anv: Make blorp update the clear color.
[mesa.git] / src / intel / vulkan / anv_descriptor_set.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29
30 #include "util/mesa-sha1.h"
31
32 #include "anv_private.h"
33
34 /*
35 * Descriptor set layouts.
36 */
37
38 void anv_GetDescriptorSetLayoutSupport(
39 VkDevice device,
40 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
41 VkDescriptorSetLayoutSupport* pSupport)
42 {
43 uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
44
45 for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {
46 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];
47
48 switch (binding->descriptorType) {
49 case VK_DESCRIPTOR_TYPE_SAMPLER:
50 /* There is no real limit on samplers */
51 break;
52
53 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
54 if (binding->pImmutableSamplers) {
55 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
56 ANV_FROM_HANDLE(anv_sampler, sampler,
57 binding->pImmutableSamplers[i]);
58 anv_foreach_stage(s, binding->stageFlags)
59 surface_count[s] += sampler->n_planes;
60 }
61 }
62 break;
63
64 default:
65 anv_foreach_stage(s, binding->stageFlags)
66 surface_count[s] += binding->descriptorCount;
67 break;
68 }
69 }
70
71 bool supported = true;
72 for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
73 /* Our maximum binding table size is 250 and we need to reserve 8 for
74 * render targets. 240 is a nice round number.
75 */
76 if (surface_count[s] >= 240)
77 supported = false;
78 }
79
80 pSupport->supported = supported;
81 }
82
83 VkResult anv_CreateDescriptorSetLayout(
84 VkDevice _device,
85 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
86 const VkAllocationCallbacks* pAllocator,
87 VkDescriptorSetLayout* pSetLayout)
88 {
89 ANV_FROM_HANDLE(anv_device, device, _device);
90
91 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
92
93 uint32_t max_binding = 0;
94 uint32_t immutable_sampler_count = 0;
95 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
96 max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
97 if (pCreateInfo->pBindings[j].pImmutableSamplers)
98 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
99 }
100
101 struct anv_descriptor_set_layout *set_layout;
102 struct anv_descriptor_set_binding_layout *bindings;
103 struct anv_sampler **samplers;
104
105 /* We need to allocate decriptor set layouts off the device allocator
106 * with DEVICE scope because they are reference counted and may not be
107 * destroyed when vkDestroyDescriptorSetLayout is called.
108 */
109 ANV_MULTIALLOC(ma);
110 anv_multialloc_add(&ma, &set_layout, 1);
111 anv_multialloc_add(&ma, &bindings, max_binding + 1);
112 anv_multialloc_add(&ma, &samplers, immutable_sampler_count);
113
114 if (!anv_multialloc_alloc(&ma, &device->alloc,
115 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE))
116 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
117
118 memset(set_layout, 0, sizeof(*set_layout));
119 set_layout->ref_cnt = 1;
120 set_layout->binding_count = max_binding + 1;
121
122 for (uint32_t b = 0; b <= max_binding; b++) {
123 /* Initialize all binding_layout entries to -1 */
124 memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
125
126 set_layout->binding[b].array_size = 0;
127 set_layout->binding[b].immutable_samplers = NULL;
128 }
129
130 /* Initialize all samplers to 0 */
131 memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
132
133 uint32_t sampler_count[MESA_SHADER_STAGES] = { 0, };
134 uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
135 uint32_t image_count[MESA_SHADER_STAGES] = { 0, };
136 uint32_t buffer_count = 0;
137 uint32_t dynamic_offset_count = 0;
138
139 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
140 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
141 uint32_t b = binding->binding;
142 /* We temporarily store the pointer to the binding in the
143 * immutable_samplers pointer. This provides us with a quick-and-dirty
144 * way to sort the bindings by binding number.
145 */
146 set_layout->binding[b].immutable_samplers = (void *)binding;
147 }
148
149 for (uint32_t b = 0; b <= max_binding; b++) {
150 const VkDescriptorSetLayoutBinding *binding =
151 (void *)set_layout->binding[b].immutable_samplers;
152
153 if (binding == NULL)
154 continue;
155
156 if (binding->descriptorCount == 0)
157 continue;
158
159 #ifndef NDEBUG
160 set_layout->binding[b].type = binding->descriptorType;
161 #endif
162 set_layout->binding[b].array_size = binding->descriptorCount;
163 set_layout->binding[b].descriptor_index = set_layout->size;
164 set_layout->size += binding->descriptorCount;
165
166 switch (binding->descriptorType) {
167 case VK_DESCRIPTOR_TYPE_SAMPLER:
168 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
169 anv_foreach_stage(s, binding->stageFlags) {
170 set_layout->binding[b].stage[s].sampler_index = sampler_count[s];
171 sampler_count[s] += binding->descriptorCount;
172 }
173 break;
174 default:
175 break;
176 }
177
178 switch (binding->descriptorType) {
179 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
180 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
181 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
182 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
183 set_layout->binding[b].buffer_index = buffer_count;
184 buffer_count += binding->descriptorCount;
185 /* fall through */
186
187 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
188 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
189 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
190 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
191 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
192 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
193 anv_foreach_stage(s, binding->stageFlags) {
194 set_layout->binding[b].stage[s].surface_index = surface_count[s];
195 surface_count[s] += binding->descriptorCount;
196 }
197 break;
198 default:
199 break;
200 }
201
202 switch (binding->descriptorType) {
203 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
204 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
205 set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
206 dynamic_offset_count += binding->descriptorCount;
207 break;
208 default:
209 break;
210 }
211
212 switch (binding->descriptorType) {
213 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
214 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
215 anv_foreach_stage(s, binding->stageFlags) {
216 set_layout->binding[b].stage[s].image_index = image_count[s];
217 image_count[s] += binding->descriptorCount;
218 }
219 break;
220 default:
221 break;
222 }
223
224 if (binding->pImmutableSamplers) {
225 set_layout->binding[b].immutable_samplers = samplers;
226 samplers += binding->descriptorCount;
227
228 for (uint32_t i = 0; i < binding->descriptorCount; i++)
229 set_layout->binding[b].immutable_samplers[i] =
230 anv_sampler_from_handle(binding->pImmutableSamplers[i]);
231 } else {
232 set_layout->binding[b].immutable_samplers = NULL;
233 }
234
235 set_layout->shader_stages |= binding->stageFlags;
236 }
237
238 set_layout->buffer_count = buffer_count;
239 set_layout->dynamic_offset_count = dynamic_offset_count;
240
241 *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
242
243 return VK_SUCCESS;
244 }
245
246 void anv_DestroyDescriptorSetLayout(
247 VkDevice _device,
248 VkDescriptorSetLayout _set_layout,
249 const VkAllocationCallbacks* pAllocator)
250 {
251 ANV_FROM_HANDLE(anv_device, device, _device);
252 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
253
254 if (!set_layout)
255 return;
256
257 anv_descriptor_set_layout_unref(device, set_layout);
258 }
259
260 static void
261 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
262 const struct anv_descriptor_set_layout *layout)
263 {
264 size_t size = sizeof(*layout) +
265 sizeof(layout->binding[0]) * layout->binding_count;
266 _mesa_sha1_update(ctx, layout, size);
267 }
268
269 /*
270 * Pipeline layouts. These have nothing to do with the pipeline. They are
271 * just multiple descriptor set layouts pasted together
272 */
273
274 VkResult anv_CreatePipelineLayout(
275 VkDevice _device,
276 const VkPipelineLayoutCreateInfo* pCreateInfo,
277 const VkAllocationCallbacks* pAllocator,
278 VkPipelineLayout* pPipelineLayout)
279 {
280 ANV_FROM_HANDLE(anv_device, device, _device);
281 struct anv_pipeline_layout *layout;
282
283 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
284
285 layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
286 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
287 if (layout == NULL)
288 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
289
290 layout->num_sets = pCreateInfo->setLayoutCount;
291
292 unsigned dynamic_offset_count = 0;
293
294 memset(layout->stage, 0, sizeof(layout->stage));
295 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
296 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
297 pCreateInfo->pSetLayouts[set]);
298 layout->set[set].layout = set_layout;
299 anv_descriptor_set_layout_ref(set_layout);
300
301 layout->set[set].dynamic_offset_start = dynamic_offset_count;
302 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
303 if (set_layout->binding[b].dynamic_offset_index < 0)
304 continue;
305
306 dynamic_offset_count += set_layout->binding[b].array_size;
307 for (gl_shader_stage s = 0; s < MESA_SHADER_STAGES; s++) {
308 if (set_layout->binding[b].stage[s].surface_index >= 0)
309 layout->stage[s].has_dynamic_offsets = true;
310 }
311 }
312 }
313
314 struct mesa_sha1 ctx;
315 _mesa_sha1_init(&ctx);
316 for (unsigned s = 0; s < layout->num_sets; s++) {
317 sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
318 _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
319 sizeof(layout->set[s].dynamic_offset_start));
320 }
321 _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
322 for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
323 _mesa_sha1_update(&ctx, &layout->stage[s].has_dynamic_offsets,
324 sizeof(layout->stage[s].has_dynamic_offsets));
325 }
326 _mesa_sha1_final(&ctx, layout->sha1);
327
328 *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
329
330 return VK_SUCCESS;
331 }
332
333 void anv_DestroyPipelineLayout(
334 VkDevice _device,
335 VkPipelineLayout _pipelineLayout,
336 const VkAllocationCallbacks* pAllocator)
337 {
338 ANV_FROM_HANDLE(anv_device, device, _device);
339 ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
340
341 if (!pipeline_layout)
342 return;
343
344 for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
345 anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
346
347 vk_free2(&device->alloc, pAllocator, pipeline_layout);
348 }
349
350 /*
351 * Descriptor pools.
352 *
353 * These are implemented using a big pool of memory and a free-list for the
354 * host memory allocations and a state_stream and a free list for the buffer
355 * view surface state. The spec allows us to fail to allocate due to
356 * fragmentation in all cases but two: 1) after pool reset, allocating up
357 * until the pool size with no freeing must succeed and 2) allocating and
358 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
359 * and the free lists lets us recycle blocks for case 2).
360 */
361
362 #define EMPTY 1
363
364 VkResult anv_CreateDescriptorPool(
365 VkDevice _device,
366 const VkDescriptorPoolCreateInfo* pCreateInfo,
367 const VkAllocationCallbacks* pAllocator,
368 VkDescriptorPool* pDescriptorPool)
369 {
370 ANV_FROM_HANDLE(anv_device, device, _device);
371 struct anv_descriptor_pool *pool;
372
373 uint32_t descriptor_count = 0;
374 uint32_t buffer_count = 0;
375 for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
376 switch (pCreateInfo->pPoolSizes[i].type) {
377 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
378 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
379 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
380 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
381 buffer_count += pCreateInfo->pPoolSizes[i].descriptorCount;
382 default:
383 descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
384 break;
385 }
386 }
387
388 const size_t pool_size =
389 pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
390 descriptor_count * sizeof(struct anv_descriptor) +
391 buffer_count * sizeof(struct anv_buffer_view);
392 const size_t total_size = sizeof(*pool) + pool_size;
393
394 pool = vk_alloc2(&device->alloc, pAllocator, total_size, 8,
395 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
396 if (!pool)
397 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
398
399 pool->size = pool_size;
400 pool->next = 0;
401 pool->free_list = EMPTY;
402
403 anv_state_stream_init(&pool->surface_state_stream,
404 &device->surface_state_pool, 4096);
405 pool->surface_state_free_list = NULL;
406
407 *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
408
409 return VK_SUCCESS;
410 }
411
412 void anv_DestroyDescriptorPool(
413 VkDevice _device,
414 VkDescriptorPool _pool,
415 const VkAllocationCallbacks* pAllocator)
416 {
417 ANV_FROM_HANDLE(anv_device, device, _device);
418 ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
419
420 if (!pool)
421 return;
422
423 anv_state_stream_finish(&pool->surface_state_stream);
424 vk_free2(&device->alloc, pAllocator, pool);
425 }
426
427 VkResult anv_ResetDescriptorPool(
428 VkDevice _device,
429 VkDescriptorPool descriptorPool,
430 VkDescriptorPoolResetFlags flags)
431 {
432 ANV_FROM_HANDLE(anv_device, device, _device);
433 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
434
435 pool->next = 0;
436 pool->free_list = EMPTY;
437 anv_state_stream_finish(&pool->surface_state_stream);
438 anv_state_stream_init(&pool->surface_state_stream,
439 &device->surface_state_pool, 4096);
440 pool->surface_state_free_list = NULL;
441
442 return VK_SUCCESS;
443 }
444
445 struct pool_free_list_entry {
446 uint32_t next;
447 uint32_t size;
448 };
449
450 size_t
451 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout)
452 {
453 return
454 sizeof(struct anv_descriptor_set) +
455 layout->size * sizeof(struct anv_descriptor) +
456 layout->buffer_count * sizeof(struct anv_buffer_view);
457 }
458
459 size_t
460 anv_descriptor_set_binding_layout_get_hw_size(const struct anv_descriptor_set_binding_layout *binding)
461 {
462 if (!binding->immutable_samplers)
463 return binding->array_size;
464
465 uint32_t total_plane_count = 0;
466 for (uint32_t i = 0; i < binding->array_size; i++)
467 total_plane_count += binding->immutable_samplers[i]->n_planes;
468
469 return total_plane_count;
470 }
471
472 struct surface_state_free_list_entry {
473 void *next;
474 struct anv_state state;
475 };
476
477 VkResult
478 anv_descriptor_set_create(struct anv_device *device,
479 struct anv_descriptor_pool *pool,
480 struct anv_descriptor_set_layout *layout,
481 struct anv_descriptor_set **out_set)
482 {
483 struct anv_descriptor_set *set;
484 const size_t size = anv_descriptor_set_layout_size(layout);
485
486 set = NULL;
487 if (size <= pool->size - pool->next) {
488 set = (struct anv_descriptor_set *) (pool->data + pool->next);
489 pool->next += size;
490 } else {
491 struct pool_free_list_entry *entry;
492 uint32_t *link = &pool->free_list;
493 for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
494 entry = (struct pool_free_list_entry *) (pool->data + f);
495 if (size <= entry->size) {
496 *link = entry->next;
497 set = (struct anv_descriptor_set *) entry;
498 break;
499 }
500 link = &entry->next;
501 }
502 }
503
504 if (set == NULL) {
505 if (pool->free_list != EMPTY) {
506 return vk_error(VK_ERROR_FRAGMENTED_POOL);
507 } else {
508 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY);
509 }
510 }
511
512 set->layout = layout;
513 anv_descriptor_set_layout_ref(layout);
514
515 set->size = size;
516 set->buffer_views =
517 (struct anv_buffer_view *) &set->descriptors[layout->size];
518 set->buffer_count = layout->buffer_count;
519
520 /* By defining the descriptors to be zero now, we can later verify that
521 * a descriptor has not been populated with user data.
522 */
523 memset(set->descriptors, 0, sizeof(struct anv_descriptor) * layout->size);
524
525 /* Go through and fill out immutable samplers if we have any */
526 struct anv_descriptor *desc = set->descriptors;
527 for (uint32_t b = 0; b < layout->binding_count; b++) {
528 if (layout->binding[b].immutable_samplers) {
529 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
530 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
531 * UpdateDescriptorSets if needed. However, if the descriptor
532 * set has an immutable sampler, UpdateDescriptorSets may never
533 * touch it, so we need to make sure it's 100% valid now.
534 */
535 desc[i] = (struct anv_descriptor) {
536 .type = VK_DESCRIPTOR_TYPE_SAMPLER,
537 .sampler = layout->binding[b].immutable_samplers[i],
538 };
539 }
540 }
541 desc += layout->binding[b].array_size;
542 }
543
544 /* Allocate surface state for the buffer views. */
545 for (uint32_t b = 0; b < layout->buffer_count; b++) {
546 struct surface_state_free_list_entry *entry =
547 pool->surface_state_free_list;
548 struct anv_state state;
549
550 if (entry) {
551 state = entry->state;
552 pool->surface_state_free_list = entry->next;
553 assert(state.alloc_size == 64);
554 } else {
555 state = anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
556 }
557
558 set->buffer_views[b].surface_state = state;
559 }
560
561 *out_set = set;
562
563 return VK_SUCCESS;
564 }
565
566 void
567 anv_descriptor_set_destroy(struct anv_device *device,
568 struct anv_descriptor_pool *pool,
569 struct anv_descriptor_set *set)
570 {
571 anv_descriptor_set_layout_unref(device, set->layout);
572
573 /* Put the buffer view surface state back on the free list. */
574 for (uint32_t b = 0; b < set->buffer_count; b++) {
575 struct surface_state_free_list_entry *entry =
576 set->buffer_views[b].surface_state.map;
577 entry->next = pool->surface_state_free_list;
578 entry->state = set->buffer_views[b].surface_state;
579 pool->surface_state_free_list = entry;
580 }
581
582 /* Put the descriptor set allocation back on the free list. */
583 const uint32_t index = (char *) set - pool->data;
584 if (index + set->size == pool->next) {
585 pool->next = index;
586 } else {
587 struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
588 entry->next = pool->free_list;
589 entry->size = set->size;
590 pool->free_list = (char *) entry - pool->data;
591 }
592 }
593
594 VkResult anv_AllocateDescriptorSets(
595 VkDevice _device,
596 const VkDescriptorSetAllocateInfo* pAllocateInfo,
597 VkDescriptorSet* pDescriptorSets)
598 {
599 ANV_FROM_HANDLE(anv_device, device, _device);
600 ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
601
602 VkResult result = VK_SUCCESS;
603 struct anv_descriptor_set *set;
604 uint32_t i;
605
606 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
607 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
608 pAllocateInfo->pSetLayouts[i]);
609
610 result = anv_descriptor_set_create(device, pool, layout, &set);
611 if (result != VK_SUCCESS)
612 break;
613
614 pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
615 }
616
617 if (result != VK_SUCCESS)
618 anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
619 i, pDescriptorSets);
620
621 return result;
622 }
623
624 VkResult anv_FreeDescriptorSets(
625 VkDevice _device,
626 VkDescriptorPool descriptorPool,
627 uint32_t count,
628 const VkDescriptorSet* pDescriptorSets)
629 {
630 ANV_FROM_HANDLE(anv_device, device, _device);
631 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
632
633 for (uint32_t i = 0; i < count; i++) {
634 ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
635
636 if (!set)
637 continue;
638
639 anv_descriptor_set_destroy(device, pool, set);
640 }
641
642 return VK_SUCCESS;
643 }
644
645 void
646 anv_descriptor_set_write_image_view(struct anv_descriptor_set *set,
647 const struct gen_device_info * const devinfo,
648 const VkDescriptorImageInfo * const info,
649 VkDescriptorType type,
650 uint32_t binding,
651 uint32_t element)
652 {
653 const struct anv_descriptor_set_binding_layout *bind_layout =
654 &set->layout->binding[binding];
655 struct anv_descriptor *desc =
656 &set->descriptors[bind_layout->descriptor_index + element];
657 struct anv_image_view *image_view = NULL;
658 struct anv_sampler *sampler = NULL;
659
660 assert(type == bind_layout->type);
661
662 switch (type) {
663 case VK_DESCRIPTOR_TYPE_SAMPLER:
664 sampler = anv_sampler_from_handle(info->sampler);
665 break;
666
667 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
668 image_view = anv_image_view_from_handle(info->imageView);
669 sampler = anv_sampler_from_handle(info->sampler);
670 break;
671
672 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
673 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
674 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
675 image_view = anv_image_view_from_handle(info->imageView);
676 break;
677
678 default:
679 unreachable("invalid descriptor type");
680 }
681
682 /* If this descriptor has an immutable sampler, we don't want to stomp on
683 * it.
684 */
685 sampler = bind_layout->immutable_samplers ?
686 bind_layout->immutable_samplers[element] :
687 sampler;
688
689 *desc = (struct anv_descriptor) {
690 .type = type,
691 .layout = info->imageLayout,
692 .image_view = image_view,
693 .sampler = sampler,
694 };
695 }
696
697 void
698 anv_descriptor_set_write_buffer_view(struct anv_descriptor_set *set,
699 VkDescriptorType type,
700 struct anv_buffer_view *buffer_view,
701 uint32_t binding,
702 uint32_t element)
703 {
704 const struct anv_descriptor_set_binding_layout *bind_layout =
705 &set->layout->binding[binding];
706 struct anv_descriptor *desc =
707 &set->descriptors[bind_layout->descriptor_index + element];
708
709 assert(type == bind_layout->type);
710
711 *desc = (struct anv_descriptor) {
712 .type = type,
713 .buffer_view = buffer_view,
714 };
715 }
716
717 void
718 anv_descriptor_set_write_buffer(struct anv_descriptor_set *set,
719 struct anv_device *device,
720 struct anv_state_stream *alloc_stream,
721 VkDescriptorType type,
722 struct anv_buffer *buffer,
723 uint32_t binding,
724 uint32_t element,
725 VkDeviceSize offset,
726 VkDeviceSize range)
727 {
728 const struct anv_descriptor_set_binding_layout *bind_layout =
729 &set->layout->binding[binding];
730 struct anv_descriptor *desc =
731 &set->descriptors[bind_layout->descriptor_index + element];
732
733 assert(type == bind_layout->type);
734
735 if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
736 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
737 *desc = (struct anv_descriptor) {
738 .type = type,
739 .buffer = buffer,
740 .offset = offset,
741 .range = range,
742 };
743 } else {
744 struct anv_buffer_view *bview =
745 &set->buffer_views[bind_layout->buffer_index + element];
746
747 bview->format = anv_isl_format_for_descriptor_type(type);
748 bview->bo = buffer->bo;
749 bview->offset = buffer->offset + offset;
750 bview->range = anv_buffer_get_range(buffer, offset, range);
751
752 /* If we're writing descriptors through a push command, we need to
753 * allocate the surface state from the command buffer. Otherwise it will
754 * be allocated by the descriptor pool when calling
755 * vkAllocateDescriptorSets. */
756 if (alloc_stream)
757 bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64);
758
759 anv_fill_buffer_surface_state(device, bview->surface_state,
760 bview->format,
761 bview->offset, bview->range, 1);
762
763 *desc = (struct anv_descriptor) {
764 .type = type,
765 .buffer_view = bview,
766 };
767 }
768 }
769
770 void anv_UpdateDescriptorSets(
771 VkDevice _device,
772 uint32_t descriptorWriteCount,
773 const VkWriteDescriptorSet* pDescriptorWrites,
774 uint32_t descriptorCopyCount,
775 const VkCopyDescriptorSet* pDescriptorCopies)
776 {
777 ANV_FROM_HANDLE(anv_device, device, _device);
778
779 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
780 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
781 ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);
782
783 switch (write->descriptorType) {
784 case VK_DESCRIPTOR_TYPE_SAMPLER:
785 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
786 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
787 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
788 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
789 for (uint32_t j = 0; j < write->descriptorCount; j++) {
790 anv_descriptor_set_write_image_view(set, &device->info,
791 write->pImageInfo + j,
792 write->descriptorType,
793 write->dstBinding,
794 write->dstArrayElement + j);
795 }
796 break;
797
798 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
799 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
800 for (uint32_t j = 0; j < write->descriptorCount; j++) {
801 ANV_FROM_HANDLE(anv_buffer_view, bview,
802 write->pTexelBufferView[j]);
803
804 anv_descriptor_set_write_buffer_view(set,
805 write->descriptorType,
806 bview,
807 write->dstBinding,
808 write->dstArrayElement + j);
809 }
810 break;
811
812 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
813 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
814 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
815 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
816 for (uint32_t j = 0; j < write->descriptorCount; j++) {
817 assert(write->pBufferInfo[j].buffer);
818 ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
819 assert(buffer);
820
821 anv_descriptor_set_write_buffer(set,
822 device,
823 NULL,
824 write->descriptorType,
825 buffer,
826 write->dstBinding,
827 write->dstArrayElement + j,
828 write->pBufferInfo[j].offset,
829 write->pBufferInfo[j].range);
830 }
831 break;
832
833 default:
834 break;
835 }
836 }
837
838 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
839 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
840 ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
841 ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
842
843 const struct anv_descriptor_set_binding_layout *src_layout =
844 &src->layout->binding[copy->srcBinding];
845 struct anv_descriptor *src_desc =
846 &src->descriptors[src_layout->descriptor_index];
847 src_desc += copy->srcArrayElement;
848
849 const struct anv_descriptor_set_binding_layout *dst_layout =
850 &dst->layout->binding[copy->dstBinding];
851 struct anv_descriptor *dst_desc =
852 &dst->descriptors[dst_layout->descriptor_index];
853 dst_desc += copy->dstArrayElement;
854
855 for (uint32_t j = 0; j < copy->descriptorCount; j++)
856 dst_desc[j] = src_desc[j];
857 }
858 }
859
860 /*
861 * Descriptor update templates.
862 */
863
864 void
865 anv_descriptor_set_write_template(struct anv_descriptor_set *set,
866 struct anv_device *device,
867 struct anv_state_stream *alloc_stream,
868 const struct anv_descriptor_update_template *template,
869 const void *data)
870 {
871 const struct anv_descriptor_set_layout *layout = set->layout;
872
873 for (uint32_t i = 0; i < template->entry_count; i++) {
874 const struct anv_descriptor_template_entry *entry =
875 &template->entries[i];
876 const struct anv_descriptor_set_binding_layout *bind_layout =
877 &layout->binding[entry->binding];
878 struct anv_descriptor *desc = &set->descriptors[bind_layout->descriptor_index];
879 desc += entry->array_element;
880
881 switch (entry->type) {
882 case VK_DESCRIPTOR_TYPE_SAMPLER:
883 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
884 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
885 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
886 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
887 for (uint32_t j = 0; j < entry->array_count; j++) {
888 const VkDescriptorImageInfo *info =
889 data + entry->offset + j * entry->stride;
890 anv_descriptor_set_write_image_view(set, &device->info,
891 info, entry->type,
892 entry->binding,
893 entry->array_element + j);
894 }
895 break;
896
897 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
898 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
899 for (uint32_t j = 0; j < entry->array_count; j++) {
900 const VkBufferView *_bview =
901 data + entry->offset + j * entry->stride;
902 ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
903
904 anv_descriptor_set_write_buffer_view(set,
905 entry->type,
906 bview,
907 entry->binding,
908 entry->array_element + j);
909 }
910 break;
911
912 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
913 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
914 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
915 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
916 for (uint32_t j = 0; j < entry->array_count; j++) {
917 const VkDescriptorBufferInfo *info =
918 data + entry->offset + j * entry->stride;
919 ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
920
921 anv_descriptor_set_write_buffer(set,
922 device,
923 alloc_stream,
924 entry->type,
925 buffer,
926 entry->binding,
927 entry->array_element + j,
928 info->offset, info->range);
929 }
930 break;
931
932 default:
933 break;
934 }
935 }
936 }
937
938 VkResult anv_CreateDescriptorUpdateTemplate(
939 VkDevice _device,
940 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
941 const VkAllocationCallbacks* pAllocator,
942 VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate)
943 {
944 ANV_FROM_HANDLE(anv_device, device, _device);
945 struct anv_descriptor_update_template *template;
946
947 size_t size = sizeof(*template) +
948 pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
949 template = vk_alloc2(&device->alloc, pAllocator, size, 8,
950 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
951 if (template == NULL)
952 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
953
954 template->bind_point = pCreateInfo->pipelineBindPoint;
955
956 if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
957 template->set = pCreateInfo->set;
958
959 template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
960 for (uint32_t i = 0; i < template->entry_count; i++) {
961 const VkDescriptorUpdateTemplateEntryKHR *pEntry =
962 &pCreateInfo->pDescriptorUpdateEntries[i];
963
964 template->entries[i] = (struct anv_descriptor_template_entry) {
965 .type = pEntry->descriptorType,
966 .binding = pEntry->dstBinding,
967 .array_element = pEntry->dstArrayElement,
968 .array_count = pEntry->descriptorCount,
969 .offset = pEntry->offset,
970 .stride = pEntry->stride,
971 };
972 }
973
974 *pDescriptorUpdateTemplate =
975 anv_descriptor_update_template_to_handle(template);
976
977 return VK_SUCCESS;
978 }
979
980 void anv_DestroyDescriptorUpdateTemplate(
981 VkDevice _device,
982 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
983 const VkAllocationCallbacks* pAllocator)
984 {
985 ANV_FROM_HANDLE(anv_device, device, _device);
986 ANV_FROM_HANDLE(anv_descriptor_update_template, template,
987 descriptorUpdateTemplate);
988
989 vk_free2(&device->alloc, pAllocator, template);
990 }
991
992 void anv_UpdateDescriptorSetWithTemplate(
993 VkDevice _device,
994 VkDescriptorSet descriptorSet,
995 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
996 const void* pData)
997 {
998 ANV_FROM_HANDLE(anv_device, device, _device);
999 ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
1000 ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1001 descriptorUpdateTemplate);
1002
1003 anv_descriptor_set_write_template(set, device, NULL, template, pData);
1004 }