anv/apply_pipeline_layout: Add to the bind map instead of replacing it
[mesa.git] / src / intel / vulkan / anv_descriptor_set.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29
30 #include "util/mesa-sha1.h"
31
32 #include "anv_private.h"
33
34 /*
35 * Descriptor set layouts.
36 */
37
38 void anv_GetDescriptorSetLayoutSupport(
39 VkDevice device,
40 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
41 VkDescriptorSetLayoutSupport* pSupport)
42 {
43 uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
44
45 for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {
46 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];
47
48 switch (binding->descriptorType) {
49 case VK_DESCRIPTOR_TYPE_SAMPLER:
50 /* There is no real limit on samplers */
51 break;
52
53 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
54 if (binding->pImmutableSamplers) {
55 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
56 ANV_FROM_HANDLE(anv_sampler, sampler,
57 binding->pImmutableSamplers[i]);
58 anv_foreach_stage(s, binding->stageFlags)
59 surface_count[s] += sampler->n_planes;
60 }
61 }
62 break;
63
64 default:
65 anv_foreach_stage(s, binding->stageFlags)
66 surface_count[s] += binding->descriptorCount;
67 break;
68 }
69 }
70
71 bool supported = true;
72 for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
73 /* Our maximum binding table size is 250 and we need to reserve 8 for
74 * render targets. 240 is a nice round number.
75 */
76 if (surface_count[s] >= 240)
77 supported = false;
78 }
79
80 pSupport->supported = supported;
81 }
82
83 VkResult anv_CreateDescriptorSetLayout(
84 VkDevice _device,
85 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
86 const VkAllocationCallbacks* pAllocator,
87 VkDescriptorSetLayout* pSetLayout)
88 {
89 ANV_FROM_HANDLE(anv_device, device, _device);
90
91 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
92
93 uint32_t max_binding = 0;
94 uint32_t immutable_sampler_count = 0;
95 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
96 max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
97 if (pCreateInfo->pBindings[j].pImmutableSamplers)
98 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
99 }
100
101 struct anv_descriptor_set_layout *set_layout;
102 struct anv_descriptor_set_binding_layout *bindings;
103 struct anv_sampler **samplers;
104
105 /* We need to allocate decriptor set layouts off the device allocator
106 * with DEVICE scope because they are reference counted and may not be
107 * destroyed when vkDestroyDescriptorSetLayout is called.
108 */
109 ANV_MULTIALLOC(ma);
110 anv_multialloc_add(&ma, &set_layout, 1);
111 anv_multialloc_add(&ma, &bindings, max_binding + 1);
112 anv_multialloc_add(&ma, &samplers, immutable_sampler_count);
113
114 if (!anv_multialloc_alloc(&ma, &device->alloc,
115 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE))
116 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
117
118 memset(set_layout, 0, sizeof(*set_layout));
119 set_layout->ref_cnt = 1;
120 set_layout->binding_count = max_binding + 1;
121
122 for (uint32_t b = 0; b <= max_binding; b++) {
123 /* Initialize all binding_layout entries to -1 */
124 memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
125
126 set_layout->binding[b].array_size = 0;
127 set_layout->binding[b].immutable_samplers = NULL;
128 }
129
130 /* Initialize all samplers to 0 */
131 memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
132
133 uint32_t sampler_count[MESA_SHADER_STAGES] = { 0, };
134 uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
135 uint32_t image_count[MESA_SHADER_STAGES] = { 0, };
136 uint32_t buffer_count = 0;
137 uint32_t dynamic_offset_count = 0;
138
139 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
140 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
141 uint32_t b = binding->binding;
142 /* We temporarily store the pointer to the binding in the
143 * immutable_samplers pointer. This provides us with a quick-and-dirty
144 * way to sort the bindings by binding number.
145 */
146 set_layout->binding[b].immutable_samplers = (void *)binding;
147 }
148
149 for (uint32_t b = 0; b <= max_binding; b++) {
150 const VkDescriptorSetLayoutBinding *binding =
151 (void *)set_layout->binding[b].immutable_samplers;
152
153 if (binding == NULL)
154 continue;
155
156 if (binding->descriptorCount == 0)
157 continue;
158
159 #ifndef NDEBUG
160 set_layout->binding[b].type = binding->descriptorType;
161 #endif
162 set_layout->binding[b].array_size = binding->descriptorCount;
163 set_layout->binding[b].descriptor_index = set_layout->size;
164 set_layout->size += binding->descriptorCount;
165
166 switch (binding->descriptorType) {
167 case VK_DESCRIPTOR_TYPE_SAMPLER:
168 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
169 anv_foreach_stage(s, binding->stageFlags) {
170 set_layout->binding[b].stage[s].sampler_index = sampler_count[s];
171 sampler_count[s] += binding->descriptorCount;
172 }
173 break;
174 default:
175 break;
176 }
177
178 switch (binding->descriptorType) {
179 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
180 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
181 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
182 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
183 set_layout->binding[b].buffer_index = buffer_count;
184 buffer_count += binding->descriptorCount;
185 /* fall through */
186
187 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
188 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
189 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
190 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
191 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
192 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
193 anv_foreach_stage(s, binding->stageFlags) {
194 set_layout->binding[b].stage[s].surface_index = surface_count[s];
195 surface_count[s] += binding->descriptorCount;
196 }
197 break;
198 default:
199 break;
200 }
201
202 switch (binding->descriptorType) {
203 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
204 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
205 set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
206 dynamic_offset_count += binding->descriptorCount;
207 break;
208 default:
209 break;
210 }
211
212 switch (binding->descriptorType) {
213 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
214 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
215 anv_foreach_stage(s, binding->stageFlags) {
216 set_layout->binding[b].stage[s].image_index = image_count[s];
217 image_count[s] += binding->descriptorCount;
218 }
219 break;
220 default:
221 break;
222 }
223
224 if (binding->pImmutableSamplers) {
225 set_layout->binding[b].immutable_samplers = samplers;
226 samplers += binding->descriptorCount;
227
228 for (uint32_t i = 0; i < binding->descriptorCount; i++)
229 set_layout->binding[b].immutable_samplers[i] =
230 anv_sampler_from_handle(binding->pImmutableSamplers[i]);
231 } else {
232 set_layout->binding[b].immutable_samplers = NULL;
233 }
234
235 set_layout->shader_stages |= binding->stageFlags;
236 }
237
238 set_layout->buffer_count = buffer_count;
239 set_layout->dynamic_offset_count = dynamic_offset_count;
240
241 *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
242
243 return VK_SUCCESS;
244 }
245
246 void anv_DestroyDescriptorSetLayout(
247 VkDevice _device,
248 VkDescriptorSetLayout _set_layout,
249 const VkAllocationCallbacks* pAllocator)
250 {
251 ANV_FROM_HANDLE(anv_device, device, _device);
252 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
253
254 if (!set_layout)
255 return;
256
257 anv_descriptor_set_layout_unref(device, set_layout);
258 }
259
260 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
261
262 static void
263 sha1_update_immutable_sampler(struct mesa_sha1 *ctx,
264 const struct anv_sampler *sampler)
265 {
266 if (!sampler->conversion)
267 return;
268
269 /* The only thing that affects the shader is ycbcr conversion */
270 _mesa_sha1_update(ctx, sampler->conversion,
271 sizeof(*sampler->conversion));
272 }
273
274 static void
275 sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,
276 const struct anv_descriptor_set_binding_layout *layout)
277 {
278 SHA1_UPDATE_VALUE(ctx, layout->array_size);
279 SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);
280 SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);
281 SHA1_UPDATE_VALUE(ctx, layout->buffer_index);
282 _mesa_sha1_update(ctx, layout->stage, sizeof(layout->stage));
283
284 if (layout->immutable_samplers) {
285 for (uint16_t i = 0; i < layout->array_size; i++)
286 sha1_update_immutable_sampler(ctx, layout->immutable_samplers[i]);
287 }
288 }
289
290 static void
291 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
292 const struct anv_descriptor_set_layout *layout)
293 {
294 SHA1_UPDATE_VALUE(ctx, layout->binding_count);
295 SHA1_UPDATE_VALUE(ctx, layout->size);
296 SHA1_UPDATE_VALUE(ctx, layout->shader_stages);
297 SHA1_UPDATE_VALUE(ctx, layout->buffer_count);
298 SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
299
300 for (uint16_t i = 0; i < layout->binding_count; i++)
301 sha1_update_descriptor_set_binding_layout(ctx, &layout->binding[i]);
302 }
303
304 /*
305 * Pipeline layouts. These have nothing to do with the pipeline. They are
306 * just multiple descriptor set layouts pasted together
307 */
308
309 VkResult anv_CreatePipelineLayout(
310 VkDevice _device,
311 const VkPipelineLayoutCreateInfo* pCreateInfo,
312 const VkAllocationCallbacks* pAllocator,
313 VkPipelineLayout* pPipelineLayout)
314 {
315 ANV_FROM_HANDLE(anv_device, device, _device);
316 struct anv_pipeline_layout *layout;
317
318 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
319
320 layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
321 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
322 if (layout == NULL)
323 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
324
325 layout->num_sets = pCreateInfo->setLayoutCount;
326
327 unsigned dynamic_offset_count = 0;
328
329 memset(layout->stage, 0, sizeof(layout->stage));
330 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
331 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
332 pCreateInfo->pSetLayouts[set]);
333 layout->set[set].layout = set_layout;
334 anv_descriptor_set_layout_ref(set_layout);
335
336 layout->set[set].dynamic_offset_start = dynamic_offset_count;
337 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
338 if (set_layout->binding[b].dynamic_offset_index < 0)
339 continue;
340
341 dynamic_offset_count += set_layout->binding[b].array_size;
342 for (gl_shader_stage s = 0; s < MESA_SHADER_STAGES; s++) {
343 if (set_layout->binding[b].stage[s].surface_index >= 0)
344 layout->stage[s].has_dynamic_offsets = true;
345 }
346 }
347 }
348
349 struct mesa_sha1 ctx;
350 _mesa_sha1_init(&ctx);
351 for (unsigned s = 0; s < layout->num_sets; s++) {
352 sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
353 _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
354 sizeof(layout->set[s].dynamic_offset_start));
355 }
356 _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
357 for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
358 _mesa_sha1_update(&ctx, &layout->stage[s].has_dynamic_offsets,
359 sizeof(layout->stage[s].has_dynamic_offsets));
360 }
361 _mesa_sha1_final(&ctx, layout->sha1);
362
363 *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
364
365 return VK_SUCCESS;
366 }
367
368 void anv_DestroyPipelineLayout(
369 VkDevice _device,
370 VkPipelineLayout _pipelineLayout,
371 const VkAllocationCallbacks* pAllocator)
372 {
373 ANV_FROM_HANDLE(anv_device, device, _device);
374 ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
375
376 if (!pipeline_layout)
377 return;
378
379 for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
380 anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
381
382 vk_free2(&device->alloc, pAllocator, pipeline_layout);
383 }
384
385 /*
386 * Descriptor pools.
387 *
388 * These are implemented using a big pool of memory and a free-list for the
389 * host memory allocations and a state_stream and a free list for the buffer
390 * view surface state. The spec allows us to fail to allocate due to
391 * fragmentation in all cases but two: 1) after pool reset, allocating up
392 * until the pool size with no freeing must succeed and 2) allocating and
393 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
394 * and the free lists lets us recycle blocks for case 2).
395 */
396
397 #define EMPTY 1
398
399 VkResult anv_CreateDescriptorPool(
400 VkDevice _device,
401 const VkDescriptorPoolCreateInfo* pCreateInfo,
402 const VkAllocationCallbacks* pAllocator,
403 VkDescriptorPool* pDescriptorPool)
404 {
405 ANV_FROM_HANDLE(anv_device, device, _device);
406 struct anv_descriptor_pool *pool;
407
408 uint32_t descriptor_count = 0;
409 uint32_t buffer_count = 0;
410 for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
411 switch (pCreateInfo->pPoolSizes[i].type) {
412 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
413 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
414 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
415 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
416 buffer_count += pCreateInfo->pPoolSizes[i].descriptorCount;
417 default:
418 descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
419 break;
420 }
421 }
422
423 const size_t pool_size =
424 pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
425 descriptor_count * sizeof(struct anv_descriptor) +
426 buffer_count * sizeof(struct anv_buffer_view);
427 const size_t total_size = sizeof(*pool) + pool_size;
428
429 pool = vk_alloc2(&device->alloc, pAllocator, total_size, 8,
430 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
431 if (!pool)
432 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
433
434 pool->size = pool_size;
435 pool->next = 0;
436 pool->free_list = EMPTY;
437
438 anv_state_stream_init(&pool->surface_state_stream,
439 &device->surface_state_pool, 4096);
440 pool->surface_state_free_list = NULL;
441
442 *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
443
444 return VK_SUCCESS;
445 }
446
447 void anv_DestroyDescriptorPool(
448 VkDevice _device,
449 VkDescriptorPool _pool,
450 const VkAllocationCallbacks* pAllocator)
451 {
452 ANV_FROM_HANDLE(anv_device, device, _device);
453 ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
454
455 if (!pool)
456 return;
457
458 anv_state_stream_finish(&pool->surface_state_stream);
459 vk_free2(&device->alloc, pAllocator, pool);
460 }
461
462 VkResult anv_ResetDescriptorPool(
463 VkDevice _device,
464 VkDescriptorPool descriptorPool,
465 VkDescriptorPoolResetFlags flags)
466 {
467 ANV_FROM_HANDLE(anv_device, device, _device);
468 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
469
470 pool->next = 0;
471 pool->free_list = EMPTY;
472 anv_state_stream_finish(&pool->surface_state_stream);
473 anv_state_stream_init(&pool->surface_state_stream,
474 &device->surface_state_pool, 4096);
475 pool->surface_state_free_list = NULL;
476
477 return VK_SUCCESS;
478 }
479
480 struct pool_free_list_entry {
481 uint32_t next;
482 uint32_t size;
483 };
484
485 size_t
486 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout)
487 {
488 return
489 sizeof(struct anv_descriptor_set) +
490 layout->size * sizeof(struct anv_descriptor) +
491 layout->buffer_count * sizeof(struct anv_buffer_view);
492 }
493
494 struct surface_state_free_list_entry {
495 void *next;
496 struct anv_state state;
497 };
498
499 VkResult
500 anv_descriptor_set_create(struct anv_device *device,
501 struct anv_descriptor_pool *pool,
502 struct anv_descriptor_set_layout *layout,
503 struct anv_descriptor_set **out_set)
504 {
505 struct anv_descriptor_set *set;
506 const size_t size = anv_descriptor_set_layout_size(layout);
507
508 set = NULL;
509 if (size <= pool->size - pool->next) {
510 set = (struct anv_descriptor_set *) (pool->data + pool->next);
511 pool->next += size;
512 } else {
513 struct pool_free_list_entry *entry;
514 uint32_t *link = &pool->free_list;
515 for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
516 entry = (struct pool_free_list_entry *) (pool->data + f);
517 if (size <= entry->size) {
518 *link = entry->next;
519 set = (struct anv_descriptor_set *) entry;
520 break;
521 }
522 link = &entry->next;
523 }
524 }
525
526 if (set == NULL) {
527 if (pool->free_list != EMPTY) {
528 return vk_error(VK_ERROR_FRAGMENTED_POOL);
529 } else {
530 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY);
531 }
532 }
533
534 set->layout = layout;
535 anv_descriptor_set_layout_ref(layout);
536
537 set->size = size;
538 set->buffer_views =
539 (struct anv_buffer_view *) &set->descriptors[layout->size];
540 set->buffer_count = layout->buffer_count;
541
542 /* By defining the descriptors to be zero now, we can later verify that
543 * a descriptor has not been populated with user data.
544 */
545 memset(set->descriptors, 0, sizeof(struct anv_descriptor) * layout->size);
546
547 /* Go through and fill out immutable samplers if we have any */
548 struct anv_descriptor *desc = set->descriptors;
549 for (uint32_t b = 0; b < layout->binding_count; b++) {
550 if (layout->binding[b].immutable_samplers) {
551 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
552 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
553 * UpdateDescriptorSets if needed. However, if the descriptor
554 * set has an immutable sampler, UpdateDescriptorSets may never
555 * touch it, so we need to make sure it's 100% valid now.
556 */
557 desc[i] = (struct anv_descriptor) {
558 .type = VK_DESCRIPTOR_TYPE_SAMPLER,
559 .sampler = layout->binding[b].immutable_samplers[i],
560 };
561 }
562 }
563 desc += layout->binding[b].array_size;
564 }
565
566 /* Allocate surface state for the buffer views. */
567 for (uint32_t b = 0; b < layout->buffer_count; b++) {
568 struct surface_state_free_list_entry *entry =
569 pool->surface_state_free_list;
570 struct anv_state state;
571
572 if (entry) {
573 state = entry->state;
574 pool->surface_state_free_list = entry->next;
575 assert(state.alloc_size == 64);
576 } else {
577 state = anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
578 }
579
580 set->buffer_views[b].surface_state = state;
581 }
582
583 *out_set = set;
584
585 return VK_SUCCESS;
586 }
587
588 void
589 anv_descriptor_set_destroy(struct anv_device *device,
590 struct anv_descriptor_pool *pool,
591 struct anv_descriptor_set *set)
592 {
593 anv_descriptor_set_layout_unref(device, set->layout);
594
595 /* Put the buffer view surface state back on the free list. */
596 for (uint32_t b = 0; b < set->buffer_count; b++) {
597 struct surface_state_free_list_entry *entry =
598 set->buffer_views[b].surface_state.map;
599 entry->next = pool->surface_state_free_list;
600 entry->state = set->buffer_views[b].surface_state;
601 pool->surface_state_free_list = entry;
602 }
603
604 /* Put the descriptor set allocation back on the free list. */
605 const uint32_t index = (char *) set - pool->data;
606 if (index + set->size == pool->next) {
607 pool->next = index;
608 } else {
609 struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
610 entry->next = pool->free_list;
611 entry->size = set->size;
612 pool->free_list = (char *) entry - pool->data;
613 }
614 }
615
616 VkResult anv_AllocateDescriptorSets(
617 VkDevice _device,
618 const VkDescriptorSetAllocateInfo* pAllocateInfo,
619 VkDescriptorSet* pDescriptorSets)
620 {
621 ANV_FROM_HANDLE(anv_device, device, _device);
622 ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
623
624 VkResult result = VK_SUCCESS;
625 struct anv_descriptor_set *set;
626 uint32_t i;
627
628 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
629 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
630 pAllocateInfo->pSetLayouts[i]);
631
632 result = anv_descriptor_set_create(device, pool, layout, &set);
633 if (result != VK_SUCCESS)
634 break;
635
636 pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
637 }
638
639 if (result != VK_SUCCESS)
640 anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
641 i, pDescriptorSets);
642
643 return result;
644 }
645
646 VkResult anv_FreeDescriptorSets(
647 VkDevice _device,
648 VkDescriptorPool descriptorPool,
649 uint32_t count,
650 const VkDescriptorSet* pDescriptorSets)
651 {
652 ANV_FROM_HANDLE(anv_device, device, _device);
653 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
654
655 for (uint32_t i = 0; i < count; i++) {
656 ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
657
658 if (!set)
659 continue;
660
661 anv_descriptor_set_destroy(device, pool, set);
662 }
663
664 return VK_SUCCESS;
665 }
666
667 void
668 anv_descriptor_set_write_image_view(struct anv_descriptor_set *set,
669 const struct gen_device_info * const devinfo,
670 const VkDescriptorImageInfo * const info,
671 VkDescriptorType type,
672 uint32_t binding,
673 uint32_t element)
674 {
675 const struct anv_descriptor_set_binding_layout *bind_layout =
676 &set->layout->binding[binding];
677 struct anv_descriptor *desc =
678 &set->descriptors[bind_layout->descriptor_index + element];
679 struct anv_image_view *image_view = NULL;
680 struct anv_sampler *sampler = NULL;
681
682 assert(type == bind_layout->type);
683
684 switch (type) {
685 case VK_DESCRIPTOR_TYPE_SAMPLER:
686 sampler = anv_sampler_from_handle(info->sampler);
687 break;
688
689 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
690 image_view = anv_image_view_from_handle(info->imageView);
691 sampler = anv_sampler_from_handle(info->sampler);
692 break;
693
694 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
695 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
696 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
697 image_view = anv_image_view_from_handle(info->imageView);
698 break;
699
700 default:
701 unreachable("invalid descriptor type");
702 }
703
704 /* If this descriptor has an immutable sampler, we don't want to stomp on
705 * it.
706 */
707 sampler = bind_layout->immutable_samplers ?
708 bind_layout->immutable_samplers[element] :
709 sampler;
710
711 *desc = (struct anv_descriptor) {
712 .type = type,
713 .layout = info->imageLayout,
714 .image_view = image_view,
715 .sampler = sampler,
716 };
717 }
718
719 void
720 anv_descriptor_set_write_buffer_view(struct anv_descriptor_set *set,
721 VkDescriptorType type,
722 struct anv_buffer_view *buffer_view,
723 uint32_t binding,
724 uint32_t element)
725 {
726 const struct anv_descriptor_set_binding_layout *bind_layout =
727 &set->layout->binding[binding];
728 struct anv_descriptor *desc =
729 &set->descriptors[bind_layout->descriptor_index + element];
730
731 assert(type == bind_layout->type);
732
733 *desc = (struct anv_descriptor) {
734 .type = type,
735 .buffer_view = buffer_view,
736 };
737 }
738
739 void
740 anv_descriptor_set_write_buffer(struct anv_descriptor_set *set,
741 struct anv_device *device,
742 struct anv_state_stream *alloc_stream,
743 VkDescriptorType type,
744 struct anv_buffer *buffer,
745 uint32_t binding,
746 uint32_t element,
747 VkDeviceSize offset,
748 VkDeviceSize range)
749 {
750 const struct anv_descriptor_set_binding_layout *bind_layout =
751 &set->layout->binding[binding];
752 struct anv_descriptor *desc =
753 &set->descriptors[bind_layout->descriptor_index + element];
754
755 assert(type == bind_layout->type);
756
757 if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
758 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
759 *desc = (struct anv_descriptor) {
760 .type = type,
761 .buffer = buffer,
762 .offset = offset,
763 .range = range,
764 };
765 } else {
766 struct anv_buffer_view *bview =
767 &set->buffer_views[bind_layout->buffer_index + element];
768
769 bview->format = anv_isl_format_for_descriptor_type(type);
770 bview->range = anv_buffer_get_range(buffer, offset, range);
771 bview->address = anv_address_add(buffer->address, offset);
772
773 /* If we're writing descriptors through a push command, we need to
774 * allocate the surface state from the command buffer. Otherwise it will
775 * be allocated by the descriptor pool when calling
776 * vkAllocateDescriptorSets. */
777 if (alloc_stream)
778 bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64);
779
780 anv_fill_buffer_surface_state(device, bview->surface_state,
781 bview->format,
782 bview->address, bview->range, 1);
783
784 *desc = (struct anv_descriptor) {
785 .type = type,
786 .buffer_view = bview,
787 };
788 }
789 }
790
791 void anv_UpdateDescriptorSets(
792 VkDevice _device,
793 uint32_t descriptorWriteCount,
794 const VkWriteDescriptorSet* pDescriptorWrites,
795 uint32_t descriptorCopyCount,
796 const VkCopyDescriptorSet* pDescriptorCopies)
797 {
798 ANV_FROM_HANDLE(anv_device, device, _device);
799
800 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
801 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
802 ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);
803
804 switch (write->descriptorType) {
805 case VK_DESCRIPTOR_TYPE_SAMPLER:
806 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
807 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
808 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
809 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
810 for (uint32_t j = 0; j < write->descriptorCount; j++) {
811 anv_descriptor_set_write_image_view(set, &device->info,
812 write->pImageInfo + j,
813 write->descriptorType,
814 write->dstBinding,
815 write->dstArrayElement + j);
816 }
817 break;
818
819 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
820 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
821 for (uint32_t j = 0; j < write->descriptorCount; j++) {
822 ANV_FROM_HANDLE(anv_buffer_view, bview,
823 write->pTexelBufferView[j]);
824
825 anv_descriptor_set_write_buffer_view(set,
826 write->descriptorType,
827 bview,
828 write->dstBinding,
829 write->dstArrayElement + j);
830 }
831 break;
832
833 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
834 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
835 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
836 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
837 for (uint32_t j = 0; j < write->descriptorCount; j++) {
838 assert(write->pBufferInfo[j].buffer);
839 ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
840 assert(buffer);
841
842 anv_descriptor_set_write_buffer(set,
843 device,
844 NULL,
845 write->descriptorType,
846 buffer,
847 write->dstBinding,
848 write->dstArrayElement + j,
849 write->pBufferInfo[j].offset,
850 write->pBufferInfo[j].range);
851 }
852 break;
853
854 default:
855 break;
856 }
857 }
858
859 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
860 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
861 ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
862 ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
863
864 const struct anv_descriptor_set_binding_layout *src_layout =
865 &src->layout->binding[copy->srcBinding];
866 struct anv_descriptor *src_desc =
867 &src->descriptors[src_layout->descriptor_index];
868 src_desc += copy->srcArrayElement;
869
870 const struct anv_descriptor_set_binding_layout *dst_layout =
871 &dst->layout->binding[copy->dstBinding];
872 struct anv_descriptor *dst_desc =
873 &dst->descriptors[dst_layout->descriptor_index];
874 dst_desc += copy->dstArrayElement;
875
876 for (uint32_t j = 0; j < copy->descriptorCount; j++)
877 dst_desc[j] = src_desc[j];
878 }
879 }
880
881 /*
882 * Descriptor update templates.
883 */
884
885 void
886 anv_descriptor_set_write_template(struct anv_descriptor_set *set,
887 struct anv_device *device,
888 struct anv_state_stream *alloc_stream,
889 const struct anv_descriptor_update_template *template,
890 const void *data)
891 {
892 for (uint32_t i = 0; i < template->entry_count; i++) {
893 const struct anv_descriptor_template_entry *entry =
894 &template->entries[i];
895
896 switch (entry->type) {
897 case VK_DESCRIPTOR_TYPE_SAMPLER:
898 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
899 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
900 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
901 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
902 for (uint32_t j = 0; j < entry->array_count; j++) {
903 const VkDescriptorImageInfo *info =
904 data + entry->offset + j * entry->stride;
905 anv_descriptor_set_write_image_view(set, &device->info,
906 info, entry->type,
907 entry->binding,
908 entry->array_element + j);
909 }
910 break;
911
912 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
913 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
914 for (uint32_t j = 0; j < entry->array_count; j++) {
915 const VkBufferView *_bview =
916 data + entry->offset + j * entry->stride;
917 ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
918
919 anv_descriptor_set_write_buffer_view(set,
920 entry->type,
921 bview,
922 entry->binding,
923 entry->array_element + j);
924 }
925 break;
926
927 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
928 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
929 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
930 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
931 for (uint32_t j = 0; j < entry->array_count; j++) {
932 const VkDescriptorBufferInfo *info =
933 data + entry->offset + j * entry->stride;
934 ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
935
936 anv_descriptor_set_write_buffer(set,
937 device,
938 alloc_stream,
939 entry->type,
940 buffer,
941 entry->binding,
942 entry->array_element + j,
943 info->offset, info->range);
944 }
945 break;
946
947 default:
948 break;
949 }
950 }
951 }
952
953 VkResult anv_CreateDescriptorUpdateTemplate(
954 VkDevice _device,
955 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
956 const VkAllocationCallbacks* pAllocator,
957 VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate)
958 {
959 ANV_FROM_HANDLE(anv_device, device, _device);
960 struct anv_descriptor_update_template *template;
961
962 size_t size = sizeof(*template) +
963 pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
964 template = vk_alloc2(&device->alloc, pAllocator, size, 8,
965 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
966 if (template == NULL)
967 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
968
969 template->bind_point = pCreateInfo->pipelineBindPoint;
970
971 if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
972 template->set = pCreateInfo->set;
973
974 template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
975 for (uint32_t i = 0; i < template->entry_count; i++) {
976 const VkDescriptorUpdateTemplateEntryKHR *pEntry =
977 &pCreateInfo->pDescriptorUpdateEntries[i];
978
979 template->entries[i] = (struct anv_descriptor_template_entry) {
980 .type = pEntry->descriptorType,
981 .binding = pEntry->dstBinding,
982 .array_element = pEntry->dstArrayElement,
983 .array_count = pEntry->descriptorCount,
984 .offset = pEntry->offset,
985 .stride = pEntry->stride,
986 };
987 }
988
989 *pDescriptorUpdateTemplate =
990 anv_descriptor_update_template_to_handle(template);
991
992 return VK_SUCCESS;
993 }
994
995 void anv_DestroyDescriptorUpdateTemplate(
996 VkDevice _device,
997 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
998 const VkAllocationCallbacks* pAllocator)
999 {
1000 ANV_FROM_HANDLE(anv_device, device, _device);
1001 ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1002 descriptorUpdateTemplate);
1003
1004 vk_free2(&device->alloc, pAllocator, template);
1005 }
1006
1007 void anv_UpdateDescriptorSetWithTemplate(
1008 VkDevice _device,
1009 VkDescriptorSet descriptorSet,
1010 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1011 const void* pData)
1012 {
1013 ANV_FROM_HANDLE(anv_device, device, _device);
1014 ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
1015 ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1016 descriptorUpdateTemplate);
1017
1018 anv_descriptor_set_write_template(set, device, NULL, template, pData);
1019 }