anv: drop unused local vars
[mesa.git] / src / intel / vulkan / anv_descriptor_set.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29
30 #include "util/mesa-sha1.h"
31
32 #include "anv_private.h"
33
34 /*
35 * Descriptor set layouts.
36 */
37
38 void anv_GetDescriptorSetLayoutSupport(
39 VkDevice device,
40 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
41 VkDescriptorSetLayoutSupport* pSupport)
42 {
43 uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
44
45 for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {
46 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];
47
48 switch (binding->descriptorType) {
49 case VK_DESCRIPTOR_TYPE_SAMPLER:
50 /* There is no real limit on samplers */
51 break;
52
53 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
54 if (binding->pImmutableSamplers) {
55 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
56 ANV_FROM_HANDLE(anv_sampler, sampler,
57 binding->pImmutableSamplers[i]);
58 anv_foreach_stage(s, binding->stageFlags)
59 surface_count[s] += sampler->n_planes;
60 }
61 }
62 break;
63
64 default:
65 anv_foreach_stage(s, binding->stageFlags)
66 surface_count[s] += binding->descriptorCount;
67 break;
68 }
69 }
70
71 bool supported = true;
72 for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
73 /* Our maximum binding table size is 250 and we need to reserve 8 for
74 * render targets. 240 is a nice round number.
75 */
76 if (surface_count[s] >= 240)
77 supported = false;
78 }
79
80 pSupport->supported = supported;
81 }
82
83 VkResult anv_CreateDescriptorSetLayout(
84 VkDevice _device,
85 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
86 const VkAllocationCallbacks* pAllocator,
87 VkDescriptorSetLayout* pSetLayout)
88 {
89 ANV_FROM_HANDLE(anv_device, device, _device);
90
91 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
92
93 uint32_t max_binding = 0;
94 uint32_t immutable_sampler_count = 0;
95 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
96 max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
97 if (pCreateInfo->pBindings[j].pImmutableSamplers)
98 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
99 }
100
101 struct anv_descriptor_set_layout *set_layout;
102 struct anv_descriptor_set_binding_layout *bindings;
103 struct anv_sampler **samplers;
104
105 /* We need to allocate decriptor set layouts off the device allocator
106 * with DEVICE scope because they are reference counted and may not be
107 * destroyed when vkDestroyDescriptorSetLayout is called.
108 */
109 ANV_MULTIALLOC(ma);
110 anv_multialloc_add(&ma, &set_layout, 1);
111 anv_multialloc_add(&ma, &bindings, max_binding + 1);
112 anv_multialloc_add(&ma, &samplers, immutable_sampler_count);
113
114 if (!anv_multialloc_alloc(&ma, &device->alloc,
115 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE))
116 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
117
118 memset(set_layout, 0, sizeof(*set_layout));
119 set_layout->ref_cnt = 1;
120 set_layout->binding_count = max_binding + 1;
121
122 for (uint32_t b = 0; b <= max_binding; b++) {
123 /* Initialize all binding_layout entries to -1 */
124 memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
125
126 set_layout->binding[b].array_size = 0;
127 set_layout->binding[b].immutable_samplers = NULL;
128 }
129
130 /* Initialize all samplers to 0 */
131 memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
132
133 uint32_t sampler_count[MESA_SHADER_STAGES] = { 0, };
134 uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
135 uint32_t image_count[MESA_SHADER_STAGES] = { 0, };
136 uint32_t buffer_count = 0;
137 uint32_t dynamic_offset_count = 0;
138
139 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
140 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
141 uint32_t b = binding->binding;
142 /* We temporarily store the pointer to the binding in the
143 * immutable_samplers pointer. This provides us with a quick-and-dirty
144 * way to sort the bindings by binding number.
145 */
146 set_layout->binding[b].immutable_samplers = (void *)binding;
147 }
148
149 for (uint32_t b = 0; b <= max_binding; b++) {
150 const VkDescriptorSetLayoutBinding *binding =
151 (void *)set_layout->binding[b].immutable_samplers;
152
153 if (binding == NULL)
154 continue;
155
156 if (binding->descriptorCount == 0)
157 continue;
158
159 #ifndef NDEBUG
160 set_layout->binding[b].type = binding->descriptorType;
161 #endif
162 set_layout->binding[b].array_size = binding->descriptorCount;
163 set_layout->binding[b].descriptor_index = set_layout->size;
164 set_layout->size += binding->descriptorCount;
165
166 switch (binding->descriptorType) {
167 case VK_DESCRIPTOR_TYPE_SAMPLER:
168 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
169 anv_foreach_stage(s, binding->stageFlags) {
170 set_layout->binding[b].stage[s].sampler_index = sampler_count[s];
171 sampler_count[s] += binding->descriptorCount;
172 }
173 break;
174 default:
175 break;
176 }
177
178 switch (binding->descriptorType) {
179 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
180 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
181 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
182 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
183 set_layout->binding[b].buffer_index = buffer_count;
184 buffer_count += binding->descriptorCount;
185 /* fall through */
186
187 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
188 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
189 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
190 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
191 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
192 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
193 anv_foreach_stage(s, binding->stageFlags) {
194 set_layout->binding[b].stage[s].surface_index = surface_count[s];
195 surface_count[s] += binding->descriptorCount;
196 }
197 break;
198 default:
199 break;
200 }
201
202 switch (binding->descriptorType) {
203 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
204 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
205 set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
206 dynamic_offset_count += binding->descriptorCount;
207 break;
208 default:
209 break;
210 }
211
212 switch (binding->descriptorType) {
213 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
214 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
215 anv_foreach_stage(s, binding->stageFlags) {
216 set_layout->binding[b].stage[s].image_index = image_count[s];
217 image_count[s] += binding->descriptorCount;
218 }
219 break;
220 default:
221 break;
222 }
223
224 if (binding->pImmutableSamplers) {
225 set_layout->binding[b].immutable_samplers = samplers;
226 samplers += binding->descriptorCount;
227
228 for (uint32_t i = 0; i < binding->descriptorCount; i++)
229 set_layout->binding[b].immutable_samplers[i] =
230 anv_sampler_from_handle(binding->pImmutableSamplers[i]);
231 } else {
232 set_layout->binding[b].immutable_samplers = NULL;
233 }
234
235 set_layout->shader_stages |= binding->stageFlags;
236 }
237
238 set_layout->buffer_count = buffer_count;
239 set_layout->dynamic_offset_count = dynamic_offset_count;
240
241 *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
242
243 return VK_SUCCESS;
244 }
245
246 void anv_DestroyDescriptorSetLayout(
247 VkDevice _device,
248 VkDescriptorSetLayout _set_layout,
249 const VkAllocationCallbacks* pAllocator)
250 {
251 ANV_FROM_HANDLE(anv_device, device, _device);
252 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
253
254 if (!set_layout)
255 return;
256
257 anv_descriptor_set_layout_unref(device, set_layout);
258 }
259
260 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
261
262 static void
263 sha1_update_immutable_sampler(struct mesa_sha1 *ctx,
264 const struct anv_sampler *sampler)
265 {
266 if (!sampler->conversion)
267 return;
268
269 /* The only thing that affects the shader is ycbcr conversion */
270 _mesa_sha1_update(ctx, sampler->conversion,
271 sizeof(*sampler->conversion));
272 }
273
274 static void
275 sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,
276 const struct anv_descriptor_set_binding_layout *layout)
277 {
278 SHA1_UPDATE_VALUE(ctx, layout->array_size);
279 SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);
280 SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);
281 SHA1_UPDATE_VALUE(ctx, layout->buffer_index);
282 _mesa_sha1_update(ctx, layout->stage, sizeof(layout->stage));
283
284 if (layout->immutable_samplers) {
285 for (uint16_t i = 0; i < layout->array_size; i++)
286 sha1_update_immutable_sampler(ctx, layout->immutable_samplers[i]);
287 }
288 }
289
290 static void
291 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
292 const struct anv_descriptor_set_layout *layout)
293 {
294 SHA1_UPDATE_VALUE(ctx, layout->binding_count);
295 SHA1_UPDATE_VALUE(ctx, layout->size);
296 SHA1_UPDATE_VALUE(ctx, layout->shader_stages);
297 SHA1_UPDATE_VALUE(ctx, layout->buffer_count);
298 SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
299
300 for (uint16_t i = 0; i < layout->binding_count; i++)
301 sha1_update_descriptor_set_binding_layout(ctx, &layout->binding[i]);
302 }
303
304 /*
305 * Pipeline layouts. These have nothing to do with the pipeline. They are
306 * just multiple descriptor set layouts pasted together
307 */
308
309 VkResult anv_CreatePipelineLayout(
310 VkDevice _device,
311 const VkPipelineLayoutCreateInfo* pCreateInfo,
312 const VkAllocationCallbacks* pAllocator,
313 VkPipelineLayout* pPipelineLayout)
314 {
315 ANV_FROM_HANDLE(anv_device, device, _device);
316 struct anv_pipeline_layout *layout;
317
318 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
319
320 layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
321 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
322 if (layout == NULL)
323 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
324
325 layout->num_sets = pCreateInfo->setLayoutCount;
326
327 unsigned dynamic_offset_count = 0;
328
329 memset(layout->stage, 0, sizeof(layout->stage));
330 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
331 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
332 pCreateInfo->pSetLayouts[set]);
333 layout->set[set].layout = set_layout;
334 anv_descriptor_set_layout_ref(set_layout);
335
336 layout->set[set].dynamic_offset_start = dynamic_offset_count;
337 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
338 if (set_layout->binding[b].dynamic_offset_index < 0)
339 continue;
340
341 dynamic_offset_count += set_layout->binding[b].array_size;
342 for (gl_shader_stage s = 0; s < MESA_SHADER_STAGES; s++) {
343 if (set_layout->binding[b].stage[s].surface_index >= 0)
344 layout->stage[s].has_dynamic_offsets = true;
345 }
346 }
347 }
348
349 struct mesa_sha1 ctx;
350 _mesa_sha1_init(&ctx);
351 for (unsigned s = 0; s < layout->num_sets; s++) {
352 sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
353 _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
354 sizeof(layout->set[s].dynamic_offset_start));
355 }
356 _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
357 for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
358 _mesa_sha1_update(&ctx, &layout->stage[s].has_dynamic_offsets,
359 sizeof(layout->stage[s].has_dynamic_offsets));
360 }
361 _mesa_sha1_final(&ctx, layout->sha1);
362
363 *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
364
365 return VK_SUCCESS;
366 }
367
368 void anv_DestroyPipelineLayout(
369 VkDevice _device,
370 VkPipelineLayout _pipelineLayout,
371 const VkAllocationCallbacks* pAllocator)
372 {
373 ANV_FROM_HANDLE(anv_device, device, _device);
374 ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
375
376 if (!pipeline_layout)
377 return;
378
379 for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
380 anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
381
382 vk_free2(&device->alloc, pAllocator, pipeline_layout);
383 }
384
385 /*
386 * Descriptor pools.
387 *
388 * These are implemented using a big pool of memory and a free-list for the
389 * host memory allocations and a state_stream and a free list for the buffer
390 * view surface state. The spec allows us to fail to allocate due to
391 * fragmentation in all cases but two: 1) after pool reset, allocating up
392 * until the pool size with no freeing must succeed and 2) allocating and
393 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
394 * and the free lists lets us recycle blocks for case 2).
395 */
396
397 #define EMPTY 1
398
399 VkResult anv_CreateDescriptorPool(
400 VkDevice _device,
401 const VkDescriptorPoolCreateInfo* pCreateInfo,
402 const VkAllocationCallbacks* pAllocator,
403 VkDescriptorPool* pDescriptorPool)
404 {
405 ANV_FROM_HANDLE(anv_device, device, _device);
406 struct anv_descriptor_pool *pool;
407
408 uint32_t descriptor_count = 0;
409 uint32_t buffer_count = 0;
410 for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
411 switch (pCreateInfo->pPoolSizes[i].type) {
412 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
413 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
414 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
415 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
416 buffer_count += pCreateInfo->pPoolSizes[i].descriptorCount;
417 default:
418 descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
419 break;
420 }
421 }
422
423 const size_t pool_size =
424 pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
425 descriptor_count * sizeof(struct anv_descriptor) +
426 buffer_count * sizeof(struct anv_buffer_view);
427 const size_t total_size = sizeof(*pool) + pool_size;
428
429 pool = vk_alloc2(&device->alloc, pAllocator, total_size, 8,
430 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
431 if (!pool)
432 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
433
434 pool->size = pool_size;
435 pool->next = 0;
436 pool->free_list = EMPTY;
437
438 anv_state_stream_init(&pool->surface_state_stream,
439 &device->surface_state_pool, 4096);
440 pool->surface_state_free_list = NULL;
441
442 *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
443
444 return VK_SUCCESS;
445 }
446
447 void anv_DestroyDescriptorPool(
448 VkDevice _device,
449 VkDescriptorPool _pool,
450 const VkAllocationCallbacks* pAllocator)
451 {
452 ANV_FROM_HANDLE(anv_device, device, _device);
453 ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
454
455 if (!pool)
456 return;
457
458 anv_state_stream_finish(&pool->surface_state_stream);
459 vk_free2(&device->alloc, pAllocator, pool);
460 }
461
462 VkResult anv_ResetDescriptorPool(
463 VkDevice _device,
464 VkDescriptorPool descriptorPool,
465 VkDescriptorPoolResetFlags flags)
466 {
467 ANV_FROM_HANDLE(anv_device, device, _device);
468 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
469
470 pool->next = 0;
471 pool->free_list = EMPTY;
472 anv_state_stream_finish(&pool->surface_state_stream);
473 anv_state_stream_init(&pool->surface_state_stream,
474 &device->surface_state_pool, 4096);
475 pool->surface_state_free_list = NULL;
476
477 return VK_SUCCESS;
478 }
479
480 struct pool_free_list_entry {
481 uint32_t next;
482 uint32_t size;
483 };
484
485 size_t
486 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout)
487 {
488 return
489 sizeof(struct anv_descriptor_set) +
490 layout->size * sizeof(struct anv_descriptor) +
491 layout->buffer_count * sizeof(struct anv_buffer_view);
492 }
493
494 size_t
495 anv_descriptor_set_binding_layout_get_hw_size(const struct anv_descriptor_set_binding_layout *binding)
496 {
497 if (!binding->immutable_samplers)
498 return binding->array_size;
499
500 uint32_t total_plane_count = 0;
501 for (uint32_t i = 0; i < binding->array_size; i++)
502 total_plane_count += binding->immutable_samplers[i]->n_planes;
503
504 return total_plane_count;
505 }
506
507 struct surface_state_free_list_entry {
508 void *next;
509 struct anv_state state;
510 };
511
512 VkResult
513 anv_descriptor_set_create(struct anv_device *device,
514 struct anv_descriptor_pool *pool,
515 struct anv_descriptor_set_layout *layout,
516 struct anv_descriptor_set **out_set)
517 {
518 struct anv_descriptor_set *set;
519 const size_t size = anv_descriptor_set_layout_size(layout);
520
521 set = NULL;
522 if (size <= pool->size - pool->next) {
523 set = (struct anv_descriptor_set *) (pool->data + pool->next);
524 pool->next += size;
525 } else {
526 struct pool_free_list_entry *entry;
527 uint32_t *link = &pool->free_list;
528 for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
529 entry = (struct pool_free_list_entry *) (pool->data + f);
530 if (size <= entry->size) {
531 *link = entry->next;
532 set = (struct anv_descriptor_set *) entry;
533 break;
534 }
535 link = &entry->next;
536 }
537 }
538
539 if (set == NULL) {
540 if (pool->free_list != EMPTY) {
541 return vk_error(VK_ERROR_FRAGMENTED_POOL);
542 } else {
543 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY);
544 }
545 }
546
547 set->layout = layout;
548 anv_descriptor_set_layout_ref(layout);
549
550 set->size = size;
551 set->buffer_views =
552 (struct anv_buffer_view *) &set->descriptors[layout->size];
553 set->buffer_count = layout->buffer_count;
554
555 /* By defining the descriptors to be zero now, we can later verify that
556 * a descriptor has not been populated with user data.
557 */
558 memset(set->descriptors, 0, sizeof(struct anv_descriptor) * layout->size);
559
560 /* Go through and fill out immutable samplers if we have any */
561 struct anv_descriptor *desc = set->descriptors;
562 for (uint32_t b = 0; b < layout->binding_count; b++) {
563 if (layout->binding[b].immutable_samplers) {
564 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
565 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
566 * UpdateDescriptorSets if needed. However, if the descriptor
567 * set has an immutable sampler, UpdateDescriptorSets may never
568 * touch it, so we need to make sure it's 100% valid now.
569 */
570 desc[i] = (struct anv_descriptor) {
571 .type = VK_DESCRIPTOR_TYPE_SAMPLER,
572 .sampler = layout->binding[b].immutable_samplers[i],
573 };
574 }
575 }
576 desc += layout->binding[b].array_size;
577 }
578
579 /* Allocate surface state for the buffer views. */
580 for (uint32_t b = 0; b < layout->buffer_count; b++) {
581 struct surface_state_free_list_entry *entry =
582 pool->surface_state_free_list;
583 struct anv_state state;
584
585 if (entry) {
586 state = entry->state;
587 pool->surface_state_free_list = entry->next;
588 assert(state.alloc_size == 64);
589 } else {
590 state = anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
591 }
592
593 set->buffer_views[b].surface_state = state;
594 }
595
596 *out_set = set;
597
598 return VK_SUCCESS;
599 }
600
601 void
602 anv_descriptor_set_destroy(struct anv_device *device,
603 struct anv_descriptor_pool *pool,
604 struct anv_descriptor_set *set)
605 {
606 anv_descriptor_set_layout_unref(device, set->layout);
607
608 /* Put the buffer view surface state back on the free list. */
609 for (uint32_t b = 0; b < set->buffer_count; b++) {
610 struct surface_state_free_list_entry *entry =
611 set->buffer_views[b].surface_state.map;
612 entry->next = pool->surface_state_free_list;
613 entry->state = set->buffer_views[b].surface_state;
614 pool->surface_state_free_list = entry;
615 }
616
617 /* Put the descriptor set allocation back on the free list. */
618 const uint32_t index = (char *) set - pool->data;
619 if (index + set->size == pool->next) {
620 pool->next = index;
621 } else {
622 struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
623 entry->next = pool->free_list;
624 entry->size = set->size;
625 pool->free_list = (char *) entry - pool->data;
626 }
627 }
628
629 VkResult anv_AllocateDescriptorSets(
630 VkDevice _device,
631 const VkDescriptorSetAllocateInfo* pAllocateInfo,
632 VkDescriptorSet* pDescriptorSets)
633 {
634 ANV_FROM_HANDLE(anv_device, device, _device);
635 ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
636
637 VkResult result = VK_SUCCESS;
638 struct anv_descriptor_set *set;
639 uint32_t i;
640
641 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
642 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
643 pAllocateInfo->pSetLayouts[i]);
644
645 result = anv_descriptor_set_create(device, pool, layout, &set);
646 if (result != VK_SUCCESS)
647 break;
648
649 pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
650 }
651
652 if (result != VK_SUCCESS)
653 anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
654 i, pDescriptorSets);
655
656 return result;
657 }
658
659 VkResult anv_FreeDescriptorSets(
660 VkDevice _device,
661 VkDescriptorPool descriptorPool,
662 uint32_t count,
663 const VkDescriptorSet* pDescriptorSets)
664 {
665 ANV_FROM_HANDLE(anv_device, device, _device);
666 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
667
668 for (uint32_t i = 0; i < count; i++) {
669 ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
670
671 if (!set)
672 continue;
673
674 anv_descriptor_set_destroy(device, pool, set);
675 }
676
677 return VK_SUCCESS;
678 }
679
680 void
681 anv_descriptor_set_write_image_view(struct anv_descriptor_set *set,
682 const struct gen_device_info * const devinfo,
683 const VkDescriptorImageInfo * const info,
684 VkDescriptorType type,
685 uint32_t binding,
686 uint32_t element)
687 {
688 const struct anv_descriptor_set_binding_layout *bind_layout =
689 &set->layout->binding[binding];
690 struct anv_descriptor *desc =
691 &set->descriptors[bind_layout->descriptor_index + element];
692 struct anv_image_view *image_view = NULL;
693 struct anv_sampler *sampler = NULL;
694
695 assert(type == bind_layout->type);
696
697 switch (type) {
698 case VK_DESCRIPTOR_TYPE_SAMPLER:
699 sampler = anv_sampler_from_handle(info->sampler);
700 break;
701
702 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
703 image_view = anv_image_view_from_handle(info->imageView);
704 sampler = anv_sampler_from_handle(info->sampler);
705 break;
706
707 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
708 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
709 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
710 image_view = anv_image_view_from_handle(info->imageView);
711 break;
712
713 default:
714 unreachable("invalid descriptor type");
715 }
716
717 /* If this descriptor has an immutable sampler, we don't want to stomp on
718 * it.
719 */
720 sampler = bind_layout->immutable_samplers ?
721 bind_layout->immutable_samplers[element] :
722 sampler;
723
724 *desc = (struct anv_descriptor) {
725 .type = type,
726 .layout = info->imageLayout,
727 .image_view = image_view,
728 .sampler = sampler,
729 };
730 }
731
732 void
733 anv_descriptor_set_write_buffer_view(struct anv_descriptor_set *set,
734 VkDescriptorType type,
735 struct anv_buffer_view *buffer_view,
736 uint32_t binding,
737 uint32_t element)
738 {
739 const struct anv_descriptor_set_binding_layout *bind_layout =
740 &set->layout->binding[binding];
741 struct anv_descriptor *desc =
742 &set->descriptors[bind_layout->descriptor_index + element];
743
744 assert(type == bind_layout->type);
745
746 *desc = (struct anv_descriptor) {
747 .type = type,
748 .buffer_view = buffer_view,
749 };
750 }
751
752 void
753 anv_descriptor_set_write_buffer(struct anv_descriptor_set *set,
754 struct anv_device *device,
755 struct anv_state_stream *alloc_stream,
756 VkDescriptorType type,
757 struct anv_buffer *buffer,
758 uint32_t binding,
759 uint32_t element,
760 VkDeviceSize offset,
761 VkDeviceSize range)
762 {
763 const struct anv_descriptor_set_binding_layout *bind_layout =
764 &set->layout->binding[binding];
765 struct anv_descriptor *desc =
766 &set->descriptors[bind_layout->descriptor_index + element];
767
768 assert(type == bind_layout->type);
769
770 if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
771 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
772 *desc = (struct anv_descriptor) {
773 .type = type,
774 .buffer = buffer,
775 .offset = offset,
776 .range = range,
777 };
778 } else {
779 struct anv_buffer_view *bview =
780 &set->buffer_views[bind_layout->buffer_index + element];
781
782 bview->format = anv_isl_format_for_descriptor_type(type);
783 bview->range = anv_buffer_get_range(buffer, offset, range);
784 bview->address = anv_address_add(buffer->address, offset);
785
786 /* If we're writing descriptors through a push command, we need to
787 * allocate the surface state from the command buffer. Otherwise it will
788 * be allocated by the descriptor pool when calling
789 * vkAllocateDescriptorSets. */
790 if (alloc_stream)
791 bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64);
792
793 anv_fill_buffer_surface_state(device, bview->surface_state,
794 bview->format,
795 bview->address, bview->range, 1);
796
797 *desc = (struct anv_descriptor) {
798 .type = type,
799 .buffer_view = bview,
800 };
801 }
802 }
803
804 void anv_UpdateDescriptorSets(
805 VkDevice _device,
806 uint32_t descriptorWriteCount,
807 const VkWriteDescriptorSet* pDescriptorWrites,
808 uint32_t descriptorCopyCount,
809 const VkCopyDescriptorSet* pDescriptorCopies)
810 {
811 ANV_FROM_HANDLE(anv_device, device, _device);
812
813 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
814 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
815 ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);
816
817 switch (write->descriptorType) {
818 case VK_DESCRIPTOR_TYPE_SAMPLER:
819 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
820 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
821 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
822 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
823 for (uint32_t j = 0; j < write->descriptorCount; j++) {
824 anv_descriptor_set_write_image_view(set, &device->info,
825 write->pImageInfo + j,
826 write->descriptorType,
827 write->dstBinding,
828 write->dstArrayElement + j);
829 }
830 break;
831
832 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
833 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
834 for (uint32_t j = 0; j < write->descriptorCount; j++) {
835 ANV_FROM_HANDLE(anv_buffer_view, bview,
836 write->pTexelBufferView[j]);
837
838 anv_descriptor_set_write_buffer_view(set,
839 write->descriptorType,
840 bview,
841 write->dstBinding,
842 write->dstArrayElement + j);
843 }
844 break;
845
846 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
847 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
848 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
849 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
850 for (uint32_t j = 0; j < write->descriptorCount; j++) {
851 assert(write->pBufferInfo[j].buffer);
852 ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
853 assert(buffer);
854
855 anv_descriptor_set_write_buffer(set,
856 device,
857 NULL,
858 write->descriptorType,
859 buffer,
860 write->dstBinding,
861 write->dstArrayElement + j,
862 write->pBufferInfo[j].offset,
863 write->pBufferInfo[j].range);
864 }
865 break;
866
867 default:
868 break;
869 }
870 }
871
872 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
873 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
874 ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
875 ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
876
877 const struct anv_descriptor_set_binding_layout *src_layout =
878 &src->layout->binding[copy->srcBinding];
879 struct anv_descriptor *src_desc =
880 &src->descriptors[src_layout->descriptor_index];
881 src_desc += copy->srcArrayElement;
882
883 const struct anv_descriptor_set_binding_layout *dst_layout =
884 &dst->layout->binding[copy->dstBinding];
885 struct anv_descriptor *dst_desc =
886 &dst->descriptors[dst_layout->descriptor_index];
887 dst_desc += copy->dstArrayElement;
888
889 for (uint32_t j = 0; j < copy->descriptorCount; j++)
890 dst_desc[j] = src_desc[j];
891 }
892 }
893
894 /*
895 * Descriptor update templates.
896 */
897
898 void
899 anv_descriptor_set_write_template(struct anv_descriptor_set *set,
900 struct anv_device *device,
901 struct anv_state_stream *alloc_stream,
902 const struct anv_descriptor_update_template *template,
903 const void *data)
904 {
905 for (uint32_t i = 0; i < template->entry_count; i++) {
906 const struct anv_descriptor_template_entry *entry =
907 &template->entries[i];
908
909 switch (entry->type) {
910 case VK_DESCRIPTOR_TYPE_SAMPLER:
911 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
912 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
913 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
914 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
915 for (uint32_t j = 0; j < entry->array_count; j++) {
916 const VkDescriptorImageInfo *info =
917 data + entry->offset + j * entry->stride;
918 anv_descriptor_set_write_image_view(set, &device->info,
919 info, entry->type,
920 entry->binding,
921 entry->array_element + j);
922 }
923 break;
924
925 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
926 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
927 for (uint32_t j = 0; j < entry->array_count; j++) {
928 const VkBufferView *_bview =
929 data + entry->offset + j * entry->stride;
930 ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
931
932 anv_descriptor_set_write_buffer_view(set,
933 entry->type,
934 bview,
935 entry->binding,
936 entry->array_element + j);
937 }
938 break;
939
940 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
941 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
942 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
943 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
944 for (uint32_t j = 0; j < entry->array_count; j++) {
945 const VkDescriptorBufferInfo *info =
946 data + entry->offset + j * entry->stride;
947 ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
948
949 anv_descriptor_set_write_buffer(set,
950 device,
951 alloc_stream,
952 entry->type,
953 buffer,
954 entry->binding,
955 entry->array_element + j,
956 info->offset, info->range);
957 }
958 break;
959
960 default:
961 break;
962 }
963 }
964 }
965
966 VkResult anv_CreateDescriptorUpdateTemplate(
967 VkDevice _device,
968 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
969 const VkAllocationCallbacks* pAllocator,
970 VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate)
971 {
972 ANV_FROM_HANDLE(anv_device, device, _device);
973 struct anv_descriptor_update_template *template;
974
975 size_t size = sizeof(*template) +
976 pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
977 template = vk_alloc2(&device->alloc, pAllocator, size, 8,
978 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
979 if (template == NULL)
980 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
981
982 template->bind_point = pCreateInfo->pipelineBindPoint;
983
984 if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
985 template->set = pCreateInfo->set;
986
987 template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
988 for (uint32_t i = 0; i < template->entry_count; i++) {
989 const VkDescriptorUpdateTemplateEntryKHR *pEntry =
990 &pCreateInfo->pDescriptorUpdateEntries[i];
991
992 template->entries[i] = (struct anv_descriptor_template_entry) {
993 .type = pEntry->descriptorType,
994 .binding = pEntry->dstBinding,
995 .array_element = pEntry->dstArrayElement,
996 .array_count = pEntry->descriptorCount,
997 .offset = pEntry->offset,
998 .stride = pEntry->stride,
999 };
1000 }
1001
1002 *pDescriptorUpdateTemplate =
1003 anv_descriptor_update_template_to_handle(template);
1004
1005 return VK_SUCCESS;
1006 }
1007
1008 void anv_DestroyDescriptorUpdateTemplate(
1009 VkDevice _device,
1010 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1011 const VkAllocationCallbacks* pAllocator)
1012 {
1013 ANV_FROM_HANDLE(anv_device, device, _device);
1014 ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1015 descriptorUpdateTemplate);
1016
1017 vk_free2(&device->alloc, pAllocator, template);
1018 }
1019
1020 void anv_UpdateDescriptorSetWithTemplate(
1021 VkDevice _device,
1022 VkDescriptorSet descriptorSet,
1023 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1024 const void* pData)
1025 {
1026 ANV_FROM_HANDLE(anv_device, device, _device);
1027 ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
1028 ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1029 descriptorUpdateTemplate);
1030
1031 anv_descriptor_set_write_template(set, device, NULL, template, pData);
1032 }