anv: Emit cherryview SF state without including gen9_pack.h
[mesa.git] / src / intel / vulkan / anv_descriptor_set.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29
30 #include "util/mesa-sha1.h"
31
32 #include "anv_private.h"
33
34 /*
35 * Descriptor set layouts.
36 */
37
38 VkResult anv_CreateDescriptorSetLayout(
39 VkDevice _device,
40 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
41 const VkAllocationCallbacks* pAllocator,
42 VkDescriptorSetLayout* pSetLayout)
43 {
44 ANV_FROM_HANDLE(anv_device, device, _device);
45 struct anv_descriptor_set_layout *set_layout;
46
47 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
48
49 uint32_t max_binding = 0;
50 uint32_t immutable_sampler_count = 0;
51 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
52 max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
53 if (pCreateInfo->pBindings[j].pImmutableSamplers)
54 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
55 }
56
57 size_t size = sizeof(struct anv_descriptor_set_layout) +
58 (max_binding + 1) * sizeof(set_layout->binding[0]) +
59 immutable_sampler_count * sizeof(struct anv_sampler *);
60
61 set_layout = vk_alloc2(&device->alloc, pAllocator, size, 8,
62 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
63 if (!set_layout)
64 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
65
66 /* We just allocate all the samplers at the end of the struct */
67 struct anv_sampler **samplers =
68 (struct anv_sampler **)&set_layout->binding[max_binding + 1];
69
70 memset(set_layout, 0, sizeof(*set_layout));
71 set_layout->binding_count = max_binding + 1;
72
73 for (uint32_t b = 0; b <= max_binding; b++) {
74 /* Initialize all binding_layout entries to -1 */
75 memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
76
77 set_layout->binding[b].array_size = 0;
78 set_layout->binding[b].immutable_samplers = NULL;
79 }
80
81 /* Initialize all samplers to 0 */
82 memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
83
84 uint32_t sampler_count[MESA_SHADER_STAGES] = { 0, };
85 uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
86 uint32_t image_count[MESA_SHADER_STAGES] = { 0, };
87 uint32_t buffer_count = 0;
88 uint32_t dynamic_offset_count = 0;
89
90 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
91 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
92 uint32_t b = binding->binding;
93 /* We temporarily store the pointer to the binding in the
94 * immutable_samplers pointer. This provides us with a quick-and-dirty
95 * way to sort the bindings by binding number.
96 */
97 set_layout->binding[b].immutable_samplers = (void *)binding;
98 }
99
100 for (uint32_t b = 0; b <= max_binding; b++) {
101 const VkDescriptorSetLayoutBinding *binding =
102 (void *)set_layout->binding[b].immutable_samplers;
103
104 if (binding == NULL)
105 continue;
106
107 assert(binding->descriptorCount > 0);
108 #ifndef NDEBUG
109 set_layout->binding[b].type = binding->descriptorType;
110 #endif
111 set_layout->binding[b].array_size = binding->descriptorCount;
112 set_layout->binding[b].descriptor_index = set_layout->size;
113 set_layout->size += binding->descriptorCount;
114
115 switch (binding->descriptorType) {
116 case VK_DESCRIPTOR_TYPE_SAMPLER:
117 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
118 anv_foreach_stage(s, binding->stageFlags) {
119 set_layout->binding[b].stage[s].sampler_index = sampler_count[s];
120 sampler_count[s] += binding->descriptorCount;
121 }
122 break;
123 default:
124 break;
125 }
126
127 switch (binding->descriptorType) {
128 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
129 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
130 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
131 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
132 set_layout->binding[b].buffer_index = buffer_count;
133 buffer_count += binding->descriptorCount;
134 /* fall through */
135
136 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
137 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
138 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
139 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
140 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
141 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
142 anv_foreach_stage(s, binding->stageFlags) {
143 set_layout->binding[b].stage[s].surface_index = surface_count[s];
144 surface_count[s] += binding->descriptorCount;
145 }
146 break;
147 default:
148 break;
149 }
150
151 switch (binding->descriptorType) {
152 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
153 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
154 set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
155 dynamic_offset_count += binding->descriptorCount;
156 break;
157 default:
158 break;
159 }
160
161 switch (binding->descriptorType) {
162 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
163 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
164 anv_foreach_stage(s, binding->stageFlags) {
165 set_layout->binding[b].stage[s].image_index = image_count[s];
166 image_count[s] += binding->descriptorCount;
167 }
168 break;
169 default:
170 break;
171 }
172
173 if (binding->pImmutableSamplers) {
174 set_layout->binding[b].immutable_samplers = samplers;
175 samplers += binding->descriptorCount;
176
177 for (uint32_t i = 0; i < binding->descriptorCount; i++)
178 set_layout->binding[b].immutable_samplers[i] =
179 anv_sampler_from_handle(binding->pImmutableSamplers[i]);
180 } else {
181 set_layout->binding[b].immutable_samplers = NULL;
182 }
183
184 set_layout->shader_stages |= binding->stageFlags;
185 }
186
187 set_layout->buffer_count = buffer_count;
188 set_layout->dynamic_offset_count = dynamic_offset_count;
189
190 *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
191
192 return VK_SUCCESS;
193 }
194
195 void anv_DestroyDescriptorSetLayout(
196 VkDevice _device,
197 VkDescriptorSetLayout _set_layout,
198 const VkAllocationCallbacks* pAllocator)
199 {
200 ANV_FROM_HANDLE(anv_device, device, _device);
201 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
202
203 if (!set_layout)
204 return;
205
206 vk_free2(&device->alloc, pAllocator, set_layout);
207 }
208
209 static void
210 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
211 const struct anv_descriptor_set_layout *layout)
212 {
213 size_t size = sizeof(*layout) +
214 sizeof(layout->binding[0]) * layout->binding_count;
215 _mesa_sha1_update(ctx, layout, size);
216 }
217
218 /*
219 * Pipeline layouts. These have nothing to do with the pipeline. They are
220 * just muttiple descriptor set layouts pasted together
221 */
222
223 VkResult anv_CreatePipelineLayout(
224 VkDevice _device,
225 const VkPipelineLayoutCreateInfo* pCreateInfo,
226 const VkAllocationCallbacks* pAllocator,
227 VkPipelineLayout* pPipelineLayout)
228 {
229 ANV_FROM_HANDLE(anv_device, device, _device);
230 struct anv_pipeline_layout *layout;
231
232 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
233
234 layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
235 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
236 if (layout == NULL)
237 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
238
239 layout->num_sets = pCreateInfo->setLayoutCount;
240
241 unsigned dynamic_offset_count = 0;
242
243 memset(layout->stage, 0, sizeof(layout->stage));
244 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
245 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
246 pCreateInfo->pSetLayouts[set]);
247 layout->set[set].layout = set_layout;
248
249 layout->set[set].dynamic_offset_start = dynamic_offset_count;
250 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
251 if (set_layout->binding[b].dynamic_offset_index < 0)
252 continue;
253
254 dynamic_offset_count += set_layout->binding[b].array_size;
255 for (gl_shader_stage s = 0; s < MESA_SHADER_STAGES; s++) {
256 if (set_layout->binding[b].stage[s].surface_index >= 0)
257 layout->stage[s].has_dynamic_offsets = true;
258 }
259 }
260 }
261
262 struct mesa_sha1 *ctx = _mesa_sha1_init();
263 for (unsigned s = 0; s < layout->num_sets; s++) {
264 sha1_update_descriptor_set_layout(ctx, layout->set[s].layout);
265 _mesa_sha1_update(ctx, &layout->set[s].dynamic_offset_start,
266 sizeof(layout->set[s].dynamic_offset_start));
267 }
268 _mesa_sha1_update(ctx, &layout->num_sets, sizeof(layout->num_sets));
269 for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
270 _mesa_sha1_update(ctx, &layout->stage[s].has_dynamic_offsets,
271 sizeof(layout->stage[s].has_dynamic_offsets));
272 }
273 _mesa_sha1_final(ctx, layout->sha1);
274
275 *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
276
277 return VK_SUCCESS;
278 }
279
280 void anv_DestroyPipelineLayout(
281 VkDevice _device,
282 VkPipelineLayout _pipelineLayout,
283 const VkAllocationCallbacks* pAllocator)
284 {
285 ANV_FROM_HANDLE(anv_device, device, _device);
286 ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
287
288 if (!pipeline_layout)
289 return;
290
291 vk_free2(&device->alloc, pAllocator, pipeline_layout);
292 }
293
294 /*
295 * Descriptor pools.
296 *
297 * These are implemented using a big pool of memory and a free-list for the
298 * host memory allocations and a state_stream and a free list for the buffer
299 * view surface state. The spec allows us to fail to allocate due to
300 * fragmentation in all cases but two: 1) after pool reset, allocating up
301 * until the pool size with no freeing must succeed and 2) allocating and
302 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
303 * and the free lists lets us recycle blocks for case 2).
304 */
305
306 #define EMPTY 1
307
308 VkResult anv_CreateDescriptorPool(
309 VkDevice _device,
310 const VkDescriptorPoolCreateInfo* pCreateInfo,
311 const VkAllocationCallbacks* pAllocator,
312 VkDescriptorPool* pDescriptorPool)
313 {
314 ANV_FROM_HANDLE(anv_device, device, _device);
315 struct anv_descriptor_pool *pool;
316
317 uint32_t descriptor_count = 0;
318 uint32_t buffer_count = 0;
319 for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
320 switch (pCreateInfo->pPoolSizes[i].type) {
321 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
322 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
323 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
324 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
325 buffer_count += pCreateInfo->pPoolSizes[i].descriptorCount;
326 default:
327 descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
328 break;
329 }
330 }
331
332 const size_t size =
333 sizeof(*pool) +
334 pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
335 descriptor_count * sizeof(struct anv_descriptor) +
336 buffer_count * sizeof(struct anv_buffer_view);
337
338 pool = vk_alloc2(&device->alloc, pAllocator, size, 8,
339 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
340 if (!pool)
341 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
342
343 pool->size = size;
344 pool->next = 0;
345 pool->free_list = EMPTY;
346
347 anv_state_stream_init(&pool->surface_state_stream,
348 &device->surface_state_block_pool);
349 pool->surface_state_free_list = NULL;
350
351 *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
352
353 return VK_SUCCESS;
354 }
355
356 void anv_DestroyDescriptorPool(
357 VkDevice _device,
358 VkDescriptorPool _pool,
359 const VkAllocationCallbacks* pAllocator)
360 {
361 ANV_FROM_HANDLE(anv_device, device, _device);
362 ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
363
364 if (!pool)
365 return;
366
367 anv_state_stream_finish(&pool->surface_state_stream);
368 vk_free2(&device->alloc, pAllocator, pool);
369 }
370
371 VkResult anv_ResetDescriptorPool(
372 VkDevice _device,
373 VkDescriptorPool descriptorPool,
374 VkDescriptorPoolResetFlags flags)
375 {
376 ANV_FROM_HANDLE(anv_device, device, _device);
377 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
378
379 pool->next = 0;
380 pool->free_list = EMPTY;
381 anv_state_stream_finish(&pool->surface_state_stream);
382 anv_state_stream_init(&pool->surface_state_stream,
383 &device->surface_state_block_pool);
384 pool->surface_state_free_list = NULL;
385
386 return VK_SUCCESS;
387 }
388
389 struct pool_free_list_entry {
390 uint32_t next;
391 uint32_t size;
392 };
393
394 static size_t
395 layout_size(const struct anv_descriptor_set_layout *layout)
396 {
397 return
398 sizeof(struct anv_descriptor_set) +
399 layout->size * sizeof(struct anv_descriptor) +
400 layout->buffer_count * sizeof(struct anv_buffer_view);
401 }
402
403 struct surface_state_free_list_entry {
404 void *next;
405 struct anv_state state;
406 };
407
408 VkResult
409 anv_descriptor_set_create(struct anv_device *device,
410 struct anv_descriptor_pool *pool,
411 const struct anv_descriptor_set_layout *layout,
412 struct anv_descriptor_set **out_set)
413 {
414 struct anv_descriptor_set *set;
415 const size_t size = layout_size(layout);
416
417 set = NULL;
418 if (size <= pool->size - pool->next) {
419 set = (struct anv_descriptor_set *) (pool->data + pool->next);
420 pool->next += size;
421 } else {
422 struct pool_free_list_entry *entry;
423 uint32_t *link = &pool->free_list;
424 for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
425 entry = (struct pool_free_list_entry *) (pool->data + f);
426 if (size <= entry->size) {
427 *link = entry->next;
428 set = (struct anv_descriptor_set *) entry;
429 break;
430 }
431 link = &entry->next;
432 }
433 }
434
435 if (set == NULL)
436 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
437
438 set->size = size;
439 set->layout = layout;
440 set->buffer_views =
441 (struct anv_buffer_view *) &set->descriptors[layout->size];
442 set->buffer_count = layout->buffer_count;
443
444 /* By defining the descriptors to be zero now, we can later verify that
445 * a descriptor has not been populated with user data.
446 */
447 memset(set->descriptors, 0, sizeof(struct anv_descriptor) * layout->size);
448
449 /* Go through and fill out immutable samplers if we have any */
450 struct anv_descriptor *desc = set->descriptors;
451 for (uint32_t b = 0; b < layout->binding_count; b++) {
452 if (layout->binding[b].immutable_samplers) {
453 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
454 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
455 * UpdateDescriptorSets if needed. However, if the descriptor
456 * set has an immutable sampler, UpdateDescriptorSets may never
457 * touch it, so we need to make sure it's 100% valid now.
458 */
459 desc[i] = (struct anv_descriptor) {
460 .type = VK_DESCRIPTOR_TYPE_SAMPLER,
461 .sampler = layout->binding[b].immutable_samplers[i],
462 };
463 }
464 }
465 desc += layout->binding[b].array_size;
466 }
467
468 /* Allocate surface state for the buffer views. */
469 for (uint32_t b = 0; b < layout->buffer_count; b++) {
470 struct surface_state_free_list_entry *entry =
471 pool->surface_state_free_list;
472 struct anv_state state;
473
474 if (entry) {
475 state = entry->state;
476 pool->surface_state_free_list = entry->next;
477 assert(state.alloc_size == 64);
478 } else {
479 state = anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
480 }
481
482 set->buffer_views[b].surface_state = state;
483 }
484
485 *out_set = set;
486
487 return VK_SUCCESS;
488 }
489
490 void
491 anv_descriptor_set_destroy(struct anv_device *device,
492 struct anv_descriptor_pool *pool,
493 struct anv_descriptor_set *set)
494 {
495 /* Put the buffer view surface state back on the free list. */
496 for (uint32_t b = 0; b < set->buffer_count; b++) {
497 struct surface_state_free_list_entry *entry =
498 set->buffer_views[b].surface_state.map;
499 entry->next = pool->surface_state_free_list;
500 entry->state = set->buffer_views[b].surface_state;
501 pool->surface_state_free_list = entry;
502 }
503
504 /* Put the descriptor set allocation back on the free list. */
505 const uint32_t index = (char *) set - pool->data;
506 if (index + set->size == pool->next) {
507 pool->next = index;
508 } else {
509 struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
510 entry->next = pool->free_list;
511 entry->size = set->size;
512 pool->free_list = (char *) entry - pool->data;
513 }
514 }
515
516 VkResult anv_AllocateDescriptorSets(
517 VkDevice _device,
518 const VkDescriptorSetAllocateInfo* pAllocateInfo,
519 VkDescriptorSet* pDescriptorSets)
520 {
521 ANV_FROM_HANDLE(anv_device, device, _device);
522 ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
523
524 VkResult result = VK_SUCCESS;
525 struct anv_descriptor_set *set;
526 uint32_t i;
527
528 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
529 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
530 pAllocateInfo->pSetLayouts[i]);
531
532 result = anv_descriptor_set_create(device, pool, layout, &set);
533 if (result != VK_SUCCESS)
534 break;
535
536 pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
537 }
538
539 if (result != VK_SUCCESS)
540 anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
541 i, pDescriptorSets);
542
543 return result;
544 }
545
546 VkResult anv_FreeDescriptorSets(
547 VkDevice _device,
548 VkDescriptorPool descriptorPool,
549 uint32_t count,
550 const VkDescriptorSet* pDescriptorSets)
551 {
552 ANV_FROM_HANDLE(anv_device, device, _device);
553 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
554
555 for (uint32_t i = 0; i < count; i++) {
556 ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
557
558 if (!set)
559 continue;
560
561 anv_descriptor_set_destroy(device, pool, set);
562 }
563
564 return VK_SUCCESS;
565 }
566
567 void anv_UpdateDescriptorSets(
568 VkDevice _device,
569 uint32_t descriptorWriteCount,
570 const VkWriteDescriptorSet* pDescriptorWrites,
571 uint32_t descriptorCopyCount,
572 const VkCopyDescriptorSet* pDescriptorCopies)
573 {
574 ANV_FROM_HANDLE(anv_device, device, _device);
575
576 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
577 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
578 ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);
579 const struct anv_descriptor_set_binding_layout *bind_layout =
580 &set->layout->binding[write->dstBinding];
581 struct anv_descriptor *desc =
582 &set->descriptors[bind_layout->descriptor_index];
583 desc += write->dstArrayElement;
584
585 assert(write->descriptorType == bind_layout->type);
586
587 switch (write->descriptorType) {
588 case VK_DESCRIPTOR_TYPE_SAMPLER:
589 for (uint32_t j = 0; j < write->descriptorCount; j++) {
590 ANV_FROM_HANDLE(anv_sampler, sampler,
591 write->pImageInfo[j].sampler);
592
593 desc[j] = (struct anv_descriptor) {
594 .type = VK_DESCRIPTOR_TYPE_SAMPLER,
595 .sampler = sampler,
596 };
597 }
598 break;
599
600 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
601 for (uint32_t j = 0; j < write->descriptorCount; j++) {
602 ANV_FROM_HANDLE(anv_image_view, iview,
603 write->pImageInfo[j].imageView);
604 ANV_FROM_HANDLE(anv_sampler, sampler,
605 write->pImageInfo[j].sampler);
606
607 desc[j].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
608 desc[j].image_view = iview;
609
610 /* If this descriptor has an immutable sampler, we don't want
611 * to stomp on it.
612 */
613 if (sampler)
614 desc[j].sampler = sampler;
615 }
616 break;
617
618 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
619 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
620 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
621 for (uint32_t j = 0; j < write->descriptorCount; j++) {
622 ANV_FROM_HANDLE(anv_image_view, iview,
623 write->pImageInfo[j].imageView);
624
625 desc[j] = (struct anv_descriptor) {
626 .type = write->descriptorType,
627 .image_view = iview,
628 };
629 }
630 break;
631
632 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
633 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
634 for (uint32_t j = 0; j < write->descriptorCount; j++) {
635 ANV_FROM_HANDLE(anv_buffer_view, bview,
636 write->pTexelBufferView[j]);
637
638 desc[j] = (struct anv_descriptor) {
639 .type = write->descriptorType,
640 .buffer_view = bview,
641 };
642 }
643 break;
644
645 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
646 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
647 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
648 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
649 for (uint32_t j = 0; j < write->descriptorCount; j++) {
650 assert(write->pBufferInfo[j].buffer);
651 ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
652 assert(buffer);
653
654 struct anv_buffer_view *view =
655 &set->buffer_views[bind_layout->buffer_index];
656 view += write->dstArrayElement + j;
657
658 view->format =
659 anv_isl_format_for_descriptor_type(write->descriptorType);
660 view->bo = buffer->bo;
661 view->offset = buffer->offset + write->pBufferInfo[j].offset;
662
663 /* For buffers with dynamic offsets, we use the full possible
664 * range in the surface state and do the actual range-checking
665 * in the shader.
666 */
667 if (bind_layout->dynamic_offset_index >= 0 ||
668 write->pBufferInfo[j].range == VK_WHOLE_SIZE)
669 view->range = buffer->size - write->pBufferInfo[j].offset;
670 else
671 view->range = write->pBufferInfo[j].range;
672
673 anv_fill_buffer_surface_state(device, view->surface_state,
674 view->format,
675 view->offset, view->range, 1);
676
677 desc[j] = (struct anv_descriptor) {
678 .type = write->descriptorType,
679 .buffer_view = view,
680 };
681
682 }
683
684 default:
685 break;
686 }
687 }
688
689 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
690 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
691 ANV_FROM_HANDLE(anv_descriptor_set, src, copy->dstSet);
692 ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
693
694 const struct anv_descriptor_set_binding_layout *src_layout =
695 &src->layout->binding[copy->srcBinding];
696 struct anv_descriptor *src_desc =
697 &src->descriptors[src_layout->descriptor_index];
698 src_desc += copy->srcArrayElement;
699
700 const struct anv_descriptor_set_binding_layout *dst_layout =
701 &dst->layout->binding[copy->dstBinding];
702 struct anv_descriptor *dst_desc =
703 &dst->descriptors[dst_layout->descriptor_index];
704 dst_desc += copy->dstArrayElement;
705
706 for (uint32_t j = 0; j < copy->descriptorCount; j++)
707 dst_desc[j] = src_desc[j];
708 }
709 }