Added few more stubs so that control reaches to DestroyDevice().
[mesa.git] / src / intel / vulkan / anv_descriptor_set.c
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29
30 #include "util/mesa-sha1.h"
31 #include "vk_util.h"
32
33 #include "anv_private.h"
34
35 /*
36 * Descriptor set layouts.
37 */
38
39 static enum anv_descriptor_data
40 anv_descriptor_data_for_type(const struct anv_physical_device *device,
41 VkDescriptorType type)
42 {
43 enum anv_descriptor_data data = 0;
44
45 switch (type) {
46 case VK_DESCRIPTOR_TYPE_SAMPLER:
47 data = ANV_DESCRIPTOR_SAMPLER_STATE;
48 if (device->has_bindless_samplers)
49 data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
50 break;
51
52 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
53 data = ANV_DESCRIPTOR_SURFACE_STATE |
54 ANV_DESCRIPTOR_SAMPLER_STATE;
55 if (device->has_bindless_images || device->has_bindless_samplers)
56 data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
57 break;
58
59 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
60 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
61 data = ANV_DESCRIPTOR_SURFACE_STATE;
62 if (device->has_bindless_images)
63 data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
64 break;
65
66 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
67 data = ANV_DESCRIPTOR_SURFACE_STATE;
68 break;
69
70 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
71 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
72 data = ANV_DESCRIPTOR_SURFACE_STATE;
73 if (device->info.gen < 9)
74 data |= ANV_DESCRIPTOR_IMAGE_PARAM;
75 if (device->has_bindless_images)
76 data |= ANV_DESCRIPTOR_STORAGE_IMAGE;
77 break;
78
79 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
80 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
81 data = ANV_DESCRIPTOR_SURFACE_STATE |
82 ANV_DESCRIPTOR_BUFFER_VIEW;
83 break;
84
85 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
86 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
87 data = ANV_DESCRIPTOR_SURFACE_STATE;
88 break;
89
90 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
91 data = ANV_DESCRIPTOR_INLINE_UNIFORM;
92 break;
93
94 default:
95 unreachable("Unsupported descriptor type");
96 }
97
98 /* On gen8 and above when we have softpin enabled, we also need to push
99 * SSBO address ranges so that we can use A64 messages in the shader.
100 */
101 if (device->has_a64_buffer_access &&
102 (type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ||
103 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC))
104 data |= ANV_DESCRIPTOR_ADDRESS_RANGE;
105
106 /* On Ivy Bridge and Bay Trail, we need swizzles textures in the shader
107 * Do not handle VK_DESCRIPTOR_TYPE_STORAGE_IMAGE and
108 * VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT because they already must
109 * have identity swizzle.
110 */
111 if (device->info.gen == 7 && !device->info.is_haswell &&
112 (type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
113 type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))
114 data |= ANV_DESCRIPTOR_TEXTURE_SWIZZLE;
115
116 return data;
117 }
118
119 static unsigned
120 anv_descriptor_data_size(enum anv_descriptor_data data)
121 {
122 unsigned size = 0;
123
124 if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE)
125 size += sizeof(struct anv_sampled_image_descriptor);
126
127 if (data & ANV_DESCRIPTOR_STORAGE_IMAGE)
128 size += sizeof(struct anv_storage_image_descriptor);
129
130 if (data & ANV_DESCRIPTOR_IMAGE_PARAM)
131 size += BRW_IMAGE_PARAM_SIZE * 4;
132
133 if (data & ANV_DESCRIPTOR_ADDRESS_RANGE)
134 size += sizeof(struct anv_address_range_descriptor);
135
136 if (data & ANV_DESCRIPTOR_TEXTURE_SWIZZLE)
137 size += sizeof(struct anv_texture_swizzle_descriptor);
138
139 return size;
140 }
141
142 static bool
143 anv_needs_descriptor_buffer(VkDescriptorType desc_type,
144 enum anv_descriptor_data desc_data)
145 {
146 if (desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT ||
147 anv_descriptor_data_size(desc_data) > 0)
148 return true;
149 return false;
150 }
151
152 /** Returns the size in bytes of each descriptor with the given layout */
153 unsigned
154 anv_descriptor_size(const struct anv_descriptor_set_binding_layout *layout)
155 {
156 if (layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM) {
157 assert(layout->data == ANV_DESCRIPTOR_INLINE_UNIFORM);
158 return layout->array_size;
159 }
160
161 unsigned size = anv_descriptor_data_size(layout->data);
162
163 /* For multi-planar bindings, we make every descriptor consume the maximum
164 * number of planes so we don't have to bother with walking arrays and
165 * adding things up every time. Fortunately, YCbCr samplers aren't all
166 * that common and likely won't be in the middle of big arrays.
167 */
168 if (layout->max_plane_count > 1)
169 size *= layout->max_plane_count;
170
171 return size;
172 }
173
174 /** Returns the size in bytes of each descriptor of the given type
175 *
176 * This version of the function does not have access to the entire layout so
177 * it may only work on certain descriptor types where the descriptor size is
178 * entirely determined by the descriptor type. Whenever possible, code should
179 * use anv_descriptor_size() instead.
180 */
181 unsigned
182 anv_descriptor_type_size(const struct anv_physical_device *pdevice,
183 VkDescriptorType type)
184 {
185 assert(type != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT &&
186 type != VK_DESCRIPTOR_TYPE_SAMPLER &&
187 type != VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE &&
188 type != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
189
190 return anv_descriptor_data_size(anv_descriptor_data_for_type(pdevice, type));
191 }
192
193 static bool
194 anv_descriptor_data_supports_bindless(const struct anv_physical_device *pdevice,
195 enum anv_descriptor_data data,
196 bool sampler)
197 {
198 if (data & ANV_DESCRIPTOR_ADDRESS_RANGE) {
199 assert(pdevice->has_a64_buffer_access);
200 return true;
201 }
202
203 if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
204 assert(pdevice->has_bindless_images || pdevice->has_bindless_samplers);
205 return sampler ? pdevice->has_bindless_samplers :
206 pdevice->has_bindless_images;
207 }
208
209 if (data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
210 assert(pdevice->has_bindless_images);
211 return true;
212 }
213
214 return false;
215 }
216
217 bool
218 anv_descriptor_supports_bindless(const struct anv_physical_device *pdevice,
219 const struct anv_descriptor_set_binding_layout *binding,
220 bool sampler)
221 {
222 return anv_descriptor_data_supports_bindless(pdevice, binding->data,
223 sampler);
224 }
225
226 bool
227 anv_descriptor_requires_bindless(const struct anv_physical_device *pdevice,
228 const struct anv_descriptor_set_binding_layout *binding,
229 bool sampler)
230 {
231 if (pdevice->always_use_bindless)
232 return anv_descriptor_supports_bindless(pdevice, binding, sampler);
233
234 static const VkDescriptorBindingFlagBitsEXT flags_requiring_bindless =
235 VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT |
236 VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT |
237 VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
238
239 return (binding->flags & flags_requiring_bindless) != 0;
240 }
241
242 void anv_GetDescriptorSetLayoutSupport(
243 VkDevice _device,
244 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
245 VkDescriptorSetLayoutSupport* pSupport)
246 {
247 ANV_FROM_HANDLE(anv_device, device, _device);
248 const struct anv_physical_device *pdevice = device->physical;
249
250 uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
251 bool needs_descriptor_buffer = false;
252
253 for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {
254 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];
255
256 enum anv_descriptor_data desc_data =
257 anv_descriptor_data_for_type(pdevice, binding->descriptorType);
258
259 if (anv_needs_descriptor_buffer(binding->descriptorType, desc_data))
260 needs_descriptor_buffer = true;
261
262 switch (binding->descriptorType) {
263 case VK_DESCRIPTOR_TYPE_SAMPLER:
264 /* There is no real limit on samplers */
265 break;
266
267 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
268 /* Inline uniforms don't use a binding */
269 break;
270
271 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
272 if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))
273 break;
274
275 if (binding->pImmutableSamplers) {
276 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
277 ANV_FROM_HANDLE(anv_sampler, sampler,
278 binding->pImmutableSamplers[i]);
279 anv_foreach_stage(s, binding->stageFlags)
280 surface_count[s] += sampler->n_planes;
281 }
282 } else {
283 anv_foreach_stage(s, binding->stageFlags)
284 surface_count[s] += binding->descriptorCount;
285 }
286 break;
287
288 default:
289 if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))
290 break;
291
292 anv_foreach_stage(s, binding->stageFlags)
293 surface_count[s] += binding->descriptorCount;
294 break;
295 }
296 }
297
298 for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
299 if (needs_descriptor_buffer)
300 surface_count[s] += 1;
301 }
302
303 bool supported = true;
304 for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
305 /* Our maximum binding table size is 240 and we need to reserve 8 for
306 * render targets.
307 */
308 if (surface_count[s] > MAX_BINDING_TABLE_SIZE - MAX_RTS)
309 supported = false;
310 }
311
312 pSupport->supported = supported;
313 }
314
315 VkResult anv_CreateDescriptorSetLayout(
316 VkDevice _device,
317 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
318 const VkAllocationCallbacks* pAllocator,
319 VkDescriptorSetLayout* pSetLayout)
320 {
321 ANV_FROM_HANDLE(anv_device, device, _device);
322
323 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
324
325 uint32_t max_binding = 0;
326 uint32_t immutable_sampler_count = 0;
327 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
328 max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
329
330 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
331 *
332 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
333 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
334 * pImmutableSamplers can be used to initialize a set of immutable
335 * samplers. [...] If descriptorType is not one of these descriptor
336 * types, then pImmutableSamplers is ignored.
337 *
338 * We need to be careful here and only parse pImmutableSamplers if we
339 * have one of the right descriptor types.
340 */
341 VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
342 if ((desc_type == VK_DESCRIPTOR_TYPE_SAMPLER ||
343 desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
344 pCreateInfo->pBindings[j].pImmutableSamplers)
345 immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
346 }
347
348 struct anv_descriptor_set_layout *set_layout;
349 struct anv_descriptor_set_binding_layout *bindings;
350 struct anv_sampler **samplers;
351
352 /* We need to allocate decriptor set layouts off the device allocator
353 * with DEVICE scope because they are reference counted and may not be
354 * destroyed when vkDestroyDescriptorSetLayout is called.
355 */
356 ANV_MULTIALLOC(ma);
357 anv_multialloc_add(&ma, &set_layout, 1);
358 anv_multialloc_add(&ma, &bindings, max_binding + 1);
359 anv_multialloc_add(&ma, &samplers, immutable_sampler_count);
360
361 if (!anv_multialloc_alloc(&ma, &device->vk.alloc,
362 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE))
363 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
364
365 memset(set_layout, 0, sizeof(*set_layout));
366 vk_object_base_init(&device->vk, &set_layout->base,
367 VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT);
368 set_layout->ref_cnt = 1;
369 set_layout->binding_count = max_binding + 1;
370
371 for (uint32_t b = 0; b <= max_binding; b++) {
372 /* Initialize all binding_layout entries to -1 */
373 memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
374
375 set_layout->binding[b].flags = 0;
376 set_layout->binding[b].data = 0;
377 set_layout->binding[b].max_plane_count = 0;
378 set_layout->binding[b].array_size = 0;
379 set_layout->binding[b].immutable_samplers = NULL;
380 }
381
382 /* Initialize all samplers to 0 */
383 memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
384
385 uint32_t buffer_view_count = 0;
386 uint32_t dynamic_offset_count = 0;
387 uint32_t descriptor_buffer_size = 0;
388
389 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
390 const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
391 uint32_t b = binding->binding;
392 /* We temporarily store pCreateInfo->pBindings[] index (plus one) in the
393 * immutable_samplers pointer. This provides us with a quick-and-dirty
394 * way to sort the bindings by binding number.
395 */
396 set_layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1);
397 }
398
399 const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *binding_flags_info =
400 vk_find_struct_const(pCreateInfo->pNext,
401 DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);
402
403 for (uint32_t b = 0; b <= max_binding; b++) {
404 /* We stashed the pCreateInfo->pBindings[] index (plus one) in the
405 * immutable_samplers pointer. Check for NULL (empty binding) and then
406 * reset it and compute the index.
407 */
408 if (set_layout->binding[b].immutable_samplers == NULL)
409 continue;
410 const uint32_t info_idx =
411 (uintptr_t)(void *)set_layout->binding[b].immutable_samplers - 1;
412 set_layout->binding[b].immutable_samplers = NULL;
413
414 const VkDescriptorSetLayoutBinding *binding =
415 &pCreateInfo->pBindings[info_idx];
416
417 if (binding->descriptorCount == 0)
418 continue;
419
420 #ifndef NDEBUG
421 set_layout->binding[b].type = binding->descriptorType;
422 #endif
423
424 if (binding_flags_info && binding_flags_info->bindingCount > 0) {
425 assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
426 set_layout->binding[b].flags =
427 binding_flags_info->pBindingFlags[info_idx];
428 }
429
430 set_layout->binding[b].data =
431 anv_descriptor_data_for_type(device->physical,
432 binding->descriptorType);
433 set_layout->binding[b].array_size = binding->descriptorCount;
434 set_layout->binding[b].descriptor_index = set_layout->size;
435 set_layout->size += binding->descriptorCount;
436
437 if (set_layout->binding[b].data & ANV_DESCRIPTOR_BUFFER_VIEW) {
438 set_layout->binding[b].buffer_view_index = buffer_view_count;
439 buffer_view_count += binding->descriptorCount;
440 }
441
442 switch (binding->descriptorType) {
443 case VK_DESCRIPTOR_TYPE_SAMPLER:
444 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
445 set_layout->binding[b].max_plane_count = 1;
446 if (binding->pImmutableSamplers) {
447 set_layout->binding[b].immutable_samplers = samplers;
448 samplers += binding->descriptorCount;
449
450 for (uint32_t i = 0; i < binding->descriptorCount; i++) {
451 ANV_FROM_HANDLE(anv_sampler, sampler,
452 binding->pImmutableSamplers[i]);
453
454 set_layout->binding[b].immutable_samplers[i] = sampler;
455 if (set_layout->binding[b].max_plane_count < sampler->n_planes)
456 set_layout->binding[b].max_plane_count = sampler->n_planes;
457 }
458 }
459 break;
460
461 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
462 set_layout->binding[b].max_plane_count = 1;
463 break;
464
465 default:
466 break;
467 }
468
469 switch (binding->descriptorType) {
470 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
471 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
472 set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
473 set_layout->dynamic_offset_stages[dynamic_offset_count] = binding->stageFlags;
474 dynamic_offset_count += binding->descriptorCount;
475 assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
476 break;
477
478 default:
479 break;
480 }
481
482 if (binding->descriptorType ==
483 VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
484 /* Inline uniform blocks are specified to use the descriptor array
485 * size as the size in bytes of the block.
486 */
487 descriptor_buffer_size = align_u32(descriptor_buffer_size, 32);
488 set_layout->binding[b].descriptor_offset = descriptor_buffer_size;
489 descriptor_buffer_size += binding->descriptorCount;
490 } else {
491 set_layout->binding[b].descriptor_offset = descriptor_buffer_size;
492 descriptor_buffer_size += anv_descriptor_size(&set_layout->binding[b]) *
493 binding->descriptorCount;
494 }
495
496 set_layout->shader_stages |= binding->stageFlags;
497 }
498
499 set_layout->buffer_view_count = buffer_view_count;
500 set_layout->dynamic_offset_count = dynamic_offset_count;
501 set_layout->descriptor_buffer_size = descriptor_buffer_size;
502
503 *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
504
505 return VK_SUCCESS;
506 }
507
508 void
509 anv_descriptor_set_layout_destroy(struct anv_device *device,
510 struct anv_descriptor_set_layout *layout)
511 {
512 assert(layout->ref_cnt == 0);
513 vk_object_base_finish(&layout->base);
514 vk_free(&device->vk.alloc, layout);
515 }
516
517 void anv_DestroyDescriptorSetLayout(
518 VkDevice _device,
519 VkDescriptorSetLayout _set_layout,
520 const VkAllocationCallbacks* pAllocator)
521 {
522 ANV_FROM_HANDLE(anv_device, device, _device);
523 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
524
525 if (!set_layout)
526 return;
527
528 anv_descriptor_set_layout_unref(device, set_layout);
529 }
530
531 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
532
533 static void
534 sha1_update_immutable_sampler(struct mesa_sha1 *ctx,
535 const struct anv_sampler *sampler)
536 {
537 if (!sampler->conversion)
538 return;
539
540 /* The only thing that affects the shader is ycbcr conversion */
541 _mesa_sha1_update(ctx, sampler->conversion,
542 sizeof(*sampler->conversion));
543 }
544
545 static void
546 sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,
547 const struct anv_descriptor_set_binding_layout *layout)
548 {
549 SHA1_UPDATE_VALUE(ctx, layout->flags);
550 SHA1_UPDATE_VALUE(ctx, layout->data);
551 SHA1_UPDATE_VALUE(ctx, layout->max_plane_count);
552 SHA1_UPDATE_VALUE(ctx, layout->array_size);
553 SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);
554 SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);
555 SHA1_UPDATE_VALUE(ctx, layout->buffer_view_index);
556 SHA1_UPDATE_VALUE(ctx, layout->descriptor_offset);
557
558 if (layout->immutable_samplers) {
559 for (uint16_t i = 0; i < layout->array_size; i++)
560 sha1_update_immutable_sampler(ctx, layout->immutable_samplers[i]);
561 }
562 }
563
564 static void
565 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
566 const struct anv_descriptor_set_layout *layout)
567 {
568 SHA1_UPDATE_VALUE(ctx, layout->binding_count);
569 SHA1_UPDATE_VALUE(ctx, layout->size);
570 SHA1_UPDATE_VALUE(ctx, layout->shader_stages);
571 SHA1_UPDATE_VALUE(ctx, layout->buffer_view_count);
572 SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
573 SHA1_UPDATE_VALUE(ctx, layout->descriptor_buffer_size);
574
575 for (uint16_t i = 0; i < layout->binding_count; i++)
576 sha1_update_descriptor_set_binding_layout(ctx, &layout->binding[i]);
577 }
578
579 /*
580 * Pipeline layouts. These have nothing to do with the pipeline. They are
581 * just multiple descriptor set layouts pasted together
582 */
583
584 VkResult anv_CreatePipelineLayout(
585 VkDevice _device,
586 const VkPipelineLayoutCreateInfo* pCreateInfo,
587 const VkAllocationCallbacks* pAllocator,
588 VkPipelineLayout* pPipelineLayout)
589 {
590 ANV_FROM_HANDLE(anv_device, device, _device);
591 struct anv_pipeline_layout *layout;
592
593 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
594
595 layout = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*layout), 8,
596 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
597 if (layout == NULL)
598 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
599
600 vk_object_base_init(&device->vk, &layout->base,
601 VK_OBJECT_TYPE_PIPELINE_LAYOUT);
602 layout->num_sets = pCreateInfo->setLayoutCount;
603
604 unsigned dynamic_offset_count = 0;
605
606 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
607 ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
608 pCreateInfo->pSetLayouts[set]);
609 layout->set[set].layout = set_layout;
610 anv_descriptor_set_layout_ref(set_layout);
611
612 layout->set[set].dynamic_offset_start = dynamic_offset_count;
613 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
614 if (set_layout->binding[b].dynamic_offset_index < 0)
615 continue;
616
617 dynamic_offset_count += set_layout->binding[b].array_size;
618 }
619 }
620 assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
621
622 struct mesa_sha1 ctx;
623 _mesa_sha1_init(&ctx);
624 for (unsigned s = 0; s < layout->num_sets; s++) {
625 sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
626 _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
627 sizeof(layout->set[s].dynamic_offset_start));
628 }
629 _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
630 _mesa_sha1_final(&ctx, layout->sha1);
631
632 *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
633
634 return VK_SUCCESS;
635 }
636
637 void anv_DestroyPipelineLayout(
638 VkDevice _device,
639 VkPipelineLayout _pipelineLayout,
640 const VkAllocationCallbacks* pAllocator)
641 {
642 ANV_FROM_HANDLE(anv_device, device, _device);
643 ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
644
645 if (!pipeline_layout)
646 return;
647
648 for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
649 anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
650
651 vk_object_base_finish(&pipeline_layout->base);
652 vk_free2(&device->vk.alloc, pAllocator, pipeline_layout);
653 }
654
655 /*
656 * Descriptor pools.
657 *
658 * These are implemented using a big pool of memory and a free-list for the
659 * host memory allocations and a state_stream and a free list for the buffer
660 * view surface state. The spec allows us to fail to allocate due to
661 * fragmentation in all cases but two: 1) after pool reset, allocating up
662 * until the pool size with no freeing must succeed and 2) allocating and
663 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
664 * and the free lists lets us recycle blocks for case 2).
665 */
666
667 /* The vma heap reserves 0 to mean NULL; we have to offset by some ammount to
668 * ensure we can allocate the entire BO without hitting zero. The actual
669 * amount doesn't matter.
670 */
671 #define POOL_HEAP_OFFSET 64
672
673 #define EMPTY 1
674
675 VkResult anv_CreateDescriptorPool(
676 VkDevice _device,
677 const VkDescriptorPoolCreateInfo* pCreateInfo,
678 const VkAllocationCallbacks* pAllocator,
679 VkDescriptorPool* pDescriptorPool)
680 {
681 ANV_FROM_HANDLE(anv_device, device, _device);
682 struct anv_descriptor_pool *pool;
683
684 const VkDescriptorPoolInlineUniformBlockCreateInfoEXT *inline_info =
685 vk_find_struct_const(pCreateInfo->pNext,
686 DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT);
687
688 uint32_t descriptor_count = 0;
689 uint32_t buffer_view_count = 0;
690 uint32_t descriptor_bo_size = 0;
691 for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
692 enum anv_descriptor_data desc_data =
693 anv_descriptor_data_for_type(device->physical,
694 pCreateInfo->pPoolSizes[i].type);
695
696 if (desc_data & ANV_DESCRIPTOR_BUFFER_VIEW)
697 buffer_view_count += pCreateInfo->pPoolSizes[i].descriptorCount;
698
699 unsigned desc_data_size = anv_descriptor_data_size(desc_data) *
700 pCreateInfo->pPoolSizes[i].descriptorCount;
701
702 /* Combined image sampler descriptors can take up to 3 slots if they
703 * hold a YCbCr image.
704 */
705 if (pCreateInfo->pPoolSizes[i].type ==
706 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
707 desc_data_size *= 3;
708
709 if (pCreateInfo->pPoolSizes[i].type ==
710 VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
711 /* Inline uniform blocks are specified to use the descriptor array
712 * size as the size in bytes of the block.
713 */
714 assert(inline_info);
715 desc_data_size += pCreateInfo->pPoolSizes[i].descriptorCount;
716 }
717
718 descriptor_bo_size += desc_data_size;
719
720 descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
721 }
722 /* We have to align descriptor buffer allocations to 32B so that we can
723 * push descriptor buffers. This means that each descriptor buffer
724 * allocated may burn up to 32B of extra space to get the right alignment.
725 * (Technically, it's at most 28B because we're always going to start at
726 * least 4B aligned but we're being conservative here.) Allocate enough
727 * extra space that we can chop it into maxSets pieces and align each one
728 * of them to 32B.
729 */
730 descriptor_bo_size += 32 * pCreateInfo->maxSets;
731 /* We align inline uniform blocks to 32B */
732 if (inline_info)
733 descriptor_bo_size += 32 * inline_info->maxInlineUniformBlockBindings;
734 descriptor_bo_size = ALIGN(descriptor_bo_size, 4096);
735
736 const size_t pool_size =
737 pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
738 descriptor_count * sizeof(struct anv_descriptor) +
739 buffer_view_count * sizeof(struct anv_buffer_view);
740 const size_t total_size = sizeof(*pool) + pool_size;
741
742 pool = vk_alloc2(&device->vk.alloc, pAllocator, total_size, 8,
743 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
744 if (!pool)
745 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
746
747 vk_object_base_init(&device->vk, &pool->base,
748 VK_OBJECT_TYPE_DESCRIPTOR_POOL);
749 pool->size = pool_size;
750 pool->next = 0;
751 pool->free_list = EMPTY;
752
753 if (descriptor_bo_size > 0) {
754 VkResult result = anv_device_alloc_bo(device,
755 descriptor_bo_size,
756 ANV_BO_ALLOC_MAPPED |
757 ANV_BO_ALLOC_SNOOPED,
758 0 /* explicit_address */,
759 &pool->bo);
760 if (result != VK_SUCCESS) {
761 vk_free2(&device->vk.alloc, pAllocator, pool);
762 return result;
763 }
764
765 util_vma_heap_init(&pool->bo_heap, POOL_HEAP_OFFSET, descriptor_bo_size);
766 } else {
767 pool->bo = NULL;
768 }
769
770 anv_state_stream_init(&pool->surface_state_stream,
771 &device->surface_state_pool, 4096);
772 pool->surface_state_free_list = NULL;
773
774 list_inithead(&pool->desc_sets);
775
776 *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
777
778 return VK_SUCCESS;
779 }
780
781 void anv_DestroyDescriptorPool(
782 VkDevice _device,
783 VkDescriptorPool _pool,
784 const VkAllocationCallbacks* pAllocator)
785 {
786 ANV_FROM_HANDLE(anv_device, device, _device);
787 ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
788
789 if (!pool)
790 return;
791
792 list_for_each_entry_safe(struct anv_descriptor_set, set,
793 &pool->desc_sets, pool_link) {
794 anv_descriptor_set_layout_unref(device, set->layout);
795 }
796
797 if (pool->bo)
798 anv_device_release_bo(device, pool->bo);
799 anv_state_stream_finish(&pool->surface_state_stream);
800
801 vk_object_base_finish(&pool->base);
802 vk_free2(&device->vk.alloc, pAllocator, pool);
803 }
804
805 VkResult anv_ResetDescriptorPool(
806 VkDevice _device,
807 VkDescriptorPool descriptorPool,
808 VkDescriptorPoolResetFlags flags)
809 {
810 ANV_FROM_HANDLE(anv_device, device, _device);
811 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
812
813 list_for_each_entry_safe(struct anv_descriptor_set, set,
814 &pool->desc_sets, pool_link) {
815 anv_descriptor_set_layout_unref(device, set->layout);
816 }
817 list_inithead(&pool->desc_sets);
818
819 pool->next = 0;
820 pool->free_list = EMPTY;
821
822 if (pool->bo) {
823 util_vma_heap_finish(&pool->bo_heap);
824 util_vma_heap_init(&pool->bo_heap, POOL_HEAP_OFFSET, pool->bo->size);
825 }
826
827 anv_state_stream_finish(&pool->surface_state_stream);
828 anv_state_stream_init(&pool->surface_state_stream,
829 &device->surface_state_pool, 4096);
830 pool->surface_state_free_list = NULL;
831
832 return VK_SUCCESS;
833 }
834
835 struct pool_free_list_entry {
836 uint32_t next;
837 uint32_t size;
838 };
839
840 static VkResult
841 anv_descriptor_pool_alloc_set(struct anv_descriptor_pool *pool,
842 uint32_t size,
843 struct anv_descriptor_set **set)
844 {
845 if (size <= pool->size - pool->next) {
846 *set = (struct anv_descriptor_set *) (pool->data + pool->next);
847 (*set)->size = size;
848 pool->next += size;
849 return VK_SUCCESS;
850 } else {
851 struct pool_free_list_entry *entry;
852 uint32_t *link = &pool->free_list;
853 for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
854 entry = (struct pool_free_list_entry *) (pool->data + f);
855 if (size <= entry->size) {
856 *link = entry->next;
857 *set = (struct anv_descriptor_set *) entry;
858 (*set)->size = entry->size;
859 return VK_SUCCESS;
860 }
861 link = &entry->next;
862 }
863
864 if (pool->free_list != EMPTY) {
865 return vk_error(VK_ERROR_FRAGMENTED_POOL);
866 } else {
867 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY);
868 }
869 }
870 }
871
872 static void
873 anv_descriptor_pool_free_set(struct anv_descriptor_pool *pool,
874 struct anv_descriptor_set *set)
875 {
876 /* Put the descriptor set allocation back on the free list. */
877 const uint32_t index = (char *) set - pool->data;
878 if (index + set->size == pool->next) {
879 pool->next = index;
880 } else {
881 struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
882 entry->next = pool->free_list;
883 entry->size = set->size;
884 pool->free_list = (char *) entry - pool->data;
885 }
886 }
887
888 struct surface_state_free_list_entry {
889 void *next;
890 struct anv_state state;
891 };
892
893 static struct anv_state
894 anv_descriptor_pool_alloc_state(struct anv_descriptor_pool *pool)
895 {
896 struct surface_state_free_list_entry *entry =
897 pool->surface_state_free_list;
898
899 if (entry) {
900 struct anv_state state = entry->state;
901 pool->surface_state_free_list = entry->next;
902 assert(state.alloc_size == 64);
903 return state;
904 } else {
905 return anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
906 }
907 }
908
909 static void
910 anv_descriptor_pool_free_state(struct anv_descriptor_pool *pool,
911 struct anv_state state)
912 {
913 /* Put the buffer view surface state back on the free list. */
914 struct surface_state_free_list_entry *entry = state.map;
915 entry->next = pool->surface_state_free_list;
916 entry->state = state;
917 pool->surface_state_free_list = entry;
918 }
919
920 size_t
921 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout)
922 {
923 return
924 sizeof(struct anv_descriptor_set) +
925 layout->size * sizeof(struct anv_descriptor) +
926 layout->buffer_view_count * sizeof(struct anv_buffer_view);
927 }
928
929 VkResult
930 anv_descriptor_set_create(struct anv_device *device,
931 struct anv_descriptor_pool *pool,
932 struct anv_descriptor_set_layout *layout,
933 struct anv_descriptor_set **out_set)
934 {
935 struct anv_descriptor_set *set;
936 const size_t size = anv_descriptor_set_layout_size(layout);
937
938 VkResult result = anv_descriptor_pool_alloc_set(pool, size, &set);
939 if (result != VK_SUCCESS)
940 return result;
941
942 if (layout->descriptor_buffer_size) {
943 /* Align the size to 32 so that alignment gaps don't cause extra holes
944 * in the heap which can lead to bad performance.
945 */
946 uint32_t set_buffer_size = ALIGN(layout->descriptor_buffer_size, 32);
947 uint64_t pool_vma_offset =
948 util_vma_heap_alloc(&pool->bo_heap, set_buffer_size, 32);
949 if (pool_vma_offset == 0) {
950 anv_descriptor_pool_free_set(pool, set);
951 return vk_error(VK_ERROR_FRAGMENTED_POOL);
952 }
953 assert(pool_vma_offset >= POOL_HEAP_OFFSET &&
954 pool_vma_offset - POOL_HEAP_OFFSET <= INT32_MAX);
955 set->desc_mem.offset = pool_vma_offset - POOL_HEAP_OFFSET;
956 set->desc_mem.alloc_size = set_buffer_size;
957 set->desc_mem.map = pool->bo->map + set->desc_mem.offset;
958
959 set->desc_surface_state = anv_descriptor_pool_alloc_state(pool);
960 anv_fill_buffer_surface_state(device, set->desc_surface_state,
961 ISL_FORMAT_R32G32B32A32_FLOAT,
962 (struct anv_address) {
963 .bo = pool->bo,
964 .offset = set->desc_mem.offset,
965 },
966 layout->descriptor_buffer_size, 1);
967 } else {
968 set->desc_mem = ANV_STATE_NULL;
969 set->desc_surface_state = ANV_STATE_NULL;
970 }
971
972 vk_object_base_init(&device->vk, &set->base,
973 VK_OBJECT_TYPE_DESCRIPTOR_SET);
974 set->pool = pool;
975 set->layout = layout;
976 anv_descriptor_set_layout_ref(layout);
977
978 set->buffer_views =
979 (struct anv_buffer_view *) &set->descriptors[layout->size];
980 set->buffer_view_count = layout->buffer_view_count;
981
982 /* By defining the descriptors to be zero now, we can later verify that
983 * a descriptor has not been populated with user data.
984 */
985 memset(set->descriptors, 0, sizeof(struct anv_descriptor) * layout->size);
986
987 /* Go through and fill out immutable samplers if we have any */
988 struct anv_descriptor *desc = set->descriptors;
989 for (uint32_t b = 0; b < layout->binding_count; b++) {
990 if (layout->binding[b].immutable_samplers) {
991 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
992 /* The type will get changed to COMBINED_IMAGE_SAMPLER in
993 * UpdateDescriptorSets if needed. However, if the descriptor
994 * set has an immutable sampler, UpdateDescriptorSets may never
995 * touch it, so we need to make sure it's 100% valid now.
996 *
997 * We don't need to actually provide a sampler because the helper
998 * will always write in the immutable sampler regardless of what
999 * is in the sampler parameter.
1000 */
1001 VkDescriptorImageInfo info = { };
1002 anv_descriptor_set_write_image_view(device, set, &info,
1003 VK_DESCRIPTOR_TYPE_SAMPLER,
1004 b, i);
1005 }
1006 }
1007 desc += layout->binding[b].array_size;
1008 }
1009
1010 /* Allocate surface state for the buffer views. */
1011 for (uint32_t b = 0; b < layout->buffer_view_count; b++) {
1012 set->buffer_views[b].surface_state =
1013 anv_descriptor_pool_alloc_state(pool);
1014 }
1015
1016 list_addtail(&set->pool_link, &pool->desc_sets);
1017
1018 *out_set = set;
1019
1020 return VK_SUCCESS;
1021 }
1022
1023 void
1024 anv_descriptor_set_destroy(struct anv_device *device,
1025 struct anv_descriptor_pool *pool,
1026 struct anv_descriptor_set *set)
1027 {
1028 anv_descriptor_set_layout_unref(device, set->layout);
1029
1030 if (set->desc_mem.alloc_size) {
1031 util_vma_heap_free(&pool->bo_heap,
1032 (uint64_t)set->desc_mem.offset + POOL_HEAP_OFFSET,
1033 set->desc_mem.alloc_size);
1034 anv_descriptor_pool_free_state(pool, set->desc_surface_state);
1035 }
1036
1037 for (uint32_t b = 0; b < set->buffer_view_count; b++)
1038 anv_descriptor_pool_free_state(pool, set->buffer_views[b].surface_state);
1039
1040 list_del(&set->pool_link);
1041
1042 vk_object_base_finish(&set->base);
1043 anv_descriptor_pool_free_set(pool, set);
1044 }
1045
1046 VkResult anv_AllocateDescriptorSets(
1047 VkDevice _device,
1048 const VkDescriptorSetAllocateInfo* pAllocateInfo,
1049 VkDescriptorSet* pDescriptorSets)
1050 {
1051 ANV_FROM_HANDLE(anv_device, device, _device);
1052 ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
1053
1054 VkResult result = VK_SUCCESS;
1055 struct anv_descriptor_set *set;
1056 uint32_t i;
1057
1058 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
1059 ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
1060 pAllocateInfo->pSetLayouts[i]);
1061
1062 result = anv_descriptor_set_create(device, pool, layout, &set);
1063 if (result != VK_SUCCESS)
1064 break;
1065
1066 pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
1067 }
1068
1069 if (result != VK_SUCCESS)
1070 anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
1071 i, pDescriptorSets);
1072
1073 return result;
1074 }
1075
1076 VkResult anv_FreeDescriptorSets(
1077 VkDevice _device,
1078 VkDescriptorPool descriptorPool,
1079 uint32_t count,
1080 const VkDescriptorSet* pDescriptorSets)
1081 {
1082 ANV_FROM_HANDLE(anv_device, device, _device);
1083 ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
1084
1085 for (uint32_t i = 0; i < count; i++) {
1086 ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
1087
1088 if (!set)
1089 continue;
1090
1091 anv_descriptor_set_destroy(device, pool, set);
1092 }
1093
1094 return VK_SUCCESS;
1095 }
1096
1097 static void
1098 anv_descriptor_set_write_image_param(uint32_t *param_desc_map,
1099 const struct brw_image_param *param)
1100 {
1101 #define WRITE_PARAM_FIELD(field, FIELD) \
1102 for (unsigned i = 0; i < ARRAY_SIZE(param->field); i++) \
1103 param_desc_map[BRW_IMAGE_PARAM_##FIELD##_OFFSET + i] = param->field[i]
1104
1105 WRITE_PARAM_FIELD(offset, OFFSET);
1106 WRITE_PARAM_FIELD(size, SIZE);
1107 WRITE_PARAM_FIELD(stride, STRIDE);
1108 WRITE_PARAM_FIELD(tiling, TILING);
1109 WRITE_PARAM_FIELD(swizzling, SWIZZLING);
1110 WRITE_PARAM_FIELD(size, SIZE);
1111
1112 #undef WRITE_PARAM_FIELD
1113 }
1114
1115 static uint32_t
1116 anv_surface_state_to_handle(struct anv_state state)
1117 {
1118 /* Bits 31:12 of the bindless surface offset in the extended message
1119 * descriptor is bits 25:6 of the byte-based address.
1120 */
1121 assert(state.offset >= 0);
1122 uint32_t offset = state.offset;
1123 assert((offset & 0x3f) == 0 && offset < (1 << 26));
1124 return offset << 6;
1125 }
1126
1127 void
1128 anv_descriptor_set_write_image_view(struct anv_device *device,
1129 struct anv_descriptor_set *set,
1130 const VkDescriptorImageInfo * const info,
1131 VkDescriptorType type,
1132 uint32_t binding,
1133 uint32_t element)
1134 {
1135 const struct anv_descriptor_set_binding_layout *bind_layout =
1136 &set->layout->binding[binding];
1137 struct anv_descriptor *desc =
1138 &set->descriptors[bind_layout->descriptor_index + element];
1139 struct anv_image_view *image_view = NULL;
1140 struct anv_sampler *sampler = NULL;
1141
1142 /* We get called with just VK_DESCRIPTOR_TYPE_SAMPLER as part of descriptor
1143 * set initialization to set the bindless samplers.
1144 */
1145 assert(type == bind_layout->type ||
1146 type == VK_DESCRIPTOR_TYPE_SAMPLER);
1147
1148 switch (type) {
1149 case VK_DESCRIPTOR_TYPE_SAMPLER:
1150 sampler = bind_layout->immutable_samplers ?
1151 bind_layout->immutable_samplers[element] :
1152 anv_sampler_from_handle(info->sampler);
1153 break;
1154
1155 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1156 image_view = anv_image_view_from_handle(info->imageView);
1157 sampler = bind_layout->immutable_samplers ?
1158 bind_layout->immutable_samplers[element] :
1159 anv_sampler_from_handle(info->sampler);
1160 break;
1161
1162 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1163 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1164 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1165 image_view = anv_image_view_from_handle(info->imageView);
1166 break;
1167
1168 default:
1169 unreachable("invalid descriptor type");
1170 }
1171
1172 *desc = (struct anv_descriptor) {
1173 .type = type,
1174 .layout = info->imageLayout,
1175 .image_view = image_view,
1176 .sampler = sampler,
1177 };
1178
1179 void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1180 element * anv_descriptor_size(bind_layout);
1181 memset(desc_map, 0, anv_descriptor_size(bind_layout));
1182
1183 if (bind_layout->data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
1184 struct anv_sampled_image_descriptor desc_data[3];
1185 memset(desc_data, 0, sizeof(desc_data));
1186
1187 if (image_view) {
1188 for (unsigned p = 0; p < image_view->n_planes; p++) {
1189 struct anv_surface_state sstate =
1190 (desc->layout == VK_IMAGE_LAYOUT_GENERAL) ?
1191 image_view->planes[p].general_sampler_surface_state :
1192 image_view->planes[p].optimal_sampler_surface_state;
1193 desc_data[p].image = anv_surface_state_to_handle(sstate.state);
1194 }
1195 }
1196
1197 if (sampler) {
1198 for (unsigned p = 0; p < sampler->n_planes; p++)
1199 desc_data[p].sampler = sampler->bindless_state.offset + p * 32;
1200 }
1201
1202 /* We may have max_plane_count < 0 if this isn't a sampled image but it
1203 * can be no more than the size of our array of handles.
1204 */
1205 assert(bind_layout->max_plane_count <= ARRAY_SIZE(desc_data));
1206 memcpy(desc_map, desc_data,
1207 MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));
1208 }
1209
1210 if (image_view == NULL)
1211 return;
1212
1213 if (bind_layout->data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
1214 assert(!(bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM));
1215 assert(image_view->n_planes == 1);
1216 struct anv_storage_image_descriptor desc_data = {
1217 .read_write = anv_surface_state_to_handle(
1218 image_view->planes[0].storage_surface_state.state),
1219 .write_only = anv_surface_state_to_handle(
1220 image_view->planes[0].writeonly_storage_surface_state.state),
1221 };
1222 memcpy(desc_map, &desc_data, sizeof(desc_data));
1223 }
1224
1225 if (bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM) {
1226 /* Storage images can only ever have one plane */
1227 assert(image_view->n_planes == 1);
1228 const struct brw_image_param *image_param =
1229 &image_view->planes[0].storage_image_param;
1230
1231 anv_descriptor_set_write_image_param(desc_map, image_param);
1232 }
1233
1234 if (bind_layout->data & ANV_DESCRIPTOR_TEXTURE_SWIZZLE) {
1235 assert(!(bind_layout->data & ANV_DESCRIPTOR_SAMPLED_IMAGE));
1236 assert(image_view);
1237 struct anv_texture_swizzle_descriptor desc_data[3];
1238 memset(desc_data, 0, sizeof(desc_data));
1239
1240 for (unsigned p = 0; p < image_view->n_planes; p++) {
1241 desc_data[p] = (struct anv_texture_swizzle_descriptor) {
1242 .swizzle = {
1243 (uint8_t)image_view->planes[p].isl.swizzle.r,
1244 (uint8_t)image_view->planes[p].isl.swizzle.g,
1245 (uint8_t)image_view->planes[p].isl.swizzle.b,
1246 (uint8_t)image_view->planes[p].isl.swizzle.a,
1247 },
1248 };
1249 }
1250 memcpy(desc_map, desc_data,
1251 MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));
1252 }
1253 }
1254
1255 void
1256 anv_descriptor_set_write_buffer_view(struct anv_device *device,
1257 struct anv_descriptor_set *set,
1258 VkDescriptorType type,
1259 struct anv_buffer_view *buffer_view,
1260 uint32_t binding,
1261 uint32_t element)
1262 {
1263 const struct anv_descriptor_set_binding_layout *bind_layout =
1264 &set->layout->binding[binding];
1265 struct anv_descriptor *desc =
1266 &set->descriptors[bind_layout->descriptor_index + element];
1267
1268 assert(type == bind_layout->type);
1269
1270 void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1271 element * anv_descriptor_size(bind_layout);
1272
1273 if (buffer_view == NULL) {
1274 *desc = (struct anv_descriptor) { .type = type, };
1275 memset(desc_map, 0, anv_descriptor_size(bind_layout));
1276 return;
1277 }
1278
1279 *desc = (struct anv_descriptor) {
1280 .type = type,
1281 .buffer_view = buffer_view,
1282 };
1283
1284 if (bind_layout->data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
1285 struct anv_sampled_image_descriptor desc_data = {
1286 .image = anv_surface_state_to_handle(buffer_view->surface_state),
1287 };
1288 memcpy(desc_map, &desc_data, sizeof(desc_data));
1289 }
1290
1291 if (bind_layout->data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
1292 assert(!(bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM));
1293 struct anv_storage_image_descriptor desc_data = {
1294 .read_write = anv_surface_state_to_handle(
1295 buffer_view->storage_surface_state),
1296 .write_only = anv_surface_state_to_handle(
1297 buffer_view->writeonly_storage_surface_state),
1298 };
1299 memcpy(desc_map, &desc_data, sizeof(desc_data));
1300 }
1301
1302 if (bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM) {
1303 anv_descriptor_set_write_image_param(desc_map,
1304 &buffer_view->storage_image_param);
1305 }
1306 }
1307
1308 void
1309 anv_descriptor_set_write_buffer(struct anv_device *device,
1310 struct anv_descriptor_set *set,
1311 struct anv_state_stream *alloc_stream,
1312 VkDescriptorType type,
1313 struct anv_buffer *buffer,
1314 uint32_t binding,
1315 uint32_t element,
1316 VkDeviceSize offset,
1317 VkDeviceSize range)
1318 {
1319 const struct anv_descriptor_set_binding_layout *bind_layout =
1320 &set->layout->binding[binding];
1321 struct anv_descriptor *desc =
1322 &set->descriptors[bind_layout->descriptor_index + element];
1323
1324 assert(type == bind_layout->type);
1325
1326 void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1327 element * anv_descriptor_size(bind_layout);
1328
1329 if (buffer == NULL) {
1330 *desc = (struct anv_descriptor) { .type = type, };
1331 memset(desc_map, 0, anv_descriptor_size(bind_layout));
1332 return;
1333 }
1334
1335 struct anv_address bind_addr = anv_address_add(buffer->address, offset);
1336 uint64_t bind_range = anv_buffer_get_range(buffer, offset, range);
1337
1338 /* We report a bounds checking alignment of 32B for the sake of block
1339 * messages which read an entire register worth at a time.
1340 */
1341 if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
1342 type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
1343 bind_range = align_u64(bind_range, ANV_UBO_ALIGNMENT);
1344
1345 if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
1346 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
1347 *desc = (struct anv_descriptor) {
1348 .type = type,
1349 .buffer = buffer,
1350 .offset = offset,
1351 .range = range,
1352 };
1353 } else {
1354 assert(bind_layout->data & ANV_DESCRIPTOR_BUFFER_VIEW);
1355 struct anv_buffer_view *bview =
1356 &set->buffer_views[bind_layout->buffer_view_index + element];
1357
1358 bview->format = anv_isl_format_for_descriptor_type(type);
1359 bview->range = bind_range;
1360 bview->address = bind_addr;
1361
1362 /* If we're writing descriptors through a push command, we need to
1363 * allocate the surface state from the command buffer. Otherwise it will
1364 * be allocated by the descriptor pool when calling
1365 * vkAllocateDescriptorSets. */
1366 if (alloc_stream)
1367 bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64);
1368
1369 anv_fill_buffer_surface_state(device, bview->surface_state,
1370 bview->format, bind_addr, bind_range, 1);
1371
1372 *desc = (struct anv_descriptor) {
1373 .type = type,
1374 .buffer_view = bview,
1375 };
1376 }
1377
1378 if (bind_layout->data & ANV_DESCRIPTOR_ADDRESS_RANGE) {
1379 struct anv_address_range_descriptor desc_data = {
1380 .address = anv_address_physical(bind_addr),
1381 .range = bind_range,
1382 };
1383 memcpy(desc_map, &desc_data, sizeof(desc_data));
1384 }
1385 }
1386
1387 void
1388 anv_descriptor_set_write_inline_uniform_data(struct anv_device *device,
1389 struct anv_descriptor_set *set,
1390 uint32_t binding,
1391 const void *data,
1392 size_t offset,
1393 size_t size)
1394 {
1395 const struct anv_descriptor_set_binding_layout *bind_layout =
1396 &set->layout->binding[binding];
1397
1398 assert(bind_layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM);
1399
1400 void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset;
1401
1402 memcpy(desc_map + offset, data, size);
1403 }
1404
1405 void anv_UpdateDescriptorSets(
1406 VkDevice _device,
1407 uint32_t descriptorWriteCount,
1408 const VkWriteDescriptorSet* pDescriptorWrites,
1409 uint32_t descriptorCopyCount,
1410 const VkCopyDescriptorSet* pDescriptorCopies)
1411 {
1412 ANV_FROM_HANDLE(anv_device, device, _device);
1413
1414 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
1415 const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
1416 ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);
1417
1418 switch (write->descriptorType) {
1419 case VK_DESCRIPTOR_TYPE_SAMPLER:
1420 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1421 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1422 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1423 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1424 for (uint32_t j = 0; j < write->descriptorCount; j++) {
1425 anv_descriptor_set_write_image_view(device, set,
1426 write->pImageInfo + j,
1427 write->descriptorType,
1428 write->dstBinding,
1429 write->dstArrayElement + j);
1430 }
1431 break;
1432
1433 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1434 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1435 for (uint32_t j = 0; j < write->descriptorCount; j++) {
1436 ANV_FROM_HANDLE(anv_buffer_view, bview,
1437 write->pTexelBufferView[j]);
1438
1439 anv_descriptor_set_write_buffer_view(device, set,
1440 write->descriptorType,
1441 bview,
1442 write->dstBinding,
1443 write->dstArrayElement + j);
1444 }
1445 break;
1446
1447 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1448 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1449 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1450 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1451 for (uint32_t j = 0; j < write->descriptorCount; j++) {
1452 ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
1453
1454 anv_descriptor_set_write_buffer(device, set,
1455 NULL,
1456 write->descriptorType,
1457 buffer,
1458 write->dstBinding,
1459 write->dstArrayElement + j,
1460 write->pBufferInfo[j].offset,
1461 write->pBufferInfo[j].range);
1462 }
1463 break;
1464
1465 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
1466 const VkWriteDescriptorSetInlineUniformBlockEXT *inline_write =
1467 vk_find_struct_const(write->pNext,
1468 WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT);
1469 assert(inline_write->dataSize == write->descriptorCount);
1470 anv_descriptor_set_write_inline_uniform_data(device, set,
1471 write->dstBinding,
1472 inline_write->pData,
1473 write->dstArrayElement,
1474 inline_write->dataSize);
1475 break;
1476 }
1477
1478 default:
1479 break;
1480 }
1481 }
1482
1483 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
1484 const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
1485 ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
1486 ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
1487
1488 const struct anv_descriptor_set_binding_layout *src_layout =
1489 &src->layout->binding[copy->srcBinding];
1490 struct anv_descriptor *src_desc =
1491 &src->descriptors[src_layout->descriptor_index];
1492 src_desc += copy->srcArrayElement;
1493
1494 const struct anv_descriptor_set_binding_layout *dst_layout =
1495 &dst->layout->binding[copy->dstBinding];
1496 struct anv_descriptor *dst_desc =
1497 &dst->descriptors[dst_layout->descriptor_index];
1498 dst_desc += copy->dstArrayElement;
1499
1500 if (src_layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM) {
1501 assert(src_layout->data == ANV_DESCRIPTOR_INLINE_UNIFORM);
1502 memcpy(dst->desc_mem.map + dst_layout->descriptor_offset +
1503 copy->dstArrayElement,
1504 src->desc_mem.map + src_layout->descriptor_offset +
1505 copy->srcArrayElement,
1506 copy->descriptorCount);
1507 } else {
1508 for (uint32_t j = 0; j < copy->descriptorCount; j++)
1509 dst_desc[j] = src_desc[j];
1510
1511 unsigned desc_size = anv_descriptor_size(src_layout);
1512 if (desc_size > 0) {
1513 assert(desc_size == anv_descriptor_size(dst_layout));
1514 memcpy(dst->desc_mem.map + dst_layout->descriptor_offset +
1515 copy->dstArrayElement * desc_size,
1516 src->desc_mem.map + src_layout->descriptor_offset +
1517 copy->srcArrayElement * desc_size,
1518 copy->descriptorCount * desc_size);
1519 }
1520 }
1521 }
1522 }
1523
1524 /*
1525 * Descriptor update templates.
1526 */
1527
1528 void
1529 anv_descriptor_set_write_template(struct anv_device *device,
1530 struct anv_descriptor_set *set,
1531 struct anv_state_stream *alloc_stream,
1532 const struct anv_descriptor_update_template *template,
1533 const void *data)
1534 {
1535 for (uint32_t i = 0; i < template->entry_count; i++) {
1536 const struct anv_descriptor_template_entry *entry =
1537 &template->entries[i];
1538
1539 switch (entry->type) {
1540 case VK_DESCRIPTOR_TYPE_SAMPLER:
1541 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1542 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1543 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1544 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1545 for (uint32_t j = 0; j < entry->array_count; j++) {
1546 const VkDescriptorImageInfo *info =
1547 data + entry->offset + j * entry->stride;
1548 anv_descriptor_set_write_image_view(device, set,
1549 info, entry->type,
1550 entry->binding,
1551 entry->array_element + j);
1552 }
1553 break;
1554
1555 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1556 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1557 for (uint32_t j = 0; j < entry->array_count; j++) {
1558 const VkBufferView *_bview =
1559 data + entry->offset + j * entry->stride;
1560 ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
1561
1562 anv_descriptor_set_write_buffer_view(device, set,
1563 entry->type,
1564 bview,
1565 entry->binding,
1566 entry->array_element + j);
1567 }
1568 break;
1569
1570 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1571 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1572 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1573 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1574 for (uint32_t j = 0; j < entry->array_count; j++) {
1575 const VkDescriptorBufferInfo *info =
1576 data + entry->offset + j * entry->stride;
1577 ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
1578
1579 anv_descriptor_set_write_buffer(device, set,
1580 alloc_stream,
1581 entry->type,
1582 buffer,
1583 entry->binding,
1584 entry->array_element + j,
1585 info->offset, info->range);
1586 }
1587 break;
1588
1589 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
1590 anv_descriptor_set_write_inline_uniform_data(device, set,
1591 entry->binding,
1592 data + entry->offset,
1593 entry->array_element,
1594 entry->array_count);
1595 break;
1596
1597 default:
1598 break;
1599 }
1600 }
1601 }
1602
1603 VkResult anv_CreateDescriptorUpdateTemplate(
1604 VkDevice _device,
1605 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
1606 const VkAllocationCallbacks* pAllocator,
1607 VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate)
1608 {
1609 ANV_FROM_HANDLE(anv_device, device, _device);
1610 struct anv_descriptor_update_template *template;
1611
1612 size_t size = sizeof(*template) +
1613 pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
1614 template = vk_alloc2(&device->vk.alloc, pAllocator, size, 8,
1615 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1616 if (template == NULL)
1617 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1618
1619 vk_object_base_init(&device->vk, &template->base,
1620 VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
1621 template->bind_point = pCreateInfo->pipelineBindPoint;
1622
1623 if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
1624 template->set = pCreateInfo->set;
1625
1626 template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
1627 for (uint32_t i = 0; i < template->entry_count; i++) {
1628 const VkDescriptorUpdateTemplateEntry *pEntry =
1629 &pCreateInfo->pDescriptorUpdateEntries[i];
1630
1631 template->entries[i] = (struct anv_descriptor_template_entry) {
1632 .type = pEntry->descriptorType,
1633 .binding = pEntry->dstBinding,
1634 .array_element = pEntry->dstArrayElement,
1635 .array_count = pEntry->descriptorCount,
1636 .offset = pEntry->offset,
1637 .stride = pEntry->stride,
1638 };
1639 }
1640
1641 *pDescriptorUpdateTemplate =
1642 anv_descriptor_update_template_to_handle(template);
1643
1644 return VK_SUCCESS;
1645 }
1646
1647 void anv_DestroyDescriptorUpdateTemplate(
1648 VkDevice _device,
1649 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1650 const VkAllocationCallbacks* pAllocator)
1651 {
1652 ANV_FROM_HANDLE(anv_device, device, _device);
1653 ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1654 descriptorUpdateTemplate);
1655
1656 vk_object_base_finish(&template->base);
1657 vk_free2(&device->vk.alloc, pAllocator, template);
1658 }
1659
1660 void anv_UpdateDescriptorSetWithTemplate(
1661 VkDevice _device,
1662 VkDescriptorSet descriptorSet,
1663 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1664 const void* pData)
1665 {
1666 ANV_FROM_HANDLE(anv_device, device, _device);
1667 ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
1668 ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1669 descriptorUpdateTemplate);
1670
1671 anv_descriptor_set_write_template(device, set, NULL, template, pData);
1672 }