2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
30 #include "anv_private.h"
33 * Descriptor set layouts.
36 VkResult
anv_CreateDescriptorSetLayout(
38 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
39 const VkAllocationCallbacks
* pAllocator
,
40 VkDescriptorSetLayout
* pSetLayout
)
42 ANV_FROM_HANDLE(anv_device
, device
, _device
);
43 struct anv_descriptor_set_layout
*set_layout
;
45 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
47 uint32_t max_binding
= 0;
48 uint32_t immutable_sampler_count
= 0;
49 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
50 max_binding
= MAX2(max_binding
, pCreateInfo
->pBindings
[j
].binding
);
51 if (pCreateInfo
->pBindings
[j
].pImmutableSamplers
)
52 immutable_sampler_count
+= pCreateInfo
->pBindings
[j
].descriptorCount
;
55 size_t size
= sizeof(struct anv_descriptor_set_layout
) +
56 (max_binding
+ 1) * sizeof(set_layout
->binding
[0]) +
57 immutable_sampler_count
* sizeof(struct anv_sampler
*);
59 set_layout
= anv_alloc2(&device
->alloc
, pAllocator
, size
, 8,
60 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
62 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
64 /* We just allocate all the samplers at the end of the struct */
65 struct anv_sampler
**samplers
=
66 (struct anv_sampler
**)&set_layout
->binding
[max_binding
+ 1];
68 set_layout
->binding_count
= max_binding
+ 1;
69 set_layout
->shader_stages
= 0;
72 for (uint32_t b
= 0; b
<= max_binding
; b
++) {
73 /* Initialize all binding_layout entries to -1 */
74 memset(&set_layout
->binding
[b
], -1, sizeof(set_layout
->binding
[b
]));
76 set_layout
->binding
[b
].immutable_samplers
= NULL
;
79 /* Initialize all samplers to 0 */
80 memset(samplers
, 0, immutable_sampler_count
* sizeof(*samplers
));
82 uint32_t sampler_count
[MESA_SHADER_STAGES
] = { 0, };
83 uint32_t surface_count
[MESA_SHADER_STAGES
] = { 0, };
84 uint32_t image_count
[MESA_SHADER_STAGES
] = { 0, };
85 uint32_t buffer_count
= 0;
86 uint32_t dynamic_offset_count
= 0;
88 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
89 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[j
];
90 uint32_t b
= binding
->binding
;
92 assert(binding
->descriptorCount
> 0);
93 set_layout
->binding
[b
].array_size
= binding
->descriptorCount
;
94 set_layout
->binding
[b
].descriptor_index
= set_layout
->size
;
95 set_layout
->size
+= binding
->descriptorCount
;
97 switch (binding
->descriptorType
) {
98 case VK_DESCRIPTOR_TYPE_SAMPLER
:
99 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
100 anv_foreach_stage(s
, binding
->stageFlags
) {
101 set_layout
->binding
[b
].stage
[s
].sampler_index
= sampler_count
[s
];
102 sampler_count
[s
] += binding
->descriptorCount
;
109 switch (binding
->descriptorType
) {
110 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
111 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
112 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
113 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
114 set_layout
->binding
[b
].buffer_index
= buffer_count
;
115 buffer_count
+= binding
->descriptorCount
;
118 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
119 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
120 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
121 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
122 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
123 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
124 anv_foreach_stage(s
, binding
->stageFlags
) {
125 set_layout
->binding
[b
].stage
[s
].surface_index
= surface_count
[s
];
126 surface_count
[s
] += binding
->descriptorCount
;
133 switch (binding
->descriptorType
) {
134 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
135 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
136 set_layout
->binding
[b
].dynamic_offset_index
= dynamic_offset_count
;
137 dynamic_offset_count
+= binding
->descriptorCount
;
143 switch (binding
->descriptorType
) {
144 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
145 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
146 anv_foreach_stage(s
, binding
->stageFlags
) {
147 set_layout
->binding
[b
].stage
[s
].image_index
= image_count
[s
];
148 image_count
[s
] += binding
->descriptorCount
;
155 if (binding
->pImmutableSamplers
) {
156 set_layout
->binding
[b
].immutable_samplers
= samplers
;
157 samplers
+= binding
->descriptorCount
;
159 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++)
160 set_layout
->binding
[b
].immutable_samplers
[i
] =
161 anv_sampler_from_handle(binding
->pImmutableSamplers
[i
]);
163 set_layout
->binding
[b
].immutable_samplers
= NULL
;
166 set_layout
->shader_stages
|= binding
->stageFlags
;
169 set_layout
->buffer_count
= buffer_count
;
170 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
172 *pSetLayout
= anv_descriptor_set_layout_to_handle(set_layout
);
177 void anv_DestroyDescriptorSetLayout(
179 VkDescriptorSetLayout _set_layout
,
180 const VkAllocationCallbacks
* pAllocator
)
182 ANV_FROM_HANDLE(anv_device
, device
, _device
);
183 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
, _set_layout
);
185 anv_free2(&device
->alloc
, pAllocator
, set_layout
);
189 * Pipeline layouts. These have nothing to do with the pipeline. They are
190 * just muttiple descriptor set layouts pasted together
193 VkResult
anv_CreatePipelineLayout(
195 const VkPipelineLayoutCreateInfo
* pCreateInfo
,
196 const VkAllocationCallbacks
* pAllocator
,
197 VkPipelineLayout
* pPipelineLayout
)
199 ANV_FROM_HANDLE(anv_device
, device
, _device
);
200 struct anv_pipeline_layout l
, *layout
;
202 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
204 l
.num_sets
= pCreateInfo
->setLayoutCount
;
206 unsigned dynamic_offset_count
= 0;
208 memset(l
.stage
, 0, sizeof(l
.stage
));
209 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
210 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
,
211 pCreateInfo
->pSetLayouts
[set
]);
212 l
.set
[set
].layout
= set_layout
;
214 l
.set
[set
].dynamic_offset_start
= dynamic_offset_count
;
215 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
216 if (set_layout
->binding
[b
].dynamic_offset_index
>= 0)
217 dynamic_offset_count
+= set_layout
->binding
[b
].array_size
;
220 for (gl_shader_stage s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
221 l
.set
[set
].stage
[s
].surface_start
= l
.stage
[s
].surface_count
;
222 l
.set
[set
].stage
[s
].sampler_start
= l
.stage
[s
].sampler_count
;
223 l
.set
[set
].stage
[s
].image_start
= l
.stage
[s
].image_count
;
225 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
226 unsigned array_size
= set_layout
->binding
[b
].array_size
;
228 if (set_layout
->binding
[b
].stage
[s
].surface_index
>= 0) {
229 l
.stage
[s
].surface_count
+= array_size
;
231 if (set_layout
->binding
[b
].dynamic_offset_index
>= 0)
232 l
.stage
[s
].has_dynamic_offsets
= true;
235 if (set_layout
->binding
[b
].stage
[s
].sampler_index
>= 0)
236 l
.stage
[s
].sampler_count
+= array_size
;
238 if (set_layout
->binding
[b
].stage
[s
].image_index
>= 0)
239 l
.stage
[s
].image_count
+= array_size
;
244 unsigned num_bindings
= 0;
245 for (gl_shader_stage s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
246 num_bindings
+= l
.stage
[s
].surface_count
+
247 l
.stage
[s
].sampler_count
+
248 l
.stage
[s
].image_count
;
251 size_t size
= sizeof(*layout
) + num_bindings
* sizeof(layout
->entries
[0]);
253 layout
= anv_alloc2(&device
->alloc
, pAllocator
, size
, 8,
254 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
256 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
258 /* Now we can actually build our surface and sampler maps */
259 struct anv_pipeline_binding
*entry
= layout
->entries
;
260 for (gl_shader_stage s
= 0; s
< MESA_SHADER_STAGES
; s
++) {
261 l
.stage
[s
].surface_to_descriptor
= entry
;
262 entry
+= l
.stage
[s
].surface_count
;
263 l
.stage
[s
].sampler_to_descriptor
= entry
;
264 entry
+= l
.stage
[s
].sampler_count
;
265 entry
+= l
.stage
[s
].image_count
;
269 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
270 struct anv_descriptor_set_layout
*set_layout
= l
.set
[set
].layout
;
272 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
273 unsigned array_size
= set_layout
->binding
[b
].array_size
;
274 unsigned set_offset
= set_layout
->binding
[b
].descriptor_index
;
276 if (set_layout
->binding
[b
].stage
[s
].surface_index
>= 0) {
277 assert(surface
== l
.set
[set
].stage
[s
].surface_start
+
278 set_layout
->binding
[b
].stage
[s
].surface_index
);
279 for (unsigned i
= 0; i
< array_size
; i
++) {
280 l
.stage
[s
].surface_to_descriptor
[surface
+ i
].set
= set
;
281 l
.stage
[s
].surface_to_descriptor
[surface
+ i
].offset
= set_offset
+ i
;
283 surface
+= array_size
;
286 if (set_layout
->binding
[b
].stage
[s
].sampler_index
>= 0) {
287 assert(sampler
== l
.set
[set
].stage
[s
].sampler_start
+
288 set_layout
->binding
[b
].stage
[s
].sampler_index
);
289 for (unsigned i
= 0; i
< array_size
; i
++) {
290 l
.stage
[s
].sampler_to_descriptor
[sampler
+ i
].set
= set
;
291 l
.stage
[s
].sampler_to_descriptor
[sampler
+ i
].offset
= set_offset
+ i
;
293 sampler
+= array_size
;
299 /* Finally, we're done setting it up, copy into the allocated version */
302 *pPipelineLayout
= anv_pipeline_layout_to_handle(layout
);
307 void anv_DestroyPipelineLayout(
309 VkPipelineLayout _pipelineLayout
,
310 const VkAllocationCallbacks
* pAllocator
)
312 ANV_FROM_HANDLE(anv_device
, device
, _device
);
313 ANV_FROM_HANDLE(anv_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
315 anv_free2(&device
->alloc
, pAllocator
, pipeline_layout
);
319 * Descriptor pools. These are a no-op for now.
322 VkResult
anv_CreateDescriptorPool(
324 const VkDescriptorPoolCreateInfo
* pCreateInfo
,
325 const VkAllocationCallbacks
* pAllocator
,
326 VkDescriptorPool
* pDescriptorPool
)
328 anv_finishme("VkDescriptorPool is a stub");
329 *pDescriptorPool
= (VkDescriptorPool
)1;
333 void anv_DestroyDescriptorPool(
335 VkDescriptorPool _pool
,
336 const VkAllocationCallbacks
* pAllocator
)
338 anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
341 VkResult
anv_ResetDescriptorPool(
343 VkDescriptorPool descriptorPool
,
344 VkDescriptorPoolResetFlags flags
)
346 anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
351 anv_descriptor_set_create(struct anv_device
*device
,
352 const struct anv_descriptor_set_layout
*layout
,
353 struct anv_descriptor_set
**out_set
)
355 struct anv_descriptor_set
*set
;
356 size_t size
= sizeof(*set
) + layout
->size
* sizeof(set
->descriptors
[0]);
358 set
= anv_alloc(&device
->alloc
/* XXX: Use the pool */, size
, 8,
359 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
361 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
363 /* A descriptor set may not be 100% filled. Clear the set so we can can
364 * later detect holes in it.
366 memset(set
, 0, size
);
368 set
->layout
= layout
;
370 /* Go through and fill out immutable samplers if we have any */
371 struct anv_descriptor
*desc
= set
->descriptors
;
372 for (uint32_t b
= 0; b
< layout
->binding_count
; b
++) {
373 if (layout
->binding
[b
].immutable_samplers
) {
374 for (uint32_t i
= 0; i
< layout
->binding
[b
].array_size
; i
++)
375 desc
[i
].sampler
= layout
->binding
[b
].immutable_samplers
[i
];
377 desc
+= layout
->binding
[b
].array_size
;
380 /* XXX: Use the pool */
382 anv_alloc(&device
->alloc
,
383 sizeof(set
->buffer_views
[0]) * layout
->buffer_count
, 8,
384 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
385 if (!set
->buffer_views
) {
386 anv_free(&device
->alloc
, set
);
387 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
390 for (uint32_t b
= 0; b
< layout
->buffer_count
; b
++) {
391 set
->buffer_views
[b
].surface_state
=
392 anv_state_pool_alloc(&device
->surface_state_pool
, 64, 64);
394 set
->buffer_count
= layout
->buffer_count
;
401 anv_descriptor_set_destroy(struct anv_device
*device
,
402 struct anv_descriptor_set
*set
)
404 /* XXX: Use the pool */
405 for (uint32_t b
= 0; b
< set
->buffer_count
; b
++)
406 anv_state_pool_free(&device
->surface_state_pool
,
407 set
->buffer_views
[b
].surface_state
);
409 anv_free(&device
->alloc
, set
->buffer_views
);
410 anv_free(&device
->alloc
, set
);
413 VkResult
anv_AllocateDescriptorSets(
415 const VkDescriptorSetAllocateInfo
* pAllocateInfo
,
416 VkDescriptorSet
* pDescriptorSets
)
418 ANV_FROM_HANDLE(anv_device
, device
, _device
);
420 VkResult result
= VK_SUCCESS
;
421 struct anv_descriptor_set
*set
;
424 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
425 ANV_FROM_HANDLE(anv_descriptor_set_layout
, layout
,
426 pAllocateInfo
->pSetLayouts
[i
]);
428 result
= anv_descriptor_set_create(device
, layout
, &set
);
429 if (result
!= VK_SUCCESS
)
432 pDescriptorSets
[i
] = anv_descriptor_set_to_handle(set
);
435 if (result
!= VK_SUCCESS
)
436 anv_FreeDescriptorSets(_device
, pAllocateInfo
->descriptorPool
,
442 VkResult
anv_FreeDescriptorSets(
444 VkDescriptorPool descriptorPool
,
446 const VkDescriptorSet
* pDescriptorSets
)
448 ANV_FROM_HANDLE(anv_device
, device
, _device
);
450 for (uint32_t i
= 0; i
< count
; i
++) {
451 ANV_FROM_HANDLE(anv_descriptor_set
, set
, pDescriptorSets
[i
]);
453 anv_descriptor_set_destroy(device
, set
);
459 void anv_UpdateDescriptorSets(
461 uint32_t descriptorWriteCount
,
462 const VkWriteDescriptorSet
* pDescriptorWrites
,
463 uint32_t descriptorCopyCount
,
464 const VkCopyDescriptorSet
* pDescriptorCopies
)
466 ANV_FROM_HANDLE(anv_device
, device
, _device
);
468 for (uint32_t i
= 0; i
< descriptorWriteCount
; i
++) {
469 const VkWriteDescriptorSet
*write
= &pDescriptorWrites
[i
];
470 ANV_FROM_HANDLE(anv_descriptor_set
, set
, write
->dstSet
);
471 const struct anv_descriptor_set_binding_layout
*bind_layout
=
472 &set
->layout
->binding
[write
->dstBinding
];
473 struct anv_descriptor
*desc
=
474 &set
->descriptors
[bind_layout
->descriptor_index
];
475 desc
+= write
->dstArrayElement
;
477 switch (write
->descriptorType
) {
478 case VK_DESCRIPTOR_TYPE_SAMPLER
:
479 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
480 ANV_FROM_HANDLE(anv_sampler
, sampler
,
481 write
->pImageInfo
[j
].sampler
);
483 desc
[j
] = (struct anv_descriptor
) {
484 .type
= VK_DESCRIPTOR_TYPE_SAMPLER
,
490 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
491 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
492 ANV_FROM_HANDLE(anv_image_view
, iview
,
493 write
->pImageInfo
[j
].imageView
);
494 ANV_FROM_HANDLE(anv_sampler
, sampler
,
495 write
->pImageInfo
[j
].sampler
);
497 desc
[j
].type
= VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
;
498 desc
[j
].image_view
= iview
;
500 /* If this descriptor has an immutable sampler, we don't want
504 desc
[j
].sampler
= sampler
;
508 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
509 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
510 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
511 ANV_FROM_HANDLE(anv_image_view
, iview
,
512 write
->pImageInfo
[j
].imageView
);
514 desc
[j
] = (struct anv_descriptor
) {
515 .type
= write
->descriptorType
,
521 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
522 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
523 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
524 ANV_FROM_HANDLE(anv_buffer_view
, bview
,
525 write
->pTexelBufferView
[j
]);
527 desc
[j
] = (struct anv_descriptor
) {
528 .type
= write
->descriptorType
,
529 .buffer_view
= bview
,
534 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
535 anv_finishme("input attachments not implemented");
538 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
539 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
540 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
541 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
542 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
543 assert(write
->pBufferInfo
[j
].buffer
);
544 ANV_FROM_HANDLE(anv_buffer
, buffer
, write
->pBufferInfo
[j
].buffer
);
547 struct anv_buffer_view
*view
=
548 &set
->buffer_views
[bind_layout
->buffer_index
];
549 view
+= write
->dstArrayElement
+ j
;
551 const struct anv_format
*format
=
552 anv_format_for_descriptor_type(write
->descriptorType
);
554 view
->format
= format
->surface_format
;
555 view
->bo
= buffer
->bo
;
556 view
->offset
= buffer
->offset
+ write
->pBufferInfo
[j
].offset
;
558 /* For buffers with dynamic offsets, we use the full possible
559 * range in the surface state and do the actual range-checking
562 if (bind_layout
->dynamic_offset_index
>= 0 ||
563 write
->pBufferInfo
[j
].range
== VK_WHOLE_SIZE
)
564 view
->range
= buffer
->size
- write
->pBufferInfo
[j
].offset
;
566 view
->range
= write
->pBufferInfo
[j
].range
;
568 anv_fill_buffer_surface_state(device
, view
->surface_state
,
570 view
->offset
, view
->range
, 1);
572 desc
[j
] = (struct anv_descriptor
) {
573 .type
= write
->descriptorType
,
584 for (uint32_t i
= 0; i
< descriptorCopyCount
; i
++) {
585 const VkCopyDescriptorSet
*copy
= &pDescriptorCopies
[i
];
586 ANV_FROM_HANDLE(anv_descriptor_set
, src
, copy
->dstSet
);
587 ANV_FROM_HANDLE(anv_descriptor_set
, dest
, copy
->dstSet
);
588 for (uint32_t j
= 0; j
< copy
->descriptorCount
; j
++) {
589 dest
->descriptors
[copy
->dstBinding
+ j
] =
590 src
->descriptors
[copy
->srcBinding
+ j
];
592 dest
->buffer_count
= src
->buffer_count
;