2 * Copyright © 2019 Red Hat.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 #include "val_private.h"
28 VkResult
val_CreateDescriptorSetLayout(
30 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
31 const VkAllocationCallbacks
* pAllocator
,
32 VkDescriptorSetLayout
* pSetLayout
)
34 VAL_FROM_HANDLE(val_device
, device
, _device
);
35 struct val_descriptor_set_layout
*set_layout
;
37 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
38 uint32_t max_binding
= 0;
39 uint32_t immutable_sampler_count
= 0;
40 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
41 max_binding
= MAX2(max_binding
, pCreateInfo
->pBindings
[j
].binding
);
42 if (pCreateInfo
->pBindings
[j
].pImmutableSamplers
)
43 immutable_sampler_count
+= pCreateInfo
->pBindings
[j
].descriptorCount
;
46 size_t size
= sizeof(struct val_descriptor_set_layout
) +
47 (max_binding
+ 1) * sizeof(set_layout
->binding
[0]) +
48 immutable_sampler_count
* sizeof(struct val_sampler
*);
50 set_layout
= vk_zalloc2(&device
->alloc
, pAllocator
, size
, 8,
51 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
53 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
55 vk_object_base_init(&device
->vk
, &set_layout
->base
,
56 VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT
);
57 /* We just allocate all the samplers at the end of the struct */
58 struct val_sampler
**samplers
=
59 (struct val_sampler
**)&set_layout
->binding
[max_binding
+ 1];
61 set_layout
->binding_count
= max_binding
+ 1;
62 set_layout
->shader_stages
= 0;
65 uint32_t dynamic_offset_count
= 0;
67 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
68 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[j
];
69 uint32_t b
= binding
->binding
;
71 set_layout
->binding
[b
].array_size
= binding
->descriptorCount
;
72 set_layout
->binding
[b
].descriptor_index
= set_layout
->size
;
73 set_layout
->binding
[b
].type
= binding
->descriptorType
;
74 set_layout
->binding
[b
].valid
= true;
75 set_layout
->size
+= binding
->descriptorCount
;
77 for (gl_shader_stage stage
= MESA_SHADER_VERTEX
; stage
< MESA_SHADER_STAGES
; stage
++) {
78 set_layout
->binding
[b
].stage
[stage
].const_buffer_index
= -1;
79 set_layout
->binding
[b
].stage
[stage
].shader_buffer_index
= -1;
80 set_layout
->binding
[b
].stage
[stage
].sampler_index
= -1;
81 set_layout
->binding
[b
].stage
[stage
].sampler_view_index
= -1;
82 set_layout
->binding
[b
].stage
[stage
].image_index
= -1;
85 if (binding
->descriptorType
== VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
||
86 binding
->descriptorType
== VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
) {
87 set_layout
->binding
[b
].dynamic_index
= dynamic_offset_count
;
88 dynamic_offset_count
+= binding
->descriptorCount
;
90 switch (binding
->descriptorType
) {
91 case VK_DESCRIPTOR_TYPE_SAMPLER
:
92 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
93 val_foreach_stage(s
, binding
->stageFlags
) {
94 set_layout
->binding
[b
].stage
[s
].sampler_index
= set_layout
->stage
[s
].sampler_count
;
95 set_layout
->stage
[s
].sampler_count
+= binding
->descriptorCount
;
102 switch (binding
->descriptorType
) {
103 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
104 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
105 val_foreach_stage(s
, binding
->stageFlags
) {
106 set_layout
->binding
[b
].stage
[s
].const_buffer_index
= set_layout
->stage
[s
].const_buffer_count
;
107 set_layout
->stage
[s
].const_buffer_count
+= binding
->descriptorCount
;
110 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
111 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
112 val_foreach_stage(s
, binding
->stageFlags
) {
113 set_layout
->binding
[b
].stage
[s
].shader_buffer_index
= set_layout
->stage
[s
].shader_buffer_count
;
114 set_layout
->stage
[s
].shader_buffer_count
+= binding
->descriptorCount
;
118 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
119 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
120 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
121 val_foreach_stage(s
, binding
->stageFlags
) {
122 set_layout
->binding
[b
].stage
[s
].image_index
= set_layout
->stage
[s
].image_count
;
123 set_layout
->stage
[s
].image_count
+= binding
->descriptorCount
;
126 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
127 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
128 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
129 val_foreach_stage(s
, binding
->stageFlags
) {
130 set_layout
->binding
[b
].stage
[s
].sampler_view_index
= set_layout
->stage
[s
].sampler_view_count
;
131 set_layout
->stage
[s
].sampler_view_count
+= binding
->descriptorCount
;
138 if (binding
->pImmutableSamplers
) {
139 set_layout
->binding
[b
].immutable_samplers
= samplers
;
140 samplers
+= binding
->descriptorCount
;
142 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++)
143 set_layout
->binding
[b
].immutable_samplers
[i
] =
144 val_sampler_from_handle(binding
->pImmutableSamplers
[i
]);
146 set_layout
->binding
[b
].immutable_samplers
= NULL
;
149 set_layout
->shader_stages
|= binding
->stageFlags
;
152 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
154 *pSetLayout
= val_descriptor_set_layout_to_handle(set_layout
);
159 void val_DestroyDescriptorSetLayout(
161 VkDescriptorSetLayout _set_layout
,
162 const VkAllocationCallbacks
* pAllocator
)
164 VAL_FROM_HANDLE(val_device
, device
, _device
);
165 VAL_FROM_HANDLE(val_descriptor_set_layout
, set_layout
, _set_layout
);
169 vk_object_base_finish(&set_layout
->base
);
170 vk_free2(&device
->alloc
, pAllocator
, set_layout
);
173 VkResult
val_CreatePipelineLayout(
175 const VkPipelineLayoutCreateInfo
* pCreateInfo
,
176 const VkAllocationCallbacks
* pAllocator
,
177 VkPipelineLayout
* pPipelineLayout
)
179 VAL_FROM_HANDLE(val_device
, device
, _device
);
180 struct val_pipeline_layout
*layout
;
182 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
184 layout
= vk_alloc2(&device
->alloc
, pAllocator
, sizeof(*layout
), 8,
185 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
187 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
189 vk_object_base_init(&device
->vk
, &layout
->base
,
190 VK_OBJECT_TYPE_PIPELINE_LAYOUT
);
191 layout
->num_sets
= pCreateInfo
->setLayoutCount
;
193 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
194 VAL_FROM_HANDLE(val_descriptor_set_layout
, set_layout
,
195 pCreateInfo
->pSetLayouts
[set
]);
196 layout
->set
[set
].layout
= set_layout
;
199 layout
->push_constant_size
= 0;
200 for (unsigned i
= 0; i
< pCreateInfo
->pushConstantRangeCount
; ++i
) {
201 const VkPushConstantRange
*range
= pCreateInfo
->pPushConstantRanges
+ i
;
202 layout
->push_constant_size
= MAX2(layout
->push_constant_size
,
203 range
->offset
+ range
->size
);
205 layout
->push_constant_size
= align(layout
->push_constant_size
, 16);
206 *pPipelineLayout
= val_pipeline_layout_to_handle(layout
);
211 void val_DestroyPipelineLayout(
213 VkPipelineLayout _pipelineLayout
,
214 const VkAllocationCallbacks
* pAllocator
)
216 VAL_FROM_HANDLE(val_device
, device
, _device
);
217 VAL_FROM_HANDLE(val_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
219 if (!_pipelineLayout
)
221 vk_object_base_finish(&pipeline_layout
->base
);
222 vk_free2(&device
->alloc
, pAllocator
, pipeline_layout
);
226 val_descriptor_set_create(struct val_device
*device
,
227 const struct val_descriptor_set_layout
*layout
,
228 struct val_descriptor_set
**out_set
)
230 struct val_descriptor_set
*set
;
231 size_t size
= sizeof(*set
) + layout
->size
* sizeof(set
->descriptors
[0]);
233 set
= vk_alloc(&device
->alloc
/* XXX: Use the pool */, size
, 8,
234 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
236 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
238 /* A descriptor set may not be 100% filled. Clear the set so we can can
239 * later detect holes in it.
241 memset(set
, 0, size
);
243 vk_object_base_init(&device
->vk
, &set
->base
,
244 VK_OBJECT_TYPE_DESCRIPTOR_SET
);
245 set
->layout
= layout
;
247 /* Go through and fill out immutable samplers if we have any */
248 struct val_descriptor
*desc
= set
->descriptors
;
249 for (uint32_t b
= 0; b
< layout
->binding_count
; b
++) {
250 if (layout
->binding
[b
].immutable_samplers
) {
251 for (uint32_t i
= 0; i
< layout
->binding
[b
].array_size
; i
++)
252 desc
[i
].sampler
= layout
->binding
[b
].immutable_samplers
[i
];
254 desc
+= layout
->binding
[b
].array_size
;
263 val_descriptor_set_destroy(struct val_device
*device
,
264 struct val_descriptor_set
*set
)
266 vk_object_base_finish(&set
->base
);
267 vk_free(&device
->alloc
, set
);
270 VkResult
val_AllocateDescriptorSets(
272 const VkDescriptorSetAllocateInfo
* pAllocateInfo
,
273 VkDescriptorSet
* pDescriptorSets
)
275 VAL_FROM_HANDLE(val_device
, device
, _device
);
276 VAL_FROM_HANDLE(val_descriptor_pool
, pool
, pAllocateInfo
->descriptorPool
);
277 VkResult result
= VK_SUCCESS
;
278 struct val_descriptor_set
*set
;
281 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
282 VAL_FROM_HANDLE(val_descriptor_set_layout
, layout
,
283 pAllocateInfo
->pSetLayouts
[i
]);
285 result
= val_descriptor_set_create(device
, layout
, &set
);
286 if (result
!= VK_SUCCESS
)
289 list_addtail(&set
->link
, &pool
->sets
);
290 pDescriptorSets
[i
] = val_descriptor_set_to_handle(set
);
293 if (result
!= VK_SUCCESS
)
294 val_FreeDescriptorSets(_device
, pAllocateInfo
->descriptorPool
,
300 VkResult
val_FreeDescriptorSets(
302 VkDescriptorPool descriptorPool
,
304 const VkDescriptorSet
* pDescriptorSets
)
306 VAL_FROM_HANDLE(val_device
, device
, _device
);
307 for (uint32_t i
= 0; i
< count
; i
++) {
308 VAL_FROM_HANDLE(val_descriptor_set
, set
, pDescriptorSets
[i
]);
312 list_del(&set
->link
);
313 val_descriptor_set_destroy(device
, set
);
318 void val_UpdateDescriptorSets(
320 uint32_t descriptorWriteCount
,
321 const VkWriteDescriptorSet
* pDescriptorWrites
,
322 uint32_t descriptorCopyCount
,
323 const VkCopyDescriptorSet
* pDescriptorCopies
)
325 for (uint32_t i
= 0; i
< descriptorWriteCount
; i
++) {
326 const VkWriteDescriptorSet
*write
= &pDescriptorWrites
[i
];
327 VAL_FROM_HANDLE(val_descriptor_set
, set
, write
->dstSet
);
328 const struct val_descriptor_set_binding_layout
*bind_layout
=
329 &set
->layout
->binding
[write
->dstBinding
];
330 struct val_descriptor
*desc
=
331 &set
->descriptors
[bind_layout
->descriptor_index
];
332 desc
+= write
->dstArrayElement
;
334 switch (write
->descriptorType
) {
335 case VK_DESCRIPTOR_TYPE_SAMPLER
:
336 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
337 VAL_FROM_HANDLE(val_sampler
, sampler
,
338 write
->pImageInfo
[j
].sampler
);
340 desc
[j
] = (struct val_descriptor
) {
341 .type
= VK_DESCRIPTOR_TYPE_SAMPLER
,
347 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
348 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
349 VAL_FROM_HANDLE(val_image_view
, iview
,
350 write
->pImageInfo
[j
].imageView
);
351 VAL_FROM_HANDLE(val_sampler
, sampler
,
352 write
->pImageInfo
[j
].sampler
);
354 desc
[j
].type
= VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
;
355 desc
[j
].image_view
= iview
;
357 /* If this descriptor has an immutable sampler, we don't want
361 desc
[j
].sampler
= sampler
;
365 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
366 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
367 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
368 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
369 VAL_FROM_HANDLE(val_image_view
, iview
,
370 write
->pImageInfo
[j
].imageView
);
372 desc
[j
] = (struct val_descriptor
) {
373 .type
= write
->descriptorType
,
379 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
380 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
381 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
382 VAL_FROM_HANDLE(val_buffer_view
, bview
,
383 write
->pTexelBufferView
[j
]);
385 desc
[j
] = (struct val_descriptor
) {
386 .type
= write
->descriptorType
,
387 .buffer_view
= bview
,
392 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
393 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
394 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
395 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
396 for (uint32_t j
= 0; j
< write
->descriptorCount
; j
++) {
397 assert(write
->pBufferInfo
[j
].buffer
);
398 VAL_FROM_HANDLE(val_buffer
, buffer
, write
->pBufferInfo
[j
].buffer
);
400 desc
[j
] = (struct val_descriptor
) {
401 .type
= write
->descriptorType
,
402 .buf
.offset
= write
->pBufferInfo
[j
].offset
,
403 .buf
.buffer
= buffer
,
404 .buf
.range
= write
->pBufferInfo
[j
].range
,
414 for (uint32_t i
= 0; i
< descriptorCopyCount
; i
++) {
415 const VkCopyDescriptorSet
*copy
= &pDescriptorCopies
[i
];
416 VAL_FROM_HANDLE(val_descriptor_set
, src
, copy
->srcSet
);
417 VAL_FROM_HANDLE(val_descriptor_set
, dst
, copy
->dstSet
);
419 const struct val_descriptor_set_binding_layout
*src_layout
=
420 &src
->layout
->binding
[copy
->srcBinding
];
421 struct val_descriptor
*src_desc
=
422 &src
->descriptors
[src_layout
->descriptor_index
];
423 src_desc
+= copy
->srcArrayElement
;
425 const struct val_descriptor_set_binding_layout
*dst_layout
=
426 &dst
->layout
->binding
[copy
->dstBinding
];
427 struct val_descriptor
*dst_desc
=
428 &dst
->descriptors
[dst_layout
->descriptor_index
];
429 dst_desc
+= copy
->dstArrayElement
;
431 for (uint32_t j
= 0; j
< copy
->descriptorCount
; j
++)
432 dst_desc
[j
] = src_desc
[j
];
436 VkResult
val_CreateDescriptorPool(
438 const VkDescriptorPoolCreateInfo
* pCreateInfo
,
439 const VkAllocationCallbacks
* pAllocator
,
440 VkDescriptorPool
* pDescriptorPool
)
442 VAL_FROM_HANDLE(val_device
, device
, _device
);
443 struct val_descriptor_pool
*pool
;
444 size_t size
= sizeof(struct val_descriptor_pool
);
445 pool
= vk_zalloc2(&device
->alloc
, pAllocator
, size
, 8,
446 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
448 return vk_error(device
->instance
, VK_ERROR_OUT_OF_HOST_MEMORY
);
450 vk_object_base_init(&device
->vk
, &pool
->base
,
451 VK_OBJECT_TYPE_DESCRIPTOR_POOL
);
452 pool
->flags
= pCreateInfo
->flags
;
453 list_inithead(&pool
->sets
);
454 *pDescriptorPool
= val_descriptor_pool_to_handle(pool
);
458 static void val_reset_descriptor_pool(struct val_device
*device
,
459 struct val_descriptor_pool
*pool
)
461 struct val_descriptor_set
*set
, *tmp
;
462 LIST_FOR_EACH_ENTRY_SAFE(set
, tmp
, &pool
->sets
, link
) {
463 list_del(&set
->link
);
464 vk_free(&device
->alloc
, set
);
468 void val_DestroyDescriptorPool(
470 VkDescriptorPool _pool
,
471 const VkAllocationCallbacks
* pAllocator
)
473 VAL_FROM_HANDLE(val_device
, device
, _device
);
474 VAL_FROM_HANDLE(val_descriptor_pool
, pool
, _pool
);
479 val_reset_descriptor_pool(device
, pool
);
480 vk_object_base_finish(&pool
->base
);
481 vk_free2(&device
->alloc
, pAllocator
, pool
);
484 VkResult
val_ResetDescriptorPool(
486 VkDescriptorPool _pool
,
487 VkDescriptorPoolResetFlags flags
)
489 VAL_FROM_HANDLE(val_device
, device
, _device
);
490 VAL_FROM_HANDLE(val_descriptor_pool
, pool
, _pool
);
492 val_reset_descriptor_pool(device
, pool
);
496 void val_GetDescriptorSetLayoutSupport(VkDevice device
,
497 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
498 VkDescriptorSetLayoutSupport
* pSupport
)