2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
30 #include "anv_private.h"
33 * Descriptor set layouts.
36 VkResult
anv_CreateDescriptorSetLayout(
38 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
39 VkDescriptorSetLayout
* pSetLayout
)
41 ANV_FROM_HANDLE(anv_device
, device
, _device
);
42 struct anv_descriptor_set_layout
*set_layout
;
45 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
47 uint32_t immutable_sampler_count
= 0;
48 for (uint32_t b
= 0; b
< pCreateInfo
->count
; b
++) {
49 if (pCreateInfo
->pBinding
[b
].pImmutableSamplers
)
50 immutable_sampler_count
+= pCreateInfo
->pBinding
[b
].arraySize
;
53 size_t size
= sizeof(struct anv_descriptor_set_layout
) +
54 pCreateInfo
->count
* sizeof(set_layout
->binding
[0]) +
55 immutable_sampler_count
* sizeof(struct anv_sampler
*);
57 set_layout
= anv_device_alloc(device
, size
, 8,
58 VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
60 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
62 /* We just allocate all the samplers at the end of the struct */
63 struct anv_sampler
**samplers
=
64 (struct anv_sampler
**)&set_layout
->binding
[pCreateInfo
->count
];
66 set_layout
->binding_count
= pCreateInfo
->count
;
67 set_layout
->shader_stages
= 0;
70 /* Initialize all binding_layout entries to -1 */
71 memset(set_layout
->binding
, -1,
72 pCreateInfo
->count
* sizeof(set_layout
->binding
[0]));
74 /* Initialize all samplers to 0 */
75 memset(samplers
, 0, immutable_sampler_count
* sizeof(*samplers
));
77 uint32_t sampler_count
[VK_SHADER_STAGE_NUM
] = { 0, };
78 uint32_t surface_count
[VK_SHADER_STAGE_NUM
] = { 0, };
79 uint32_t dynamic_offset_count
= 0;
81 for (uint32_t b
= 0; b
< pCreateInfo
->count
; b
++) {
82 uint32_t array_size
= MAX2(1, pCreateInfo
->pBinding
[b
].arraySize
);
83 set_layout
->binding
[b
].array_size
= array_size
;
84 set_layout
->binding
[b
].descriptor_index
= set_layout
->size
;
85 set_layout
->size
+= array_size
;
87 switch (pCreateInfo
->pBinding
[b
].descriptorType
) {
88 case VK_DESCRIPTOR_TYPE_SAMPLER
:
89 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
90 for_each_bit(s
, pCreateInfo
->pBinding
[b
].stageFlags
) {
91 set_layout
->binding
[b
].stage
[s
].sampler_index
= sampler_count
[s
];
92 sampler_count
[s
] += array_size
;
99 switch (pCreateInfo
->pBinding
[b
].descriptorType
) {
100 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
101 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
102 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
103 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
104 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
105 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
106 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
107 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
108 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
109 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
110 for_each_bit(s
, pCreateInfo
->pBinding
[b
].stageFlags
) {
111 set_layout
->binding
[b
].stage
[s
].surface_index
= surface_count
[s
];
112 surface_count
[s
] += array_size
;
119 switch (pCreateInfo
->pBinding
[b
].descriptorType
) {
120 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
121 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
122 set_layout
->binding
[b
].dynamic_offset_index
= dynamic_offset_count
;
123 dynamic_offset_count
+= array_size
;
129 if (pCreateInfo
->pBinding
[b
].pImmutableSamplers
) {
130 set_layout
->binding
[b
].immutable_samplers
= samplers
;
131 samplers
+= array_size
;
133 for (uint32_t i
= 0; i
< array_size
; i
++)
134 set_layout
->binding
[b
].immutable_samplers
[i
] =
135 anv_sampler_from_handle(pCreateInfo
->pBinding
[b
].pImmutableSamplers
[i
]);
137 set_layout
->binding
[b
].immutable_samplers
= NULL
;
140 set_layout
->shader_stages
|= pCreateInfo
->pBinding
[b
].stageFlags
;
143 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
145 *pSetLayout
= anv_descriptor_set_layout_to_handle(set_layout
);
150 void anv_DestroyDescriptorSetLayout(
152 VkDescriptorSetLayout _set_layout
)
154 ANV_FROM_HANDLE(anv_device
, device
, _device
);
155 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
, _set_layout
);
157 anv_device_free(device
, set_layout
);
161 * Pipeline layouts. These have nothing to do with the pipeline. They are
162 * just muttiple descriptor set layouts pasted together
165 VkResult
anv_CreatePipelineLayout(
167 const VkPipelineLayoutCreateInfo
* pCreateInfo
,
168 VkPipelineLayout
* pPipelineLayout
)
170 ANV_FROM_HANDLE(anv_device
, device
, _device
);
171 struct anv_pipeline_layout l
, *layout
;
173 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
175 l
.num_sets
= pCreateInfo
->descriptorSetCount
;
177 unsigned dynamic_offset_count
= 0;
179 memset(l
.stage
, 0, sizeof(l
.stage
));
180 for (uint32_t set
= 0; set
< pCreateInfo
->descriptorSetCount
; set
++) {
181 ANV_FROM_HANDLE(anv_descriptor_set_layout
, set_layout
,
182 pCreateInfo
->pSetLayouts
[set
]);
183 l
.set
[set
].layout
= set_layout
;
185 l
.set
[set
].dynamic_offset_start
= dynamic_offset_count
;
186 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
187 if (set_layout
->binding
[b
].dynamic_offset_index
>= 0)
188 dynamic_offset_count
+= set_layout
->binding
[b
].array_size
;
191 for (VkShaderStage s
= 0; s
< VK_SHADER_STAGE_NUM
; s
++) {
192 l
.set
[set
].stage
[s
].surface_start
= l
.stage
[s
].surface_count
;
193 l
.set
[set
].stage
[s
].sampler_start
= l
.stage
[s
].sampler_count
;
195 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
196 unsigned array_size
= set_layout
->binding
[b
].array_size
;
198 if (set_layout
->binding
[b
].stage
[s
].surface_index
>= 0) {
199 l
.stage
[s
].surface_count
+= array_size
;
201 if (set_layout
->binding
[b
].dynamic_offset_index
>= 0)
202 l
.stage
[s
].has_dynamic_offsets
= true;
205 if (set_layout
->binding
[b
].stage
[s
].sampler_index
>= 0)
206 l
.stage
[s
].sampler_count
+= array_size
;
211 unsigned num_bindings
= 0;
212 for (VkShaderStage s
= 0; s
< VK_SHADER_STAGE_NUM
; s
++)
213 num_bindings
+= l
.stage
[s
].surface_count
+ l
.stage
[s
].sampler_count
;
215 size_t size
= sizeof(*layout
) + num_bindings
* sizeof(layout
->entries
[0]);
217 layout
= anv_device_alloc(device
, size
, 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
219 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
221 /* Now we can actually build our surface and sampler maps */
222 struct anv_pipeline_binding
*entry
= layout
->entries
;
223 for (VkShaderStage s
= 0; s
< VK_SHADER_STAGE_NUM
; s
++) {
224 l
.stage
[s
].surface_to_descriptor
= entry
;
225 entry
+= l
.stage
[s
].surface_count
;
226 l
.stage
[s
].sampler_to_descriptor
= entry
;
227 entry
+= l
.stage
[s
].sampler_count
;
231 for (uint32_t set
= 0; set
< pCreateInfo
->descriptorSetCount
; set
++) {
232 struct anv_descriptor_set_layout
*set_layout
= l
.set
[set
].layout
;
234 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
235 unsigned array_size
= set_layout
->binding
[b
].array_size
;
236 unsigned set_offset
= set_layout
->binding
[b
].descriptor_index
;
238 if (set_layout
->binding
[b
].stage
[s
].surface_index
>= 0) {
239 assert(surface
== l
.set
[set
].stage
[s
].surface_start
+
240 set_layout
->binding
[b
].stage
[s
].surface_index
);
241 for (unsigned i
= 0; i
< array_size
; i
++) {
242 l
.stage
[s
].surface_to_descriptor
[surface
+ i
].set
= set
;
243 l
.stage
[s
].surface_to_descriptor
[surface
+ i
].offset
= set_offset
+ i
;
245 surface
+= array_size
;
248 if (set_layout
->binding
[b
].stage
[s
].sampler_index
>= 0) {
249 assert(sampler
== l
.set
[set
].stage
[s
].sampler_start
+
250 set_layout
->binding
[b
].stage
[s
].sampler_index
);
251 for (unsigned i
= 0; i
< array_size
; i
++) {
252 l
.stage
[s
].sampler_to_descriptor
[sampler
+ i
].set
= set
;
253 l
.stage
[s
].sampler_to_descriptor
[sampler
+ i
].offset
= set_offset
+ i
;
255 sampler
+= array_size
;
261 /* Finally, we're done setting it up, copy into the allocated version */
264 *pPipelineLayout
= anv_pipeline_layout_to_handle(layout
);
269 void anv_DestroyPipelineLayout(
271 VkPipelineLayout _pipelineLayout
)
273 ANV_FROM_HANDLE(anv_device
, device
, _device
);
274 ANV_FROM_HANDLE(anv_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
276 anv_device_free(device
, pipeline_layout
);
280 * Descriptor pools. These are a no-op for now.
283 VkResult
anv_CreateDescriptorPool(
285 const VkDescriptorPoolCreateInfo
* pCreateInfo
,
286 VkDescriptorPool
* pDescriptorPool
)
288 anv_finishme("VkDescriptorPool is a stub");
289 *pDescriptorPool
= (VkDescriptorPool
)1;
293 void anv_DestroyDescriptorPool(
295 VkDescriptorPool _pool
)
297 anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
300 VkResult
anv_ResetDescriptorPool(
302 VkDescriptorPool descriptorPool
)
304 anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets");
309 anv_descriptor_set_create(struct anv_device
*device
,
310 const struct anv_descriptor_set_layout
*layout
,
311 struct anv_descriptor_set
**out_set
)
313 struct anv_descriptor_set
*set
;
314 size_t size
= sizeof(*set
) + layout
->size
* sizeof(set
->descriptors
[0]);
316 set
= anv_device_alloc(device
, size
, 8, VK_SYSTEM_ALLOC_TYPE_API_OBJECT
);
318 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
320 /* A descriptor set may not be 100% filled. Clear the set so we can can
321 * later detect holes in it.
323 memset(set
, 0, size
);
325 set
->layout
= layout
;
327 /* Go through and fill out immutable samplers if we have any */
328 struct anv_descriptor
*desc
= set
->descriptors
;
329 for (uint32_t b
= 0; b
< layout
->binding_count
; b
++) {
330 if (layout
->binding
[b
].immutable_samplers
) {
331 for (uint32_t i
= 0; i
< layout
->binding
[b
].array_size
; i
++)
332 desc
[i
].sampler
= layout
->binding
[b
].immutable_samplers
[i
];
334 desc
+= layout
->binding
[b
].array_size
;
343 anv_descriptor_set_destroy(struct anv_device
*device
,
344 struct anv_descriptor_set
*set
)
346 anv_device_free(device
, set
);
349 VkResult
anv_AllocDescriptorSets(
351 VkDescriptorPool descriptorPool
,
352 VkDescriptorSetUsage setUsage
,
354 const VkDescriptorSetLayout
* pSetLayouts
,
355 VkDescriptorSet
* pDescriptorSets
)
357 ANV_FROM_HANDLE(anv_device
, device
, _device
);
359 VkResult result
= VK_SUCCESS
;
360 struct anv_descriptor_set
*set
;
363 for (i
= 0; i
< count
; i
++) {
364 ANV_FROM_HANDLE(anv_descriptor_set_layout
, layout
, pSetLayouts
[i
]);
366 result
= anv_descriptor_set_create(device
, layout
, &set
);
367 if (result
!= VK_SUCCESS
)
370 pDescriptorSets
[i
] = anv_descriptor_set_to_handle(set
);
373 if (result
!= VK_SUCCESS
)
374 anv_FreeDescriptorSets(_device
, descriptorPool
, i
, pDescriptorSets
);
379 VkResult
anv_FreeDescriptorSets(
381 VkDescriptorPool descriptorPool
,
383 const VkDescriptorSet
* pDescriptorSets
)
385 ANV_FROM_HANDLE(anv_device
, device
, _device
);
387 for (uint32_t i
= 0; i
< count
; i
++) {
388 ANV_FROM_HANDLE(anv_descriptor_set
, set
, pDescriptorSets
[i
]);
390 anv_descriptor_set_destroy(device
, set
);
396 void anv_UpdateDescriptorSets(
399 const VkWriteDescriptorSet
* pDescriptorWrites
,
401 const VkCopyDescriptorSet
* pDescriptorCopies
)
403 for (uint32_t i
= 0; i
< writeCount
; i
++) {
404 const VkWriteDescriptorSet
*write
= &pDescriptorWrites
[i
];
405 ANV_FROM_HANDLE(anv_descriptor_set
, set
, write
->destSet
);
406 const struct anv_descriptor_set_binding_layout
*bind_layout
=
407 &set
->layout
->binding
[write
->destBinding
];
408 struct anv_descriptor
*desc
=
409 &set
->descriptors
[bind_layout
->descriptor_index
];
411 switch (write
->descriptorType
) {
412 case VK_DESCRIPTOR_TYPE_SAMPLER
:
413 for (uint32_t j
= 0; j
< write
->count
; j
++) {
414 ANV_FROM_HANDLE(anv_sampler
, sampler
,
415 write
->pDescriptors
[j
].sampler
);
417 desc
[j
] = (struct anv_descriptor
) {
418 .type
= VK_DESCRIPTOR_TYPE_SAMPLER
,
424 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
425 for (uint32_t j
= 0; j
< write
->count
; j
++) {
426 ANV_FROM_HANDLE(anv_image_view
, iview
,
427 write
->pDescriptors
[j
].imageView
);
428 ANV_FROM_HANDLE(anv_sampler
, sampler
,
429 write
->pDescriptors
[j
].sampler
);
431 desc
[j
].type
= VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
;
432 desc
[j
].image_view
= iview
;
434 /* If this descriptor has an immutable sampler, we don't want
438 desc
[j
].sampler
= sampler
;
442 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
443 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
444 for (uint32_t j
= 0; j
< write
->count
; j
++) {
445 ANV_FROM_HANDLE(anv_image_view
, iview
,
446 write
->pDescriptors
[j
].imageView
);
448 desc
[j
] = (struct anv_descriptor
) {
449 .type
= write
->descriptorType
,
455 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
456 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
457 anv_finishme("texel buffers not implemented");
460 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
461 anv_finishme("input attachments not implemented");
464 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
465 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
466 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
467 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
468 for (uint32_t j
= 0; j
< write
->count
; j
++) {
469 assert(write
->pDescriptors
[j
].bufferInfo
.buffer
);
470 ANV_FROM_HANDLE(anv_buffer
, buffer
,
471 write
->pDescriptors
[j
].bufferInfo
.buffer
);
474 desc
[j
] = (struct anv_descriptor
) {
475 .type
= write
->descriptorType
,
477 .offset
= write
->pDescriptors
[j
].bufferInfo
.offset
,
478 .range
= write
->pDescriptors
[j
].bufferInfo
.range
,
481 /* For buffers with dynamic offsets, we use the full possible
482 * range in the surface state and do the actual range-checking
485 if (bind_layout
->dynamic_offset_index
>= 0)
486 desc
[j
].range
= buffer
->size
- desc
[j
].offset
;
494 for (uint32_t i
= 0; i
< copyCount
; i
++) {
495 const VkCopyDescriptorSet
*copy
= &pDescriptorCopies
[i
];
496 ANV_FROM_HANDLE(anv_descriptor_set
, src
, copy
->destSet
);
497 ANV_FROM_HANDLE(anv_descriptor_set
, dest
, copy
->destSet
);
498 for (uint32_t j
= 0; j
< copy
->count
; j
++) {
499 dest
->descriptors
[copy
->destBinding
+ j
] =
500 src
->descriptors
[copy
->srcBinding
+ j
];