2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
30 #include "util/mesa-sha1.h"
31 #include "radv_private.h"
35 static bool has_equal_immutable_samplers(const VkSampler
*samplers
, uint32_t count
)
39 for(uint32_t i
= 1; i
< count
; ++i
) {
40 if (memcmp(radv_sampler_from_handle(samplers
[0])->state
,
41 radv_sampler_from_handle(samplers
[i
])->state
, 16)) {
48 VkResult
radv_CreateDescriptorSetLayout(
50 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
51 const VkAllocationCallbacks
* pAllocator
,
52 VkDescriptorSetLayout
* pSetLayout
)
54 RADV_FROM_HANDLE(radv_device
, device
, _device
);
55 struct radv_descriptor_set_layout
*set_layout
;
57 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
59 uint32_t max_binding
= 0;
60 uint32_t immutable_sampler_count
= 0;
61 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
62 max_binding
= MAX2(max_binding
, pCreateInfo
->pBindings
[j
].binding
);
63 if (pCreateInfo
->pBindings
[j
].pImmutableSamplers
)
64 immutable_sampler_count
+= pCreateInfo
->pBindings
[j
].descriptorCount
;
67 uint32_t samplers_offset
= sizeof(struct radv_descriptor_set_layout
) +
68 (max_binding
+ 1) * sizeof(set_layout
->binding
[0]);
69 size_t size
= samplers_offset
+ immutable_sampler_count
* 4 * sizeof(uint32_t);
71 set_layout
= vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
72 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
74 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
76 set_layout
->flags
= pCreateInfo
->flags
;
78 /* We just allocate all the samplers at the end of the struct */
79 uint32_t *samplers
= (uint32_t*)&set_layout
->binding
[max_binding
+ 1];
81 set_layout
->binding_count
= max_binding
+ 1;
82 set_layout
->shader_stages
= 0;
83 set_layout
->dynamic_shader_stages
= 0;
84 set_layout
->has_immutable_samplers
= false;
87 memset(set_layout
->binding
, 0, size
- sizeof(struct radv_descriptor_set_layout
));
89 uint32_t buffer_count
= 0;
90 uint32_t dynamic_offset_count
= 0;
92 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
93 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[j
];
94 uint32_t b
= binding
->binding
;
96 unsigned binding_buffer_count
= 0;
98 switch (binding
->descriptorType
) {
99 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
100 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
101 assert(!(pCreateInfo
->flags
& VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
));
102 set_layout
->binding
[b
].dynamic_offset_count
= 1;
103 set_layout
->dynamic_shader_stages
|= binding
->stageFlags
;
104 set_layout
->binding
[b
].size
= 0;
105 binding_buffer_count
= 1;
108 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
109 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
110 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
111 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
112 set_layout
->binding
[b
].size
= 16;
113 binding_buffer_count
= 1;
116 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
117 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
118 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
119 /* main descriptor + fmask descriptor */
120 set_layout
->binding
[b
].size
= 64;
121 binding_buffer_count
= 1;
124 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
125 /* main descriptor + fmask descriptor + sampler */
126 set_layout
->binding
[b
].size
= 96;
127 binding_buffer_count
= 1;
130 case VK_DESCRIPTOR_TYPE_SAMPLER
:
131 set_layout
->binding
[b
].size
= 16;
135 unreachable("unknown descriptor type\n");
139 set_layout
->size
= align(set_layout
->size
, alignment
);
140 set_layout
->binding
[b
].type
= binding
->descriptorType
;
141 set_layout
->binding
[b
].array_size
= binding
->descriptorCount
;
142 set_layout
->binding
[b
].offset
= set_layout
->size
;
143 set_layout
->binding
[b
].buffer_offset
= buffer_count
;
144 set_layout
->binding
[b
].dynamic_offset_offset
= dynamic_offset_count
;
146 if (binding
->pImmutableSamplers
) {
147 set_layout
->binding
[b
].immutable_samplers_offset
= samplers_offset
;
148 set_layout
->binding
[b
].immutable_samplers_equal
=
149 has_equal_immutable_samplers(binding
->pImmutableSamplers
, binding
->descriptorCount
);
150 set_layout
->has_immutable_samplers
= true;
153 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++)
154 memcpy(samplers
+ 4 * i
, &radv_sampler_from_handle(binding
->pImmutableSamplers
[i
])->state
, 16);
156 /* Don't reserve space for the samplers if they're not accessed. */
157 if (set_layout
->binding
[b
].immutable_samplers_equal
) {
158 if (binding
->descriptorType
== VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
)
159 set_layout
->binding
[b
].size
-= 32;
160 else if (binding
->descriptorType
== VK_DESCRIPTOR_TYPE_SAMPLER
)
161 set_layout
->binding
[b
].size
-= 16;
163 samplers
+= 4 * binding
->descriptorCount
;
164 samplers_offset
+= 4 * sizeof(uint32_t) * binding
->descriptorCount
;
167 set_layout
->size
+= binding
->descriptorCount
* set_layout
->binding
[b
].size
;
168 buffer_count
+= binding
->descriptorCount
* binding_buffer_count
;
169 dynamic_offset_count
+= binding
->descriptorCount
*
170 set_layout
->binding
[b
].dynamic_offset_count
;
171 set_layout
->shader_stages
|= binding
->stageFlags
;
174 set_layout
->buffer_count
= buffer_count
;
175 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
177 *pSetLayout
= radv_descriptor_set_layout_to_handle(set_layout
);
182 void radv_DestroyDescriptorSetLayout(
184 VkDescriptorSetLayout _set_layout
,
185 const VkAllocationCallbacks
* pAllocator
)
187 RADV_FROM_HANDLE(radv_device
, device
, _device
);
188 RADV_FROM_HANDLE(radv_descriptor_set_layout
, set_layout
, _set_layout
);
193 vk_free2(&device
->alloc
, pAllocator
, set_layout
);
196 void radv_GetDescriptorSetLayoutSupport(VkDevice device
,
197 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
198 VkDescriptorSetLayoutSupport
* pSupport
)
200 bool supported
= true;
202 for (uint32_t i
= 0; i
< pCreateInfo
->bindingCount
; i
++) {
203 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[i
];
205 if (binding
->descriptorCount
== 0)
208 uint64_t descriptor_size
= 0;
209 uint64_t descriptor_alignment
= 1;
210 switch (binding
->descriptorType
) {
211 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
212 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
214 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
215 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
216 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
217 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
218 descriptor_size
= 16;
219 descriptor_alignment
= 16;
221 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
222 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
223 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
224 descriptor_size
= 64;
225 descriptor_alignment
= 32;
227 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
228 if (!has_equal_immutable_samplers(binding
->pImmutableSamplers
, binding
->descriptorCount
)) {
229 descriptor_size
= 64;
231 descriptor_size
= 96;
233 descriptor_alignment
= 32;
235 case VK_DESCRIPTOR_TYPE_SAMPLER
:
236 if (!has_equal_immutable_samplers(binding
->pImmutableSamplers
, binding
->descriptorCount
)) {
237 descriptor_size
= 16;
238 descriptor_alignment
= 16;
242 unreachable("unknown descriptor type\n");
246 if (size
&& !align_u64(size
, descriptor_alignment
)) {
249 size
= align_u64(size
, descriptor_alignment
);
250 if (descriptor_size
&& (UINT64_MAX
- size
) / descriptor_size
< binding
->descriptorCount
) {
253 size
+= binding
->descriptorCount
* descriptor_size
;
256 pSupport
->supported
= supported
;
260 * Pipeline layouts. These have nothing to do with the pipeline. They are
261 * just muttiple descriptor set layouts pasted together
264 VkResult
radv_CreatePipelineLayout(
266 const VkPipelineLayoutCreateInfo
* pCreateInfo
,
267 const VkAllocationCallbacks
* pAllocator
,
268 VkPipelineLayout
* pPipelineLayout
)
270 RADV_FROM_HANDLE(radv_device
, device
, _device
);
271 struct radv_pipeline_layout
*layout
;
272 struct mesa_sha1 ctx
;
274 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
276 layout
= vk_alloc2(&device
->alloc
, pAllocator
, sizeof(*layout
), 8,
277 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
279 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
281 layout
->num_sets
= pCreateInfo
->setLayoutCount
;
283 unsigned dynamic_offset_count
= 0;
286 _mesa_sha1_init(&ctx
);
287 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
288 RADV_FROM_HANDLE(radv_descriptor_set_layout
, set_layout
,
289 pCreateInfo
->pSetLayouts
[set
]);
290 layout
->set
[set
].layout
= set_layout
;
292 layout
->set
[set
].dynamic_offset_start
= dynamic_offset_count
;
293 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
294 dynamic_offset_count
+= set_layout
->binding
[b
].array_size
* set_layout
->binding
[b
].dynamic_offset_count
;
295 if (set_layout
->binding
[b
].immutable_samplers_offset
)
296 _mesa_sha1_update(&ctx
, radv_immutable_samplers(set_layout
, set_layout
->binding
+ b
),
297 set_layout
->binding
[b
].array_size
* 4 * sizeof(uint32_t));
299 _mesa_sha1_update(&ctx
, set_layout
->binding
,
300 sizeof(set_layout
->binding
[0]) * set_layout
->binding_count
);
303 layout
->dynamic_offset_count
= dynamic_offset_count
;
304 layout
->push_constant_size
= 0;
306 for (unsigned i
= 0; i
< pCreateInfo
->pushConstantRangeCount
; ++i
) {
307 const VkPushConstantRange
*range
= pCreateInfo
->pPushConstantRanges
+ i
;
308 layout
->push_constant_size
= MAX2(layout
->push_constant_size
,
309 range
->offset
+ range
->size
);
312 layout
->push_constant_size
= align(layout
->push_constant_size
, 16);
313 _mesa_sha1_update(&ctx
, &layout
->push_constant_size
,
314 sizeof(layout
->push_constant_size
));
315 _mesa_sha1_final(&ctx
, layout
->sha1
);
316 *pPipelineLayout
= radv_pipeline_layout_to_handle(layout
);
321 void radv_DestroyPipelineLayout(
323 VkPipelineLayout _pipelineLayout
,
324 const VkAllocationCallbacks
* pAllocator
)
326 RADV_FROM_HANDLE(radv_device
, device
, _device
);
327 RADV_FROM_HANDLE(radv_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
329 if (!pipeline_layout
)
331 vk_free2(&device
->alloc
, pAllocator
, pipeline_layout
);
337 radv_descriptor_set_create(struct radv_device
*device
,
338 struct radv_descriptor_pool
*pool
,
339 const struct radv_descriptor_set_layout
*layout
,
340 struct radv_descriptor_set
**out_set
)
342 struct radv_descriptor_set
*set
;
343 unsigned range_offset
= sizeof(struct radv_descriptor_set
) +
344 sizeof(struct radeon_winsys_bo
*) * layout
->buffer_count
;
345 unsigned mem_size
= range_offset
+
346 sizeof(struct radv_descriptor_range
) * layout
->dynamic_offset_count
;
348 if (pool
->host_memory_base
) {
349 if (pool
->host_memory_end
- pool
->host_memory_ptr
< mem_size
)
350 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR
);
352 set
= (struct radv_descriptor_set
*)pool
->host_memory_ptr
;
353 pool
->host_memory_ptr
+= mem_size
;
355 set
= vk_alloc2(&device
->alloc
, NULL
, mem_size
, 8,
356 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
359 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
362 memset(set
, 0, mem_size
);
364 if (layout
->dynamic_offset_count
) {
365 set
->dynamic_descriptors
= (struct radv_descriptor_range
*)((uint8_t*)set
+ range_offset
);
368 set
->layout
= layout
;
370 uint32_t layout_size
= align_u32(layout
->size
, 32);
371 set
->size
= layout
->size
;
373 if (!pool
->host_memory_base
&& pool
->entry_count
== pool
->max_entry_count
) {
374 vk_free2(&device
->alloc
, NULL
, set
);
375 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR
);
378 /* try to allocate linearly first, so that we don't spend
379 * time looking for gaps if the app only allocates &
380 * resets via the pool. */
381 if (pool
->current_offset
+ layout_size
<= pool
->size
) {
383 set
->mapped_ptr
= (uint32_t*)(pool
->mapped_ptr
+ pool
->current_offset
);
384 set
->va
= radv_buffer_get_va(set
->bo
) + pool
->current_offset
;
385 if (!pool
->host_memory_base
) {
386 pool
->entries
[pool
->entry_count
].offset
= pool
->current_offset
;
387 pool
->entries
[pool
->entry_count
].size
= layout_size
;
388 pool
->entries
[pool
->entry_count
].set
= set
;
391 pool
->current_offset
+= layout_size
;
392 } else if (!pool
->host_memory_base
) {
396 for (index
= 0; index
< pool
->entry_count
; ++index
) {
397 if (pool
->entries
[index
].offset
- offset
>= layout_size
)
399 offset
= pool
->entries
[index
].offset
+ pool
->entries
[index
].size
;
402 if (pool
->size
- offset
< layout_size
) {
403 vk_free2(&device
->alloc
, NULL
, set
);
404 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR
);
407 set
->mapped_ptr
= (uint32_t*)(pool
->mapped_ptr
+ offset
);
408 set
->va
= radv_buffer_get_va(set
->bo
) + offset
;
409 memmove(&pool
->entries
[index
+ 1], &pool
->entries
[index
],
410 sizeof(pool
->entries
[0]) * (pool
->entry_count
- index
));
411 pool
->entries
[index
].offset
= offset
;
412 pool
->entries
[index
].size
= layout_size
;
413 pool
->entries
[index
].set
= set
;
416 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR
);
419 if (layout
->has_immutable_samplers
) {
420 for (unsigned i
= 0; i
< layout
->binding_count
; ++i
) {
421 if (!layout
->binding
[i
].immutable_samplers_offset
||
422 layout
->binding
[i
].immutable_samplers_equal
)
425 unsigned offset
= layout
->binding
[i
].offset
/ 4;
426 if (layout
->binding
[i
].type
== VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
)
429 const uint32_t *samplers
= (const uint32_t*)((const char*)layout
+ layout
->binding
[i
].immutable_samplers_offset
);
430 for (unsigned j
= 0; j
< layout
->binding
[i
].array_size
; ++j
) {
431 memcpy(set
->mapped_ptr
+ offset
, samplers
+ 4 * j
, 16);
432 offset
+= layout
->binding
[i
].size
/ 4;
442 radv_descriptor_set_destroy(struct radv_device
*device
,
443 struct radv_descriptor_pool
*pool
,
444 struct radv_descriptor_set
*set
,
447 assert(!pool
->host_memory_base
);
449 if (free_bo
&& set
->size
&& !pool
->host_memory_base
) {
450 uint32_t offset
= (uint8_t*)set
->mapped_ptr
- pool
->mapped_ptr
;
451 for (int i
= 0; i
< pool
->entry_count
; ++i
) {
452 if (pool
->entries
[i
].offset
== offset
) {
453 memmove(&pool
->entries
[i
], &pool
->entries
[i
+1],
454 sizeof(pool
->entries
[i
]) * (pool
->entry_count
- i
- 1));
460 vk_free2(&device
->alloc
, NULL
, set
);
463 VkResult
radv_CreateDescriptorPool(
465 const VkDescriptorPoolCreateInfo
* pCreateInfo
,
466 const VkAllocationCallbacks
* pAllocator
,
467 VkDescriptorPool
* pDescriptorPool
)
469 RADV_FROM_HANDLE(radv_device
, device
, _device
);
470 struct radv_descriptor_pool
*pool
;
471 int size
= sizeof(struct radv_descriptor_pool
);
472 uint64_t bo_size
= 0, bo_count
= 0, range_count
= 0;
475 for (unsigned i
= 0; i
< pCreateInfo
->poolSizeCount
; ++i
) {
476 if (pCreateInfo
->pPoolSizes
[i
].type
!= VK_DESCRIPTOR_TYPE_SAMPLER
)
477 bo_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
479 switch(pCreateInfo
->pPoolSizes
[i
].type
) {
480 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
481 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
482 range_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
484 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
485 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
486 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
487 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
488 case VK_DESCRIPTOR_TYPE_SAMPLER
:
489 /* 32 as we may need to align for images */
490 bo_size
+= 32 * pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
492 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
493 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
494 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
495 bo_size
+= 64 * pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
497 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
498 bo_size
+= 96 * pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
501 unreachable("unknown descriptor type\n");
506 if (!(pCreateInfo
->flags
& VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
)) {
507 uint64_t host_size
= pCreateInfo
->maxSets
* sizeof(struct radv_descriptor_set
);
508 host_size
+= sizeof(struct radeon_winsys_bo
*) * bo_count
;
509 host_size
+= sizeof(struct radv_descriptor_range
) * range_count
;
512 size
+= sizeof(struct radv_descriptor_pool_entry
) * pCreateInfo
->maxSets
;
515 pool
= vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
516 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
518 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
520 memset(pool
, 0, sizeof(*pool
));
522 if (!(pCreateInfo
->flags
& VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
)) {
523 pool
->host_memory_base
= (uint8_t*)pool
+ sizeof(struct radv_descriptor_pool
);
524 pool
->host_memory_ptr
= pool
->host_memory_base
;
525 pool
->host_memory_end
= (uint8_t*)pool
+ size
;
529 pool
->bo
= device
->ws
->buffer_create(device
->ws
, bo_size
, 32,
531 RADEON_FLAG_NO_INTERPROCESS_SHARING
|
532 RADEON_FLAG_READ_ONLY
);
533 pool
->mapped_ptr
= (uint8_t*)device
->ws
->buffer_map(pool
->bo
);
535 pool
->size
= bo_size
;
536 pool
->max_entry_count
= pCreateInfo
->maxSets
;
538 *pDescriptorPool
= radv_descriptor_pool_to_handle(pool
);
542 void radv_DestroyDescriptorPool(
544 VkDescriptorPool _pool
,
545 const VkAllocationCallbacks
* pAllocator
)
547 RADV_FROM_HANDLE(radv_device
, device
, _device
);
548 RADV_FROM_HANDLE(radv_descriptor_pool
, pool
, _pool
);
553 if (!pool
->host_memory_base
) {
554 for(int i
= 0; i
< pool
->entry_count
; ++i
) {
555 radv_descriptor_set_destroy(device
, pool
, pool
->entries
[i
].set
, false);
560 device
->ws
->buffer_destroy(pool
->bo
);
561 vk_free2(&device
->alloc
, pAllocator
, pool
);
564 VkResult
radv_ResetDescriptorPool(
566 VkDescriptorPool descriptorPool
,
567 VkDescriptorPoolResetFlags flags
)
569 RADV_FROM_HANDLE(radv_device
, device
, _device
);
570 RADV_FROM_HANDLE(radv_descriptor_pool
, pool
, descriptorPool
);
572 if (!pool
->host_memory_base
) {
573 for(int i
= 0; i
< pool
->entry_count
; ++i
) {
574 radv_descriptor_set_destroy(device
, pool
, pool
->entries
[i
].set
, false);
576 pool
->entry_count
= 0;
579 pool
->current_offset
= 0;
580 pool
->host_memory_ptr
= pool
->host_memory_base
;
585 VkResult
radv_AllocateDescriptorSets(
587 const VkDescriptorSetAllocateInfo
* pAllocateInfo
,
588 VkDescriptorSet
* pDescriptorSets
)
590 RADV_FROM_HANDLE(radv_device
, device
, _device
);
591 RADV_FROM_HANDLE(radv_descriptor_pool
, pool
, pAllocateInfo
->descriptorPool
);
593 VkResult result
= VK_SUCCESS
;
595 struct radv_descriptor_set
*set
= NULL
;
597 /* allocate a set of buffers for each shader to contain descriptors */
598 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
599 RADV_FROM_HANDLE(radv_descriptor_set_layout
, layout
,
600 pAllocateInfo
->pSetLayouts
[i
]);
602 assert(!(layout
->flags
& VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
));
604 result
= radv_descriptor_set_create(device
, pool
, layout
, &set
);
605 if (result
!= VK_SUCCESS
)
608 pDescriptorSets
[i
] = radv_descriptor_set_to_handle(set
);
611 if (result
!= VK_SUCCESS
)
612 radv_FreeDescriptorSets(_device
, pAllocateInfo
->descriptorPool
,
617 VkResult
radv_FreeDescriptorSets(
619 VkDescriptorPool descriptorPool
,
621 const VkDescriptorSet
* pDescriptorSets
)
623 RADV_FROM_HANDLE(radv_device
, device
, _device
);
624 RADV_FROM_HANDLE(radv_descriptor_pool
, pool
, descriptorPool
);
626 for (uint32_t i
= 0; i
< count
; i
++) {
627 RADV_FROM_HANDLE(radv_descriptor_set
, set
, pDescriptorSets
[i
]);
629 if (set
&& !pool
->host_memory_base
)
630 radv_descriptor_set_destroy(device
, pool
, set
, true);
635 static void write_texel_buffer_descriptor(struct radv_device
*device
,
636 struct radv_cmd_buffer
*cmd_buffer
,
638 struct radeon_winsys_bo
**buffer_list
,
639 const VkBufferView _buffer_view
)
641 RADV_FROM_HANDLE(radv_buffer_view
, buffer_view
, _buffer_view
);
643 memcpy(dst
, buffer_view
->state
, 4 * 4);
646 radv_cs_add_buffer(device
->ws
, cmd_buffer
->cs
, buffer_view
->bo
, 7);
648 *buffer_list
= buffer_view
->bo
;
651 static void write_buffer_descriptor(struct radv_device
*device
,
652 struct radv_cmd_buffer
*cmd_buffer
,
654 struct radeon_winsys_bo
**buffer_list
,
655 const VkDescriptorBufferInfo
*buffer_info
)
657 RADV_FROM_HANDLE(radv_buffer
, buffer
, buffer_info
->buffer
);
658 uint64_t va
= radv_buffer_get_va(buffer
->bo
);
659 uint32_t range
= buffer_info
->range
;
661 if (buffer_info
->range
== VK_WHOLE_SIZE
)
662 range
= buffer
->size
- buffer_info
->offset
;
664 va
+= buffer_info
->offset
+ buffer
->offset
;
666 dst
[1] = S_008F04_BASE_ADDRESS_HI(va
>> 32);
668 dst
[3] = S_008F0C_DST_SEL_X(V_008F0C_SQ_SEL_X
) |
669 S_008F0C_DST_SEL_Y(V_008F0C_SQ_SEL_Y
) |
670 S_008F0C_DST_SEL_Z(V_008F0C_SQ_SEL_Z
) |
671 S_008F0C_DST_SEL_W(V_008F0C_SQ_SEL_W
) |
672 S_008F0C_NUM_FORMAT(V_008F0C_BUF_NUM_FORMAT_FLOAT
) |
673 S_008F0C_DATA_FORMAT(V_008F0C_BUF_DATA_FORMAT_32
);
676 radv_cs_add_buffer(device
->ws
, cmd_buffer
->cs
, buffer
->bo
, 7);
678 *buffer_list
= buffer
->bo
;
681 static void write_dynamic_buffer_descriptor(struct radv_device
*device
,
682 struct radv_descriptor_range
*range
,
683 struct radeon_winsys_bo
**buffer_list
,
684 const VkDescriptorBufferInfo
*buffer_info
)
686 RADV_FROM_HANDLE(radv_buffer
, buffer
, buffer_info
->buffer
);
687 uint64_t va
= radv_buffer_get_va(buffer
->bo
);
688 unsigned size
= buffer_info
->range
;
690 if (buffer_info
->range
== VK_WHOLE_SIZE
)
691 size
= buffer
->size
- buffer_info
->offset
;
693 va
+= buffer_info
->offset
+ buffer
->offset
;
697 *buffer_list
= buffer
->bo
;
701 write_image_descriptor(struct radv_device
*device
,
702 struct radv_cmd_buffer
*cmd_buffer
,
704 struct radeon_winsys_bo
**buffer_list
,
705 VkDescriptorType descriptor_type
,
706 const VkDescriptorImageInfo
*image_info
)
708 RADV_FROM_HANDLE(radv_image_view
, iview
, image_info
->imageView
);
709 uint32_t *descriptor
;
711 if (descriptor_type
== VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
) {
712 descriptor
= iview
->storage_descriptor
;
714 descriptor
= iview
->descriptor
;
717 memcpy(dst
, descriptor
, 16 * 4);
720 radv_cs_add_buffer(device
->ws
, cmd_buffer
->cs
, iview
->bo
, 7);
722 *buffer_list
= iview
->bo
;
726 write_combined_image_sampler_descriptor(struct radv_device
*device
,
727 struct radv_cmd_buffer
*cmd_buffer
,
729 struct radeon_winsys_bo
**buffer_list
,
730 VkDescriptorType descriptor_type
,
731 const VkDescriptorImageInfo
*image_info
,
734 RADV_FROM_HANDLE(radv_sampler
, sampler
, image_info
->sampler
);
736 write_image_descriptor(device
, cmd_buffer
, dst
, buffer_list
, descriptor_type
, image_info
);
737 /* copy over sampler state */
739 memcpy(dst
+ 16, sampler
->state
, 16);
743 write_sampler_descriptor(struct radv_device
*device
,
745 const VkDescriptorImageInfo
*image_info
)
747 RADV_FROM_HANDLE(radv_sampler
, sampler
, image_info
->sampler
);
749 memcpy(dst
, sampler
->state
, 16);
752 void radv_update_descriptor_sets(
753 struct radv_device
* device
,
754 struct radv_cmd_buffer
* cmd_buffer
,
755 VkDescriptorSet dstSetOverride
,
756 uint32_t descriptorWriteCount
,
757 const VkWriteDescriptorSet
* pDescriptorWrites
,
758 uint32_t descriptorCopyCount
,
759 const VkCopyDescriptorSet
* pDescriptorCopies
)
762 for (i
= 0; i
< descriptorWriteCount
; i
++) {
763 const VkWriteDescriptorSet
*writeset
= &pDescriptorWrites
[i
];
764 RADV_FROM_HANDLE(radv_descriptor_set
, set
,
765 dstSetOverride
? dstSetOverride
: writeset
->dstSet
);
766 const struct radv_descriptor_set_binding_layout
*binding_layout
=
767 set
->layout
->binding
+ writeset
->dstBinding
;
768 uint32_t *ptr
= set
->mapped_ptr
;
769 struct radeon_winsys_bo
**buffer_list
= set
->descriptors
;
770 /* Immutable samplers are not copied into push descriptors when they are
771 * allocated, so if we are writing push descriptors we have to copy the
772 * immutable samplers into them now.
774 const bool copy_immutable_samplers
= cmd_buffer
&&
775 binding_layout
->immutable_samplers_offset
&& !binding_layout
->immutable_samplers_equal
;
776 const uint32_t *samplers
= radv_immutable_samplers(set
->layout
, binding_layout
);
778 ptr
+= binding_layout
->offset
/ 4;
779 ptr
+= binding_layout
->size
* writeset
->dstArrayElement
/ 4;
780 buffer_list
+= binding_layout
->buffer_offset
;
781 buffer_list
+= writeset
->dstArrayElement
;
782 for (j
= 0; j
< writeset
->descriptorCount
; ++j
) {
783 switch(writeset
->descriptorType
) {
784 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
785 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
: {
786 unsigned idx
= writeset
->dstArrayElement
+ j
;
787 idx
+= binding_layout
->dynamic_offset_offset
;
788 assert(!(set
->layout
->flags
& VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
));
789 write_dynamic_buffer_descriptor(device
, set
->dynamic_descriptors
+ idx
,
790 buffer_list
, writeset
->pBufferInfo
+ j
);
793 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
794 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
795 write_buffer_descriptor(device
, cmd_buffer
, ptr
, buffer_list
,
796 writeset
->pBufferInfo
+ j
);
798 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
799 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
800 write_texel_buffer_descriptor(device
, cmd_buffer
, ptr
, buffer_list
,
801 writeset
->pTexelBufferView
[j
]);
803 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
804 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
805 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
806 write_image_descriptor(device
, cmd_buffer
, ptr
, buffer_list
,
807 writeset
->descriptorType
,
808 writeset
->pImageInfo
+ j
);
810 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
811 write_combined_image_sampler_descriptor(device
, cmd_buffer
, ptr
, buffer_list
,
812 writeset
->descriptorType
,
813 writeset
->pImageInfo
+ j
,
814 !binding_layout
->immutable_samplers_offset
);
815 if (copy_immutable_samplers
) {
816 const unsigned idx
= writeset
->dstArrayElement
+ j
;
817 memcpy(ptr
+ 16, samplers
+ 4 * idx
, 16);
820 case VK_DESCRIPTOR_TYPE_SAMPLER
:
821 if (!binding_layout
->immutable_samplers_offset
) {
822 write_sampler_descriptor(device
, ptr
,
823 writeset
->pImageInfo
+ j
);
824 } else if (copy_immutable_samplers
) {
825 unsigned idx
= writeset
->dstArrayElement
+ j
;
826 memcpy(ptr
, samplers
+ 4 * idx
, 16);
830 unreachable("unimplemented descriptor type");
833 ptr
+= binding_layout
->size
/ 4;
839 for (i
= 0; i
< descriptorCopyCount
; i
++) {
840 const VkCopyDescriptorSet
*copyset
= &pDescriptorCopies
[i
];
841 RADV_FROM_HANDLE(radv_descriptor_set
, src_set
,
843 RADV_FROM_HANDLE(radv_descriptor_set
, dst_set
,
845 const struct radv_descriptor_set_binding_layout
*src_binding_layout
=
846 src_set
->layout
->binding
+ copyset
->srcBinding
;
847 const struct radv_descriptor_set_binding_layout
*dst_binding_layout
=
848 dst_set
->layout
->binding
+ copyset
->dstBinding
;
849 uint32_t *src_ptr
= src_set
->mapped_ptr
;
850 uint32_t *dst_ptr
= dst_set
->mapped_ptr
;
851 struct radeon_winsys_bo
**src_buffer_list
= src_set
->descriptors
;
852 struct radeon_winsys_bo
**dst_buffer_list
= dst_set
->descriptors
;
854 src_ptr
+= src_binding_layout
->offset
/ 4;
855 dst_ptr
+= dst_binding_layout
->offset
/ 4;
857 src_ptr
+= src_binding_layout
->size
* copyset
->srcArrayElement
/ 4;
858 dst_ptr
+= dst_binding_layout
->size
* copyset
->dstArrayElement
/ 4;
860 src_buffer_list
+= src_binding_layout
->buffer_offset
;
861 src_buffer_list
+= copyset
->srcArrayElement
;
863 dst_buffer_list
+= dst_binding_layout
->buffer_offset
;
864 dst_buffer_list
+= copyset
->dstArrayElement
;
866 for (j
= 0; j
< copyset
->descriptorCount
; ++j
) {
867 switch (src_binding_layout
->type
) {
868 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
869 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
: {
870 unsigned src_idx
= copyset
->srcArrayElement
+ j
;
871 unsigned dst_idx
= copyset
->dstArrayElement
+ j
;
872 struct radv_descriptor_range
*src_range
, *dst_range
;
873 src_idx
+= src_binding_layout
->dynamic_offset_offset
;
874 dst_idx
+= dst_binding_layout
->dynamic_offset_offset
;
876 src_range
= src_set
->dynamic_descriptors
+ src_idx
;
877 dst_range
= dst_set
->dynamic_descriptors
+ dst_idx
;
878 *dst_range
= *src_range
;
882 memcpy(dst_ptr
, src_ptr
, src_binding_layout
->size
);
884 src_ptr
+= src_binding_layout
->size
/ 4;
885 dst_ptr
+= dst_binding_layout
->size
/ 4;
886 dst_buffer_list
[j
] = src_buffer_list
[j
];
893 void radv_UpdateDescriptorSets(
895 uint32_t descriptorWriteCount
,
896 const VkWriteDescriptorSet
* pDescriptorWrites
,
897 uint32_t descriptorCopyCount
,
898 const VkCopyDescriptorSet
* pDescriptorCopies
)
900 RADV_FROM_HANDLE(radv_device
, device
, _device
);
902 radv_update_descriptor_sets(device
, NULL
, VK_NULL_HANDLE
, descriptorWriteCount
, pDescriptorWrites
,
903 descriptorCopyCount
, pDescriptorCopies
);
906 VkResult
radv_CreateDescriptorUpdateTemplate(VkDevice _device
,
907 const VkDescriptorUpdateTemplateCreateInfoKHR
*pCreateInfo
,
908 const VkAllocationCallbacks
*pAllocator
,
909 VkDescriptorUpdateTemplateKHR
*pDescriptorUpdateTemplate
)
911 RADV_FROM_HANDLE(radv_device
, device
, _device
);
912 RADV_FROM_HANDLE(radv_descriptor_set_layout
, set_layout
, pCreateInfo
->descriptorSetLayout
);
913 const uint32_t entry_count
= pCreateInfo
->descriptorUpdateEntryCount
;
914 const size_t size
= sizeof(struct radv_descriptor_update_template
) +
915 sizeof(struct radv_descriptor_update_template_entry
) * entry_count
;
916 struct radv_descriptor_update_template
*templ
;
919 templ
= vk_alloc2(&device
->alloc
, pAllocator
, size
, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
921 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
923 templ
->entry_count
= entry_count
;
924 templ
->bind_point
= pCreateInfo
->pipelineBindPoint
;
926 for (i
= 0; i
< entry_count
; i
++) {
927 const VkDescriptorUpdateTemplateEntryKHR
*entry
= &pCreateInfo
->pDescriptorUpdateEntries
[i
];
928 const struct radv_descriptor_set_binding_layout
*binding_layout
=
929 set_layout
->binding
+ entry
->dstBinding
;
930 const uint32_t buffer_offset
= binding_layout
->buffer_offset
+ entry
->dstArrayElement
;
931 const uint32_t *immutable_samplers
= NULL
;
935 /* dst_offset is an offset into dynamic_descriptors when the descriptor
936 is dynamic, and an offset into mapped_ptr otherwise */
937 switch (entry
->descriptorType
) {
938 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
939 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
940 assert(pCreateInfo
->templateType
== VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR
);
941 dst_offset
= binding_layout
->dynamic_offset_offset
+ entry
->dstArrayElement
;
942 dst_stride
= 0; /* Not used */
945 switch (entry
->descriptorType
) {
946 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
947 case VK_DESCRIPTOR_TYPE_SAMPLER
:
948 /* Immutable samplers are copied into push descriptors when they are pushed */
949 if (pCreateInfo
->templateType
== VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR
&&
950 binding_layout
->immutable_samplers_offset
&& !binding_layout
->immutable_samplers_equal
) {
951 immutable_samplers
= radv_immutable_samplers(set_layout
, binding_layout
) + entry
->dstArrayElement
* 4;
957 dst_offset
= binding_layout
->offset
/ 4 + binding_layout
->size
* entry
->dstArrayElement
/ 4;
958 dst_stride
= binding_layout
->size
/ 4;
962 templ
->entry
[i
] = (struct radv_descriptor_update_template_entry
) {
963 .descriptor_type
= entry
->descriptorType
,
964 .descriptor_count
= entry
->descriptorCount
,
965 .src_offset
= entry
->offset
,
966 .src_stride
= entry
->stride
,
967 .dst_offset
= dst_offset
,
968 .dst_stride
= dst_stride
,
969 .buffer_offset
= buffer_offset
,
970 .has_sampler
= !binding_layout
->immutable_samplers_offset
,
971 .immutable_samplers
= immutable_samplers
975 *pDescriptorUpdateTemplate
= radv_descriptor_update_template_to_handle(templ
);
979 void radv_DestroyDescriptorUpdateTemplate(VkDevice _device
,
980 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate
,
981 const VkAllocationCallbacks
*pAllocator
)
983 RADV_FROM_HANDLE(radv_device
, device
, _device
);
984 RADV_FROM_HANDLE(radv_descriptor_update_template
, templ
, descriptorUpdateTemplate
);
989 vk_free2(&device
->alloc
, pAllocator
, templ
);
992 void radv_update_descriptor_set_with_template(struct radv_device
*device
,
993 struct radv_cmd_buffer
*cmd_buffer
,
994 struct radv_descriptor_set
*set
,
995 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate
,
998 RADV_FROM_HANDLE(radv_descriptor_update_template
, templ
, descriptorUpdateTemplate
);
1001 for (i
= 0; i
< templ
->entry_count
; ++i
) {
1002 struct radeon_winsys_bo
**buffer_list
= set
->descriptors
+ templ
->entry
[i
].buffer_offset
;
1003 uint32_t *pDst
= set
->mapped_ptr
+ templ
->entry
[i
].dst_offset
;
1004 const uint8_t *pSrc
= ((const uint8_t *) pData
) + templ
->entry
[i
].src_offset
;
1007 for (j
= 0; j
< templ
->entry
[i
].descriptor_count
; ++j
) {
1008 switch (templ
->entry
[i
].descriptor_type
) {
1009 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
1010 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
: {
1011 const unsigned idx
= templ
->entry
[i
].dst_offset
+ j
;
1012 assert(!(set
->layout
->flags
& VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
));
1013 write_dynamic_buffer_descriptor(device
, set
->dynamic_descriptors
+ idx
,
1014 buffer_list
, (struct VkDescriptorBufferInfo
*) pSrc
);
1017 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
1018 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
1019 write_buffer_descriptor(device
, cmd_buffer
, pDst
, buffer_list
,
1020 (struct VkDescriptorBufferInfo
*) pSrc
);
1022 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
1023 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
1024 write_texel_buffer_descriptor(device
, cmd_buffer
, pDst
, buffer_list
,
1025 *(VkBufferView
*) pSrc
);
1027 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
1028 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
1029 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
1030 write_image_descriptor(device
, cmd_buffer
, pDst
, buffer_list
,
1031 templ
->entry
[i
].descriptor_type
,
1032 (struct VkDescriptorImageInfo
*) pSrc
);
1034 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
1035 write_combined_image_sampler_descriptor(device
, cmd_buffer
, pDst
, buffer_list
,
1036 templ
->entry
[i
].descriptor_type
,
1037 (struct VkDescriptorImageInfo
*) pSrc
,
1038 templ
->entry
[i
].has_sampler
);
1039 if (templ
->entry
[i
].immutable_samplers
)
1040 memcpy(pDst
+ 16, templ
->entry
[i
].immutable_samplers
+ 4 * j
, 16);
1042 case VK_DESCRIPTOR_TYPE_SAMPLER
:
1043 if (templ
->entry
[i
].has_sampler
)
1044 write_sampler_descriptor(device
, pDst
,
1045 (struct VkDescriptorImageInfo
*) pSrc
);
1046 else if (templ
->entry
[i
].immutable_samplers
)
1047 memcpy(pDst
, templ
->entry
[i
].immutable_samplers
+ 4 * j
, 16);
1050 unreachable("unimplemented descriptor type");
1053 pSrc
+= templ
->entry
[i
].src_stride
;
1054 pDst
+= templ
->entry
[i
].dst_stride
;
1060 void radv_UpdateDescriptorSetWithTemplate(VkDevice _device
,
1061 VkDescriptorSet descriptorSet
,
1062 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate
,
1065 RADV_FROM_HANDLE(radv_device
, device
, _device
);
1066 RADV_FROM_HANDLE(radv_descriptor_set
, set
, descriptorSet
);
1068 radv_update_descriptor_set_with_template(device
, NULL
, set
, descriptorUpdateTemplate
, pData
);
1072 VkResult
radv_CreateSamplerYcbcrConversion(VkDevice device
,
1073 const VkSamplerYcbcrConversionCreateInfo
* pCreateInfo
,
1074 const VkAllocationCallbacks
* pAllocator
,
1075 VkSamplerYcbcrConversion
* pYcbcrConversion
)
1077 *pYcbcrConversion
= VK_NULL_HANDLE
;
1082 void radv_DestroySamplerYcbcrConversion(VkDevice device
,
1083 VkSamplerYcbcrConversion ycbcrConversion
,
1084 const VkAllocationCallbacks
* pAllocator
)