2 * Copyright © 2016 Red Hat.
3 * Copyright © 2016 Bas Nieuwenhuizen
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
30 #include "util/mesa-sha1.h"
31 #include "radv_private.h"
34 VkResult
radv_CreateDescriptorSetLayout(
36 const VkDescriptorSetLayoutCreateInfo
* pCreateInfo
,
37 const VkAllocationCallbacks
* pAllocator
,
38 VkDescriptorSetLayout
* pSetLayout
)
40 RADV_FROM_HANDLE(radv_device
, device
, _device
);
41 struct radv_descriptor_set_layout
*set_layout
;
43 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO
);
45 uint32_t max_binding
= 0;
46 uint32_t immutable_sampler_count
= 0;
47 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
48 max_binding
= MAX2(max_binding
, pCreateInfo
->pBindings
[j
].binding
);
49 if (pCreateInfo
->pBindings
[j
].pImmutableSamplers
)
50 immutable_sampler_count
+= pCreateInfo
->pBindings
[j
].descriptorCount
;
53 uint32_t samplers_offset
= sizeof(struct radv_descriptor_set_layout
) +
54 (max_binding
+ 1) * sizeof(set_layout
->binding
[0]);
55 size_t size
= samplers_offset
+ immutable_sampler_count
* 4 * sizeof(uint32_t);
57 set_layout
= vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
58 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
60 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
62 set_layout
->flags
= pCreateInfo
->flags
;
64 /* We just allocate all the samplers at the end of the struct */
65 uint32_t *samplers
= (uint32_t*)&set_layout
->binding
[max_binding
+ 1];
67 set_layout
->binding_count
= max_binding
+ 1;
68 set_layout
->shader_stages
= 0;
69 set_layout
->dynamic_shader_stages
= 0;
70 set_layout
->has_immutable_samplers
= false;
73 memset(set_layout
->binding
, 0, size
- sizeof(struct radv_descriptor_set_layout
));
75 uint32_t buffer_count
= 0;
76 uint32_t dynamic_offset_count
= 0;
78 for (uint32_t j
= 0; j
< pCreateInfo
->bindingCount
; j
++) {
79 const VkDescriptorSetLayoutBinding
*binding
= &pCreateInfo
->pBindings
[j
];
80 uint32_t b
= binding
->binding
;
82 unsigned binding_buffer_count
= 0;
84 switch (binding
->descriptorType
) {
85 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
86 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
87 assert(!(pCreateInfo
->flags
& VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
));
88 set_layout
->binding
[b
].dynamic_offset_count
= 1;
89 set_layout
->dynamic_shader_stages
|= binding
->stageFlags
;
90 set_layout
->binding
[b
].size
= 0;
91 binding_buffer_count
= 1;
94 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
95 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
96 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
97 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
98 set_layout
->binding
[b
].size
= 16;
99 binding_buffer_count
= 1;
102 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
103 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
104 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
105 /* main descriptor + fmask descriptor */
106 set_layout
->binding
[b
].size
= 64;
107 binding_buffer_count
= 1;
110 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
111 /* main descriptor + fmask descriptor + sampler */
112 set_layout
->binding
[b
].size
= 96;
113 binding_buffer_count
= 1;
116 case VK_DESCRIPTOR_TYPE_SAMPLER
:
117 set_layout
->binding
[b
].size
= 16;
121 unreachable("unknown descriptor type\n");
125 set_layout
->size
= align(set_layout
->size
, alignment
);
126 assert(binding
->descriptorCount
> 0);
127 set_layout
->binding
[b
].type
= binding
->descriptorType
;
128 set_layout
->binding
[b
].array_size
= binding
->descriptorCount
;
129 set_layout
->binding
[b
].offset
= set_layout
->size
;
130 set_layout
->binding
[b
].buffer_offset
= buffer_count
;
131 set_layout
->binding
[b
].dynamic_offset_offset
= dynamic_offset_count
;
133 if (binding
->pImmutableSamplers
) {
134 set_layout
->binding
[b
].immutable_samplers_offset
= samplers_offset
;
135 set_layout
->binding
[b
].immutable_samplers_equal
= true;
136 set_layout
->has_immutable_samplers
= true;
139 for (uint32_t i
= 0; i
< binding
->descriptorCount
; i
++)
140 memcpy(samplers
+ 4 * i
, &radv_sampler_from_handle(binding
->pImmutableSamplers
[i
])->state
, 16);
141 for (uint32_t i
= 1; i
< binding
->descriptorCount
; i
++)
142 if (memcmp(samplers
+ 4 * i
, samplers
, 16) != 0)
143 set_layout
->binding
[b
].immutable_samplers_equal
= false;
145 /* Don't reserve space for the samplers if they're not accessed. */
146 if (set_layout
->binding
[b
].immutable_samplers_equal
) {
147 if (binding
->descriptorType
== VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
)
148 set_layout
->binding
[b
].size
-= 32;
149 else if (binding
->descriptorType
== VK_DESCRIPTOR_TYPE_SAMPLER
)
150 set_layout
->binding
[b
].size
-= 16;
152 samplers
+= 4 * binding
->descriptorCount
;
153 samplers_offset
+= 4 * sizeof(uint32_t) * binding
->descriptorCount
;
156 set_layout
->size
+= binding
->descriptorCount
* set_layout
->binding
[b
].size
;
157 buffer_count
+= binding
->descriptorCount
* binding_buffer_count
;
158 dynamic_offset_count
+= binding
->descriptorCount
*
159 set_layout
->binding
[b
].dynamic_offset_count
;
160 set_layout
->shader_stages
|= binding
->stageFlags
;
163 set_layout
->buffer_count
= buffer_count
;
164 set_layout
->dynamic_offset_count
= dynamic_offset_count
;
166 *pSetLayout
= radv_descriptor_set_layout_to_handle(set_layout
);
171 void radv_DestroyDescriptorSetLayout(
173 VkDescriptorSetLayout _set_layout
,
174 const VkAllocationCallbacks
* pAllocator
)
176 RADV_FROM_HANDLE(radv_device
, device
, _device
);
177 RADV_FROM_HANDLE(radv_descriptor_set_layout
, set_layout
, _set_layout
);
182 vk_free2(&device
->alloc
, pAllocator
, set_layout
);
186 * Pipeline layouts. These have nothing to do with the pipeline. They are
187 * just muttiple descriptor set layouts pasted together
190 VkResult
radv_CreatePipelineLayout(
192 const VkPipelineLayoutCreateInfo
* pCreateInfo
,
193 const VkAllocationCallbacks
* pAllocator
,
194 VkPipelineLayout
* pPipelineLayout
)
196 RADV_FROM_HANDLE(radv_device
, device
, _device
);
197 struct radv_pipeline_layout
*layout
;
198 struct mesa_sha1 ctx
;
200 assert(pCreateInfo
->sType
== VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO
);
202 layout
= vk_alloc2(&device
->alloc
, pAllocator
, sizeof(*layout
), 8,
203 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
205 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
207 layout
->num_sets
= pCreateInfo
->setLayoutCount
;
209 unsigned dynamic_offset_count
= 0;
212 _mesa_sha1_init(&ctx
);
213 for (uint32_t set
= 0; set
< pCreateInfo
->setLayoutCount
; set
++) {
214 RADV_FROM_HANDLE(radv_descriptor_set_layout
, set_layout
,
215 pCreateInfo
->pSetLayouts
[set
]);
216 layout
->set
[set
].layout
= set_layout
;
218 layout
->set
[set
].dynamic_offset_start
= dynamic_offset_count
;
219 for (uint32_t b
= 0; b
< set_layout
->binding_count
; b
++) {
220 dynamic_offset_count
+= set_layout
->binding
[b
].array_size
* set_layout
->binding
[b
].dynamic_offset_count
;
221 if (set_layout
->binding
[b
].immutable_samplers_offset
)
222 _mesa_sha1_update(&ctx
, radv_immutable_samplers(set_layout
, set_layout
->binding
+ b
),
223 set_layout
->binding
[b
].array_size
* 4 * sizeof(uint32_t));
225 _mesa_sha1_update(&ctx
, set_layout
->binding
,
226 sizeof(set_layout
->binding
[0]) * set_layout
->binding_count
);
229 layout
->dynamic_offset_count
= dynamic_offset_count
;
230 layout
->push_constant_size
= 0;
231 for (unsigned i
= 0; i
< pCreateInfo
->pushConstantRangeCount
; ++i
) {
232 const VkPushConstantRange
*range
= pCreateInfo
->pPushConstantRanges
+ i
;
233 layout
->push_constant_size
= MAX2(layout
->push_constant_size
,
234 range
->offset
+ range
->size
);
237 layout
->push_constant_size
= align(layout
->push_constant_size
, 16);
238 _mesa_sha1_update(&ctx
, &layout
->push_constant_size
,
239 sizeof(layout
->push_constant_size
));
240 _mesa_sha1_final(&ctx
, layout
->sha1
);
241 *pPipelineLayout
= radv_pipeline_layout_to_handle(layout
);
246 void radv_DestroyPipelineLayout(
248 VkPipelineLayout _pipelineLayout
,
249 const VkAllocationCallbacks
* pAllocator
)
251 RADV_FROM_HANDLE(radv_device
, device
, _device
);
252 RADV_FROM_HANDLE(radv_pipeline_layout
, pipeline_layout
, _pipelineLayout
);
254 if (!pipeline_layout
)
256 vk_free2(&device
->alloc
, pAllocator
, pipeline_layout
);
262 radv_descriptor_set_create(struct radv_device
*device
,
263 struct radv_descriptor_pool
*pool
,
264 const struct radv_descriptor_set_layout
*layout
,
265 struct radv_descriptor_set
**out_set
)
267 struct radv_descriptor_set
*set
;
268 unsigned range_offset
= sizeof(struct radv_descriptor_set
) +
269 sizeof(struct radeon_winsys_bo
*) * layout
->buffer_count
;
270 unsigned mem_size
= range_offset
+
271 sizeof(struct radv_descriptor_range
) * layout
->dynamic_offset_count
;
273 if (pool
->host_memory_base
) {
274 if (pool
->host_memory_end
- pool
->host_memory_ptr
< mem_size
)
275 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR
);
277 set
= (struct radv_descriptor_set
*)pool
->host_memory_ptr
;
278 pool
->host_memory_ptr
+= mem_size
;
280 set
= vk_alloc2(&device
->alloc
, NULL
, mem_size
, 8,
281 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
284 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
287 memset(set
, 0, mem_size
);
289 if (layout
->dynamic_offset_count
) {
290 set
->dynamic_descriptors
= (struct radv_descriptor_range
*)((uint8_t*)set
+ range_offset
);
293 set
->layout
= layout
;
295 uint32_t layout_size
= align_u32(layout
->size
, 32);
296 set
->size
= layout
->size
;
298 /* try to allocate linearly first, so that we don't spend
299 * time looking for gaps if the app only allocates &
300 * resets via the pool. */
301 if (pool
->current_offset
+ layout_size
<= pool
->size
) {
303 set
->mapped_ptr
= (uint32_t*)(pool
->mapped_ptr
+ pool
->current_offset
);
304 set
->va
= radv_buffer_get_va(set
->bo
) + pool
->current_offset
;
305 pool
->current_offset
+= layout_size
;
306 list_addtail(&set
->vram_list
, &pool
->vram_list
);
307 } else if (!pool
->host_memory_base
) {
309 struct list_head
*prev
= &pool
->vram_list
;
310 struct radv_descriptor_set
*cur
;
312 assert(!pool
->host_memory_base
);
313 LIST_FOR_EACH_ENTRY(cur
, &pool
->vram_list
, vram_list
) {
314 uint64_t start
= (uint8_t*)cur
->mapped_ptr
- pool
->mapped_ptr
;
315 if (start
- offset
>= layout_size
)
318 offset
= start
+ cur
->size
;
319 prev
= &cur
->vram_list
;
322 if (pool
->size
- offset
< layout_size
) {
323 vk_free2(&device
->alloc
, NULL
, set
);
324 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR
);
327 set
->mapped_ptr
= (uint32_t*)(pool
->mapped_ptr
+ offset
);
328 set
->va
= radv_buffer_get_va(set
->bo
) + offset
;
329 list_add(&set
->vram_list
, prev
);
331 return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY_KHR
);
334 if (layout
->has_immutable_samplers
) {
335 for (unsigned i
= 0; i
< layout
->binding_count
; ++i
) {
336 if (!layout
->binding
[i
].immutable_samplers_offset
||
337 layout
->binding
[i
].immutable_samplers_equal
)
340 unsigned offset
= layout
->binding
[i
].offset
/ 4;
341 if (layout
->binding
[i
].type
== VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
)
344 const uint32_t *samplers
= (const uint32_t*)((const char*)layout
+ layout
->binding
[i
].immutable_samplers_offset
);
345 for (unsigned j
= 0; j
< layout
->binding
[i
].array_size
; ++j
) {
346 memcpy(set
->mapped_ptr
+ offset
, samplers
+ 4 * j
, 16);
347 offset
+= layout
->binding
[i
].size
/ 4;
357 radv_descriptor_set_destroy(struct radv_device
*device
,
358 struct radv_descriptor_pool
*pool
,
359 struct radv_descriptor_set
*set
,
362 assert(!pool
->host_memory_base
);
364 if (free_bo
&& set
->size
)
365 list_del(&set
->vram_list
);
366 vk_free2(&device
->alloc
, NULL
, set
);
369 VkResult
radv_CreateDescriptorPool(
371 const VkDescriptorPoolCreateInfo
* pCreateInfo
,
372 const VkAllocationCallbacks
* pAllocator
,
373 VkDescriptorPool
* pDescriptorPool
)
375 RADV_FROM_HANDLE(radv_device
, device
, _device
);
376 struct radv_descriptor_pool
*pool
;
377 int size
= sizeof(struct radv_descriptor_pool
);
378 uint64_t bo_size
= 0, bo_count
= 0, range_count
= 0;
381 for (unsigned i
= 0; i
< pCreateInfo
->poolSizeCount
; ++i
) {
382 if (pCreateInfo
->pPoolSizes
[i
].type
!= VK_DESCRIPTOR_TYPE_SAMPLER
)
383 bo_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
385 switch(pCreateInfo
->pPoolSizes
[i
].type
) {
386 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
387 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
388 range_count
+= pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
390 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
391 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
392 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
393 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
394 case VK_DESCRIPTOR_TYPE_SAMPLER
:
395 /* 32 as we may need to align for images */
396 bo_size
+= 32 * pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
398 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
399 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
400 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
401 bo_size
+= 64 * pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
403 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
404 bo_size
+= 96 * pCreateInfo
->pPoolSizes
[i
].descriptorCount
;
407 unreachable("unknown descriptor type\n");
412 if (!(pCreateInfo
->flags
& VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
)) {
413 uint64_t host_size
= pCreateInfo
->maxSets
* sizeof(struct radv_descriptor_set
);
414 host_size
+= sizeof(struct radeon_winsys_bo
*) * bo_count
;
415 host_size
+= sizeof(struct radv_descriptor_range
) * range_count
;
419 pool
= vk_alloc2(&device
->alloc
, pAllocator
, size
, 8,
420 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
422 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
424 memset(pool
, 0, sizeof(*pool
));
426 if (!(pCreateInfo
->flags
& VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
)) {
427 pool
->host_memory_base
= (uint8_t*)pool
+ sizeof(struct radv_descriptor_pool
);
428 pool
->host_memory_ptr
= pool
->host_memory_base
;
429 pool
->host_memory_end
= (uint8_t*)pool
+ size
;
433 pool
->bo
= device
->ws
->buffer_create(device
->ws
, bo_size
,
434 32, RADEON_DOMAIN_VRAM
, 0);
435 pool
->mapped_ptr
= (uint8_t*)device
->ws
->buffer_map(pool
->bo
);
437 pool
->size
= bo_size
;
439 list_inithead(&pool
->vram_list
);
440 *pDescriptorPool
= radv_descriptor_pool_to_handle(pool
);
444 void radv_DestroyDescriptorPool(
446 VkDescriptorPool _pool
,
447 const VkAllocationCallbacks
* pAllocator
)
449 RADV_FROM_HANDLE(radv_device
, device
, _device
);
450 RADV_FROM_HANDLE(radv_descriptor_pool
, pool
, _pool
);
455 if (!pool
->host_memory_base
) {
456 list_for_each_entry_safe(struct radv_descriptor_set
, set
,
457 &pool
->vram_list
, vram_list
) {
458 radv_descriptor_set_destroy(device
, pool
, set
, false);
463 device
->ws
->buffer_destroy(pool
->bo
);
464 vk_free2(&device
->alloc
, pAllocator
, pool
);
467 VkResult
radv_ResetDescriptorPool(
469 VkDescriptorPool descriptorPool
,
470 VkDescriptorPoolResetFlags flags
)
472 RADV_FROM_HANDLE(radv_device
, device
, _device
);
473 RADV_FROM_HANDLE(radv_descriptor_pool
, pool
, descriptorPool
);
475 if (!pool
->host_memory_base
) {
476 list_for_each_entry_safe(struct radv_descriptor_set
, set
,
477 &pool
->vram_list
, vram_list
) {
478 radv_descriptor_set_destroy(device
, pool
, set
, false);
482 list_inithead(&pool
->vram_list
);
484 pool
->current_offset
= 0;
485 pool
->host_memory_ptr
= pool
->host_memory_base
;
490 VkResult
radv_AllocateDescriptorSets(
492 const VkDescriptorSetAllocateInfo
* pAllocateInfo
,
493 VkDescriptorSet
* pDescriptorSets
)
495 RADV_FROM_HANDLE(radv_device
, device
, _device
);
496 RADV_FROM_HANDLE(radv_descriptor_pool
, pool
, pAllocateInfo
->descriptorPool
);
498 VkResult result
= VK_SUCCESS
;
500 struct radv_descriptor_set
*set
;
502 /* allocate a set of buffers for each shader to contain descriptors */
503 for (i
= 0; i
< pAllocateInfo
->descriptorSetCount
; i
++) {
504 RADV_FROM_HANDLE(radv_descriptor_set_layout
, layout
,
505 pAllocateInfo
->pSetLayouts
[i
]);
507 assert(!(layout
->flags
& VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
));
509 result
= radv_descriptor_set_create(device
, pool
, layout
, &set
);
510 if (result
!= VK_SUCCESS
)
513 pDescriptorSets
[i
] = radv_descriptor_set_to_handle(set
);
516 if (result
!= VK_SUCCESS
)
517 radv_FreeDescriptorSets(_device
, pAllocateInfo
->descriptorPool
,
522 VkResult
radv_FreeDescriptorSets(
524 VkDescriptorPool descriptorPool
,
526 const VkDescriptorSet
* pDescriptorSets
)
528 RADV_FROM_HANDLE(radv_device
, device
, _device
);
529 RADV_FROM_HANDLE(radv_descriptor_pool
, pool
, descriptorPool
);
531 for (uint32_t i
= 0; i
< count
; i
++) {
532 RADV_FROM_HANDLE(radv_descriptor_set
, set
, pDescriptorSets
[i
]);
534 if (set
&& !pool
->host_memory_base
)
535 radv_descriptor_set_destroy(device
, pool
, set
, true);
540 static void write_texel_buffer_descriptor(struct radv_device
*device
,
541 struct radv_cmd_buffer
*cmd_buffer
,
543 struct radeon_winsys_bo
**buffer_list
,
544 const VkBufferView _buffer_view
)
546 RADV_FROM_HANDLE(radv_buffer_view
, buffer_view
, _buffer_view
);
548 memcpy(dst
, buffer_view
->state
, 4 * 4);
551 device
->ws
->cs_add_buffer(cmd_buffer
->cs
, buffer_view
->bo
, 7);
553 *buffer_list
= buffer_view
->bo
;
556 static void write_buffer_descriptor(struct radv_device
*device
,
557 struct radv_cmd_buffer
*cmd_buffer
,
559 struct radeon_winsys_bo
**buffer_list
,
560 const VkDescriptorBufferInfo
*buffer_info
)
562 RADV_FROM_HANDLE(radv_buffer
, buffer
, buffer_info
->buffer
);
563 uint64_t va
= radv_buffer_get_va(buffer
->bo
);
564 uint32_t range
= buffer_info
->range
;
566 if (buffer_info
->range
== VK_WHOLE_SIZE
)
567 range
= buffer
->size
- buffer_info
->offset
;
569 va
+= buffer_info
->offset
+ buffer
->offset
;
571 dst
[1] = S_008F04_BASE_ADDRESS_HI(va
>> 32);
573 dst
[3] = S_008F0C_DST_SEL_X(V_008F0C_SQ_SEL_X
) |
574 S_008F0C_DST_SEL_Y(V_008F0C_SQ_SEL_Y
) |
575 S_008F0C_DST_SEL_Z(V_008F0C_SQ_SEL_Z
) |
576 S_008F0C_DST_SEL_W(V_008F0C_SQ_SEL_W
) |
577 S_008F0C_NUM_FORMAT(V_008F0C_BUF_NUM_FORMAT_FLOAT
) |
578 S_008F0C_DATA_FORMAT(V_008F0C_BUF_DATA_FORMAT_32
);
581 device
->ws
->cs_add_buffer(cmd_buffer
->cs
, buffer
->bo
, 7);
583 *buffer_list
= buffer
->bo
;
586 static void write_dynamic_buffer_descriptor(struct radv_device
*device
,
587 struct radv_descriptor_range
*range
,
588 struct radeon_winsys_bo
**buffer_list
,
589 const VkDescriptorBufferInfo
*buffer_info
)
591 RADV_FROM_HANDLE(radv_buffer
, buffer
, buffer_info
->buffer
);
592 uint64_t va
= radv_buffer_get_va(buffer
->bo
);
593 unsigned size
= buffer_info
->range
;
595 if (buffer_info
->range
== VK_WHOLE_SIZE
)
596 size
= buffer
->size
- buffer_info
->offset
;
598 va
+= buffer_info
->offset
+ buffer
->offset
;
602 *buffer_list
= buffer
->bo
;
606 write_image_descriptor(struct radv_device
*device
,
607 struct radv_cmd_buffer
*cmd_buffer
,
609 struct radeon_winsys_bo
**buffer_list
,
610 VkDescriptorType descriptor_type
,
611 const VkDescriptorImageInfo
*image_info
)
613 RADV_FROM_HANDLE(radv_image_view
, iview
, image_info
->imageView
);
615 if (descriptor_type
== VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
) {
616 memcpy(dst
, iview
->storage_descriptor
, 8 * 4);
617 memcpy(dst
+ 8, iview
->storage_fmask_descriptor
, 8 * 4);
619 memcpy(dst
, iview
->descriptor
, 8 * 4);
620 memcpy(dst
+ 8, iview
->fmask_descriptor
, 8 * 4);
624 device
->ws
->cs_add_buffer(cmd_buffer
->cs
, iview
->bo
, 7);
626 *buffer_list
= iview
->bo
;
630 write_combined_image_sampler_descriptor(struct radv_device
*device
,
631 struct radv_cmd_buffer
*cmd_buffer
,
633 struct radeon_winsys_bo
**buffer_list
,
634 VkDescriptorType descriptor_type
,
635 const VkDescriptorImageInfo
*image_info
,
638 RADV_FROM_HANDLE(radv_sampler
, sampler
, image_info
->sampler
);
640 write_image_descriptor(device
, cmd_buffer
, dst
, buffer_list
, descriptor_type
, image_info
);
641 /* copy over sampler state */
643 memcpy(dst
+ 16, sampler
->state
, 16);
647 write_sampler_descriptor(struct radv_device
*device
,
649 const VkDescriptorImageInfo
*image_info
)
651 RADV_FROM_HANDLE(radv_sampler
, sampler
, image_info
->sampler
);
653 memcpy(dst
, sampler
->state
, 16);
656 void radv_update_descriptor_sets(
657 struct radv_device
* device
,
658 struct radv_cmd_buffer
* cmd_buffer
,
659 VkDescriptorSet dstSetOverride
,
660 uint32_t descriptorWriteCount
,
661 const VkWriteDescriptorSet
* pDescriptorWrites
,
662 uint32_t descriptorCopyCount
,
663 const VkCopyDescriptorSet
* pDescriptorCopies
)
666 for (i
= 0; i
< descriptorWriteCount
; i
++) {
667 const VkWriteDescriptorSet
*writeset
= &pDescriptorWrites
[i
];
668 RADV_FROM_HANDLE(radv_descriptor_set
, set
,
669 dstSetOverride
? dstSetOverride
: writeset
->dstSet
);
670 const struct radv_descriptor_set_binding_layout
*binding_layout
=
671 set
->layout
->binding
+ writeset
->dstBinding
;
672 uint32_t *ptr
= set
->mapped_ptr
;
673 struct radeon_winsys_bo
**buffer_list
= set
->descriptors
;
674 /* Immutable samplers are not copied into push descriptors when they are
675 * allocated, so if we are writing push descriptors we have to copy the
676 * immutable samplers into them now.
678 const bool copy_immutable_samplers
= cmd_buffer
&&
679 binding_layout
->immutable_samplers_offset
&& !binding_layout
->immutable_samplers_equal
;
680 const uint32_t *samplers
= radv_immutable_samplers(set
->layout
, binding_layout
);
682 ptr
+= binding_layout
->offset
/ 4;
683 ptr
+= binding_layout
->size
* writeset
->dstArrayElement
/ 4;
684 buffer_list
+= binding_layout
->buffer_offset
;
685 buffer_list
+= writeset
->dstArrayElement
;
686 for (j
= 0; j
< writeset
->descriptorCount
; ++j
) {
687 switch(writeset
->descriptorType
) {
688 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
689 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
: {
690 unsigned idx
= writeset
->dstArrayElement
+ j
;
691 idx
+= binding_layout
->dynamic_offset_offset
;
692 assert(!(set
->layout
->flags
& VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
));
693 write_dynamic_buffer_descriptor(device
, set
->dynamic_descriptors
+ idx
,
694 buffer_list
, writeset
->pBufferInfo
+ j
);
697 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
698 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
699 write_buffer_descriptor(device
, cmd_buffer
, ptr
, buffer_list
,
700 writeset
->pBufferInfo
+ j
);
702 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
703 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
704 write_texel_buffer_descriptor(device
, cmd_buffer
, ptr
, buffer_list
,
705 writeset
->pTexelBufferView
[j
]);
707 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
708 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
709 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
710 write_image_descriptor(device
, cmd_buffer
, ptr
, buffer_list
,
711 writeset
->descriptorType
,
712 writeset
->pImageInfo
+ j
);
714 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
715 write_combined_image_sampler_descriptor(device
, cmd_buffer
, ptr
, buffer_list
,
716 writeset
->descriptorType
,
717 writeset
->pImageInfo
+ j
,
718 !binding_layout
->immutable_samplers_offset
);
719 if (copy_immutable_samplers
) {
720 const unsigned idx
= writeset
->dstArrayElement
+ j
;
721 memcpy(ptr
+ 16, samplers
+ 4 * idx
, 16);
724 case VK_DESCRIPTOR_TYPE_SAMPLER
:
725 if (!binding_layout
->immutable_samplers_offset
) {
726 write_sampler_descriptor(device
, ptr
,
727 writeset
->pImageInfo
+ j
);
728 } else if (copy_immutable_samplers
) {
729 unsigned idx
= writeset
->dstArrayElement
+ j
;
730 memcpy(ptr
, samplers
+ 4 * idx
, 16);
734 unreachable("unimplemented descriptor type");
737 ptr
+= binding_layout
->size
/ 4;
742 if (descriptorCopyCount
)
743 radv_finishme("copy descriptors");
746 void radv_UpdateDescriptorSets(
748 uint32_t descriptorWriteCount
,
749 const VkWriteDescriptorSet
* pDescriptorWrites
,
750 uint32_t descriptorCopyCount
,
751 const VkCopyDescriptorSet
* pDescriptorCopies
)
753 RADV_FROM_HANDLE(radv_device
, device
, _device
);
755 radv_update_descriptor_sets(device
, NULL
, VK_NULL_HANDLE
, descriptorWriteCount
, pDescriptorWrites
,
756 descriptorCopyCount
, pDescriptorCopies
);
759 VkResult
radv_CreateDescriptorUpdateTemplateKHR(VkDevice _device
,
760 const VkDescriptorUpdateTemplateCreateInfoKHR
*pCreateInfo
,
761 const VkAllocationCallbacks
*pAllocator
,
762 VkDescriptorUpdateTemplateKHR
*pDescriptorUpdateTemplate
)
764 RADV_FROM_HANDLE(radv_device
, device
, _device
);
765 RADV_FROM_HANDLE(radv_descriptor_set_layout
, set_layout
, pCreateInfo
->descriptorSetLayout
);
766 const uint32_t entry_count
= pCreateInfo
->descriptorUpdateEntryCount
;
767 const size_t size
= sizeof(struct radv_descriptor_update_template
) +
768 sizeof(struct radv_descriptor_update_template_entry
) * entry_count
;
769 struct radv_descriptor_update_template
*templ
;
772 templ
= vk_alloc2(&device
->alloc
, pAllocator
, size
, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT
);
774 return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY
);
776 templ
->entry_count
= entry_count
;
778 for (i
= 0; i
< entry_count
; i
++) {
779 const VkDescriptorUpdateTemplateEntryKHR
*entry
= &pCreateInfo
->pDescriptorUpdateEntries
[i
];
780 const struct radv_descriptor_set_binding_layout
*binding_layout
=
781 set_layout
->binding
+ entry
->dstBinding
;
782 const uint32_t buffer_offset
= binding_layout
->buffer_offset
+ entry
->dstArrayElement
;
783 const uint32_t *immutable_samplers
= NULL
;
787 /* dst_offset is an offset into dynamic_descriptors when the descriptor
788 is dynamic, and an offset into mapped_ptr otherwise */
789 switch (entry
->descriptorType
) {
790 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
791 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
:
792 assert(pCreateInfo
->templateType
== VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR
);
793 dst_offset
= binding_layout
->dynamic_offset_offset
+ entry
->dstArrayElement
;
794 dst_stride
= 0; /* Not used */
797 switch (entry
->descriptorType
) {
798 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
799 case VK_DESCRIPTOR_TYPE_SAMPLER
:
800 /* Immutable samplers are copied into push descriptors when they are pushed */
801 if (pCreateInfo
->templateType
== VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR
&&
802 binding_layout
->immutable_samplers_offset
&& !binding_layout
->immutable_samplers_equal
) {
803 immutable_samplers
= radv_immutable_samplers(set_layout
, binding_layout
) + entry
->dstArrayElement
* 4;
809 dst_offset
= binding_layout
->offset
/ 4 + binding_layout
->size
* entry
->dstArrayElement
/ 4;
810 dst_stride
= binding_layout
->size
/ 4;
814 templ
->entry
[i
] = (struct radv_descriptor_update_template_entry
) {
815 .descriptor_type
= entry
->descriptorType
,
816 .descriptor_count
= entry
->descriptorCount
,
817 .src_offset
= entry
->offset
,
818 .src_stride
= entry
->stride
,
819 .dst_offset
= dst_offset
,
820 .dst_stride
= dst_stride
,
821 .buffer_offset
= buffer_offset
,
822 .has_sampler
= !binding_layout
->immutable_samplers_offset
,
823 .immutable_samplers
= immutable_samplers
827 *pDescriptorUpdateTemplate
= radv_descriptor_update_template_to_handle(templ
);
831 void radv_DestroyDescriptorUpdateTemplateKHR(VkDevice _device
,
832 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate
,
833 const VkAllocationCallbacks
*pAllocator
)
835 RADV_FROM_HANDLE(radv_device
, device
, _device
);
836 RADV_FROM_HANDLE(radv_descriptor_update_template
, templ
, descriptorUpdateTemplate
);
841 vk_free2(&device
->alloc
, pAllocator
, templ
);
844 void radv_update_descriptor_set_with_template(struct radv_device
*device
,
845 struct radv_cmd_buffer
*cmd_buffer
,
846 struct radv_descriptor_set
*set
,
847 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate
,
850 RADV_FROM_HANDLE(radv_descriptor_update_template
, templ
, descriptorUpdateTemplate
);
853 for (i
= 0; i
< templ
->entry_count
; ++i
) {
854 struct radeon_winsys_bo
**buffer_list
= set
->descriptors
+ templ
->entry
[i
].buffer_offset
;
855 uint32_t *pDst
= set
->mapped_ptr
+ templ
->entry
[i
].dst_offset
;
856 const uint8_t *pSrc
= ((const uint8_t *) pData
) + templ
->entry
[i
].src_offset
;
859 for (j
= 0; j
< templ
->entry
[i
].descriptor_count
; ++j
) {
860 switch (templ
->entry
[i
].descriptor_type
) {
861 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC
:
862 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC
: {
863 const unsigned idx
= templ
->entry
[i
].dst_offset
+ j
;
864 assert(!(set
->layout
->flags
& VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR
));
865 write_dynamic_buffer_descriptor(device
, set
->dynamic_descriptors
+ idx
,
866 buffer_list
, (struct VkDescriptorBufferInfo
*) pSrc
);
869 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER
:
870 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER
:
871 write_buffer_descriptor(device
, cmd_buffer
, pDst
, buffer_list
,
872 (struct VkDescriptorBufferInfo
*) pSrc
);
874 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER
:
875 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER
:
876 write_texel_buffer_descriptor(device
, cmd_buffer
, pDst
, buffer_list
,
877 *(VkBufferView
*) pSrc
);
879 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE
:
880 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE
:
881 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT
:
882 write_image_descriptor(device
, cmd_buffer
, pDst
, buffer_list
,
883 templ
->entry
[i
].descriptor_type
,
884 (struct VkDescriptorImageInfo
*) pSrc
);
886 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
:
887 write_combined_image_sampler_descriptor(device
, cmd_buffer
, pDst
, buffer_list
,
888 templ
->entry
[i
].descriptor_type
,
889 (struct VkDescriptorImageInfo
*) pSrc
,
890 templ
->entry
[i
].has_sampler
);
891 if (templ
->entry
[i
].immutable_samplers
)
892 memcpy(pDst
+ 16, templ
->entry
[i
].immutable_samplers
+ 4 * j
, 16);
894 case VK_DESCRIPTOR_TYPE_SAMPLER
:
895 if (templ
->entry
[i
].has_sampler
)
896 write_sampler_descriptor(device
, pDst
,
897 (struct VkDescriptorImageInfo
*) pSrc
);
898 else if (templ
->entry
[i
].immutable_samplers
)
899 memcpy(pDst
, templ
->entry
[i
].immutable_samplers
+ 4 * j
, 16);
902 unreachable("unimplemented descriptor type");
905 pSrc
+= templ
->entry
[i
].src_stride
;
906 pDst
+= templ
->entry
[i
].dst_stride
;
912 void radv_UpdateDescriptorSetWithTemplateKHR(VkDevice _device
,
913 VkDescriptorSet descriptorSet
,
914 VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate
,
917 RADV_FROM_HANDLE(radv_device
, device
, _device
);
918 RADV_FROM_HANDLE(radv_descriptor_set
, set
, descriptorSet
);
920 radv_update_descriptor_set_with_template(device
, NULL
, set
, descriptorUpdateTemplate
, pData
);