+
+ for (i = 0; i < descriptorCopyCount; i++) {
+ const VkCopyDescriptorSet *copyset = &pDescriptorCopies[i];
+ RADV_FROM_HANDLE(radv_descriptor_set, src_set,
+ copyset->srcSet);
+ RADV_FROM_HANDLE(radv_descriptor_set, dst_set,
+ copyset->dstSet);
+ const struct radv_descriptor_set_binding_layout *src_binding_layout =
+ src_set->layout->binding + copyset->srcBinding;
+ const struct radv_descriptor_set_binding_layout *dst_binding_layout =
+ dst_set->layout->binding + copyset->dstBinding;
+ uint32_t *src_ptr = src_set->mapped_ptr;
+ uint32_t *dst_ptr = dst_set->mapped_ptr;
+ struct radeon_winsys_bo **src_buffer_list = src_set->descriptors;
+ struct radeon_winsys_bo **dst_buffer_list = dst_set->descriptors;
+
+ src_ptr += src_binding_layout->offset / 4;
+ dst_ptr += dst_binding_layout->offset / 4;
+
+ src_ptr += src_binding_layout->size * copyset->srcArrayElement / 4;
+ dst_ptr += dst_binding_layout->size * copyset->dstArrayElement / 4;
+
+ src_buffer_list += src_binding_layout->buffer_offset;
+ src_buffer_list += copyset->srcArrayElement;
+
+ dst_buffer_list += dst_binding_layout->buffer_offset;
+ dst_buffer_list += copyset->dstArrayElement;
+
+ for (j = 0; j < copyset->descriptorCount; ++j) {
+ switch (src_binding_layout->type) {
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
+ unsigned src_idx = copyset->srcArrayElement + j;
+ unsigned dst_idx = copyset->dstArrayElement + j;
+ struct radv_descriptor_range *src_range, *dst_range;
+ src_idx += src_binding_layout->dynamic_offset_offset;
+ dst_idx += dst_binding_layout->dynamic_offset_offset;
+
+ src_range = src_set->dynamic_descriptors + src_idx;
+ dst_range = dst_set->dynamic_descriptors + dst_idx;
+ *dst_range = *src_range;
+ break;
+ }
+ default:
+ memcpy(dst_ptr, src_ptr, src_binding_layout->size);
+ }
+ src_ptr += src_binding_layout->size / 4;
+ dst_ptr += dst_binding_layout->size / 4;
+
+ if (src_binding_layout->type != VK_DESCRIPTOR_TYPE_SAMPLER) {
+ /* Sampler descriptors don't have a buffer list. */
+ dst_buffer_list[j] = src_buffer_list[j];
+ }
+ }
+ }
+}
+
+void radv_UpdateDescriptorSets(
+ VkDevice _device,
+ uint32_t descriptorWriteCount,
+ const VkWriteDescriptorSet* pDescriptorWrites,
+ uint32_t descriptorCopyCount,
+ const VkCopyDescriptorSet* pDescriptorCopies)
+{
+ RADV_FROM_HANDLE(radv_device, device, _device);
+
+ radv_update_descriptor_sets(device, NULL, VK_NULL_HANDLE, descriptorWriteCount, pDescriptorWrites,
+ descriptorCopyCount, pDescriptorCopies);
+}
+
+VkResult radv_CreateDescriptorUpdateTemplate(VkDevice _device,
+ const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
+ const VkAllocationCallbacks *pAllocator,
+ VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
+{
+ RADV_FROM_HANDLE(radv_device, device, _device);
+ RADV_FROM_HANDLE(radv_descriptor_set_layout, set_layout, pCreateInfo->descriptorSetLayout);
+ const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount;
+ const size_t size = sizeof(struct radv_descriptor_update_template) +
+ sizeof(struct radv_descriptor_update_template_entry) * entry_count;
+ struct radv_descriptor_update_template *templ;
+ uint32_t i;
+
+ templ = vk_alloc2(&device->alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+ if (!templ)
+ return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+
+ templ->entry_count = entry_count;
+ templ->bind_point = pCreateInfo->pipelineBindPoint;
+
+ for (i = 0; i < entry_count; i++) {
+ const VkDescriptorUpdateTemplateEntry *entry = &pCreateInfo->pDescriptorUpdateEntries[i];
+ const struct radv_descriptor_set_binding_layout *binding_layout =
+ set_layout->binding + entry->dstBinding;
+ const uint32_t buffer_offset = binding_layout->buffer_offset + entry->dstArrayElement;
+ const uint32_t *immutable_samplers = NULL;
+ uint32_t dst_offset;
+ uint32_t dst_stride;
+
+ /* dst_offset is an offset into dynamic_descriptors when the descriptor
+ is dynamic, and an offset into mapped_ptr otherwise */
+ switch (entry->descriptorType) {
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+ assert(pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET);
+ dst_offset = binding_layout->dynamic_offset_offset + entry->dstArrayElement;
+ dst_stride = 0; /* Not used */
+ break;
+ default:
+ switch (entry->descriptorType) {
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+ case VK_DESCRIPTOR_TYPE_SAMPLER:
+ /* Immutable samplers are copied into push descriptors when they are pushed */
+ if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR &&
+ binding_layout->immutable_samplers_offset && !binding_layout->immutable_samplers_equal) {
+ immutable_samplers = radv_immutable_samplers(set_layout, binding_layout) + entry->dstArrayElement * 4;
+ }
+ break;
+ default:
+ break;
+ }
+ dst_offset = binding_layout->offset / 4;
+ if (entry->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)
+ dst_offset += entry->dstArrayElement / 4;
+ else
+ dst_offset += binding_layout->size * entry->dstArrayElement / 4;
+
+ dst_stride = binding_layout->size / 4;
+ break;
+ }
+
+ templ->entry[i] = (struct radv_descriptor_update_template_entry) {
+ .descriptor_type = entry->descriptorType,
+ .descriptor_count = entry->descriptorCount,
+ .src_offset = entry->offset,
+ .src_stride = entry->stride,
+ .dst_offset = dst_offset,
+ .dst_stride = dst_stride,
+ .buffer_offset = buffer_offset,
+ .has_sampler = !binding_layout->immutable_samplers_offset,
+ .sampler_offset = radv_combined_image_descriptor_sampler_offset(binding_layout),
+ .immutable_samplers = immutable_samplers
+ };
+ }
+
+ *pDescriptorUpdateTemplate = radv_descriptor_update_template_to_handle(templ);
+ return VK_SUCCESS;
+}
+
+void radv_DestroyDescriptorUpdateTemplate(VkDevice _device,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+ const VkAllocationCallbacks *pAllocator)
+{
+ RADV_FROM_HANDLE(radv_device, device, _device);
+ RADV_FROM_HANDLE(radv_descriptor_update_template, templ, descriptorUpdateTemplate);
+
+ if (!templ)
+ return;
+
+ vk_free2(&device->alloc, pAllocator, templ);
+}
+
+void radv_update_descriptor_set_with_template(struct radv_device *device,
+ struct radv_cmd_buffer *cmd_buffer,
+ struct radv_descriptor_set *set,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+ const void *pData)
+{
+ RADV_FROM_HANDLE(radv_descriptor_update_template, templ, descriptorUpdateTemplate);
+ uint32_t i;
+
+ for (i = 0; i < templ->entry_count; ++i) {
+ struct radeon_winsys_bo **buffer_list = set->descriptors + templ->entry[i].buffer_offset;
+ uint32_t *pDst = set->mapped_ptr + templ->entry[i].dst_offset;
+ const uint8_t *pSrc = ((const uint8_t *) pData) + templ->entry[i].src_offset;
+ uint32_t j;
+
+ if (templ->entry[i].descriptor_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
+ memcpy((uint8_t*)pDst, pSrc, templ->entry[i].descriptor_count);
+ continue;
+ }
+
+ for (j = 0; j < templ->entry[i].descriptor_count; ++j) {
+ switch (templ->entry[i].descriptor_type) {
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
+ const unsigned idx = templ->entry[i].dst_offset + j;
+ assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
+ write_dynamic_buffer_descriptor(device, set->dynamic_descriptors + idx,
+ buffer_list, (struct VkDescriptorBufferInfo *) pSrc);
+ break;
+ }
+ case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+ write_buffer_descriptor(device, cmd_buffer, pDst, buffer_list,
+ (struct VkDescriptorBufferInfo *) pSrc);
+ break;
+ case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+ case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+ write_texel_buffer_descriptor(device, cmd_buffer, pDst, buffer_list,
+ *(VkBufferView *) pSrc);
+ break;
+ case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+ case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+ case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
+ write_image_descriptor(device, cmd_buffer, 64, pDst, buffer_list,
+ templ->entry[i].descriptor_type,
+ (struct VkDescriptorImageInfo *) pSrc);
+ break;
+ case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+ write_combined_image_sampler_descriptor(device, cmd_buffer, templ->entry[i].sampler_offset,
+ pDst, buffer_list, templ->entry[i].descriptor_type,
+ (struct VkDescriptorImageInfo *) pSrc,
+ templ->entry[i].has_sampler);
+ if (templ->entry[i].immutable_samplers) {
+ memcpy((char*)pDst + templ->entry[i].sampler_offset, templ->entry[i].immutable_samplers + 4 * j, 16);
+ }
+ break;
+ case VK_DESCRIPTOR_TYPE_SAMPLER:
+ if (templ->entry[i].has_sampler)
+ write_sampler_descriptor(device, pDst,
+ (struct VkDescriptorImageInfo *) pSrc);
+ else if (templ->entry[i].immutable_samplers)
+ memcpy(pDst, templ->entry[i].immutable_samplers + 4 * j, 16);
+ break;
+ default:
+ unreachable("unimplemented descriptor type");
+ break;
+ }
+ pSrc += templ->entry[i].src_stride;
+ pDst += templ->entry[i].dst_stride;
+ ++buffer_list;
+ }
+ }
+}
+
+void radv_UpdateDescriptorSetWithTemplate(VkDevice _device,
+ VkDescriptorSet descriptorSet,
+ VkDescriptorUpdateTemplate descriptorUpdateTemplate,
+ const void *pData)
+{
+ RADV_FROM_HANDLE(radv_device, device, _device);
+ RADV_FROM_HANDLE(radv_descriptor_set, set, descriptorSet);
+
+ radv_update_descriptor_set_with_template(device, NULL, set, descriptorUpdateTemplate, pData);
+}
+
+
+VkResult radv_CreateSamplerYcbcrConversion(VkDevice _device,
+ const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
+ const VkAllocationCallbacks* pAllocator,
+ VkSamplerYcbcrConversion* pYcbcrConversion)
+{
+ RADV_FROM_HANDLE(radv_device, device, _device);
+ struct radv_sampler_ycbcr_conversion *conversion = NULL;
+
+ conversion = vk_zalloc2(&device->alloc, pAllocator, sizeof(*conversion), 8,
+ VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+
+ if (conversion == NULL)
+ return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+
+ conversion->format = pCreateInfo->format;
+ conversion->ycbcr_model = pCreateInfo->ycbcrModel;
+ conversion->ycbcr_range = pCreateInfo->ycbcrRange;
+ conversion->components = pCreateInfo->components;
+ conversion->chroma_offsets[0] = pCreateInfo->xChromaOffset;
+ conversion->chroma_offsets[1] = pCreateInfo->yChromaOffset;
+ conversion->chroma_filter = pCreateInfo->chromaFilter;
+
+ *pYcbcrConversion = radv_sampler_ycbcr_conversion_to_handle(conversion);
+ return VK_SUCCESS;
+}
+
+
+void radv_DestroySamplerYcbcrConversion(VkDevice _device,
+ VkSamplerYcbcrConversion ycbcrConversion,
+ const VkAllocationCallbacks* pAllocator)
+{
+ RADV_FROM_HANDLE(radv_device, device, _device);
+ RADV_FROM_HANDLE(radv_sampler_ycbcr_conversion, ycbcr_conversion, ycbcrConversion);
+
+ if (ycbcr_conversion)
+ vk_free2(&device->alloc, pAllocator, ycbcr_conversion);