if (cmd_buffer == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
- cmd_buffer->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
+ vk_object_base_init(&device->vk, &cmd_buffer->base,
+ VK_OBJECT_TYPE_COMMAND_BUFFER);
+
cmd_buffer->device = device;
cmd_buffer->pool = pool;
cmd_buffer->level = level;
for (unsigned i = 0; i < MAX_BIND_POINTS; i++)
free(cmd_buffer->descriptors[i].push_set.set.mapped_ptr);
+ vk_object_base_finish(&cmd_buffer->base);
+
vk_free(&cmd_buffer->pool->alloc, cmd_buffer);
}
list_addtail(&cmd_buffer->pool_link, &pool->cmd_buffers);
result = radv_reset_cmd_buffer(cmd_buffer);
- cmd_buffer->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
cmd_buffer->level = pAllocateInfo->level;
pCommandBuffers[i] = radv_cmd_buffer_to_handle(cmd_buffer);
if (pool == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &pool->base,
+ VK_OBJECT_TYPE_COMMAND_POOL);
+
if (pAllocator)
pool->alloc = *pAllocator;
else
radv_cmd_buffer_destroy(cmd_buffer);
}
+ vk_object_base_finish(&pool->base);
vk_free2(&device->vk.alloc, pAllocator, pool);
}
if (!set_layout)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &set_layout->base,
+ VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT);
+
set_layout->flags = pCreateInfo->flags;
set_layout->layout_size = size;
VkDescriptorSetLayoutBinding *bindings = create_sorted_bindings(pCreateInfo->pBindings,
pCreateInfo->bindingCount);
if (!bindings) {
+ vk_object_base_finish(&set_layout->base);
vk_free2(&device->vk.alloc, pAllocator, set_layout);
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
}
if (!set_layout)
return;
+ vk_object_base_finish(&set_layout->base);
vk_free2(&device->vk.alloc, pAllocator, set_layout);
}
if (layout == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &layout->base,
+ VK_OBJECT_TYPE_PIPELINE_LAYOUT);
+
layout->num_sets = pCreateInfo->setLayoutCount;
unsigned dynamic_offset_count = 0;
if (!pipeline_layout)
return;
+
+ vk_object_base_finish(&pipeline_layout->base);
vk_free2(&device->vk.alloc, pAllocator, pipeline_layout);
}
memset(set, 0, mem_size);
+ vk_object_base_init(&device->vk, &set->base,
+ VK_OBJECT_TYPE_DESCRIPTOR_SET);
+
if (layout->dynamic_offset_count) {
set->dynamic_descriptors = (struct radv_descriptor_range*)((uint8_t*)set + range_offset);
}
}
}
}
+ vk_object_base_finish(&set->base);
vk_free2(&device->vk.alloc, NULL, set);
}
memset(pool, 0, sizeof(*pool));
+ vk_object_base_init(&device->vk, &pool->base,
+ VK_OBJECT_TYPE_DESCRIPTOR_POOL);
+
if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
pool->host_memory_base = (uint8_t*)pool + sizeof(struct radv_descriptor_pool);
pool->host_memory_ptr = pool->host_memory_base;
if (pool->bo)
device->ws->buffer_destroy(pool->bo);
+
+ vk_object_base_finish(&pool->base);
vk_free2(&device->vk.alloc, pAllocator, pool);
}
if (!templ)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &templ->base,
+ VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
+
templ->entry_count = entry_count;
if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR) {
if (!templ)
return;
+ vk_object_base_finish(&templ->base);
vk_free2(&device->vk.alloc, pAllocator, templ);
}
if (conversion == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &conversion->base,
+ VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION);
+
conversion->format = pCreateInfo->format;
conversion->ycbcr_model = pCreateInfo->ycbcrModel;
conversion->ycbcr_range = pCreateInfo->ycbcrRange;
RADV_FROM_HANDLE(radv_device, device, _device);
RADV_FROM_HANDLE(radv_sampler_ycbcr_conversion, ycbcr_conversion, ycbcrConversion);
- if (ycbcr_conversion)
- vk_free2(&device->vk.alloc, pAllocator, ycbcr_conversion);
+ if (!ycbcr_conversion)
+ return;
+
+ vk_object_base_finish(&ycbcr_conversion->base);
+ vk_free2(&device->vk.alloc, pAllocator, ycbcr_conversion);
}
#include "radv_constants.h"
+#include "vulkan/util/vk_object.h"
+
#include <vulkan/vulkan.h>
struct radv_descriptor_set_binding_layout {
};
struct radv_descriptor_set_layout {
+ struct vk_object_base base;
+
/* The create flags for this descriptor set layout */
VkDescriptorSetLayoutCreateFlags flags;
};
struct radv_pipeline_layout {
+ struct vk_object_base base;
struct {
struct radv_descriptor_set_layout *layout;
uint32_t size;
if (!instance)
return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
- instance->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
+ vk_object_base_init(NULL, &instance->base, VK_OBJECT_TYPE_INSTANCE);
if (pAllocator)
instance->alloc = *pAllocator;
vk_debug_report_instance_destroy(&instance->debug_report_callbacks);
+ vk_object_base_finish(&instance->base);
vk_free(&instance->alloc, instance);
}
mem->bo = NULL;
}
+ vk_object_base_finish(&mem->base);
vk_free2(&device->vk.alloc, pAllocator, mem);
}
if (mem == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &mem->base,
+ VK_OBJECT_TYPE_DEVICE_MEMORY);
+
if (wsi_info && wsi_info->implicit_sync)
flags |= RADEON_FLAG_IMPLICIT_SYNC;
if (!fence)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &fence->base, VK_OBJECT_TYPE_FENCE);
+
fence->fence_wsi = NULL;
fence->temp_syncobj = 0;
if (device->always_use_syncobj || handleTypes) {
device->ws->destroy_fence(fence->fence);
if (fence->fence_wsi)
fence->fence_wsi->destroy(fence->fence_wsi);
+
+ vk_object_base_finish(&fence->base);
vk_free2(&device->vk.alloc, pAllocator, fence);
}
if (!sem)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &sem->base,
+ VK_OBJECT_TYPE_SEMAPHORE);
+
sem->temporary.kind = RADV_SEMAPHORE_NONE;
sem->permanent.kind = RADV_SEMAPHORE_NONE;
radv_destroy_semaphore_part(device, &sem->temporary);
radv_destroy_semaphore_part(device, &sem->permanent);
+ vk_object_base_finish(&sem->base);
vk_free2(&device->vk.alloc, pAllocator, sem);
}
if (!event)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &event->base, VK_OBJECT_TYPE_EVENT);
+
event->bo = device->ws->buffer_create(device->ws, 8, 8,
RADEON_DOMAIN_GTT,
RADEON_FLAG_VA_UNCACHED | RADEON_FLAG_CPU_ACCESS | RADEON_FLAG_NO_INTERPROCESS_SHARING,
if (!event)
return;
device->ws->buffer_destroy(event->bo);
+ vk_object_base_finish(&event->base);
vk_free2(&device->vk.alloc, pAllocator, event);
}
if (buffer == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &buffer->base, VK_OBJECT_TYPE_BUFFER);
+
buffer->size = pCreateInfo->size;
buffer->usage = pCreateInfo->usage;
buffer->bo = NULL;
if (buffer->flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT)
device->ws->buffer_destroy(buffer->bo);
+ vk_object_base_finish(&buffer->base);
vk_free2(&device->vk.alloc, pAllocator, buffer);
}
if (framebuffer == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &framebuffer->base,
+ VK_OBJECT_TYPE_FRAMEBUFFER);
+
framebuffer->attachment_count = pCreateInfo->attachmentCount;
framebuffer->width = pCreateInfo->width;
framebuffer->height = pCreateInfo->height;
if (!fb)
return;
+ vk_object_base_finish(&fb->base);
vk_free2(&device->vk.alloc, pAllocator, fb);
}
if (!sampler)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &sampler->base,
+ VK_OBJECT_TYPE_SAMPLER);
+
radv_init_sampler(device, sampler, pCreateInfo);
sampler->ycbcr_sampler = ycbcr_conversion ? radv_sampler_ycbcr_conversion_from_handle(ycbcr_conversion->conversion): NULL;
if (!sampler)
return;
+ vk_object_base_finish(&sampler->base);
vk_free2(&device->vk.alloc, pAllocator, sampler);
}
if (!image)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &image->base, VK_OBJECT_TYPE_IMAGE);
+
image->type = pCreateInfo->imageType;
image->info.width = pCreateInfo->extent.width;
image->info.height = pCreateInfo->extent.height;
if (image->owned_memory != VK_NULL_HANDLE)
radv_FreeMemory(_device, image->owned_memory, pAllocator);
+ vk_object_base_finish(&image->base);
vk_free2(&device->vk.alloc, pAllocator, image);
}
if (view == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &view->base,
+ VK_OBJECT_TYPE_IMAGE_VIEW);
+
radv_image_view_init(view, device, pCreateInfo, NULL);
*pView = radv_image_view_to_handle(view);
if (!iview)
return;
+
+ vk_object_base_finish(&iview->base);
vk_free2(&device->vk.alloc, pAllocator, iview);
}
if (!view)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &view->base,
+ VK_OBJECT_TYPE_BUFFER_VIEW);
+
radv_buffer_view_init(view, device, pCreateInfo);
*pView = radv_buffer_view_to_handle(view);
if (!view)
return;
+ vk_object_base_finish(&view->base);
vk_free2(&device->vk.alloc, pAllocator, view);
}
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
memset(pass, 0, size);
+
+ vk_object_base_init(&device->vk, &pass->base,
+ VK_OBJECT_TYPE_RENDER_PASS);
+
pass->attachment_count = pCreateInfo->attachmentCount;
pass->subpass_count = pCreateInfo->subpassCount;
pass->attachments = (void *) pass + attachments_offset;
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
memset(pass, 0, size);
+
+ vk_object_base_init(&device->vk, &pass->base,
+ VK_OBJECT_TYPE_RENDER_PASS);
+
pass->attachment_count = pCreateInfo->attachmentCount;
pass->subpass_count = pCreateInfo->subpassCount;
pass->attachments = (void *) pass + attachments_offset;
if (!_pass)
return;
+
+ vk_object_base_finish(&pass->base);
vk_free2(&device->vk.alloc, pAllocator, pass->subpass_attachments);
vk_free2(&device->vk.alloc, pAllocator, pass);
}
if(pipeline->cs.buf)
free(pipeline->cs.buf);
+
+ vk_object_base_finish(&pipeline->base);
vk_free2(&device->vk.alloc, allocator, pipeline);
}
if (pipeline == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &pipeline->base,
+ VK_OBJECT_TYPE_PIPELINE);
+
result = radv_pipeline_init(pipeline, device, cache,
pCreateInfo, extra);
if (result != VK_SUCCESS) {
if (pipeline == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &pipeline->base,
+ VK_OBJECT_TYPE_PIPELINE);
+
pipeline->device = device;
pipeline->layout = radv_pipeline_layout_from_handle(pCreateInfo->layout);
assert(pipeline->layout);
if (cache == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &cache->base,
+ VK_OBJECT_TYPE_PIPELINE_CACHE);
+
if (pAllocator)
cache->alloc = *pAllocator;
else
return;
radv_pipeline_cache_finish(cache);
+ vk_object_base_finish(&cache->base);
vk_free2(&device->vk.alloc, pAllocator, cache);
}
};
struct radv_instance {
- VK_LOADER_DATA _loader_data;
+ struct vk_object_base base;
VkAllocationCallbacks alloc;
struct cache_entry;
struct radv_pipeline_cache {
+ struct vk_object_base base;
struct radv_device * device;
pthread_mutex_t mutex;
};
struct radv_device_memory {
+ struct vk_object_base base;
struct radeon_winsys_bo *bo;
/* for dedicated allocations */
struct radv_image *image;
};
struct radv_descriptor_set {
+ struct vk_object_base base;
const struct radv_descriptor_set_layout *layout;
uint32_t size;
uint32_t buffer_count;
};
struct radv_descriptor_pool {
+ struct vk_object_base base;
struct radeon_winsys_bo *bo;
uint8_t *mapped_ptr;
uint64_t current_offset;
};
struct radv_descriptor_update_template {
+ struct vk_object_base base;
uint32_t entry_count;
VkPipelineBindPoint bind_point;
struct radv_descriptor_update_template_entry entry[0];
};
struct radv_buffer {
+ struct vk_object_base base;
VkDeviceSize size;
VkBufferUsageFlags usage;
};
struct radv_cmd_pool {
+ struct vk_object_base base;
VkAllocationCallbacks alloc;
struct list_head cmd_buffers;
struct list_head free_cmd_buffers;
};
struct radv_cmd_buffer {
- VK_LOADER_DATA _loader_data;
+ struct vk_object_base base;
struct radv_device * device;
uint32_t z);
struct radv_event {
+ struct vk_object_base base;
struct radeon_winsys_bo *bo;
uint64_t *map;
};
#define SI_GS_PER_ES 128
struct radv_pipeline {
+ struct vk_object_base base;
struct radv_device * device;
struct radv_dynamic_state dynamic_state;
};
struct radv_image {
+ struct vk_object_base base;
VkImageType type;
/* The original VkFormat provided by the client. This may not match any
* of the actual surface formats.
};
struct radv_image_view {
+ struct vk_object_base base;
struct radv_image *image; /**< VkImageViewCreateInfo::image */
struct radeon_winsys_bo *bo;
VkFormat radv_get_aspect_format(struct radv_image *image, VkImageAspectFlags mask);
struct radv_sampler_ycbcr_conversion {
+ struct vk_object_base base;
VkFormat format;
VkSamplerYcbcrModelConversion ycbcr_model;
VkSamplerYcbcrRange ycbcr_range;
};
struct radv_buffer_view {
+ struct vk_object_base base;
struct radeon_winsys_bo *bo;
VkFormat vk_format;
uint64_t range; /**< VkBufferViewCreateInfo::range */
}
struct radv_sampler {
+ struct vk_object_base base;
uint32_t state[4];
struct radv_sampler_ycbcr_conversion *ycbcr_sampler;
};
struct radv_framebuffer {
+ struct vk_object_base base;
uint32_t width;
uint32_t height;
uint32_t layers;
};
struct radv_render_pass {
+ struct vk_object_base base;
uint32_t attachment_count;
uint32_t subpass_count;
struct radv_subpass_attachment * subpass_attachments;
void radv_device_finish_meta(struct radv_device *device);
struct radv_query_pool {
+ struct vk_object_base base;
struct radeon_winsys_bo *bo;
uint32_t stride;
uint32_t availability_offset;
};
struct radv_semaphore {
+ struct vk_object_base base;
struct radv_semaphore_part permanent;
struct radv_semaphore_part temporary;
};
const VkImageSubresourceRange *range);
struct radv_fence {
+ struct vk_object_base base;
struct radeon_winsys_fence *fence;
struct wsi_fence *fence_wsi;
if (!pool)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &pool->base,
+ VK_OBJECT_TYPE_QUERY_POOL);
switch(pCreateInfo->queryType) {
case VK_QUERY_TYPE_OCCLUSION:
return;
device->ws->buffer_destroy(pool->bo);
+
+ vk_object_base_finish(&pool->base);
vk_free2(&device->vk.alloc, pAllocator, pool);
}
if (module == NULL)
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &module->base,
+ VK_OBJECT_TYPE_SHADER_MODULE);
+
module->nir = NULL;
module->size = pCreateInfo->codeSize;
memcpy(module->data, pCreateInfo->pCode, module->size);
if (!module)
return;
+ vk_object_base_finish(&module->base);
vk_free2(&device->vk.alloc, pAllocator, module);
}
#include "nir/nir.h"
#include "vulkan/vulkan.h"
+#include "vulkan/util/vk_object.h"
struct radv_device;
struct radv_shader_module {
+ struct vk_object_base base;
struct nir_shader *nir;
unsigned char sha1[20];
uint32_t size;