if (cmd_buffer == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &cmd_buffer->base,
+ VK_OBJECT_TYPE_COMMAND_BUFFER);
+
cmd_buffer->batch.status = VK_SUCCESS;
- cmd_buffer->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
cmd_buffer->device = device;
cmd_buffer->pool = pool;
cmd_buffer->level = level;
anv_cmd_state_finish(cmd_buffer);
+ vk_object_base_finish(&cmd_buffer->base);
vk_free(&cmd_buffer->pool->alloc, cmd_buffer);
}
if (pool == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &pool->base, VK_OBJECT_TYPE_COMMAND_POOL);
+
if (pAllocator)
pool->alloc = *pAllocator;
else
anv_cmd_buffer_destroy(cmd_buffer);
}
+ vk_object_base_finish(&pool->base);
vk_free2(&device->vk.alloc, pAllocator, pool);
}
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
memset(set_layout, 0, sizeof(*set_layout));
+ vk_object_base_init(&device->vk, &set_layout->base,
+ VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT);
set_layout->ref_cnt = 1;
set_layout->binding_count = max_binding + 1;
return VK_SUCCESS;
}
+void
+anv_descriptor_set_layout_destroy(struct anv_device *device,
+ struct anv_descriptor_set_layout *layout)
+{
+ assert(layout->ref_cnt == 0);
+ vk_object_base_finish(&layout->base);
+ vk_free(&device->vk.alloc, layout);
+}
+
void anv_DestroyDescriptorSetLayout(
VkDevice _device,
VkDescriptorSetLayout _set_layout,
if (layout == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &layout->base,
+ VK_OBJECT_TYPE_PIPELINE_LAYOUT);
layout->num_sets = pCreateInfo->setLayoutCount;
unsigned dynamic_offset_count = 0;
for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
+ vk_object_base_finish(&pipeline_layout->base);
vk_free2(&device->vk.alloc, pAllocator, pipeline_layout);
}
if (!pool)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &pool->base,
+ VK_OBJECT_TYPE_DESCRIPTOR_POOL);
pool->size = pool_size;
pool->next = 0;
pool->free_list = EMPTY;
anv_device_release_bo(device, pool->bo);
anv_state_stream_finish(&pool->surface_state_stream);
+ vk_object_base_finish(&pool->base);
vk_free2(&device->vk.alloc, pAllocator, pool);
}
uint64_t pool_vma_offset =
util_vma_heap_alloc(&pool->bo_heap, set_buffer_size, 32);
if (pool_vma_offset == 0) {
- anv_descriptor_pool_free_set(pool, set);
+ vk_object_base_finish(&set->base);
return vk_error(VK_ERROR_FRAGMENTED_POOL);
}
assert(pool_vma_offset >= POOL_HEAP_OFFSET &&
set->desc_surface_state = ANV_STATE_NULL;
}
+ vk_object_base_init(&device->vk, &set->base,
+ VK_OBJECT_TYPE_DESCRIPTOR_SET);
set->pool = pool;
set->layout = layout;
anv_descriptor_set_layout_ref(layout);
list_del(&set->pool_link);
+ vk_object_base_finish(&set->base);
anv_descriptor_pool_free_set(pool, set);
}
if (template == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &template->base,
+ VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
template->bind_point = pCreateInfo->pipelineBindPoint;
if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
ANV_FROM_HANDLE(anv_descriptor_update_template, template,
descriptorUpdateTemplate);
+ vk_object_base_finish(&template->base);
vk_free2(&device->vk.alloc, pAllocator, template);
}
goto fail_fd;
}
- device->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
+ vk_object_base_init(NULL, &device->base, VK_OBJECT_TYPE_PHYSICAL_DEVICE);
device->instance = instance;
assert(strlen(path) < ARRAY_SIZE(device->path));
close(device->local_fd);
if (device->master_fd >= 0)
close(device->master_fd);
+ vk_object_base_finish(&device->base);
vk_free(&device->instance->alloc, device);
}
if (!instance)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
- instance->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
+ vk_object_base_init(NULL, &instance->base, VK_OBJECT_TYPE_INSTANCE);
if (pAllocator)
instance->alloc = *pAllocator;
driDestroyOptionCache(&instance->dri_options);
driDestroyOptionInfo(&instance->available_dri_options);
+ vk_object_base_finish(&instance->base);
vk_free(&instance->alloc, instance);
}
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
assert(pAllocateInfo->memoryTypeIndex < pdevice->memory.type_count);
+ vk_object_base_init(&device->vk, &mem->base, VK_OBJECT_TYPE_DEVICE_MEMORY);
mem->type = mem_type;
mem->map = NULL;
mem->map_size = 0;
AHardwareBuffer_release(mem->ahw);
#endif
+ vk_object_base_finish(&mem->base);
vk_free2(&device->vk.alloc, pAllocator, mem);
}
if (event == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &event->base, VK_OBJECT_TYPE_EVENT);
event->state = anv_state_pool_alloc(&device->dynamic_state_pool,
sizeof(uint64_t), 8);
*(uint64_t *)event->state.map = VK_EVENT_RESET;
return;
anv_state_pool_free(&device->dynamic_state_pool, event->state);
+
+ vk_object_base_finish(&event->base);
vk_free2(&device->vk.alloc, pAllocator, event);
}
if (buffer == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &buffer->base, VK_OBJECT_TYPE_BUFFER);
buffer->size = pCreateInfo->size;
buffer->usage = pCreateInfo->usage;
buffer->address = ANV_NULL_ADDRESS;
if (!buffer)
return;
+ vk_object_base_finish(&buffer->base);
vk_free2(&device->vk.alloc, pAllocator, buffer);
}
sampler->bindless_state);
}
+ vk_object_base_finish(&sampler->base);
vk_free2(&device->vk.alloc, pAllocator, sampler);
}
framebuffer->attachment_count = 0;
}
+ vk_object_base_init(&device->vk, &framebuffer->base,
+ VK_OBJECT_TYPE_FRAMEBUFFER);
+
framebuffer->width = pCreateInfo->width;
framebuffer->height = pCreateInfo->height;
framebuffer->layers = pCreateInfo->layers;
if (!fb)
return;
+ vk_object_base_finish(&fb->base);
vk_free2(&device->vk.alloc, pAllocator, fb);
}
memset(conversion, 0, sizeof(*conversion));
+ vk_object_base_init(&device->vk, &conversion->base,
+ VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION);
conversion->format = anv_get_format(pCreateInfo->format);
conversion->ycbcr_model = pCreateInfo->ycbcrModel;
conversion->ycbcr_range = pCreateInfo->ycbcrRange;
if (!conversion)
return;
+ vk_object_base_finish(&conversion->base);
vk_free2(&device->vk.alloc, pAllocator, conversion);
}
if (!image)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &image->base, VK_OBJECT_TYPE_IMAGE);
image->type = pCreateInfo->imageType;
image->extent = pCreateInfo->extent;
image->vk_format = pCreateInfo->format;
}
}
+ vk_object_base_finish(&image->base);
vk_free2(&device->vk.alloc, pAllocator, image);
}
if (iview == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &iview->base, VK_OBJECT_TYPE_IMAGE_VIEW);
+
const VkImageSubresourceRange *range = &pCreateInfo->subresourceRange;
assert(range->layerCount > 0);
/* TODO: Handle the format swizzle? */
+ vk_object_base_init(&device->vk, &view->base, VK_OBJECT_TYPE_BUFFER_VIEW);
view->format = anv_get_isl_format(&device->info, pCreateInfo->format,
VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_TILING_LINEAR);
anv_state_pool_free(&device->surface_state_pool,
view->writeonly_storage_surface_state);
+ vk_object_base_finish(&view->base);
vk_free2(&device->vk.alloc, pAllocator, view);
}
* each array member of anv_subpass must be a valid pointer if not NULL.
*/
memset(pass, 0, ma.size);
+ vk_object_base_init(&device->vk, &pass->base, VK_OBJECT_TYPE_RENDER_PASS);
pass->attachment_count = pCreateInfo->attachmentCount;
pass->subpass_count = pCreateInfo->subpassCount;
pass->attachments = attachments;
* each array member of anv_subpass must be a valid pointer if not NULL.
*/
memset(pass, 0, ma.size);
+ vk_object_base_init(&device->vk, &pass->base, VK_OBJECT_TYPE_RENDER_PASS);
pass->attachment_count = pCreateInfo->attachmentCount;
pass->subpass_count = pCreateInfo->subpassCount;
pass->attachments = attachments;
ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_render_pass, pass, _pass);
+ vk_object_base_finish(&pass->base);
vk_free2(&device->vk.alloc, pAllocator, pass);
}
if (module == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &module->base,
+ VK_OBJECT_TYPE_SHADER_MODULE);
module->size = pCreateInfo->codeSize;
memcpy(module->data, pCreateInfo->pCode, module->size);
if (!module)
return;
+ vk_object_base_finish(&module->base);
vk_free2(&device->vk.alloc, pAllocator, module);
}
unreachable("invalid pipeline type");
}
+ vk_object_base_finish(&pipeline->base);
vk_free2(&device->vk.alloc, pAllocator, pipeline);
}
if (alloc == NULL)
alloc = &device->vk.alloc;
+ vk_object_base_init(&device->vk, &pipeline->base.base,
+ VK_OBJECT_TYPE_PIPELINE);
pipeline->base.device = device;
pipeline->base.type = ANV_PIPELINE_GRAPHICS;
struct anv_device *device,
bool cache_enabled)
{
+ vk_object_base_init(&device->vk, &cache->base,
+ VK_OBJECT_TYPE_PIPELINE_CACHE);
cache->device = device;
pthread_mutex_init(&cache->mutex, NULL);
_mesa_hash_table_destroy(cache->nir_cache, NULL);
}
+
+ vk_object_base_finish(&cache->base);
}
static struct anv_shader_bin *
};
struct anv_physical_device {
- VK_LOADER_DATA _loader_data;
+ struct vk_object_base base;
/* Link in anv_instance::physical_devices */
struct list_head link;
};
struct anv_instance {
- VK_LOADER_DATA _loader_data;
+ struct vk_object_base base;
VkAllocationCallbacks alloc;
};
struct anv_queue {
- VK_LOADER_DATA _loader_data;
+ struct vk_object_base base;
struct anv_device * device;
};
struct anv_pipeline_cache {
+ struct vk_object_base base;
struct anv_device * device;
pthread_mutex_t mutex;
}))
struct anv_device_memory {
+ struct vk_object_base base;
+
struct list_head link;
struct anv_bo * bo;
bool sampler);
struct anv_descriptor_set_layout {
+ struct vk_object_base base;
+
/* Descriptor set layouts can be destroyed at almost any time */
uint32_t ref_cnt;
struct anv_descriptor_set_binding_layout binding[0];
};
+void anv_descriptor_set_layout_destroy(struct anv_device *device,
+ struct anv_descriptor_set_layout *layout);
+
static inline void
anv_descriptor_set_layout_ref(struct anv_descriptor_set_layout *layout)
{
{
assert(layout && layout->ref_cnt >= 1);
if (p_atomic_dec_zero(&layout->ref_cnt))
- vk_free(&device->vk.alloc, layout);
+ anv_descriptor_set_layout_destroy(device, layout);
}
struct anv_descriptor {
};
struct anv_descriptor_set {
+ struct vk_object_base base;
+
struct anv_descriptor_pool *pool;
struct anv_descriptor_set_layout *layout;
uint32_t size;
};
struct anv_buffer_view {
+ struct vk_object_base base;
+
enum isl_format format; /**< VkBufferViewCreateInfo::format */
uint64_t range; /**< VkBufferViewCreateInfo::range */
};
struct anv_descriptor_pool {
+ struct vk_object_base base;
+
uint32_t size;
uint32_t next;
uint32_t free_list;
};
struct anv_descriptor_update_template {
+ struct vk_object_base base;
+
VkPipelineBindPoint bind_point;
/* The descriptor set this template corresponds to. This value is only
};
struct anv_pipeline_layout {
+ struct vk_object_base base;
+
struct {
struct anv_descriptor_set_layout *layout;
uint32_t dynamic_offset_start;
};
struct anv_buffer {
+ struct vk_object_base base;
+
struct anv_device * device;
VkDeviceSize size;
};
struct anv_cmd_pool {
+ struct vk_object_base base;
VkAllocationCallbacks alloc;
struct list_head cmd_buffers;
};
};
struct anv_cmd_buffer {
- VK_LOADER_DATA _loader_data;
+ struct vk_object_base base;
struct anv_device * device;
};
struct anv_fence {
+ struct vk_object_base base;
+
/* Permanent fence state. Every fence has some form of permanent state
* (type != ANV_SEMAPHORE_TYPE_NONE). This may be a BO to fence on (for
* cross-process fences) or it could just be a dummy for use internally.
struct anv_fence *fence);
struct anv_event {
+ struct vk_object_base base;
uint64_t semaphore;
struct anv_state state;
};
};
struct anv_semaphore {
+ struct vk_object_base base;
+
uint32_t refcount;
/* Permanent semaphore state. Every semaphore has some form of permanent
struct anv_semaphore *semaphore);
struct anv_shader_module {
+ struct vk_object_base base;
+
unsigned char sha1[20];
uint32_t size;
char data[0];
};
struct anv_pipeline {
+ struct vk_object_base base;
+
struct anv_device * device;
struct anv_batch batch;
};
struct anv_image {
+ struct vk_object_base base;
+
VkImageType type; /**< VkImageCreateInfo::imageType */
/* The original VkFormat provided by the client. This may not match any
* of the actual surface formats.
}
struct anv_image_view {
+ struct vk_object_base base;
+
const struct anv_image *image; /**< VkImageViewCreateInfo::image */
VkImageAspectFlags aspect_mask;
struct anv_ycbcr_conversion {
+ struct vk_object_base base;
+
const struct anv_format * format;
VkSamplerYcbcrModelConversion ycbcr_model;
VkSamplerYcbcrRange ycbcr_range;
};
struct anv_sampler {
+ struct vk_object_base base;
+
uint32_t state[3][4];
uint32_t n_planes;
struct anv_ycbcr_conversion *conversion;
};
struct anv_framebuffer {
+ struct vk_object_base base;
+
uint32_t width;
uint32_t height;
uint32_t layers;
};
struct anv_render_pass {
+ struct vk_object_base base;
+
uint32_t attachment_count;
uint32_t subpass_count;
/* An array of subpass_count+1 flushes, one per subpass boundary */
#define ANV_PIPELINE_STATISTICS_MASK 0x000007ff
struct anv_query_pool {
+ struct vk_object_base base;
+
VkQueryType type;
VkQueryPipelineStatisticFlags pipeline_statistics;
/** Stride between slots, in bytes */
ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_sampler, VkSampler)
ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_semaphore, VkSemaphore)
ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_shader_module, VkShaderModule)
-ANV_DEFINE_NONDISP_HANDLE_CASTS(vk_debug_report_callback, VkDebugReportCallbackEXT)
ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_ycbcr_conversion, VkSamplerYcbcrConversion)
/* Gen-specific function declarations */
VkResult
anv_queue_init(struct anv_device *device, struct anv_queue *queue)
{
- queue->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
+ vk_object_base_init(&device->vk, &queue->base, VK_OBJECT_TYPE_QUEUE);
queue->device = device;
queue->flags = 0;
void
anv_queue_finish(struct anv_queue *queue)
{
+ vk_object_base_finish(&queue->base);
}
static VkResult
if (fence == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &fence->base, VK_OBJECT_TYPE_FENCE);
+
if (device->physical->has_syncobj_wait) {
fence->permanent.type = ANV_FENCE_TYPE_SYNCOBJ;
anv_fence_impl_cleanup(device, &fence->temporary);
anv_fence_impl_cleanup(device, &fence->permanent);
+ vk_object_base_finish(&fence->base);
vk_free2(&device->vk.alloc, pAllocator, fence);
}
if (semaphore == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &semaphore->base, VK_OBJECT_TYPE_SEMAPHORE);
+
p_atomic_set(&semaphore->refcount, 1);
const VkExportSemaphoreCreateInfo *export =
anv_semaphore_impl_cleanup(device, &semaphore->temporary);
anv_semaphore_impl_cleanup(device, &semaphore->permanent);
+
+ vk_object_base_finish(&semaphore->base);
vk_free(&device->vk.alloc, semaphore);
}
if (pipeline == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &pipeline->base.base,
+ VK_OBJECT_TYPE_PIPELINE);
pipeline->base.device = device;
pipeline->base.type = ANV_PIPELINE_COMPUTE;
if (pool == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &pool->base, VK_OBJECT_TYPE_QUERY_POOL);
pool->type = pCreateInfo->queryType;
pool->pipeline_statistics = pipeline_statistics;
pool->stride = uint64s_per_slot * sizeof(uint64_t);
return;
anv_device_release_bo(device, pool->bo);
+ vk_object_base_finish(&pool->base);
vk_free2(&device->vk.alloc, pAllocator, pool);
}
if (!sampler)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
+ vk_object_base_init(&device->vk, &sampler->base, VK_OBJECT_TYPE_SAMPLER);
sampler->n_planes = 1;
uint32_t border_color_stride = GEN_IS_HASWELL ? 512 : 64;
#include "vk_object.h"
+void
+vk_object_base_init(UNUSED struct vk_device *device,
+ struct vk_object_base *base,
+ UNUSED VkObjectType obj_type)
+{
+ base->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
+}
+
+void
+vk_object_base_finish(UNUSED struct vk_object_base *base)
+{
+}
+
void
vk_device_init(struct vk_device *device,
UNUSED const VkDeviceCreateInfo *pCreateInfo,
const VkAllocationCallbacks *instance_alloc,
const VkAllocationCallbacks *device_alloc)
{
- device->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
+ vk_object_base_init(device, &device->base, VK_OBJECT_TYPE_DEVICE);
if (device_alloc)
device->alloc = *device_alloc;
else
void
vk_device_finish(UNUSED struct vk_device *device)
{
+ vk_object_base_finish(&device->base);
}
extern "C" {
#endif
-struct vk_device {
+struct vk_device;
+
+struct vk_object_base {
VK_LOADER_DATA _loader_data;
+};
+
+void vk_object_base_init(UNUSED struct vk_device *device,
+ struct vk_object_base *base,
+ UNUSED VkObjectType obj_type);
+void vk_object_base_finish(UNUSED struct vk_object_base *base);
+
+
+struct vk_device {
+ struct vk_object_base base;
VkAllocationCallbacks alloc;
};