dump_words(words.data(), words.size());
}
-std::unique_ptr<pipeline::Shader_module> load_shader(const char *filename)
+std::unique_ptr<pipeline::Shader_module> load_shader(vulkan::Vulkan_device &device,
+ const char *filename)
{
std::cerr << "loading " << filename << std::endl;
auto file = load_file(filename);
.codeSize = file->size() * sizeof(spirv::Word),
.pCode = file->data(),
};
- return pipeline::Shader_module::make(shader_module_create_info);
+ return pipeline::Shader_module::create(device, shader_module_create_info);
}
-std::unique_ptr<pipeline::Pipeline_layout> make_pipeline_layout()
+std::unique_ptr<pipeline::Pipeline_layout> make_pipeline_layout(vulkan::Vulkan_device &device)
{
VkPipelineLayoutCreateInfo pipeline_layout_create_info = {
.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
.pushConstantRangeCount = 0,
.pPushConstantRanges = nullptr,
};
- return pipeline::Pipeline_layout::make(pipeline_layout_create_info);
+ return pipeline::Pipeline_layout::create(device, pipeline_layout_create_info);
}
template <typename Integer_type>
}
try
{
- auto vertex_shader = load_shader(vertex_shader_filename);
- auto fragment_shader = load_shader(fragment_shader_filename);
+ VkInstanceCreateInfo instance_create_info{
+ .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+ .pNext = nullptr,
+ .flags = 0,
+ .pApplicationInfo = nullptr,
+ .enabledLayerCount = 0,
+ .ppEnabledLayerNames = nullptr,
+ .enabledExtensionCount = 0,
+ .ppEnabledExtensionNames = nullptr,
+ };
+ auto vulkan_instance = util::get<std::unique_ptr<vulkan::Vulkan_instance>>(
+ vulkan::Vulkan_instance::create(instance_create_info));
+ constexpr std::size_t device_queue_create_info_count = 1;
+ constexpr std::size_t queue_count = 1;
+ float queue_priorities[queue_count] = {};
+ VkDeviceQueueCreateInfo device_queue_create_infos[device_queue_create_info_count] = {
+ {
+ .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+ .pNext = nullptr,
+ .flags = 0,
+ .queueFamilyIndex = 0,
+ .queueCount = queue_count,
+ .pQueuePriorities = queue_priorities,
+ },
+ };
+ VkDeviceCreateInfo device_create_info{
+ .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
+ .pNext = nullptr,
+ .flags = 0,
+ .queueCreateInfoCount = device_queue_create_info_count,
+ .pQueueCreateInfos = device_queue_create_infos,
+ .enabledLayerCount = 0,
+ .ppEnabledLayerNames = nullptr,
+ .enabledExtensionCount = 0,
+ .ppEnabledExtensionNames = nullptr,
+ .pEnabledFeatures = nullptr,
+ };
+ auto vulkan_device = util::get<std::unique_ptr<vulkan::Vulkan_device>>(
+ vulkan::Vulkan_device::create(vulkan_instance->physical_device, device_create_info));
+ auto vertex_shader = load_shader(*vulkan_device, vertex_shader_filename);
+ auto fragment_shader = load_shader(*vulkan_device, fragment_shader_filename);
auto vertexes = load_wavefront_obj_file(vertexes_filename);
- auto pipeline_layout = make_pipeline_layout();
+ auto pipeline_layout = make_pipeline_layout(*vulkan_device);
constexpr std::size_t main_color_attachment_index = 0;
constexpr std::size_t attachment_count = main_color_attachment_index + 1;
VkAttachmentDescription attachments[attachment_count] = {};
.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
- .finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
+ .finalLayout = VK_IMAGE_LAYOUT_GENERAL,
};
constexpr std::size_t color_attachment_count = 1;
VkAttachmentReference color_attachment_references[color_attachment_count] = {
.dependencyCount = 0,
.pDependencies = nullptr,
};
- auto render_pass = pipeline::Render_pass::make(render_pass_create_info);
+ auto render_pass =
+ vulkan::Vulkan_render_pass::create(*vulkan_device, render_pass_create_info);
constexpr std::size_t stage_index_vertex = 0;
constexpr std::size_t stage_index_fragment = stage_index_vertex + 1;
constexpr std::size_t stage_count = stage_index_fragment + 1;
.basePipelineHandle = VK_NULL_HANDLE,
.basePipelineIndex = -1,
};
- auto graphics_pipeline =
- pipeline::Graphics_pipeline::make(nullptr, graphics_pipeline_create_info);
+ auto graphics_pipeline = pipeline::Graphics_pipeline::create(
+ *vulkan_device, nullptr, graphics_pipeline_create_info);
VkImageCreateInfo image_create_info = {
.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
.pNext = nullptr,
}
}
-std::unique_ptr<Graphics_pipeline> Graphics_pipeline::make(
- Pipeline_cache *pipeline_cache, const VkGraphicsPipelineCreateInfo &create_info)
+std::unique_ptr<Graphics_pipeline> Graphics_pipeline::create(
+ vulkan::Vulkan_device &,
+ Pipeline_cache *pipeline_cache,
+ const VkGraphicsPipelineCreateInfo &create_info)
{
assert(create_info.sType == VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO);
- auto *render_pass = Render_pass::from_handle(create_info.renderPass);
+ auto *render_pass = vulkan::Vulkan_render_pass::from_handle(create_info.renderPass);
assert(render_pass);
auto *pipeline_layout = Pipeline_layout::from_handle(create_info.layout);
assert(pipeline_layout);
reinterpret_cast<Fragment_shader_function>(shader_entry_point_address);
#warning finish implementing Graphics_pipeline::make
continue;
-#warning finish implementing Graphics_pipeline::make
- throw std::runtime_error("creating fragment shaders is not implemented");
case spirv::Execution_model::geometry:
#warning finish implementing Graphics_pipeline::make
throw std::runtime_error("creating geometry shaders is not implemented");
{
#warning finish implementing Pipeline_cache
public:
- static std::unique_ptr<Pipeline_cache> make(
- const VkPipelineCacheCreateInfo &pipeline_cache_create_info)
+ static std::unique_ptr<Pipeline_cache> create(vulkan::Vulkan_device &,
+ const VkPipelineCacheCreateInfo &create_info)
{
-#warning finish implementing Pipeline_cache::make
+ assert(create_info.sType == VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO);
+ assert(create_info.initialDataSize == 0 || create_info.pInitialData);
+#warning finish implementing Pipeline_cache::create
return std::make_unique<Pipeline_cache>();
}
};
-class Render_pass : public vulkan::Vulkan_nondispatchable_object<Render_pass, VkRenderPass>
-{
-#warning finish implementing Render_pass
-public:
- static std::unique_ptr<Render_pass> make(const VkRenderPassCreateInfo &render_pass_create_info)
- {
-#warning finish implementing Render_pass::make
- return std::make_unique<Render_pass>();
- }
-};
-
class Pipeline_layout
: public vulkan::Vulkan_nondispatchable_object<Pipeline_layout, VkPipelineLayout>
{
#warning finish implementing Pipeline_layout
public:
- static std::unique_ptr<Pipeline_layout> make(
- const VkPipelineLayoutCreateInfo &pipeline_layout_create_info)
+ static std::unique_ptr<Pipeline_layout> create(vulkan::Vulkan_device &,
+ const VkPipelineLayoutCreateInfo &create_info)
{
-#warning finish implementing Pipeline_layout::make
+#warning finish implementing Pipeline_layout::create
return std::make_unique<Pipeline_layout>();
}
};
assert(byte_count % sizeof(spirv::Word) == 0);
return byte_count / sizeof(spirv::Word);
}
- static std::unique_ptr<Shader_module> make(const VkShaderModuleCreateInfo &create_info)
+ static std::unique_ptr<Shader_module> create(vulkan::Vulkan_device &,
+ const VkShaderModuleCreateInfo &create_info)
{
struct Code_deleter
{
{
}
virtual ~Pipeline() = default;
- static std::unique_ptr<Pipeline> move_from_handle(VkPipeline pipeline) noexcept
- {
- return std::unique_ptr<Pipeline>(from_handle(pipeline));
- }
- static Pipeline *from_handle(VkPipeline pipeline) noexcept
- {
- return reinterpret_cast<Pipeline *>(pipeline);
- }
protected:
static llvm_wrapper::Module optimize_module(llvm_wrapper::Module module,
std::uint32_t instance_id,
const vulkan::Vulkan_image &color_attachment,
void *const *bindings);
- static std::unique_ptr<Graphics_pipeline> make(Pipeline_cache *pipeline_cache,
- const VkGraphicsPipelineCreateInfo &create_info);
+ static std::unique_ptr<Graphics_pipeline> create(
+ vulkan::Vulkan_device &,
+ Pipeline_cache *pipeline_cache,
+ const VkGraphicsPipelineCreateInfo &create_info);
static std::unique_ptr<Graphics_pipeline> move_from_handle(VkPipeline pipeline) noexcept
{
return std::unique_ptr<Graphics_pipeline>(from_handle(pipeline));
#include <type_traits>
#include <vector>
#include <algorithm>
+#include <atomic>
namespace kazan
{
std::unique_ptr<Vulkan_image> Vulkan_image::create(Vulkan_device &device,
const VkImageCreateInfo &create_info)
{
-#warning finish implementing Vulkan_image::create
return std::make_unique<Vulkan_image>(Vulkan_image_descriptor(create_info));
}
+std::unique_ptr<Vulkan_buffer> Vulkan_buffer::create(Vulkan_device &device,
+ const VkBufferCreateInfo &create_info)
+{
+ return std::make_unique<Vulkan_buffer>(Vulkan_buffer_descriptor(create_info));
+}
+
std::unique_ptr<Vulkan_image_view> Vulkan_image_view::create(
Vulkan_device &device, const VkImageViewCreateInfo &create_info)
{
assert(create_info.sType == VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO);
- assert(create_info.image);
- return std::make_unique<Vulkan_image_view>(*Vulkan_image::from_handle(create_info.image),
+ auto *image = Vulkan_image::from_handle(create_info.image);
+ assert(image);
+ VkImageSubresourceRange subresource_range = create_info.subresourceRange;
+ assert(subresource_range.baseArrayLayer < image->descriptor.array_layers);
+ assert(subresource_range.baseMipLevel < image->descriptor.mip_levels);
+ if(subresource_range.layerCount == VK_REMAINING_ARRAY_LAYERS)
+ subresource_range.layerCount =
+ image->descriptor.array_layers - subresource_range.baseArrayLayer;
+ if(subresource_range.levelCount == VK_REMAINING_MIP_LEVELS)
+ subresource_range.levelCount =
+ image->descriptor.mip_levels - subresource_range.baseMipLevel;
+ assert(subresource_range.layerCount != 0);
+ assert(subresource_range.levelCount != 0);
+ assert(image->descriptor.array_layers - subresource_range.baseArrayLayer
+ >= subresource_range.layerCount);
+ assert(image->descriptor.mip_levels - subresource_range.baseMipLevel
+ >= subresource_range.levelCount);
+ assert(create_info.viewType == VK_IMAGE_VIEW_TYPE_2D
+ && "image view with create_info.viewType != VK_IMAGE_VIEW_TYPE_2D is not implemented");
+ assert(is_identity_component_mapping(create_info.components)
+ && "image view with non-identity swizzle is not implemented");
+ return std::make_unique<Vulkan_image_view>(*image,
create_info.viewType,
create_info.format,
- create_info.components,
- create_info.subresourceRange);
+ normalize_component_mapping(create_info.components),
+ subresource_range);
+}
+
+std::unique_ptr<Vulkan_render_pass> Vulkan_render_pass::create(
+ Vulkan_device &device, const VkRenderPassCreateInfo &create_info)
+{
+ assert(create_info.sType == VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO);
+ assert(create_info.attachmentCount == 0 || create_info.pAttachments);
+ assert(create_info.subpassCount != 0);
+ assert(create_info.pSubpasses);
+ assert(create_info.dependencyCount == 0 || create_info.pDependencies);
+
+ assert(create_info.subpassCount == 1 && "render pass not implemented for subpassCount != 1");
+ std::vector<VkAttachmentDescription> attachments(
+ create_info.pAttachments, create_info.pAttachments + create_info.attachmentCount);
+ util::optional<std::uint32_t> color_attachment_index;
+ util::optional<std::uint32_t> depth_stencil_attachment_index;
+
+ for(std::uint32_t i = 0; i < create_info.subpassCount; i++)
+ {
+ auto &subpass = create_info.pSubpasses[i];
+ assert(subpass.inputAttachmentCount == 0 || subpass.pInputAttachments);
+ assert(subpass.colorAttachmentCount == 0 || subpass.pColorAttachments);
+ assert(subpass.preserveAttachmentCount == 0 || subpass.pPreserveAttachments);
+ assert(subpass.pipelineBindPoint == VK_PIPELINE_BIND_POINT_GRAPHICS);
+
+ assert(subpass.flags == 0
+ && "render pass not implemented for VkSubpassDescription::flags != 0");
+ assert(
+ subpass.inputAttachmentCount == 0
+ && "render pass not implemented for VkSubpassDescription::inputAttachmentCount != 0");
+ assert(
+ subpass.pResolveAttachments == nullptr
+ && "render pass not implemented for VkSubpassDescription::pResolveAttachments != nullptr");
+ if(subpass.pDepthStencilAttachment)
+ {
+ assert(subpass.pDepthStencilAttachment->attachment != VK_ATTACHMENT_UNUSED);
+ assert(subpass.pDepthStencilAttachment->attachment < create_info.attachmentCount);
+ auto &depth_stencil_attachment =
+ create_info.pAttachments[subpass.pDepthStencilAttachment->attachment];
+ assert(depth_stencil_attachment.flags == 0
+ && "render pass not implemented for depth_stencil_attachment.flags != 0");
+ switch(depth_stencil_attachment.format)
+ {
+ case VK_FORMAT_D32_SFLOAT:
+ case VK_FORMAT_D32_SFLOAT_S8_UINT:
+ break;
+ default:
+ assert(!"depth-stencil attachment format not implemented");
+ }
+ assert(depth_stencil_attachment.samples == VK_SAMPLE_COUNT_1_BIT
+ && "render pass not implemented for depth_stencil_attachment.samples != VK_SAMPLE_COUNT_1_BIT");
+ assert(depth_stencil_attachment.loadOp == VK_ATTACHMENT_LOAD_OP_CLEAR
+ && "render pass not implemented for depth_stencil_attachment.loadOp != VK_ATTACHMENT_LOAD_OP_CLEAR");
+ assert(depth_stencil_attachment.stencilLoadOp == VK_ATTACHMENT_LOAD_OP_DONT_CARE
+ && "render pass not implemented for depth_stencil_attachment.stencilLoadOp != VK_ATTACHMENT_LOAD_OP_DONT_CARE");
+ depth_stencil_attachment_index = subpass.pDepthStencilAttachment->attachment;
+ }
+ assert(
+ subpass.preserveAttachmentCount == 0
+ && "render pass not implemented for VkSubpassDescription::preserveAttachmentCount != "
+ "0");
+ std::size_t valid_color_attachment_count = 0;
+ for(std::uint32_t j = 0; j < subpass.colorAttachmentCount; j++)
+ {
+ auto &color_attachment_reference = subpass.pColorAttachments[j];
+ if(color_attachment_reference.attachment == VK_ATTACHMENT_UNUSED)
+ continue;
+ valid_color_attachment_count++;
+ assert(color_attachment_reference.attachment < create_info.attachmentCount);
+ auto &color_attachment =
+ create_info.pAttachments[color_attachment_reference.attachment];
+ assert(color_attachment.flags == 0
+ && "render pass not implemented for color_attachment.flags != 0");
+ assert(color_attachment.format == VK_FORMAT_B8G8R8A8_UNORM
+ && "render pass not implemented for color_attachment.format != VK_FORMAT_B8G8R8A8_UNORM");
+ assert(color_attachment.samples == VK_SAMPLE_COUNT_1_BIT
+ && "render pass not implemented for color_attachment.samples != VK_SAMPLE_COUNT_1_BIT");
+ assert(color_attachment.loadOp == VK_ATTACHMENT_LOAD_OP_CLEAR
+ && "render pass not implemented for color_attachment.loadOp != VK_ATTACHMENT_LOAD_OP_CLEAR");
+ assert(color_attachment.stencilLoadOp == VK_ATTACHMENT_LOAD_OP_DONT_CARE
+ && "render pass not implemented for color_attachment.stencilLoadOp != VK_ATTACHMENT_LOAD_OP_DONT_CARE");
+ color_attachment_index = color_attachment_reference.attachment;
+#warning implement non-linear image layouts
+ }
+ assert(valid_color_attachment_count == 1
+ && "render pass not implemented for valid_color_attachment_count != 1");
+ }
+ for(std::uint32_t i = 0; i < create_info.dependencyCount; i++)
+ {
+ auto &dependency = create_info.pDependencies[i];
+ assert(dependency.srcSubpass == VK_SUBPASS_EXTERNAL
+ || dependency.dstSubpass == VK_SUBPASS_EXTERNAL
+ || dependency.srcSubpass <= dependency.dstSubpass);
+ assert(dependency.srcSubpass != VK_SUBPASS_EXTERNAL
+ || dependency.dstSubpass != VK_SUBPASS_EXTERNAL);
+ assert(dependency.srcSubpass == VK_SUBPASS_EXTERNAL
+ || dependency.srcSubpass < create_info.subpassCount);
+ assert(dependency.dstSubpass == VK_SUBPASS_EXTERNAL
+ || dependency.dstSubpass < create_info.subpassCount);
+
+ assert((dependency.srcSubpass == VK_SUBPASS_EXTERNAL
+ || dependency.dstSubpass == VK_SUBPASS_EXTERNAL)
+ && "intra-render-pass subpass dependencies are not implemented");
+ }
+#warning finish implementing Vulkan_render_pass::create
+ if(depth_stencil_attachment_index)
+ {
+ static std::atomic_bool wrote_warning{false};
+ if(!wrote_warning.exchange(true, std::memory_order::memory_order_relaxed))
+ std::cerr << "depth stencil attachments not supported" << std::endl;
+ }
+ return std::make_unique<Vulkan_render_pass>(
+ std::move(attachments), *color_attachment_index, depth_stencil_attachment_index);
+}
+
+std::unique_ptr<Vulkan_framebuffer> Vulkan_framebuffer::create(
+ Vulkan_device &device, const VkFramebufferCreateInfo &create_info)
+{
+ assert(create_info.sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
+ assert(create_info.renderPass);
+ auto *render_pass = Vulkan_render_pass::from_handle(create_info.renderPass);
+ assert(create_info.attachmentCount == render_pass->attachments.size());
+ assert(create_info.attachmentCount == 0 || create_info.pAttachments);
+ std::vector<Vulkan_image_view *> attachments;
+ attachments.reserve(create_info.attachmentCount);
+ for(std::uint32_t i = 0; i < create_info.attachmentCount; i++)
+ {
+ auto *attachment = Vulkan_image_view::from_handle(create_info.pAttachments[i]);
+ assert(attachment);
+ assert(attachment->format == render_pass->attachments[i].format);
+ assert(attachment->base_image.descriptor.samples == render_pass->attachments[i].samples);
+ assert(is_identity_component_mapping(attachment->components));
+ assert(attachment->subresource_range.levelCount == 1);
+ assert(attachment->base_image.descriptor.extent.width == create_info.width
+ && "non-matching image dimensions in framebuffer is not implemented");
+ assert(attachment->base_image.descriptor.extent.height == create_info.height
+ && "non-matching image dimensions in framebuffer is not implemented");
+ assert(attachment->subresource_range.layerCount == create_info.layers
+ && "non-matching image layer count in framebuffer is not implemented");
+ attachments.push_back(attachment);
+ }
+ return std::make_unique<Vulkan_framebuffer>(*render_pass,
+ std::move(attachments),
+ create_info.width,
+ create_info.height,
+ create_info.layers);
}
void Vulkan_command_buffer::Command::on_record_end(Vulkan_command_buffer &command_buffer)
static_cast<void>(command_buffer);
}
-
Vulkan_command_buffer::Vulkan_command_buffer(
std::list<std::unique_ptr<Vulkan_command_buffer>>::iterator iter,
Vulkan_command_pool &command_pool,
#include "vulkan/vulkan.h"
#include "vulkan/vk_icd.h"
-#include "vulkan/remove_xlib_macros.h"
+#include "remove_xlib_macros.h"
+#include "util.h"
#include "util/enum.h"
#include "util/string_view.h"
#include "util/variant.h"
struct Vulkan_device;
-struct Vulkan_device_memory
- : public Vulkan_nondispatchable_object<Vulkan_device_memory, VkDeviceMemory>
-{
- static constexpr std::size_t alignment = 64;
- std::shared_ptr<void> memory;
- explicit Vulkan_device_memory(std::shared_ptr<void> memory) noexcept : memory(std::move(memory))
- {
- }
- static std::shared_ptr<void> allocate(VkDeviceSize size)
- {
- if(static_cast<std::size_t>(size) != size)
- throw std::bad_alloc();
- typedef util::Aligned_memory_allocator<alignment> Allocator;
- return std::shared_ptr<void>(Allocator::allocate(size), Allocator::Deleter{});
- }
- static std::unique_ptr<Vulkan_device_memory> create(Vulkan_device &device,
- const VkMemoryAllocateInfo &allocate_info)
- {
- static_cast<void>(device);
- assert(allocate_info.sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
- constexpr std::uint32_t main_memory_type_index = 0;
- assert(allocate_info.memoryTypeIndex == main_memory_type_index);
- assert(allocate_info.allocationSize != 0);
- return std::make_unique<Vulkan_device_memory>(allocate(allocate_info.allocationSize));
- }
-};
-
struct Vulkan_instance;
struct Vulkan_physical_device
heap_size = std::numeric_limits<VkDeviceSize>::max();
return heap_size;
}
+ static constexpr std::size_t main_memory_type_index = 0;
Vulkan_physical_device(Vulkan_instance &instance) noexcept
: instance(instance),
properties{
}
};
+struct Vulkan_device_memory
+ : public Vulkan_nondispatchable_object<Vulkan_device_memory, VkDeviceMemory>
+{
+ static constexpr std::size_t alignment = 64;
+ std::shared_ptr<void> memory;
+ explicit Vulkan_device_memory(std::shared_ptr<void> memory) noexcept : memory(std::move(memory))
+ {
+ }
+ static std::shared_ptr<void> allocate(VkDeviceSize size)
+ {
+ if(static_cast<std::size_t>(size) != size)
+ throw std::bad_alloc();
+ typedef util::Aligned_memory_allocator<alignment> Allocator;
+ return std::shared_ptr<void>(Allocator::allocate(size), Allocator::Deleter{});
+ }
+ static std::unique_ptr<Vulkan_device_memory> create(Vulkan_device &device,
+ const VkMemoryAllocateInfo &allocate_info)
+ {
+ static_cast<void>(device);
+ assert(allocate_info.sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
+ assert(allocate_info.memoryTypeIndex == Vulkan_physical_device::main_memory_type_index);
+ assert(allocate_info.allocationSize != 0);
+ return std::make_unique<Vulkan_device_memory>(allocate(allocate_info.allocationSize));
+ }
+};
+
struct Vulkan_instance : public Vulkan_dispatchable_object<Vulkan_instance, VkInstance>
{
Vulkan_instance(const Vulkan_instance &) = delete;
}
constexpr VkMemoryRequirements get_memory_requirements() const noexcept
{
- constexpr std::size_t main_memory_type_index = 0;
auto memory_properties = get_memory_properties();
return {
.size = memory_properties.size,
.alignment = memory_properties.alignment,
- .memoryTypeBits = 1UL << main_memory_type_index,
+ .memoryTypeBits = 1UL << Vulkan_physical_device::main_memory_type_index,
};
}
};
const VkImageCreateInfo &create_info);
};
+struct Vulkan_buffer_descriptor
+{
+ VkDeviceSize size;
+ static constexpr VkBufferCreateFlags supported_flags = 0;
+ constexpr explicit Vulkan_buffer_descriptor(const VkBufferCreateInfo &create_info) noexcept
+ : size(create_info.size)
+ {
+ assert(create_info.sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
+ assert((create_info.flags & ~supported_flags) == 0);
+ assert(create_info.size != 0);
+ }
+ constexpr VkMemoryRequirements get_memory_requirements() const noexcept
+ {
+ return {
+ .size = size,
+ .alignment = util::get_max_align_alignment(),
+ .memoryTypeBits = 1UL << Vulkan_physical_device::main_memory_type_index,
+ };
+ }
+};
+
+struct Vulkan_buffer : public Vulkan_nondispatchable_object<Vulkan_buffer, VkBuffer>
+{
+ Vulkan_buffer_descriptor descriptor;
+ std::shared_ptr<void> memory;
+ Vulkan_buffer(const Vulkan_buffer_descriptor &descriptor,
+ std::shared_ptr<void> memory = nullptr) noexcept : descriptor(descriptor),
+ memory(std::move(memory))
+ {
+ }
+ virtual ~Vulkan_buffer() = default;
+ static std::unique_ptr<Vulkan_buffer> create(Vulkan_device &device,
+ const VkBufferCreateInfo &create_info);
+};
+
struct Vulkan_image_view : public Vulkan_nondispatchable_object<Vulkan_image_view, VkImageView>
{
Vulkan_image &base_image;
const VkImageViewCreateInfo &create_info);
};
+struct Vulkan_render_pass : public Vulkan_nondispatchable_object<Vulkan_render_pass, VkRenderPass>
+{
+#warning finish implementing Vulkan_render_pass
+ std::vector<VkAttachmentDescription> attachments;
+ std::uint32_t color_attachment_index;
+ util::optional<std::uint32_t> depth_stencil_attachment_index;
+ Vulkan_render_pass(std::vector<VkAttachmentDescription> attachments,
+ std::uint32_t color_attachment_index,
+ util::optional<std::uint32_t> depth_stencil_attachment_index) noexcept
+ : attachments(std::move(attachments)),
+ color_attachment_index(color_attachment_index),
+ depth_stencil_attachment_index(depth_stencil_attachment_index)
+ {
+ }
+ static std::unique_ptr<Vulkan_render_pass> create(Vulkan_device &,
+ const VkRenderPassCreateInfo &create_info);
+};
+
+struct Vulkan_framebuffer : public Vulkan_nondispatchable_object<Vulkan_framebuffer, VkFramebuffer>
+{
+#warning finish implementing Vulkan_framebuffer
+ Vulkan_render_pass &render_pass;
+ std::vector<Vulkan_image_view *> attachments;
+ std::uint32_t width;
+ std::uint32_t height;
+ std::uint32_t layers;
+ Vulkan_framebuffer(Vulkan_render_pass &render_pass,
+ std::vector<Vulkan_image_view *> attachments,
+ std::uint32_t width,
+ std::uint32_t height,
+ std::uint32_t layers) noexcept : render_pass(render_pass),
+ attachments(std::move(attachments)),
+ width(width),
+ height(height),
+ layers(layers)
+ {
+ }
+ static std::unique_ptr<Vulkan_framebuffer> create(Vulkan_device &,
+ const VkFramebufferCreateInfo &create_info);
+};
+
struct Vulkan_command_pool;
struct Vulkan_command_buffer
retval.insert(spirv::Execution_model::vertex);
return retval;
}
+
+constexpr VkComponentMapping normalize_component_mapping(
+ VkComponentMapping component_mapping) noexcept
+{
+ if(component_mapping.r == VK_COMPONENT_SWIZZLE_IDENTITY)
+ component_mapping.r = VK_COMPONENT_SWIZZLE_R;
+ if(component_mapping.g == VK_COMPONENT_SWIZZLE_IDENTITY)
+ component_mapping.g = VK_COMPONENT_SWIZZLE_G;
+ if(component_mapping.b == VK_COMPONENT_SWIZZLE_IDENTITY)
+ component_mapping.b = VK_COMPONENT_SWIZZLE_B;
+ if(component_mapping.a == VK_COMPONENT_SWIZZLE_IDENTITY)
+ component_mapping.a = VK_COMPONENT_SWIZZLE_A;
+ return component_mapping;
+}
+
+constexpr bool is_identity_component_mapping(const VkComponentMapping &component_mapping) noexcept
+{
+ auto normalized = normalize_component_mapping(component_mapping);
+ if(normalized.r != VK_COMPONENT_SWIZZLE_R)
+ return false;
+ if(normalized.g != VK_COMPONENT_SWIZZLE_G)
+ return false;
+ if(normalized.b != VK_COMPONENT_SWIZZLE_B)
+ return false;
+ if(normalized.a != VK_COMPONENT_SWIZZLE_A)
+ return false;
+ return true;
+}
}
}
extern "C" VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(VkDevice device,
VkBuffer buffer,
VkDeviceMemory memory,
- VkDeviceSize memoryOffset)
+ VkDeviceSize memory_offset)
{
-#warning finish implementing vkBindBufferMemory
- assert(!"vkBindBufferMemory is not implemented");
+ assert(device);
+ assert(buffer);
+ assert(memory);
+ return vulkan_icd::catch_exceptions_and_return_result(
+ [&]()
+ {
+ auto *buffer_pointer = vulkan::Vulkan_buffer::from_handle(buffer);
+ auto *device_memory = vulkan::Vulkan_device_memory::from_handle(memory);
+ assert(!buffer_pointer->memory);
+ buffer_pointer->memory = std::shared_ptr<void>(
+ device_memory->memory,
+ static_cast<unsigned char *>(device_memory->memory.get()) + memory_offset);
+ return VK_SUCCESS;
+ });
}
extern "C" VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(VkDevice device,
}
extern "C" VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(
- VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements)
+ VkDevice device, VkBuffer buffer, VkMemoryRequirements *memory_requirements)
{
-#warning finish implementing vkGetBufferMemoryRequirements
- assert(!"vkGetBufferMemoryRequirements is not implemented");
+ assert(device);
+ assert(buffer);
+ assert(memory_requirements);
+ *memory_requirements =
+ vulkan::Vulkan_buffer::from_handle(buffer)->descriptor.get_memory_requirements();
}
extern "C" VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(
}
extern "C" VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(VkDevice device,
- const VkBufferCreateInfo *pCreateInfo,
+ const VkBufferCreateInfo *create_info,
const VkAllocationCallbacks *allocator,
- VkBuffer *pBuffer)
+ VkBuffer *buffer)
{
validate_allocator(allocator);
-#warning finish implementing vkCreateBuffer
- assert(!"vkCreateBuffer is not implemented");
+ assert(device);
+ assert(create_info);
+ assert(buffer);
+ return vulkan_icd::catch_exceptions_and_return_result(
+ [&]()
+ {
+ auto create_result = vulkan::Vulkan_buffer::create(
+ *vulkan::Vulkan_device::from_handle(device), *create_info);
+ *buffer = move_to_handle(std::move(create_result));
+ return VK_SUCCESS;
+ });
}
extern "C" VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(VkDevice device,
const VkAllocationCallbacks *allocator)
{
validate_allocator(allocator);
-#warning finish implementing vkDestroyBuffer
- assert(!"vkDestroyBuffer is not implemented");
+ assert(device);
+ vulkan::Vulkan_buffer::move_from_handle(buffer).reset();
}
extern "C" VKAPI_ATTR VkResult VKAPI_CALL
extern "C" VKAPI_ATTR VkResult VKAPI_CALL
vkCreatePipelineCache(VkDevice device,
- const VkPipelineCacheCreateInfo *pCreateInfo,
+ const VkPipelineCacheCreateInfo *create_info,
const VkAllocationCallbacks *allocator,
- VkPipelineCache *pPipelineCache)
+ VkPipelineCache *pipeline_cache)
{
validate_allocator(allocator);
-#warning finish implementing vkCreatePipelineCache
- assert(!"vkCreatePipelineCache is not implemented");
+ assert(device);
+ assert(create_info);
+ assert(pipeline_cache);
+ return vulkan_icd::catch_exceptions_and_return_result(
+ [&]()
+ {
+ auto create_result = pipeline::Pipeline_cache::create(
+ *vulkan::Vulkan_device::from_handle(device), *create_info);
+ *pipeline_cache = move_to_handle(std::move(create_result));
+ return VK_SUCCESS;
+ });
}
extern "C" VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(VkDevice device,
- VkPipelineCache pipelineCache,
+ VkPipelineCache pipeline_cache,
const VkAllocationCallbacks *allocator)
{
validate_allocator(allocator);
-#warning finish implementing vkDestroyPipelineCache
- assert(!"vkDestroyPipelineCache is not implemented");
+ assert(device);
+ pipeline::Pipeline_cache::move_from_handle(pipeline_cache).reset();
}
extern "C" VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(VkDevice device,
extern "C" VKAPI_ATTR VkResult VKAPI_CALL
vkCreateFramebuffer(VkDevice device,
- const VkFramebufferCreateInfo *pCreateInfo,
+ const VkFramebufferCreateInfo *create_info,
const VkAllocationCallbacks *allocator,
- VkFramebuffer *pFramebuffer)
+ VkFramebuffer *framebuffer)
{
validate_allocator(allocator);
-#warning finish implementing vkCreateFramebuffer
- assert(!"vkCreateFramebuffer is not implemented");
+ assert(device);
+ assert(create_info);
+ assert(framebuffer);
+ return vulkan_icd::catch_exceptions_and_return_result(
+ [&]()
+ {
+ auto create_result = vulkan::Vulkan_framebuffer::create(
+ *vulkan::Vulkan_device::from_handle(device), *create_info);
+ *framebuffer = move_to_handle(std::move(create_result));
+ return VK_SUCCESS;
+ });
}
extern "C" VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(VkDevice device,
const VkAllocationCallbacks *allocator)
{
validate_allocator(allocator);
-#warning finish implementing vkDestroyFramebuffer
- assert(!"vkDestroyFramebuffer is not implemented");
+ assert(device);
+ vulkan::Vulkan_framebuffer::move_from_handle(framebuffer).reset();
}
extern "C" VKAPI_ATTR VkResult VKAPI_CALL
vkCreateRenderPass(VkDevice device,
- const VkRenderPassCreateInfo *pCreateInfo,
+ const VkRenderPassCreateInfo *create_info,
const VkAllocationCallbacks *allocator,
- VkRenderPass *pRenderPass)
+ VkRenderPass *render_pass)
{
validate_allocator(allocator);
-#warning finish implementing vkCreateRenderPass
- assert(!"vkCreateRenderPass is not implemented");
+ assert(device);
+ assert(create_info);
+ assert(render_pass);
+ return vulkan_icd::catch_exceptions_and_return_result(
+ [&]()
+ {
+ auto create_result = vulkan::Vulkan_render_pass::create(
+ *vulkan::Vulkan_device::from_handle(device), *create_info);
+ *render_pass = move_to_handle(std::move(create_result));
+ return VK_SUCCESS;
+ });
}
extern "C" VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(VkDevice device,
- VkRenderPass renderPass,
+ VkRenderPass render_pass,
const VkAllocationCallbacks *allocator)
{
validate_allocator(allocator);
-#warning finish implementing vkDestroyRenderPass
- assert(!"vkDestroyRenderPass is not implemented");
+ assert(device);
+ vulkan::Vulkan_render_pass::move_from_handle(render_pass).reset();
}
extern "C" VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(VkDevice device,
assert(!"vkCmdDispatchIndirect is not implemented");
}
-extern "C" VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(VkCommandBuffer commandBuffer,
- VkBuffer srcBuffer,
- VkBuffer dstBuffer,
- uint32_t regionCount,
- const VkBufferCopy *pRegions)
+extern "C" VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(VkCommandBuffer command_buffer,
+ VkBuffer src_buffer,
+ VkBuffer dst_buffer,
+ uint32_t region_count,
+ const VkBufferCopy *regions)
{
-#warning finish implementing vkCmdCopyBuffer
- assert(!"vkCmdCopyBuffer is not implemented");
+ assert(command_buffer);
+ assert(src_buffer);
+ assert(dst_buffer);
+ assert(region_count > 0);
+ assert(regions);
+ auto command_buffer_pointer = vulkan::Vulkan_command_buffer::from_handle(command_buffer);
+ command_buffer_pointer->record_command_and_keep_errors(
+ [&]()
+ {
+ auto src_buffer_pointer = vulkan::Vulkan_buffer::from_handle(src_buffer);
+ auto dst_buffer_pointer = vulkan::Vulkan_buffer::from_handle(dst_buffer);
+ for(std::uint32_t i = 0; i < region_count; i++)
+ {
+ auto ®ion = regions[i];
+ assert(region.size <= src_buffer_pointer->descriptor.size);
+ assert(src_buffer_pointer->descriptor.size - region.size >= region.srcOffset);
+ assert(region.size <= dst_buffer_pointer->descriptor.size);
+ assert(dst_buffer_pointer->descriptor.size - region.size >= region.dstOffset);
+ static_cast<void>(region);
+ }
+ struct Copy_buffer_command final : public vulkan::Vulkan_command_buffer::Command
+ {
+ vulkan::Vulkan_buffer &src_buffer;
+ vulkan::Vulkan_buffer &dst_buffer;
+ std::vector<VkBufferCopy> regions;
+ Copy_buffer_command(vulkan::Vulkan_buffer &src_buffer,
+ vulkan::Vulkan_buffer &dst_buffer,
+ std::vector<VkBufferCopy> regions) noexcept
+ : src_buffer(src_buffer),
+ dst_buffer(dst_buffer),
+ regions(std::move(regions))
+ {
+ }
+ virtual void run(
+ vulkan::Vulkan_command_buffer::Running_state &state) noexcept override
+ {
+ static_cast<void>(state);
+ for(auto ®ion : regions)
+ std::memcpy(static_cast<unsigned char *>(dst_buffer.memory.get())
+ + region.dstOffset,
+ static_cast<const unsigned char *>(src_buffer.memory.get())
+ + region.srcOffset,
+ region.size);
+ }
+ };
+ command_buffer_pointer->commands.push_back(std::make_unique<Copy_buffer_command>(
+ *src_buffer_pointer,
+ *dst_buffer_pointer,
+ std::vector<VkBufferCopy>(regions, regions + region_count)));
+ });
}
extern "C" VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(VkCommandBuffer commandBuffer,