From f7b2047a3d18fff31e43d4a0399a67cbe930c0a6 Mon Sep 17 00:00:00 2001 From: Keith Leonardo Date: Sun, 3 Nov 2024 00:05:19 +1100 Subject: [PATCH] Implement Texture binding system for Uniforms * Implement MemoryPool to manage all device memory allocations * Remove Buffer class * Rename VkResult_log to VkResult_check * Remove redundant/verbose documentation for internal submodules (annoying to maintain) --- src/Dynamo.hpp | 1 + src/Graphics/Mesh.hpp | 2 +- src/Graphics/Renderer.cpp | 59 +++-- src/Graphics/Renderer.hpp | 41 +++- src/Graphics/Texture.hpp | 104 +++++++++ src/Graphics/Vulkan/Buffer.cpp | 113 ---------- src/Graphics/Vulkan/Buffer.hpp | 121 ---------- src/Graphics/Vulkan/FrameContext.cpp | 32 +++ src/Graphics/Vulkan/FrameContext.hpp | 56 +---- src/Graphics/Vulkan/FramebufferCache.hpp | 18 -- src/Graphics/Vulkan/MaterialRegistry.cpp | 23 +- src/Graphics/Vulkan/MaterialRegistry.hpp | 71 +----- src/Graphics/Vulkan/MemoryPool.cpp | 133 +++++++++++ src/Graphics/Vulkan/MemoryPool.hpp | 67 ++++++ src/Graphics/Vulkan/MeshRegistry.cpp | 157 ++++++------- src/Graphics/Vulkan/MeshRegistry.hpp | 50 +---- src/Graphics/Vulkan/PhysicalDevice.cpp | 2 +- src/Graphics/Vulkan/PhysicalDevice.hpp | 48 +--- src/Graphics/Vulkan/ShaderRegistry.cpp | 10 +- src/Graphics/Vulkan/ShaderRegistry.hpp | 78 +------ src/Graphics/Vulkan/Swapchain.cpp | 2 +- src/Graphics/Vulkan/Swapchain.hpp | 17 -- src/Graphics/Vulkan/TextureRegistry.cpp | 127 +++++++++++ src/Graphics/Vulkan/TextureRegistry.hpp | 69 ++++++ src/Graphics/Vulkan/UniformRegistry.cpp | 209 ++++++++++------- src/Graphics/Vulkan/UniformRegistry.hpp | 121 ++++------ src/Graphics/Vulkan/Utils.cpp | 272 +++++++++++++++++++---- src/Graphics/Vulkan/Utils.hpp | 252 ++++----------------- src/Utils/Allocator.cpp | 3 +- 29 files changed, 1178 insertions(+), 1080 deletions(-) create mode 100644 src/Graphics/Texture.hpp delete mode 100644 src/Graphics/Vulkan/Buffer.cpp delete mode 100644 src/Graphics/Vulkan/Buffer.hpp create mode 100644 src/Graphics/Vulkan/FrameContext.cpp create mode 100644 src/Graphics/Vulkan/MemoryPool.cpp create mode 100644 src/Graphics/Vulkan/MemoryPool.hpp create mode 100644 src/Graphics/Vulkan/TextureRegistry.cpp create mode 100644 src/Graphics/Vulkan/TextureRegistry.hpp diff --git a/src/Dynamo.hpp b/src/Dynamo.hpp index 1affd05..5e2c6d2 100644 --- a/src/Dynamo.hpp +++ b/src/Dynamo.hpp @@ -7,6 +7,7 @@ #include #include #include +#include #include #include #include diff --git a/src/Graphics/Mesh.hpp b/src/Graphics/Mesh.hpp index e3daa95..be63d88 100644 --- a/src/Graphics/Mesh.hpp +++ b/src/Graphics/Mesh.hpp @@ -28,7 +28,7 @@ namespace Dynamo::Graphics { * */ struct MeshDescriptor { - using AttributeBuffer = std::vector; + using AttributeBuffer = std::vector; std::vector attributes; unsigned vertex_count; diff --git a/src/Graphics/Renderer.cpp b/src/Graphics/Renderer.cpp index 1c002da..ac1067d 100644 --- a/src/Graphics/Renderer.cpp +++ b/src/Graphics/Renderer.cpp @@ -9,7 +9,7 @@ namespace Dynamo::Graphics { _surface = _display.create_vulkan_surface(_instance); // Create the logical device - _physical = PhysicalDevice::select(_instance, _surface); + _physical = PhysicalDevice::select_best(_instance, _surface); _device = VkDevice_create(_physical); // Build the swapchain and its views @@ -20,10 +20,12 @@ namespace Dynamo::Graphics { _transfer_pool = VkCommandPool_create(_device, _physical.transfer_queues); // Vulkan object registries - _meshes = MeshRegistry(_device, _physical, _transfer_pool); + _memory = MemoryPool(_device, _physical); _shaders = ShaderRegistry(_device); - _materials = MaterialRegistry(_device, root_asset_directory + "/vulkan_cache.bin"); + _meshes = MeshRegistry(_device, _physical, _transfer_pool); _uniforms = UniformRegistry(_device, _physical, _transfer_pool); + _textures = TextureRegistry(_device, _physical, _transfer_pool); + _materials = MaterialRegistry(_device, root_asset_directory + "/vulkan_cache.bin"); _framebuffers = FramebufferCache(_device); // Frame contexts @@ -46,10 +48,12 @@ namespace Dynamo::Graphics { // High-level objects _frame_contexts.destroy(); _framebuffers.destroy(); - _uniforms.destroy(); _materials.destroy(); + _textures.destroy(_memory); + _uniforms.destroy(_memory); + _meshes.destroy(_memory); _shaders.destroy(); - _meshes.destroy(); + _memory.destroy(); _swapchain.destroy(); // Vulkan core objects @@ -79,19 +83,31 @@ namespace Dynamo::Graphics { _clear.color.float32[3] = color.a; } - Mesh Renderer::build_mesh(const MeshDescriptor &descriptor) { return _meshes.build(descriptor); } + Mesh Renderer::build_mesh(const MeshDescriptor &descriptor) { return _meshes.build(descriptor, _memory); } - void Renderer::destroy_mesh(Mesh mesh) { _meshes.destroy(mesh); } + void Renderer::destroy_mesh(Mesh mesh) { _meshes.destroy(mesh, _memory); } Shader Renderer::build_shader(const ShaderDescriptor &descriptor) { return _shaders.build(descriptor); } void Renderer::destroy_shader(Shader shader) { _shaders.destroy(shader); } + Texture Renderer::build_texture(const TextureDescriptor &descriptor) { + return _textures.build(descriptor, _memory); + } + + void Renderer::destroy_texture(Texture texture) { _textures.destroy(texture, _memory); } + Material Renderer::build_material(const MaterialDescriptor &descriptor) { - return _materials.build(descriptor, _swapchain, _shaders, _uniforms); + return _materials.build(descriptor, _swapchain, _shaders, _uniforms, _memory); } - void Renderer::destroy_material(Material material) { _materials.destroy(material, _uniforms); } + void Renderer::destroy_material(Material material) { + // Free allocated descriptor / push constant uniforms + MaterialInstance &instance = _materials.get(material); + for (Uniform uniform : instance.uniforms) { + _uniforms.free(uniform, _memory); + } + } std::optional Renderer::get_uniform(Material material, const std::string &name) { MaterialInstance &instance = _materials.get(material); @@ -104,7 +120,14 @@ namespace Dynamo::Graphics { return {}; } - void Renderer::write_uniform(Uniform uniform, void *data) { _uniforms.write(uniform, data); } + void Renderer::write_uniform(Uniform uniform, void *data, unsigned index, unsigned count) { + _uniforms.write(uniform, data, index, count); + } + + void Renderer::bind_texture(Uniform uniform, Texture texture, unsigned index) { + const TextureInstance &instance = _textures.get(texture); + _uniforms.bind(uniform, instance, index); + } void Renderer::draw(const Model &model) { _models.push_back(model); } @@ -123,18 +146,18 @@ namespace Dynamo::Graphics { rebuild_swapchain(); return; } else if (acquire_result != VK_SUCCESS && acquire_result != VK_SUBOPTIMAL_KHR) { - VkResult_log("Acquire Image", acquire_result); + VkResult_check("Acquire Image", acquire_result); } - VkResult_log("Reset Fence", vkResetFences(_device, 1, &frame.sync_fence)); - VkResult_log("Reset Command Buffer", vkResetCommandBuffer(frame.command_buffer, 0)); + VkResult_check("Reset Fence", vkResetFences(_device, 1, &frame.sync_fence)); + VkResult_check("Reset Command Buffer", vkResetCommandBuffer(frame.command_buffer, 0)); VkCommandBufferBeginInfo begin_info = {}; begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; begin_info.flags = 0; begin_info.pInheritanceInfo = nullptr; - VkResult_log("Begin Command Recording", vkBeginCommandBuffer(frame.command_buffer, &begin_info)); + VkResult_check("Begin Command Recording", vkBeginCommandBuffer(frame.command_buffer, &begin_info)); // Group models by material and geometry std::sort(_models.begin(), _models.end(), [](const Model &a, const Model &b) { @@ -161,7 +184,7 @@ namespace Dynamo::Graphics { VkPipeline prev_pipeline = VK_NULL_HANDLE; Mesh prev_mesh = reinterpret_cast(-1); for (Model model : _models) { - const MeshAllocation &mesh = _meshes.get(model.mesh); + const MeshInstance &mesh = _meshes.get(model.mesh); const MaterialInstance &material = _materials.get(model.material); // Rebind renderpass if changed @@ -248,7 +271,7 @@ namespace Dynamo::Graphics { if (prev_renderpass != VK_NULL_HANDLE) { vkCmdEndRenderPass(frame.command_buffer); } - VkResult_log("End Command Buffer", vkEndCommandBuffer(frame.command_buffer)); + VkResult_check("End Command Buffer", vkEndCommandBuffer(frame.command_buffer)); // Submit commands VkQueue queue; @@ -265,7 +288,7 @@ namespace Dynamo::Graphics { submit_info.pSignalSemaphores = &frame.sync_render_done; submit_info.pWaitDstStageMask = &wait_stage_mask; - VkResult_log("Graphics Submit", vkQueueSubmit(queue, 1, &submit_info, frame.sync_fence)); + VkResult_check("Graphics Submit", vkQueueSubmit(queue, 1, &submit_info, frame.sync_fence)); // Present the render VkPresentInfoKHR present_info = {}; @@ -282,7 +305,7 @@ namespace Dynamo::Graphics { if (present_result == VK_ERROR_OUT_OF_DATE_KHR || present_result == VK_SUBOPTIMAL_KHR) { rebuild_swapchain(); } else if (present_result != VK_SUCCESS) { - VkResult_log("Present Render", present_result); + VkResult_check("Present Render", present_result); } } } // namespace Dynamo::Graphics \ No newline at end of file diff --git a/src/Graphics/Renderer.hpp b/src/Graphics/Renderer.hpp index e8b7944..1ca95c5 100644 --- a/src/Graphics/Renderer.hpp +++ b/src/Graphics/Renderer.hpp @@ -5,7 +5,7 @@ #include #include #include -#include +#include #include #include #include @@ -13,6 +13,7 @@ #include #include #include +#include #include #include @@ -38,10 +39,12 @@ namespace Dynamo::Graphics { VkCommandPool _graphics_pool; VkCommandPool _transfer_pool; + MemoryPool _memory; MeshRegistry _meshes; ShaderRegistry _shaders; MaterialRegistry _materials; UniformRegistry _uniforms; + TextureRegistry _textures; FramebufferCache _framebuffers; FrameContextList _frame_contexts; @@ -51,11 +54,11 @@ namespace Dynamo::Graphics { // TODO - Fixes: // * Pre-defined render pass ---- Define a default render pass to handle the no-draw case - // * Let Buffer take in fallback memory types, only throw when all options exhausted + // * Memory defragmentation stategy // TODO - Features: + // * Live update texture? --- Support non-shader-optimal image layouts // * Depth-stencil buffer ---- Update jukebox to showcase 3d perspective (to visualize depth buffering) - // * Texture system ---- Similar to shaders / meshes, generate a handle and return // * Draw-to-texture ---- overload render(), render(Texture texture) /** @@ -116,6 +119,21 @@ namespace Dynamo::Graphics { */ void destroy_shader(Shader shader); + /** + * @brief Build a texture. + * + * @param descriptor + * @return Texture + */ + Texture build_texture(const TextureDescriptor &descriptor); + + /** + * @brief Free texture resources. + * + * @param texture + */ + void destroy_texture(Texture texture); + /** * @brief Build a material. * @@ -143,12 +161,25 @@ namespace Dynamo::Graphics { /** * @brief Write to a uniform. * - * Data must match the size of the uniform variable. + * If the uniform is an array, an index offset and count can be provided. * * @param uniform * @param data + * @param index + * @param count + */ + void write_uniform(Uniform uniform, void *data, unsigned index = 0, unsigned count = 1); + + /** + * @brief Bind a texture to a uniform variable. + * + * If the uniform is an array, an index offset can be provided. + * + * @param uniform + * @param texture + * @param index */ - void write_uniform(Uniform uniform, void *data); + void bind_texture(Uniform uniform, Texture texture, unsigned index = 0); /** * @brief Draw a model in the current frame. diff --git a/src/Graphics/Texture.hpp b/src/Graphics/Texture.hpp new file mode 100644 index 0000000..0251fa7 --- /dev/null +++ b/src/Graphics/Texture.hpp @@ -0,0 +1,104 @@ +#pragma once + +#include + +#include + +namespace Dynamo::Graphics { + /** + * @brief Texture resource handle. + * + */ + DYN_DEFINE_ID_TYPE(Texture); + + /** + * @brief Texture formats. + * + */ + enum class TextureFormat { + F32_R_Norm, + U8_RGB_Norm, + U8_RGBA_Norm, + }; + + /** + * @brief Texture filter modes. + * + */ + enum class TextureFilter { + Nearest, + Linear, + }; + + /** + * @brief Texture addressing modes. + * + */ + enum class TextureAddressMode { + Repeat, + RepeatMirror, + Clamp, + ClampMirror, + ClampBorder, + }; + + /** + * @brief Texture descriptor. + * + */ + struct TextureDescriptor { + /** + * @brief Texture unit byte buffer. + * + */ + std::vector texels; + + /** + * @brief Width of the texture in texture units. + * + */ + unsigned width = 0; + + /** + * @brief Height of the texture in texture units. + * + */ + unsigned height = 0; + + /** + * @brief Format of the texture determines how the byte buffer is interpreted. + * + */ + TextureFormat format = TextureFormat::U8_RGBA_Norm; + + /** + * @brief Minification filter. + * + */ + TextureFilter min_filter = TextureFilter::Nearest; + + /** + * @brief Magnification filter. + * + */ + TextureFilter mag_filter = TextureFilter::Nearest; + + /** + * @brief How U coordinates are addressed outside [0, 1). + * + */ + TextureAddressMode u_address_mode = TextureAddressMode::Repeat; + + /** + * @brief How V coordinates are addressed outside [0, 1). + * + */ + TextureAddressMode v_address_mode = TextureAddressMode::Repeat; + + /** + * @brief How W coordinates are addressed outside [0, 1). + * + */ + TextureAddressMode w_address_mode = TextureAddressMode::Repeat; + }; +} // namespace Dynamo::Graphics \ No newline at end of file diff --git a/src/Graphics/Vulkan/Buffer.cpp b/src/Graphics/Vulkan/Buffer.cpp deleted file mode 100644 index 7696396..0000000 --- a/src/Graphics/Vulkan/Buffer.cpp +++ /dev/null @@ -1,113 +0,0 @@ -#include -#include - -namespace Dynamo::Graphics::Vulkan { - Buffer::Buffer(VkDevice device, - const PhysicalDevice &physical, - VkCommandBuffer command_buffer, - VkBufferUsageFlagBits usage, - VkMemoryPropertyFlags properties) : - _device(device), - _physical_settings(physical.memory), _command_buffer(command_buffer), - _usage(usage | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT), _properties(properties), - _allocator(MIN_ALLOCATION_SIZE) { - _handle = VkBuffer_create(_device, _usage, _allocator.capacity(), nullptr, 0); - _memory = allocate(_handle); - vkBindBufferMemory(_device, _handle, _memory, 0); - vkGetDeviceQueue(_device, physical.transfer_queues.index, 0, &_transfer_queue); - } - - VkDeviceMemory Buffer::allocate(VkBuffer buffer) { - vkGetBufferMemoryRequirements(_device, buffer, &_requirements); - - unsigned type_index; - for (type_index = 0; type_index < _physical_settings.memoryTypeCount; type_index++) { - VkMemoryType type = _physical_settings.memoryTypes[type_index]; - if ((_requirements.memoryTypeBits & (1 << type_index)) && - ((_properties & type.propertyFlags) == _properties)) { - break; - } - } - if (type_index == _physical_settings.memoryTypeCount) { - Log::error("Vulkan could not find suitable memory type for buffer."); - } - - VkDeviceMemory memory = VkDeviceMemory_allocate(_device, type_index, _requirements.size); - if (_properties & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) { - VkResult_log( - "Map Device Memory", - vkMapMemory(_device, memory, 0, _allocator.capacity(), 0, reinterpret_cast(&_mapped))); - DYN_ASSERT(_mapped != nullptr); - } else { - _mapped = nullptr; - } - return memory; - } - - VkBuffer Buffer::handle() const { return _handle; } - - unsigned Buffer::capacity() const { return _allocator.capacity(); } - - unsigned Buffer::reserve(unsigned size) { - std::optional result = _allocator.reserve(size, _requirements.alignment); - if (result.has_value()) { - return result.value(); - } else { - unsigned curr = _allocator.capacity(); - unsigned next = std::max(curr + size, curr * 2); - resize(align_size(next, _requirements.alignment)); - - // If this fails, we're in trouble... - return _allocator.reserve(size, _requirements.alignment).value(); - } - } - - void Buffer::free(unsigned block_offset) { _allocator.free(block_offset); } - - unsigned Buffer::size(unsigned block_offset) const { return _allocator.size(block_offset); } - - void Buffer::resize(unsigned size) { - // Do not resize if target is less than the current capacity - if (size < _allocator.capacity()) return; - - // Allocate new memory and buffer - VkBuffer next_handle = VkBuffer_create(_device, _usage, size, nullptr, 0); - VkDeviceMemory next_memory = allocate(next_handle); - vkBindBufferMemory(_device, next_handle, next_memory, 0); - - // Copy contents to new buffer - VkBufferCopy region; - region.dstOffset = 0; - region.srcOffset = 0; - region.size = _allocator.capacity(); - - VkBuffer_immediate_copy(_handle, next_handle, _transfer_queue, _command_buffer, ®ion, 1); - vkQueueWaitIdle(_transfer_queue); - - // Destroy old resources - vkDestroyBuffer(_device, _handle, nullptr); - vkFreeMemory(_device, _memory, nullptr); - - // Reassign - _handle = next_handle; - _memory = next_memory; - - // Update the allocator - _allocator.grow(size); - } - - void *Buffer::get_mapped(unsigned block_offset) { - DYN_ASSERT(_allocator.is_reserved(block_offset) && _mapped != nullptr); - return _mapped + block_offset; - } - - void Buffer::copy_to(Buffer &dst, VkBufferCopy *regions, unsigned region_count) { - VkBuffer_immediate_copy(_handle, dst._handle, _transfer_queue, _command_buffer, regions, region_count); - vkQueueWaitIdle(_transfer_queue); - } - - void Buffer::destroy() { - vkDestroyBuffer(_device, _handle, nullptr); - vkFreeMemory(_device, _memory, nullptr); - } -} // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/Buffer.hpp b/src/Graphics/Vulkan/Buffer.hpp deleted file mode 100644 index c55aad1..0000000 --- a/src/Graphics/Vulkan/Buffer.hpp +++ /dev/null @@ -1,121 +0,0 @@ -#pragma once - -#include - -#include -#include - -namespace Dynamo::Graphics::Vulkan { - /** - * @brief 256M minimum buffer allocation size. - * - */ - constexpr unsigned MIN_ALLOCATION_SIZE = 256 * (1 << 20); - - /** - * @brief Dynamic Vulkan buffer. - * - */ - class Buffer { - VkDevice _device; - VkPhysicalDeviceMemoryProperties _physical_settings; - VkCommandBuffer _command_buffer; - VkQueue _transfer_queue; - - VkBufferUsageFlags _usage; - VkMemoryPropertyFlags _properties; - VkMemoryRequirements _requirements; - - Allocator _allocator; - - VkDeviceMemory _memory; - VkBuffer _handle; - - char *_mapped; - - /** - * @brief Allocate memory for a buffer. - * - * @param buffer - * @return VkDeviceMemory - */ - VkDeviceMemory allocate(VkBuffer buffer); - - public: - Buffer(VkDevice device, - const PhysicalDevice &physical, - VkCommandBuffer command_buffer, - VkBufferUsageFlagBits usage, - VkMemoryPropertyFlags properties); - Buffer() = default; - - /** - * @brief Get the buffer handle. - * - * @return VkBuffer - */ - VkBuffer handle() const; - - /** - * @brief Get the capacity of the buffer. - * - * @return unsigned - */ - unsigned capacity() const; - - /** - * @brief Reserve a block of memory. - * - * @param size - * @param alignment - * @return unsigned - */ - unsigned reserve(unsigned size); - - /** - * @brief Free an allocated block. - * - * @param block_offset - * @return unsigned - */ - void free(unsigned block_offset); - - /** - * @brief Get the size of an allocated block. - * - * @param offset - * @return unsigned - */ - unsigned size(unsigned block_offset) const; - - /** - * @brief Resize the buffer. - * - * @param size - */ - void resize(unsigned size); - - /** - * @brief Get a pointer to mapped memory. - * - * @param block_offset - * @return void* - */ - void *get_mapped(unsigned block_offset); - - /** - * @brief Copy contents to another buffer. - * - * @param dst - * @param regions - * @param region_count - */ - void copy_to(Buffer &dst, VkBufferCopy *regions, unsigned region_count); - - /** - * @brief Destroy the buffer and free underlying memory. - * - */ - void destroy(); - }; -} // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/FrameContext.cpp b/src/Graphics/Vulkan/FrameContext.cpp new file mode 100644 index 0000000..65b9220 --- /dev/null +++ b/src/Graphics/Vulkan/FrameContext.cpp @@ -0,0 +1,32 @@ +#include + +namespace Dynamo::Graphics::Vulkan { + FrameContextList::FrameContextList(VkDevice device, VkCommandPool command_pool) : _device(device), _index(0) { + std::array buffers; + VkCommandBuffer_allocate(device, + command_pool, + VK_COMMAND_BUFFER_LEVEL_PRIMARY, + buffers.data(), + MAX_FRAMES_IN_FLIGHT); + for (unsigned i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { + _contexts[i].sync_fence = VkFence_create(device); + _contexts[i].sync_render_start = VkSemaphore_create(device); + _contexts[i].sync_render_done = VkSemaphore_create(device); + _contexts[i].command_buffer = buffers[i]; + } + } + + const FrameContext &FrameContextList::next() { + FrameContext &context = _contexts[_index]; + _index = (_index + 1) % MAX_FRAMES_IN_FLIGHT; + return context; + } + + void FrameContextList::destroy() { + for (const FrameContext &context : _contexts) { + vkDestroyFence(_device, context.sync_fence, nullptr); + vkDestroySemaphore(_device, context.sync_render_start, nullptr); + vkDestroySemaphore(_device, context.sync_render_done, nullptr); + } + } +} // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/FrameContext.hpp b/src/Graphics/Vulkan/FrameContext.hpp index 3e1e77b..7a78a60 100644 --- a/src/Graphics/Vulkan/FrameContext.hpp +++ b/src/Graphics/Vulkan/FrameContext.hpp @@ -5,16 +5,9 @@ #include namespace Dynamo::Graphics::Vulkan { - /** - * @brief Maximum number of frames in flight. - * - */ + // Can't be too high or we'll experience latency constexpr unsigned MAX_FRAMES_IN_FLIGHT = 3; - /** - * @brief Render frame context. - * - */ struct FrameContext { VkFence sync_fence; VkSemaphore sync_render_start; @@ -22,58 +15,17 @@ namespace Dynamo::Graphics::Vulkan { VkCommandBuffer command_buffer; }; - /** - * @brief Frame context list. - * - */ class FrameContextList { VkDevice _device; std::array _contexts; unsigned _index; public: - /** - * @brief Initialize the context objects. - * - * @param command_pool - */ - FrameContextList(VkDevice device, VkCommandPool command_pool) : _device(device), _index(0) { - std::array buffers; - VkCommandBuffer_allocate(device, - command_pool, - VK_COMMAND_BUFFER_LEVEL_PRIMARY, - buffers.data(), - MAX_FRAMES_IN_FLIGHT); - for (unsigned i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { - _contexts[i].sync_fence = VkFence_create(device); - _contexts[i].sync_render_start = VkSemaphore_create(device); - _contexts[i].sync_render_done = VkSemaphore_create(device); - _contexts[i].command_buffer = buffers[i]; - } - } + FrameContextList(VkDevice device, VkCommandPool command_pool); FrameContextList() = default; - /** - * @brief Grab the next context object. - * - * @return const FrameContext& - */ - const FrameContext &next() { - FrameContext &context = _contexts[_index]; - _index = (_index + 1) % MAX_FRAMES_IN_FLIGHT; - return context; - } + const FrameContext &next(); - /** - * @brief Destroy the context objects. - * - */ - void destroy() { - for (const FrameContext &context : _contexts) { - vkDestroyFence(_device, context.sync_fence, nullptr); - vkDestroySemaphore(_device, context.sync_render_start, nullptr); - vkDestroySemaphore(_device, context.sync_render_done, nullptr); - } - } + void destroy(); }; } // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/FramebufferCache.hpp b/src/Graphics/Vulkan/FramebufferCache.hpp index 13938bb..8968299 100644 --- a/src/Graphics/Vulkan/FramebufferCache.hpp +++ b/src/Graphics/Vulkan/FramebufferCache.hpp @@ -5,10 +5,6 @@ #include namespace Dynamo::Graphics::Vulkan { - /** - * @brief Framebuffer configuration settings. - * - */ struct FramebufferSettings { VkImageView view; VkExtent2D extent; @@ -31,10 +27,6 @@ namespace Dynamo::Graphics::Vulkan { }; }; - /** - * @brief Framebuffer cache. - * - */ class FramebufferCache { VkDevice _device; std::unordered_map _cache; @@ -43,18 +35,8 @@ namespace Dynamo::Graphics::Vulkan { FramebufferCache(VkDevice device); FramebufferCache() = default; - /** - * @brief Build a framebuffer. - * - * @param settings - * @return VkFramebuffer - */ VkFramebuffer get(const FramebufferSettings &settings); - /** - * @brief Destroy all framebuffers, invalidating existing handles. - * - */ void destroy(); }; } // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/MaterialRegistry.cpp b/src/Graphics/Vulkan/MaterialRegistry.cpp index babcb8f..e7e0843 100644 --- a/src/Graphics/Vulkan/MaterialRegistry.cpp +++ b/src/Graphics/Vulkan/MaterialRegistry.cpp @@ -17,7 +17,7 @@ namespace Dynamo::Graphics::Vulkan { cache_info.initialDataSize = size; cache_info.pInitialData = buffer.data(); - VkResult_log("Create Pipeline Cache", vkCreatePipelineCache(_device, &cache_info, nullptr, &_pipeline_cache)); + VkResult_check("Create Pipeline Cache", vkCreatePipelineCache(_device, &cache_info, nullptr, &_pipeline_cache)); _ofstream.open(filename, std::ios::trunc | std::ios::binary); } @@ -64,7 +64,7 @@ namespace Dynamo::Graphics::Vulkan { VkRenderPass renderpass; VkResult result = vkCreateRenderPass(_device, &renderpass_info, nullptr, &renderpass); - VkResult_log("Create Render Pass", result); + VkResult_check("Create Render Pass", result); return renderpass; } @@ -189,14 +189,15 @@ namespace Dynamo::Graphics::Vulkan { // Build and cache VkPipeline pipeline; VkResult result = vkCreateGraphicsPipelines(_device, _pipeline_cache, 1, &pipeline_info, nullptr, &pipeline); - VkResult_log("Create Graphics Pipeline", result); + VkResult_check("Create Graphics Pipeline", result); return pipeline; } Material MaterialRegistry::build(const MaterialDescriptor &descriptor, const Swapchain &swapchain, const ShaderRegistry &shaders, - UniformRegistry &uniforms) { + UniformRegistry &uniforms, + MemoryPool &memory) { MaterialInstance instance; const ShaderModule &vertex_module = shaders.get(descriptor.vertex); @@ -212,8 +213,8 @@ namespace Dynamo::Graphics::Vulkan { for (const DescriptorSet &set : module.descriptor_sets) { layout_settings.descriptor_layouts.push_back(set.layout); // TODO: What if we have duplicate set layouts? Can we reuse? - DescriptorAllocation allocation = uniforms.allocate(set); - instance.descriptor_sets.push_back(allocation.set); + DescriptorAllocation allocation = uniforms.allocate(set, memory); + instance.descriptor_sets.push_back(allocation.descriptor_set); for (Uniform uniform : allocation.uniforms) { instance.uniforms.push_back(uniform); } @@ -275,14 +276,6 @@ namespace Dynamo::Graphics::Vulkan { MaterialInstance &MaterialRegistry::get(Material material) { return _instances.get(material); } - void MaterialRegistry::destroy(Material material, UniformRegistry &uniforms) { - MaterialInstance &instance = _instances.get(material); - for (Uniform uniform : instance.uniforms) { - uniforms.free(uniform); - } - _instances.get(material); - } - void MaterialRegistry::destroy() { // Clean up pipelines vkDestroyPipelineCache(_device, _pipeline_cache, nullptr); @@ -301,7 +294,7 @@ namespace Dynamo::Graphics::Vulkan { for (const auto &[key, renderpass] : _renderpasses) { vkDestroyRenderPass(_device, renderpass, nullptr); } - _layouts.clear(); + _renderpasses.clear(); // Clear all instances _instances.clear(); diff --git a/src/Graphics/Vulkan/MaterialRegistry.hpp b/src/Graphics/Vulkan/MaterialRegistry.hpp index 8637378..8d8706a 100644 --- a/src/Graphics/Vulkan/MaterialRegistry.hpp +++ b/src/Graphics/Vulkan/MaterialRegistry.hpp @@ -7,7 +7,6 @@ #include #include -#include #include #include #include @@ -15,10 +14,6 @@ #include namespace Dynamo::Graphics::Vulkan { - /** - * @brief Render pass configuration settings. - * - */ struct RenderPassSettings { VkFormat color_format; VkFormat depth_format; @@ -49,10 +44,6 @@ namespace Dynamo::Graphics::Vulkan { }; }; - /** - * @brief Pipeline layout settings. - * - */ struct PipelineLayoutSettings { std::vector descriptor_layouts; std::vector push_constant_ranges; @@ -98,10 +89,6 @@ namespace Dynamo::Graphics::Vulkan { }; }; - /** - * @brief Graphics pipeline configuration settings. - * - */ struct GraphicsPipelineSettings { VkPrimitiveTopology topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; VkPolygonMode polygon_mode = VK_POLYGON_MODE_FILL; @@ -134,10 +121,6 @@ namespace Dynamo::Graphics::Vulkan { }; }; - /** - * @brief Material instance with references to allocated Vulkan resources. - * - */ struct MaterialInstance { VkRenderPass renderpass; VkPipelineLayout layout; @@ -148,10 +131,6 @@ namespace Dynamo::Graphics::Vulkan { std::vector push_constant_offsets; }; - /** - * @brief Material registry caches Vulkan objects associated with a material. - * - */ class MaterialRegistry { VkDevice _device; std::ofstream _ofstream; @@ -163,68 +142,24 @@ namespace Dynamo::Graphics::Vulkan { SparseArray _instances; - /** - * @brief Create a Vulkan render pass. - * - * @param settings - * @return VkRenderPass - */ VkRenderPass build_renderpass(const RenderPassSettings &settings) const; - /** - * @brief Create a Vulkan pipeline. - * - * @param settings - * @return VkPipeline - */ VkPipeline build_pipeline(const GraphicsPipelineSettings &settings) const; public: MaterialRegistry(VkDevice device, const std::string &filename); MaterialRegistry() = default; - /** - * @brief Build a material and its resources. - * - * @param descriptor - * @param swapchain - * @param shaders - * @param uniforms - * @return MaterialInstance - */ Material build(const MaterialDescriptor &descriptor, const Swapchain &swapchain, const ShaderRegistry &shaders, - UniformRegistry &uniforms); - - /** - * @brief Get a material instance. - * - * @param material - * @return MaterialInstance& - */ + UniformRegistry &uniforms, + MemoryPool &memory); + MaterialInstance &get(Material material); - /** - * @brief Destroy a material instance. - * - * Pipeline, layout, and render pass are preserved, only uniforms are freed. - * - * @param material - * @param uniforms - */ - void destroy(Material material, UniformRegistry &uniforms); - - /** - * @brief Destroy all Vulkan resources. - * - */ void destroy(); - /** - * @brief Write the pipeline cache to disk. - * - */ void write_to_disk(); }; } // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/MemoryPool.cpp b/src/Graphics/Vulkan/MemoryPool.cpp new file mode 100644 index 0000000..7b28851 --- /dev/null +++ b/src/Graphics/Vulkan/MemoryPool.cpp @@ -0,0 +1,133 @@ +#include +#include +#include + +namespace Dynamo::Graphics::Vulkan { + MemoryPool::MemoryPool(VkDevice device, const PhysicalDevice &physical) : + _device(device), _physical(&physical), _groups(physical.memory.memoryTypeCount) {} + + unsigned MemoryPool::find_type_index(const VkMemoryRequirements &requirements, + VkMemoryPropertyFlags properties) const { + unsigned type_index = 0; + while (type_index < _physical->memory.memoryTypeCount) { + VkMemoryType type = _physical->memory.memoryTypes[type_index]; + bool has_type = requirements.memoryTypeBits & (1 << type_index); + bool has_properties = (properties & type.propertyFlags) == properties; + if (has_type && has_properties) { + break; + } + type_index++; + } + DYN_ASSERT(type_index < _groups.size()); + return type_index; + } + + MemoryPool::VirtualMemory MemoryPool::allocate_memory(const VkMemoryRequirements &requirements, + VkMemoryPropertyFlags properties) { + VirtualMemory allocation; + allocation.key.type = find_type_index(requirements, properties); + allocation.mapped = nullptr; + + MemoryGroup &group = _groups[allocation.key.type]; + for (allocation.key.index = 0; allocation.key.index < group.size(); allocation.key.index++) { + Memory &memory = group[allocation.key.index]; + + std::optional result = memory.allocator.reserve(requirements.size, requirements.alignment); + if (result.has_value()) { + allocation.memory = memory.handle; + allocation.key.offset = result.value(); + if (memory.mapped) { + allocation.mapped = static_cast(memory.mapped) + allocation.key.offset; + } + } + } + + // None found, allocate new memory + VkDeviceSize heap_size = std::max(requirements.size, MEMORY_ALLOCATION_SIZE); + allocation.memory = VkDeviceMemory_allocate(_device, allocation.key.type, heap_size); + if (properties & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) { + VkResult_check("Map Memory", vkMapMemory(_device, allocation.memory, 0, heap_size, 0, &allocation.mapped)); + } + group.push_back({allocation.memory, heap_size, allocation.mapped}); + + Memory &memory = group.back(); + allocation.key.offset = memory.allocator.reserve(requirements.size, requirements.alignment).value(); + return allocation; + } + + VirtualBuffer MemoryPool::build(VkBufferUsageFlags usage, VkMemoryPropertyFlags properties, unsigned size) { + // Create the buffer + VirtualBuffer buffer; + buffer.buffer = VkBuffer_create(_device, usage, size, nullptr, 0); + + // Allocate memory and bind to buffer + VkMemoryRequirements requirements; + vkGetBufferMemoryRequirements(_device, buffer.buffer, &requirements); + + VirtualMemory memory = allocate_memory(requirements, properties); + buffer.key = memory.key; + buffer.offset = 0; // TODO: Suballocation + buffer.mapped = memory.mapped; + + vkBindBufferMemory(_device, buffer.buffer, memory.memory, buffer.offset); + return buffer; + } + + VirtualImage MemoryPool::build(const TextureDescriptor &descriptor) { + // Create the image + VkExtent3D extent; + extent.width = descriptor.width; + extent.height = descriptor.height; + extent.depth = 1; + + VkFormat format = convert_texture_format(descriptor.format); + VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; + + VirtualImage image; + image.image = VkImage_create(_device, + extent, + format, + VK_IMAGE_LAYOUT_UNDEFINED, + VK_IMAGE_TYPE_2D, + VK_IMAGE_TILING_OPTIMAL, + usage, + VK_SAMPLE_COUNT_1_BIT, + 1, + 1, + nullptr, + 0); + + // Allocate memory and bind to image + VkMemoryRequirements requirements; + vkGetImageMemoryRequirements(_device, image.image, &requirements); + + VirtualMemory memory = allocate_memory(requirements, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT); + image.key = memory.key; + image.mapped = memory.mapped; + + vkBindImageMemory(_device, image.image, memory.memory, image.key.offset); + return image; + } + + void MemoryPool::free(const VirtualBuffer &allocation) { + vkDestroyBuffer(_device, allocation.buffer, nullptr); + Memory &memory = _groups[allocation.key.type][allocation.key.index]; + memory.allocator.free(allocation.key.offset); + } + + void MemoryPool::free(const VirtualImage &allocation) { + vkDestroyImage(_device, allocation.image, nullptr); + Memory &memory = _groups[allocation.key.type][allocation.key.index]; + memory.allocator.free(allocation.key.offset); + } + + void MemoryPool::destroy() { + // Free device memory + for (const MemoryGroup &group : _groups) { + for (const Memory &memory : group) { + vkFreeMemory(_device, memory.handle, nullptr); + } + } + _groups.clear(); + } +} // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/MemoryPool.hpp b/src/Graphics/Vulkan/MemoryPool.hpp new file mode 100644 index 0000000..f5712f5 --- /dev/null +++ b/src/Graphics/Vulkan/MemoryPool.hpp @@ -0,0 +1,67 @@ +#pragma once + +#include + +#include +#include +#include + +namespace Dynamo::Graphics::Vulkan { + // We only have 4096 guaranteed allocations. 512M * 4096 is approx. 2T, so this should be enough. + constexpr VkDeviceSize MEMORY_ALLOCATION_SIZE = 512 * (1 << 20); + + struct AllocationKey { + unsigned offset; + unsigned type; + unsigned index; + }; + + struct VirtualBuffer { + VkBuffer buffer; + AllocationKey key; + unsigned offset; + void *mapped; + }; + + struct VirtualImage { + VkImage image; + AllocationKey key; + void *mapped; + }; + + class MemoryPool { + struct VirtualMemory { + VkDeviceMemory memory; + AllocationKey key; + void *mapped; + }; + struct Memory { + VkDeviceMemory handle; + Allocator allocator; + void *mapped; + }; + using MemoryGroup = std::vector; + + VkDevice _device; + const PhysicalDevice *_physical; + std::vector _groups; + + unsigned find_type_index(const VkMemoryRequirements &requirements, VkMemoryPropertyFlags properties) const; + + VirtualMemory allocate_memory(const VkMemoryRequirements &requirements, VkMemoryPropertyFlags properties); + + public: + MemoryPool(VkDevice device, const PhysicalDevice &physical); + MemoryPool() = default; + + VirtualBuffer build(VkBufferUsageFlags usage, VkMemoryPropertyFlags properties, unsigned size); + + VirtualImage build(const TextureDescriptor &descriptor); + + void free(const VirtualBuffer &allocation); + + void free(const VirtualImage &allocation); + + void destroy(); + }; +}; // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/MeshRegistry.cpp b/src/Graphics/Vulkan/MeshRegistry.cpp index ee0fb4c..56b7cd6 100644 --- a/src/Graphics/Vulkan/MeshRegistry.cpp +++ b/src/Graphics/Vulkan/MeshRegistry.cpp @@ -3,125 +3,110 @@ namespace Dynamo::Graphics::Vulkan { MeshRegistry::MeshRegistry(VkDevice device, const PhysicalDevice &physical, VkCommandPool transfer_pool) { - std::array transfer_commands; - VkCommandBuffer_allocate(_device, transfer_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, transfer_commands.data(), 3); - - _vertex = Buffer(device, - physical, - transfer_commands[0], - VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, - VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT); - _index = Buffer(device, - physical, - transfer_commands[1], - VK_BUFFER_USAGE_INDEX_BUFFER_BIT, - VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT); - _staging = Buffer(device, - physical, - transfer_commands[2], - VK_BUFFER_USAGE_TRANSFER_SRC_BIT, - VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT); + VkCommandBuffer_allocate(_device, transfer_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, &_command_buffer, 1); + vkGetDeviceQueue(_device, physical.transfer_queues.index, 0, &_transfer_queue); } - Mesh MeshRegistry::build(const MeshDescriptor &descriptor) { - // Set the vertex, instance, and index counts - MeshAllocation allocation; - allocation.index_buffer = _index.handle(); - allocation.vertex_count = descriptor.vertex_count; - allocation.instance_count = descriptor.instance_count; - allocation.index_count = descriptor.indices.size(); - // Write attributes to the buffers - for (auto &attribute : descriptor.attributes) { - unsigned offset = _vertex.reserve(attribute.size()); - allocation.attribute_offsets.push_back(offset); - allocation.buffers.push_back(_vertex.handle()); + void MeshRegistry::write_local_buffer(MemoryPool &memory, const void *src, VirtualBuffer &dst, unsigned size) { + VkBufferCopy region; + region.srcOffset = 0; + region.dstOffset = dst.offset; + region.size = size; - VkBufferCopy region; - region.srcOffset = 0; - region.dstOffset = offset; - region.size = attribute.size(); + VirtualBuffer staging = memory.build(VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + size); + std::memcpy(staging.mapped, src, size); - unsigned staging_offset = _staging.reserve(attribute.size()); - void *ptr = _staging.get_mapped(staging_offset); - std::memcpy(ptr, attribute.data(), attribute.size()); + VkCommandBuffer_immediate_start(_command_buffer); + vkCmdCopyBuffer(_command_buffer, staging.buffer, dst.buffer, 1, ®ion); + VkCommandBuffer_immediate_end(_command_buffer, _transfer_queue); - _staging.copy_to(_vertex, ®ion, 1); - _staging.free(staging_offset); + memory.free(staging); + } + + Mesh MeshRegistry::build(const MeshDescriptor &descriptor, MemoryPool &memory) { + // Set the vertex, instance, and index counts + MeshInstance instance; + instance.vertex_count = descriptor.vertex_count; + instance.instance_count = descriptor.instance_count; + instance.index_count = descriptor.indices.size(); + + // Write attributes to the buffers + for (auto &attribute : descriptor.attributes) { + VirtualBuffer vertex = memory.build(VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, + VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, + attribute.size()); + instance.attribute_offsets.push_back(vertex.offset); + instance.buffers.push_back(vertex.buffer); + instance.virtual_buffers.push_back(vertex); + + write_local_buffer(memory, attribute.data(), vertex, attribute.size()); } // Write index array, if available switch (descriptor.index_type) { case IndexType::U16: { + instance.index_type = VK_INDEX_TYPE_UINT16; + std::vector u16_indices; for (unsigned index : descriptor.indices) { u16_indices.push_back(index); } - unsigned size = u16_indices.size() * sizeof(u16_indices[0]); - allocation.index_offset = _index.reserve(size); - - VkBufferCopy region; - region.srcOffset = 0; - region.dstOffset = allocation.index_offset; - region.size = size; - - unsigned staging_offset = _staging.reserve(size); - void *ptr = _staging.get_mapped(staging_offset); - std::memcpy(ptr, u16_indices.data(), size); - - _staging.copy_to(_index, ®ion, 1); - _staging.free(staging_offset); - - allocation.index_type = VK_INDEX_TYPE_UINT16; + unsigned size = u16_indices.size() * 2; + VirtualBuffer index = memory.build(VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, + VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, + size); + instance.index_buffer = index.buffer; + instance.index_offset = index.offset; + instance.virtual_buffers.push_back(index); + + write_local_buffer(memory, u16_indices.data(), index, size); break; } case IndexType::U32: { + instance.index_type = VK_INDEX_TYPE_UINT32; + std::vector u32_indices; for (unsigned index : descriptor.indices) { u32_indices.push_back(index); } - unsigned size = u32_indices.size() * sizeof(u32_indices[0]); - allocation.index_offset = _index.reserve(size); - - VkBufferCopy region; - region.srcOffset = 0; - region.dstOffset = allocation.index_offset; - region.size = size; - - unsigned staging_offset = _staging.reserve(size); - void *ptr = _staging.get_mapped(staging_offset); - std::memcpy(ptr, u32_indices.data(), size); - - _staging.copy_to(_index, ®ion, 1); - _staging.free(staging_offset); - - allocation.index_type = VK_INDEX_TYPE_UINT32; + unsigned size = u32_indices.size() * 4; + VirtualBuffer index = memory.build(VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, + VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, + size); + instance.index_buffer = index.buffer; + instance.index_offset = index.offset; + instance.virtual_buffers.push_back(index); + + write_local_buffer(memory, u32_indices.data(), index, size); break; } case IndexType::None: - allocation.index_type = VK_INDEX_TYPE_NONE_KHR; + instance.index_type = VK_INDEX_TYPE_NONE_KHR; break; } // Register the allocation - return _allocations.insert(allocation); + return _instances.insert(instance); } - MeshAllocation &MeshRegistry::get(Mesh mesh) { return _allocations.get(mesh); } + MeshInstance &MeshRegistry::get(Mesh mesh) { return _instances.get(mesh); } - void MeshRegistry::destroy(Mesh mesh) { - MeshAllocation &allocation = _allocations.get(mesh); - for (unsigned offset : allocation.attribute_offsets) { - _vertex.free(offset); + void MeshRegistry::destroy(Mesh mesh, MemoryPool &memory) { + MeshInstance &instance = _instances.get(mesh); + for (VirtualBuffer &buffer : instance.virtual_buffers) { + memory.free(buffer); } - if (allocation.index_type != VK_INDEX_TYPE_NONE_KHR) { - _index.free(allocation.index_offset); - } - _allocations.remove(mesh); + _instances.remove(mesh); } - void MeshRegistry::destroy() { - _vertex.destroy(); - _index.destroy(); - _staging.destroy(); + void MeshRegistry::destroy(MemoryPool &memory) { + _instances.foreach ([&](MeshInstance &instance) { + for (VirtualBuffer &buffer : instance.virtual_buffers) { + memory.free(buffer); + } + }); + _instances.clear(); } } // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/MeshRegistry.hpp b/src/Graphics/Vulkan/MeshRegistry.hpp index 9ead453..c93f5bb 100644 --- a/src/Graphics/Vulkan/MeshRegistry.hpp +++ b/src/Graphics/Vulkan/MeshRegistry.hpp @@ -5,16 +5,13 @@ #include #include -#include +#include #include namespace Dynamo::Graphics::Vulkan { - /** - * @brief Mesh GPU allocation instance. - * - */ - struct MeshAllocation { + struct MeshInstance { std::vector attribute_offsets; + std::vector virtual_buffers; std::vector buffers; VkBuffer index_buffer; unsigned index_offset; @@ -24,49 +21,24 @@ namespace Dynamo::Graphics::Vulkan { VkIndexType index_type; }; - /** - * @brief Mesh registry. - * - */ class MeshRegistry { VkDevice _device; - Buffer _vertex; - Buffer _index; - Buffer _staging; + VkCommandBuffer _command_buffer; + VkQueue _transfer_queue; + SparseArray _instances; - SparseArray _allocations; + void write_local_buffer(MemoryPool &memory, const void *src, VirtualBuffer &dst, unsigned size); public: MeshRegistry(VkDevice device, const PhysicalDevice &physical, VkCommandPool transfer_pool); MeshRegistry() = default; - /** - * @brief Get a mesh allocation. - * - * @param mesh - * @return MeshAllocation& - */ - MeshAllocation &get(Mesh mesh); + MeshInstance &get(Mesh mesh); - /** - * @brief Upload a mesh descriptor to VRAM. - * - * @param descriptor - * @return Mesh - */ - Mesh build(const MeshDescriptor &descriptor); + Mesh build(const MeshDescriptor &descriptor, MemoryPool &memory); - /** - * @brief Free all allocated buffers for a mesh. - * - * @param mesh - */ - void destroy(Mesh mesh); + void destroy(Mesh mesh, MemoryPool &memory); - /** - * @brief Destroy mesh allocation buffers. - * - */ - void destroy(); + void destroy(MemoryPool &memory); }; } // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/PhysicalDevice.cpp b/src/Graphics/Vulkan/PhysicalDevice.cpp index 47a09e8..aa2f03e 100644 --- a/src/Graphics/Vulkan/PhysicalDevice.cpp +++ b/src/Graphics/Vulkan/PhysicalDevice.cpp @@ -62,7 +62,7 @@ namespace Dynamo::Graphics::Vulkan { } } - PhysicalDevice PhysicalDevice::select(VkInstance instance, VkSurfaceKHR surface) { + PhysicalDevice PhysicalDevice::select_best(VkInstance instance, VkSurfaceKHR surface) { unsigned count = 0; vkEnumeratePhysicalDevices(instance, &count, nullptr); std::vector handles(count); diff --git a/src/Graphics/Vulkan/PhysicalDevice.hpp b/src/Graphics/Vulkan/PhysicalDevice.hpp index 3193570..245b784 100644 --- a/src/Graphics/Vulkan/PhysicalDevice.hpp +++ b/src/Graphics/Vulkan/PhysicalDevice.hpp @@ -5,10 +5,7 @@ #include namespace Dynamo::Graphics::Vulkan { - /** - * @brief Vulkan Queue Family. - * - */ + struct QueueFamily { unsigned index = 0; unsigned count = 0; @@ -16,20 +13,12 @@ namespace Dynamo::Graphics::Vulkan { }; using QueueFamilyRef = std::reference_wrapper; - /** - * @brief Available swapchain options. - * - */ struct SwapchainOptions { VkSurfaceCapabilitiesKHR capabilities; std::vector formats; std::vector present_modes; }; - /** - * @brief Wrapper object for a Vulkan physical device and its properties. - * - */ struct PhysicalDevice { VkPhysicalDevice handle; VkSurfaceKHR surface; @@ -44,50 +33,17 @@ namespace Dynamo::Graphics::Vulkan { QueueFamily compute_queues; QueueFamily transfer_queues; - /** - * @brief Create PhysicalDevice object. - * - * @param handle - * @param surface - */ PhysicalDevice(VkPhysicalDevice handle, VkSurfaceKHR surface); PhysicalDevice() = default; - /** - * @brief Select the best available physical device. - * - * @param instance - * @param surface - * @return PhysicalDevice - */ - static PhysicalDevice select(VkInstance instance, VkSurfaceKHR surface); + static PhysicalDevice select_best(VkInstance instance, VkSurfaceKHR surface); - /** - * @brief Get the available swapchain configuration options. - * - * @return SwapchainOptions - */ SwapchainOptions get_swapchain_options() const; - /** - * @brief Get the unique set of queue families - * - * @return std::vector - */ std::vector unique_queue_families() const; - /** - * @brief Get the set of required extensions. - * - * @return std::vector - */ std::vector required_extensions() const; - /** - * @brief Compute the desirability "score". - * - * @return unsigned - */ unsigned score() const; }; } // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/ShaderRegistry.cpp b/src/Graphics/Vulkan/ShaderRegistry.cpp index f549138..a5e08dd 100644 --- a/src/Graphics/Vulkan/ShaderRegistry.cpp +++ b/src/Graphics/Vulkan/ShaderRegistry.cpp @@ -136,20 +136,20 @@ namespace Dynamo::Graphics::Vulkan { // Add descriptor metadata to reflection DescriptorBinding descriptor_binding; descriptor_binding.name = refl_binding.name; + descriptor_binding.type = layout_binding.descriptorType; descriptor_binding.shared = shared_it != shared_uniforms.end(); - descriptor_binding.set = refl_binding.set; descriptor_binding.binding = refl_binding.binding; - descriptor_binding.descriptor_count = layout_binding.descriptorCount; + descriptor_binding.count = layout_binding.descriptorCount; descriptor_binding.size = refl_binding.block.size; descriptor_set.bindings.push_back(descriptor_binding); Log::info( "* Descriptor (name: {}, set: {}, binding: {}, size: {}, dim: {}, shared: {}, type: {}, stage: {})", descriptor_binding.name, - descriptor_binding.set, + refl_binding.set, descriptor_binding.binding, descriptor_binding.size, - descriptor_binding.descriptor_count, + descriptor_binding.count, descriptor_binding.shared, VkDescriptorType_string(layout_binding.descriptorType), VkShaderStageFlagBits_string(static_cast(layout_binding.stageFlags))); @@ -285,11 +285,13 @@ namespace Dynamo::Graphics::Vulkan { } void ShaderRegistry::destroy() { + // Destroy descriptor layouts for (const auto &[key, layout] : _descriptor_layouts) { vkDestroyDescriptorSetLayout(_device, layout, nullptr); } _descriptor_layouts.clear(); + // Destroy shader modules _modules.foreach ([&](ShaderModule &module) { vkDestroyShaderModule(_device, module.handle, nullptr); }); _modules.clear(); } diff --git a/src/Graphics/Vulkan/ShaderRegistry.hpp b/src/Graphics/Vulkan/ShaderRegistry.hpp index c58558b..3d56636 100644 --- a/src/Graphics/Vulkan/ShaderRegistry.hpp +++ b/src/Graphics/Vulkan/ShaderRegistry.hpp @@ -11,10 +11,6 @@ #include namespace Dynamo::Graphics::Vulkan { - /** - * @brief Descriptor set layout key. - * - */ struct DescriptorLayoutKey { std::vector bindings; @@ -54,42 +50,26 @@ namespace Dynamo::Graphics::Vulkan { }; }; - /** - * @brief Reflected descriptor binding. - * - */ struct DescriptorBinding { std::string name; - unsigned set; + VkDescriptorType type; unsigned binding; - unsigned descriptor_count; + unsigned count; unsigned size; bool shared; }; - /** - * @brief Descriptor set. - * - */ struct DescriptorSet { VkDescriptorSetLayout layout; std::vector bindings; }; - /** - * @brief Push constant. - * - */ struct PushConstant { std::string name; VkPushConstantRange range; bool shared; }; - /** - * @brief Shader module instance. - * - */ struct ShaderModule { VkShaderModule handle; std::vector bindings; @@ -98,53 +78,20 @@ namespace Dynamo::Graphics::Vulkan { std::vector push_constants; }; - /** - * @brief Shader registry. - * - */ class ShaderRegistry { VkDevice _device; SparseArray _modules; std::unordered_map _descriptor_layouts; - /** - * @brief Compile a shader source. - * - * @param name - * @param code - * @param stage - * @param optimized - * @return std::vector - */ std::vector compile(const std::string &name, const std::string &code, VkShaderStageFlagBits stage, bool optimized); - /** - * @brief Extract vertex inputs from the shader source. - * - * @param module - * @param reflection - */ void reflect_vertex_input(ShaderModule &module, SpvReflectShaderModule reflection); - /** - * @brief Extract descriptor sets from the shader source. - * - * @param module - * @param reflection - * @param shared_uniforms - */ void reflect_descriptor_sets(ShaderModule &module, SpvReflectShaderModule reflection, const std::vector &shared_uniforms); - /** - * @brief Extract push constants from the shader source. - * - * @param module - * @param reflection - * @param shared_uniforms - */ void reflect_push_constants(ShaderModule &module, SpvReflectShaderModule reflection, const std::vector &shared_uniforms); @@ -153,33 +100,12 @@ namespace Dynamo::Graphics::Vulkan { ShaderRegistry(VkDevice device); ShaderRegistry() = default; - /** - * @brief Get a shader module. - * - * @param shader - * @return ShaderModule& - */ const ShaderModule &get(Shader shader) const; - /** - * @brief Build a shader module from a descriptor. - * - * @param descriptor - * @return Shader - */ Shader build(const ShaderDescriptor &descriptor); - /** - * @brief Destroy a shader module. - * - * @param shader - */ void destroy(Shader shader); - /** - * @brief Destroy all existing shader modules. - * - */ void destroy(); }; } // namespace Dynamo::Graphics::Vulkan diff --git a/src/Graphics/Vulkan/Swapchain.cpp b/src/Graphics/Vulkan/Swapchain.cpp index 01ba99c..e483573 100644 --- a/src/Graphics/Vulkan/Swapchain.cpp +++ b/src/Graphics/Vulkan/Swapchain.cpp @@ -77,7 +77,7 @@ namespace Dynamo::Graphics::Vulkan { swapchain_info.oldSwapchain = previous.value().handle; } - VkResult_log("Create Swapchain", vkCreateSwapchainKHR(device, &swapchain_info, nullptr, &handle)); + VkResult_check("Create Swapchain", vkCreateSwapchainKHR(device, &swapchain_info, nullptr, &handle)); // Destroy the old swapchain if (previous.has_value()) { diff --git a/src/Graphics/Vulkan/Swapchain.hpp b/src/Graphics/Vulkan/Swapchain.hpp index 966c8ee..2ce159e 100644 --- a/src/Graphics/Vulkan/Swapchain.hpp +++ b/src/Graphics/Vulkan/Swapchain.hpp @@ -8,10 +8,6 @@ #include namespace Dynamo::Graphics::Vulkan { - /** - * @brief Wrapper object for a Vulkan swapchain and its properties. - * - */ struct Swapchain { VkDevice device; VkSwapchainKHR handle; @@ -23,25 +19,12 @@ namespace Dynamo::Graphics::Vulkan { std::vector images; std::vector views; - /** - * @brief Create a Vulkan swapchain. - * - * @param device - * @param physical - * @param display - * @param previous - * @return Swapchain - */ Swapchain(VkDevice device, const PhysicalDevice &physical, const Display &display, std::optional previous = {}); Swapchain() = default; - /** - * @brief Destroy the swapchain and its resources. - * - */ void destroy(); }; } // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/TextureRegistry.cpp b/src/Graphics/Vulkan/TextureRegistry.cpp new file mode 100644 index 0000000..aa0ef91 --- /dev/null +++ b/src/Graphics/Vulkan/TextureRegistry.cpp @@ -0,0 +1,127 @@ +#include +#include + +namespace Dynamo::Graphics::Vulkan { + TextureRegistry::TextureRegistry(VkDevice device, const PhysicalDevice &physical, VkCommandPool transfer_pool) : + _device(device), _physical(&physical) { + VkCommandBuffer_allocate(_device, transfer_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, &_command_buffer, 1); + vkGetDeviceQueue(_device, physical.transfer_queues.index, 0, &_transfer_queue); + } + + Texture TextureRegistry::build(const TextureDescriptor &descriptor, MemoryPool &memory) { + TextureInstance instance; + + // Build sampler + SamplerSettings sampler_settings; + sampler_settings.u_address_mode = convert_texture_address_mode(descriptor.u_address_mode); + sampler_settings.v_address_mode = convert_texture_address_mode(descriptor.v_address_mode); + sampler_settings.w_address_mode = convert_texture_address_mode(descriptor.w_address_mode); + sampler_settings.mag_filter = convert_texture_filter(descriptor.mag_filter); + sampler_settings.min_filter = convert_texture_filter(descriptor.min_filter); + sampler_settings.border_color = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK; + + auto sampler_it = _samplers.find(sampler_settings); + if (sampler_it != _samplers.end()) { + instance.sampler = sampler_it->second; + } else { + instance.sampler = VkSampler_create(_device, + sampler_settings.u_address_mode, + sampler_settings.v_address_mode, + sampler_settings.w_address_mode, + sampler_settings.mag_filter, + sampler_settings.min_filter, + sampler_settings.border_color, + _physical->properties.limits.maxSamplerAnisotropy); + _samplers.emplace(sampler_settings, instance.sampler); + } + + // Build image + instance.image = memory.build(descriptor); + + // Copy texels to staging buffer + VirtualBuffer staging = memory.build(VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + descriptor.texels.size()); + std::memcpy(staging.mapped, descriptor.texels.data(), descriptor.texels.size()); + + VkImageSubresourceRange subresources; + subresources.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + subresources.baseMipLevel = 0; + subresources.levelCount = 1; + subresources.baseArrayLayer = 0; + subresources.layerCount = 1; + + // Transition image to optimal layout for buffer copying + VkCommandBuffer_immediate_start(_command_buffer); + VkImage_transition_layout(instance.image.image, + _command_buffer, + VK_FORMAT_R8G8B8A8_SRGB, + VK_IMAGE_LAYOUT_UNDEFINED, + VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + subresources); + + // Copy buffer to image + VkBufferImageCopy region = {}; + region.bufferOffset = 0; + region.bufferRowLength = 0; + region.bufferImageHeight = 0; + + region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + region.imageSubresource.mipLevel = 0; + region.imageSubresource.baseArrayLayer = 0; + region.imageSubresource.layerCount = 1; + + region.imageOffset = {0, 0, 0}; + region.imageExtent = {descriptor.width, descriptor.height, 1}; + + vkCmdCopyBufferToImage(_command_buffer, + staging.buffer, + instance.image.image, + VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + 1, + ®ion); + + // Transition back to shader read optimal layout + VkImage_transition_layout(instance.image.image, + _command_buffer, + VK_FORMAT_R8G8B8A8_SRGB, + VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, + subresources); + VkCommandBuffer_immediate_end(_command_buffer, _transfer_queue); + + // Free the staging buffer + memory.free(staging); + + // Build image view + instance.view = VkImageView_create(_device, + instance.image.image, + convert_texture_format(descriptor.format), + VK_IMAGE_VIEW_TYPE_2D, + subresources); + + return _instances.insert(instance); + } + + const TextureInstance &TextureRegistry::get(Texture texture) const { return _instances.get(texture); } + + void TextureRegistry::destroy(Texture texture, MemoryPool &memory) { + const TextureInstance &instance = _instances.get(texture); + vkDestroyImageView(_device, instance.view, nullptr); + memory.free(instance.image); + _instances.remove(texture); + } + + void TextureRegistry::destroy(MemoryPool &memory) { + _instances.foreach ([&](TextureInstance &instance) { + vkDestroyImageView(_device, instance.view, nullptr); + memory.free(instance.image); + }); + _instances.clear(); + + for (const auto &[key, sampler] : _samplers) { + vkDestroySampler(_device, sampler, nullptr); + } + _samplers.clear(); + } +} // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/TextureRegistry.hpp b/src/Graphics/Vulkan/TextureRegistry.hpp new file mode 100644 index 0000000..2478b61 --- /dev/null +++ b/src/Graphics/Vulkan/TextureRegistry.hpp @@ -0,0 +1,69 @@ +#pragma once + +#include + +#include + +#include +#include +#include +#include + +namespace Dynamo::Graphics::Vulkan { + struct SamplerSettings { + VkSamplerAddressMode u_address_mode; + VkSamplerAddressMode v_address_mode; + VkSamplerAddressMode w_address_mode; + VkFilter min_filter; + VkFilter mag_filter; + VkBorderColor border_color; + + bool operator==(const SamplerSettings &other) const { + return u_address_mode == other.u_address_mode && v_address_mode == other.v_address_mode && + w_address_mode == other.w_address_mode && min_filter == other.min_filter && + mag_filter == other.mag_filter && border_color == other.border_color; + } + + struct Hash { + inline size_t operator()(const SamplerSettings &settings) const { + size_t hash0 = std::hash{}(settings.u_address_mode); + size_t hash1 = std::hash{}(settings.v_address_mode); + size_t hash2 = std::hash{}(settings.w_address_mode); + size_t hash3 = std::hash{}(settings.min_filter); + size_t hash4 = std::hash{}(settings.mag_filter); + size_t hash5 = std::hash{}(settings.border_color); + + return hash0 ^ (hash1 << 1) ^ (hash2 << 2) ^ (hash3 << 3) ^ (hash4 << 4) ^ (hash5 << 5); + } + }; + }; + + struct TextureInstance { + VirtualImage image; + VkImageView view; + VkSampler sampler; + }; + + class TextureRegistry { + VkDevice _device; + const PhysicalDevice *_physical; + + VkQueue _transfer_queue; + VkCommandBuffer _command_buffer; + + std::unordered_map _samplers; + SparseArray _instances; + + public: + TextureRegistry(VkDevice device, const PhysicalDevice &physical, VkCommandPool transfer_pool); + TextureRegistry() = default; + + Texture build(const TextureDescriptor &descriptor, MemoryPool &memory); + + const TextureInstance &get(Texture texture) const; + + void destroy(Texture texture, MemoryPool &memory); + + void destroy(MemoryPool &memory); + }; +} // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/UniformRegistry.cpp b/src/Graphics/Vulkan/UniformRegistry.cpp index 98b0cda..d272fb6 100644 --- a/src/Graphics/Vulkan/UniformRegistry.cpp +++ b/src/Graphics/Vulkan/UniformRegistry.cpp @@ -4,72 +4,86 @@ namespace Dynamo::Graphics::Vulkan { UniformRegistry::UniformRegistry(VkDevice device, const PhysicalDevice &physical, VkCommandPool transfer_pool) : _device(device) { - std::array sizes; - sizes[0].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; - sizes[0].descriptorCount = 1024; - + std::array sizes = { + VkDescriptorPoolSize{VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1024}, + VkDescriptorPoolSize{VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1024}, + }; _pool = VkDescriptorPool_create(device, sizes.data(), sizes.size(), 1024); - VkCommandBuffer transfer_buffer; - VkCommandBuffer_allocate(_device, transfer_pool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, &transfer_buffer, 1); - _uniform_buffer = Buffer(_device, - physical, - transfer_buffer, - VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, - VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT); - - // Minimum of 256 bytes for push constants - _push_constant_buffer = VirtualMemory(256); + // Limit of 128 bytes for push constants + _push_constant_buffer = VirtualMemory(128); } - DescriptorAllocation UniformRegistry::allocate(const DescriptorSet &set) { - DescriptorAllocation allocation; + VirtualBuffer UniformRegistry::allocate_uniform_buffer(VkDescriptorSet descriptor_set, + DescriptorBinding &binding, + MemoryPool &memory) { + // Allocate shared uniform binding once only + VirtualBuffer buffer; + unsigned size = binding.size * binding.count; + if (binding.shared) { + auto shared_it = _shared.find(binding.name); + if (shared_it != _shared.end()) { + buffer = shared_it->second.descriptor_buffer; + } else { + buffer = memory.build(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + size); + + SharedVariable shared; + shared.descriptor_buffer = buffer; + _shared.emplace(binding.name, shared); + } + } else { + buffer = memory.build(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + size); + } + // Write each binding array element + for (unsigned i = 0; i < binding.count; i++) { + VkDescriptorBufferInfo buffer_info; + buffer_info.buffer = buffer.buffer; + buffer_info.offset = buffer.offset + i * binding.size; + buffer_info.range = binding.size; + + VkWriteDescriptorSet write = {}; + write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + write.descriptorType = binding.type; + write.dstSet = descriptor_set; + write.dstBinding = binding.binding; + write.dstArrayElement = i; + write.descriptorCount = 1; + write.pBufferInfo = &buffer_info; + vkUpdateDescriptorSets(_device, 1, &write, 0, nullptr); + } + + return buffer; + } + + DescriptorAllocation UniformRegistry::allocate(const DescriptorSet &set, MemoryPool &memory) { // TODO: Recycle descriptor sets that are not used // TODO: Need to allocate a new descriptor pool if this fails - VkDescriptorSet_allocate(_device, _pool, &set.layout, &allocation.set, 1); + DescriptorAllocation allocation; + VkDescriptorSet_allocate(_device, _pool, &set.layout, &allocation.descriptor_set, 1); - // Map descriptors to buffer + // Process uniform bindings for (DescriptorBinding binding : set.bindings) { - unsigned block_size = binding.size * binding.descriptor_count; - - // Allocate shared uniform once only - unsigned block_offset; - if (binding.shared) { - auto shared_it = _shared_offsets.find(binding.name); - if (shared_it != _shared_offsets.end()) { - block_offset = shared_it->second; - } else { - block_offset = _uniform_buffer.reserve(block_size); - _shared_offsets.emplace(binding.name, block_offset); - } - } else { - block_offset = _uniform_buffer.reserve(block_size); - } - - // Write each binding array element - for (unsigned i = 0; i < binding.descriptor_count; i++) { - VkDescriptorBufferInfo buffer_info; - buffer_info.buffer = _uniform_buffer.handle(); - buffer_info.offset = block_offset + i * binding.size; - buffer_info.range = binding.size; - - VkWriteDescriptorSet write = {}; - write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; - write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; - write.dstSet = allocation.set; - write.dstBinding = binding.binding; - write.dstArrayElement = i; - write.descriptorCount = 1; - write.pBufferInfo = &buffer_info; - vkUpdateDescriptorSets(_device, 1, &write, 0, nullptr); - } - UniformVariable var; var.name = binding.name; - var.type = UniformVariableType::Descriptor; - var.block_offset = block_offset; - var.block_size = block_size; + var.type = UniformType::Descriptor; + var.descriptor.type = binding.type; + var.descriptor.set = allocation.descriptor_set; + var.descriptor.binding = binding.binding; + var.descriptor.size = binding.size; + var.descriptor.count = binding.count; + + // Handle each descriptor type + switch (binding.type) { + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: + var.descriptor.buffer = allocate_uniform_buffer(allocation.descriptor_set, binding, memory); + default: + break; + } allocation.uniforms.push_back(_variables.insert(var)); } @@ -79,26 +93,29 @@ namespace Dynamo::Graphics::Vulkan { PushConstantAllocation UniformRegistry::allocate(const PushConstant &push_constant) { UniformVariable var; var.name = push_constant.name; - var.type = UniformVariableType::PushConstant; - var.block_size = push_constant.range.size; + var.type = UniformType::PushConstant; + var.push_constant.size = push_constant.range.size; // Allocate shared uniform once only if (push_constant.shared) { - auto shared_it = _shared_offsets.find(push_constant.name); - if (shared_it != _shared_offsets.end()) { - var.block_offset = shared_it->second; + auto shared_it = _shared.find(push_constant.name); + if (shared_it != _shared.end()) { + var.push_constant.offset = shared_it->second.push_constant_offset; } else { - var.block_offset = _push_constant_buffer.reserve(var.block_size); - _shared_offsets.emplace(push_constant.name, var.block_offset); + var.push_constant.offset = _push_constant_buffer.reserve(var.push_constant.size); + + SharedVariable shared; + shared.push_constant_offset = var.push_constant.offset; + _shared.emplace(push_constant.name, shared); } } else { - var.block_offset = _push_constant_buffer.reserve(var.block_size); + var.push_constant.offset = _push_constant_buffer.reserve(var.push_constant.size); } PushConstantAllocation allocation; allocation.uniform = _variables.insert(var); allocation.range = push_constant.range; - allocation.block_offset = var.block_offset; + allocation.block_offset = var.push_constant.offset; return allocation; } @@ -109,35 +126,71 @@ namespace Dynamo::Graphics::Vulkan { return _push_constant_buffer.get_mapped(block_offset); } - void UniformRegistry::write(Uniform uniform, void *data) { + void UniformRegistry::write(Uniform uniform, void *data, unsigned index, unsigned count) { const UniformVariable &var = _variables.get(uniform); - void *ptr = nullptr; switch (var.type) { - case UniformVariableType::Descriptor: - ptr = _uniform_buffer.get_mapped(var.block_offset); + case UniformType::Descriptor: { + char *dst = static_cast(var.descriptor.buffer.mapped); + std::memcpy(dst + index * var.descriptor.size, data, var.descriptor.size * count); break; - case UniformVariableType::PushConstant: - ptr = _push_constant_buffer.get_mapped(var.block_offset); + } + case UniformType::PushConstant: { + char *dst = static_cast(_push_constant_buffer.get_mapped(var.push_constant.offset)); + std::memcpy(dst + index * var.push_constant.size, data, var.push_constant.size * count); break; } - std::memcpy(ptr, data, var.block_size); + } } - void UniformRegistry::free(Uniform uniform) { + void UniformRegistry::bind(Uniform uniform, const TextureInstance &texture, unsigned index) { + UniformVariable &var = _variables.get(uniform); + + VkDescriptorImageInfo image_info; + image_info.imageView = texture.view; + image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; + image_info.sampler = texture.sampler; + + VkWriteDescriptorSet write = {}; + write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; + write.dstSet = var.descriptor.set; + write.dstBinding = var.descriptor.binding; + write.dstArrayElement = index; + write.descriptorCount = 1; + write.pImageInfo = &image_info; + + vkUpdateDescriptorSets(_device, 1, &write, 0, nullptr); + } + + void UniformRegistry::free(Uniform uniform, MemoryPool &memory) { const UniformVariable &var = _variables.get(uniform); switch (var.type) { - case UniformVariableType::Descriptor: - _uniform_buffer.free(var.block_offset); + case UniformType::Descriptor: + if (var.descriptor.type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) { + memory.free(var.descriptor.buffer); + } break; - case UniformVariableType::PushConstant: - _push_constant_buffer.free(var.block_offset); + case UniformType::PushConstant: + _push_constant_buffer.free(var.push_constant.offset); break; } _variables.remove(uniform); } - void UniformRegistry::destroy() { + void UniformRegistry::destroy(MemoryPool &memory) { vkDestroyDescriptorPool(_device, _pool, nullptr); - _uniform_buffer.destroy(); + _variables.foreach ([&](UniformVariable &var) { + switch (var.type) { + case UniformType::Descriptor: + if (var.descriptor.type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) { + memory.free(var.descriptor.buffer); + } + break; + case UniformType::PushConstant: + _push_constant_buffer.free(var.push_constant.offset); + break; + } + }); + _variables.clear(); } } // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/UniformRegistry.hpp b/src/Graphics/Vulkan/UniformRegistry.hpp index b83c758..c80a90e 100644 --- a/src/Graphics/Vulkan/UniformRegistry.hpp +++ b/src/Graphics/Vulkan/UniformRegistry.hpp @@ -4,122 +4,91 @@ #include -#include +#include #include +#include #include +#include #include #include namespace Dynamo::Graphics::Vulkan { - /** - * @brief In Vulkan, shader variables can be from a descriptor or push constant. - * - * Renderer API should be able to access both types with the same API. - * - */ - enum class UniformVariableType { + // In Vulkan, uniform variables can be from a descriptor or push constant. + // Renderer API should be able to access both types with the same API. + enum class UniformType { Descriptor, PushConstant, }; - /** - * @brief Uniform variable block data. - * - */ + struct DescriptorData { + VirtualBuffer buffer; + VkDescriptorType type; + VkDescriptorSet set; + unsigned binding; + unsigned size; + unsigned count; + }; + + struct PushConstantData { + unsigned offset; + unsigned size; + }; + struct UniformVariable { std::string name; - UniformVariableType type; - unsigned block_offset; - unsigned block_size; + UniformType type; + union { + DescriptorData descriptor; + PushConstantData push_constant; + }; + }; + + // Shared variable allocation information + union SharedVariable { + VirtualBuffer descriptor_buffer; + unsigned push_constant_offset; }; - /** - * @brief Descriptor allocation result. - * - */ struct DescriptorAllocation { - VkDescriptorSet set; + VkDescriptorSet descriptor_set; std::vector uniforms; }; - /** - * @brief Push constant allocation result. - * - */ struct PushConstantAllocation { Uniform uniform; VkPushConstantRange range; unsigned block_offset; }; - /** - * @brief Uniform shader variable registry. - * - */ class UniformRegistry { VkDevice _device; VkDescriptorPool _pool; - Buffer _uniform_buffer; VirtualMemory _push_constant_buffer; - std::unordered_map _shared_offsets; + std::unordered_map _shared; SparseArray _variables; + VirtualBuffer + allocate_uniform_buffer(VkDescriptorSet descriptor_set, DescriptorBinding &binding, MemoryPool &memory); + public: UniformRegistry(VkDevice device, const PhysicalDevice &physical, VkCommandPool transfer_pool); UniformRegistry() = default; - /** - * @brief Reserve memory for uniforms from a descriptor set. - * - * @param set - * @return DescriptorAllocation - */ - DescriptorAllocation allocate(const DescriptorSet &set); - - /** - * @brief Reserve memory for uniforms from a push constant. - * - * @param push_constant - * @return PushConstantAllocation - */ + DescriptorAllocation allocate(const DescriptorSet &set, MemoryPool &memory); + PushConstantAllocation allocate(const PushConstant &push_constant); - /** - * @brief Get a uniform variable. - * - * @param uniform - * @return const UniformVariable& - */ const UniformVariable &get(Uniform uniform); - /** - * @brief Get the data pointer to a push constant variable - * - * @param block_offset - * @return void* - */ void *get_push_constant_data(unsigned block_offset); - /** - * @brief Write a value to a uniform variable. - * - * @param uniform - * @param data - */ - void write(Uniform uniform, void *data); - - /** - * @brief Free a uniform variable. - * - * @param uniform - */ - void free(Uniform uniform); - - /** - * @brief Destroy all uniform allocations. - * - */ - void destroy(); + void write(Uniform uniform, void *data, unsigned index, unsigned count); + + void bind(Uniform uniform, const TextureInstance &texture, unsigned index); + + void free(Uniform uniform, MemoryPool &memory); + + void destroy(MemoryPool &memory); }; } // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/Utils.cpp b/src/Graphics/Vulkan/Utils.cpp index 20f1297..98334c9 100644 --- a/src/Graphics/Vulkan/Utils.cpp +++ b/src/Graphics/Vulkan/Utils.cpp @@ -477,7 +477,56 @@ namespace Dynamo::Graphics::Vulkan { } } - void VkResult_log(const std::string &op_message, VkResult result) { + const char *VkImageLayout_string(VkImageLayout layout) { + switch (layout) { + case VK_IMAGE_LAYOUT_UNDEFINED: + return "VK_IMAGE_LAYOUT_UNDEFINED"; + case VK_IMAGE_LAYOUT_GENERAL: + return "VK_IMAGE_LAYOUT_GENERAL"; + case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: + return "VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL"; + case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: + return "VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL"; + case VK_IMAGE_LAYOUT_PREINITIALIZED: + return "VK_IMAGE_LAYOUT_PREINITIALIZED"; + case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL: + return "VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL"; + case VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL: + return "VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL"; + case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: + return "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR"; + case VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR: + return "VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR"; + case VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT: + return "VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT"; + case VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR: + return "VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR"; + case VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR: + return "VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR"; + case VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR: + return "VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR"; + case VK_IMAGE_LAYOUT_MAX_ENUM: + return "VK_IMAGE_LAYOUT_MAX_ENUM"; + } + } + + void VkResult_check(const std::string &op_message, VkResult result) { if (result != VK_SUCCESS) { Log::error("Graphics::Vulkan {}: {}", op_message, VkResult_string(result)); } @@ -516,6 +565,41 @@ namespace Dynamo::Graphics::Vulkan { } } + VkFormat convert_texture_format(TextureFormat format) { + switch (format) { + case TextureFormat::F32_R_Norm: + return VK_FORMAT_R32_SFLOAT; + case TextureFormat::U8_RGB_Norm: + return VK_FORMAT_R8G8B8_UNORM; + case TextureFormat::U8_RGBA_Norm: + return VK_FORMAT_R8G8B8A8_UNORM; + } + } + + VkFilter convert_texture_filter(TextureFilter filter) { + switch (filter) { + case TextureFilter::Nearest: + return VK_FILTER_NEAREST; + case TextureFilter::Linear: + return VK_FILTER_LINEAR; + } + } + + VkSamplerAddressMode convert_texture_address_mode(TextureAddressMode address_mode) { + switch (address_mode) { + case TextureAddressMode::Repeat: + return VK_SAMPLER_ADDRESS_MODE_REPEAT; + case TextureAddressMode::RepeatMirror: + return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT; + case TextureAddressMode::Clamp: + return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; + case TextureAddressMode::ClampMirror: + return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE; + case TextureAddressMode::ClampBorder: + return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER; + } + } + VkInstance VkInstance_create(const Display &display) { std::vector extensions = display.get_vulkan_extensions(); @@ -552,7 +636,7 @@ namespace Dynamo::Graphics::Vulkan { Log::info(""); VkInstance instance; - VkResult_log("Create Instance", vkCreateInstance(&instance_info, nullptr, &instance)); + VkResult_check("Create Instance", vkCreateInstance(&instance_info, nullptr, &instance)); return instance; } @@ -573,7 +657,7 @@ namespace Dynamo::Graphics::Vulkan { debugger_info.pfnUserCallback = &VkDebugUtilsMessengerEXT_message_callback; VkDebugUtilsMessengerEXT debugger; - VkResult_log("Create Debugger", vkCreateDebugUtilsMessengerEXT(instance, &debugger_info, nullptr, &debugger)); + VkResult_check("Create Debugger", vkCreateDebugUtilsMessengerEXT(instance, &debugger_info, nullptr, &debugger)); return debugger; } @@ -615,24 +699,24 @@ namespace Dynamo::Graphics::Vulkan { device_info.pNext = &descriptor_indexing; VkDevice device; - VkResult_log("Create Device", vkCreateDevice(physical.handle, &device_info, nullptr, &device)); + VkResult_check("Create Device", vkCreateDevice(physical.handle, &device_info, nullptr, &device)); return device; } - VkDeviceMemory VkDeviceMemory_allocate(VkDevice device, unsigned type_index, unsigned size) { + VkDeviceMemory VkDeviceMemory_allocate(VkDevice device, unsigned type_index, VkDeviceSize size) { VkMemoryAllocateInfo alloc_info = {}; alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO; alloc_info.memoryTypeIndex = type_index; alloc_info.allocationSize = size; VkDeviceMemory memory; - VkResult_log("Allocate Memory", vkAllocateMemory(device, &alloc_info, nullptr, &memory)); + VkResult_check("Allocate Memory", vkAllocateMemory(device, &alloc_info, nullptr, &memory)); return memory; } VkBuffer VkBuffer_create(VkDevice device, VkBufferUsageFlags usage, - unsigned size, + VkDeviceSize size, const QueueFamily *queue_families, unsigned queue_family_count) { std::vector family_indices; @@ -654,32 +738,88 @@ namespace Dynamo::Graphics::Vulkan { } VkBuffer buffer; - VkResult_log("Create Buffer", vkCreateBuffer(device, &buffer_info, nullptr, &buffer)); + VkResult_check("Create Buffer", vkCreateBuffer(device, &buffer_info, nullptr, &buffer)); return buffer; } - void VkBuffer_immediate_copy(VkBuffer src, - VkBuffer dst, - VkQueue queue, - VkCommandBuffer command_buffer, - VkBufferCopy *regions, - unsigned region_count) { - // Copy command - VkCommandBufferBeginInfo begin_info = {}; - begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; - begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT; + VkImage VkImage_create(VkDevice device, + const VkExtent3D &extent, + VkFormat format, + VkImageLayout layout, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkSampleCountFlagBits samples, + unsigned mip_levels, + unsigned array_layers, + const QueueFamily *queue_families, + unsigned queue_family_count) { + std::vector family_indices; + for (unsigned i = 0; i < queue_family_count; i++) { + family_indices.push_back(queue_families[i].index); + } - vkBeginCommandBuffer(command_buffer, &begin_info); - vkCmdCopyBuffer(command_buffer, src, dst, region_count, regions); - vkEndCommandBuffer(command_buffer); + VkImageCreateInfo image_info = {}; + image_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; + image_info.extent = extent; + image_info.format = format; + image_info.imageType = type; + image_info.usage = usage; + image_info.initialLayout = layout; + image_info.samples = samples; + image_info.mipLevels = mip_levels; + image_info.arrayLayers = array_layers; + image_info.queueFamilyIndexCount = family_indices.size(); + image_info.pQueueFamilyIndices = family_indices.data(); - // Submit the command to the transfer queue - VkSubmitInfo submit_info = {}; - submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; - submit_info.commandBufferCount = 1; - submit_info.pCommandBuffers = &command_buffer; + if (family_indices.size() > 1) { + image_info.sharingMode = VK_SHARING_MODE_CONCURRENT; + } else { + image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + } - vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE); + VkImage image; + VkResult_check("Create Image", vkCreateImage(device, &image_info, nullptr, &image)); + return image; + } + + void VkImage_transition_layout(VkImage image, + VkCommandBuffer command_buffer, + VkFormat format, + VkImageLayout prev, + VkImageLayout next, + const VkImageSubresourceRange &subresources) { + VkImageMemoryBarrier barrier = {}; + barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + barrier.image = image; + barrier.oldLayout = prev; + barrier.newLayout = next; + barrier.subresourceRange = subresources; + barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + + VkPipelineStageFlags src_stage = 0; + VkPipelineStageFlags dst_stage = 0; + + if (prev == VK_IMAGE_LAYOUT_UNDEFINED && next == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) { + barrier.srcAccessMask = 0; + barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + + src_stage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; + dst_stage = VK_PIPELINE_STAGE_TRANSFER_BIT; + } else if (prev == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && next == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) { + barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT; + + src_stage = VK_PIPELINE_STAGE_TRANSFER_BIT; + dst_stage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + } else { + Log::error("Graphics::Vulkan unsupported image layout transition: {} -> {}", + VkImageLayout_string(prev), + VkImageLayout_string(next)); + } + + vkCmdPipelineBarrier(command_buffer, src_stage, dst_stage, 0, 0, nullptr, 0, nullptr, 1, &barrier); } VkImageView VkImageView_create(VkDevice device, @@ -697,10 +837,44 @@ namespace Dynamo::Graphics::Vulkan { view_info.subresourceRange = subresources; VkImageView view; - VkResult_log("Create ImageView", vkCreateImageView(device, &view_info, nullptr, &view)); + VkResult_check("Create ImageView", vkCreateImageView(device, &view_info, nullptr, &view)); return view; } + VkSampler VkSampler_create(VkDevice device, + VkSamplerAddressMode address_mode_u, + VkSamplerAddressMode address_mode_v, + VkSamplerAddressMode address_mode_w, + VkFilter mag_filter, + VkFilter min_filter, + VkBorderColor border_color, + float max_anisotropy) { + VkSamplerCreateInfo sampler_info = {}; + sampler_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; + sampler_info.addressModeU = address_mode_u; + sampler_info.addressModeV = address_mode_v; + sampler_info.addressModeW = address_mode_w; + sampler_info.magFilter = mag_filter; + sampler_info.minFilter = min_filter; + sampler_info.borderColor = border_color; + sampler_info.maxAnisotropy = max_anisotropy; + sampler_info.anisotropyEnable = VK_TRUE; + sampler_info.unnormalizedCoordinates = VK_FALSE; + + // TODO + sampler_info.compareEnable = VK_FALSE; + sampler_info.compareOp = VK_COMPARE_OP_ALWAYS; + + sampler_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR; + sampler_info.mipLodBias = 0.0f; + sampler_info.minLod = 0.0f; + sampler_info.maxLod = 0.0f; + + VkSampler sampler; + VkResult_check("Create Sampler", vkCreateSampler(device, &sampler_info, nullptr, &sampler)); + return sampler; + } + VkDescriptorSetLayout VkDescriptorSetLayout_create(VkDevice device, const VkDescriptorSetLayoutBinding *bindings, unsigned binding_count) { @@ -710,8 +884,8 @@ namespace Dynamo::Graphics::Vulkan { layout_info.pBindings = bindings; VkDescriptorSetLayout vk_layout; - VkResult_log("Create Descriptor Set Layout", - vkCreateDescriptorSetLayout(device, &layout_info, nullptr, &vk_layout)); + VkResult_check("Create Descriptor Set Layout", + vkCreateDescriptorSetLayout(device, &layout_info, nullptr, &vk_layout)); return vk_layout; } @@ -728,7 +902,7 @@ namespace Dynamo::Graphics::Vulkan { layout_info.pPushConstantRanges = push_constant_ranges; VkPipelineLayout layout; - VkResult_log("Create Pipeline Layout", vkCreatePipelineLayout(device, &layout_info, nullptr, &layout)); + VkResult_check("Create Pipeline Layout", vkCreatePipelineLayout(device, &layout_info, nullptr, &layout)); return layout; } @@ -739,7 +913,7 @@ namespace Dynamo::Graphics::Vulkan { shader_info.codeSize = bytecode.size() * sizeof(uint32_t); VkShaderModule shader; - VkResult_log("Create Shader Module", vkCreateShaderModule(device, &shader_info, nullptr, &shader)); + VkResult_check("Create Shader Module", vkCreateShaderModule(device, &shader_info, nullptr, &shader)); return shader; } @@ -759,7 +933,7 @@ namespace Dynamo::Graphics::Vulkan { framebuffer_info.layers = layer_count; VkFramebuffer framebuffer; - VkResult_log("Create Framebuffer", vkCreateFramebuffer(device, &framebuffer_info, nullptr, &framebuffer)); + VkResult_check("Create Framebuffer", vkCreateFramebuffer(device, &framebuffer_info, nullptr, &framebuffer)); return framebuffer; } @@ -770,7 +944,7 @@ namespace Dynamo::Graphics::Vulkan { pool_info.queueFamilyIndex = family.index; VkCommandPool pool; - VkResult_log("Create Command Pool", vkCreateCommandPool(device, &pool_info, nullptr, &pool)); + VkResult_check("Create Command Pool", vkCreateCommandPool(device, &pool_info, nullptr, &pool)); return pool; } @@ -785,7 +959,7 @@ namespace Dynamo::Graphics::Vulkan { alloc_info.level = level; alloc_info.commandBufferCount = count; - VkResult_log("Allocate Command Buffers", vkAllocateCommandBuffers(device, &alloc_info, dst)); + VkResult_check("Allocate Command Buffers", vkAllocateCommandBuffers(device, &alloc_info, dst)); } VkDescriptorPool @@ -798,7 +972,7 @@ namespace Dynamo::Graphics::Vulkan { pool_info.flags = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT; VkDescriptorPool pool; - VkResult_log("Create Descriptor Pool", vkCreateDescriptorPool(device, &pool_info, nullptr, &pool)); + VkResult_check("Create Descriptor Pool", vkCreateDescriptorPool(device, &pool_info, nullptr, &pool)); return pool; } @@ -813,7 +987,27 @@ namespace Dynamo::Graphics::Vulkan { alloc_info.descriptorSetCount = count; alloc_info.pSetLayouts = layouts; - VkResult_log("Allocate Descriptor Sets", vkAllocateDescriptorSets(device, &alloc_info, dst)); + VkResult_check("Allocate Descriptor Sets", vkAllocateDescriptorSets(device, &alloc_info, dst)); + } + + void VkCommandBuffer_immediate_start(VkCommandBuffer command_buffer) { + VkCommandBufferBeginInfo begin_info = {}; + begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT; + + vkBeginCommandBuffer(command_buffer, &begin_info); + } + + void VkCommandBuffer_immediate_end(VkCommandBuffer command_buffer, VkQueue queue) { + // Submit the command to the queue + VkSubmitInfo submit_info = {}; + submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + submit_info.commandBufferCount = 1; + submit_info.pCommandBuffers = &command_buffer; + + vkEndCommandBuffer(command_buffer); + vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE); + vkQueueWaitIdle(queue); } VkFence VkFence_create(VkDevice device) { @@ -822,7 +1016,7 @@ namespace Dynamo::Graphics::Vulkan { fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; VkFence fence; - VkResult_log("Create Fence", vkCreateFence(device, &fence_info, nullptr, &fence)); + VkResult_check("Create Fence", vkCreateFence(device, &fence_info, nullptr, &fence)); return fence; } @@ -831,7 +1025,7 @@ namespace Dynamo::Graphics::Vulkan { semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; VkSemaphore semaphore; - VkResult_log("Create Semaphore", vkCreateSemaphore(device, &semaphore_info, nullptr, &semaphore)); + VkResult_check("Create Semaphore", vkCreateSemaphore(device, &semaphore_info, nullptr, &semaphore)); return semaphore; } } // namespace Dynamo::Graphics::Vulkan \ No newline at end of file diff --git a/src/Graphics/Vulkan/Utils.hpp b/src/Graphics/Vulkan/Utils.hpp index dd67b55..fb02f89 100644 --- a/src/Graphics/Vulkan/Utils.hpp +++ b/src/Graphics/Vulkan/Utils.hpp @@ -6,160 +6,71 @@ #include #include +#include #include #include namespace Dynamo::Graphics::Vulkan { - /** - * @brief Compute the size of VkFormat - * - * @param format - * @return unsigned - */ unsigned VkFormat_size(VkFormat format); - /** - * @brief Convert VkPhysicalDeviceType to string. - * - * @param type - * @return const char* - */ const char *VkPhysicalDeviceType_string(VkPhysicalDeviceType type); - /** - * @brief Convert VkShaderStageFlagBits to string. - * - * @param stage - * @return const char* - */ const char *VkShaderStageFlagBits_string(VkShaderStageFlagBits stage); - /** - * @brief Convert VkDescriptorType to string. - * - * @param type - * @return const char* - */ const char *VkDescriptorType_string(VkDescriptorType type); - /** - * @brief Convert VkResult to string. - * - * @param result - * @return const char* - */ const char *VkResult_string(VkResult result); - /** - * @brief Log the result of a Vulkan operation. - * - * @param op_message - * @param result - */ - void VkResult_log(const std::string &op_message, VkResult result); - - /** - * @brief Convert Fill to VkPolygonMode. - * - * @param fill - * @return VkPolygonMode - */ + const char *VkImageLayout_string(VkImageLayout layout); + + void VkResult_check(const std::string &op_message, VkResult result); + VkPolygonMode convert_fill(Fill fill); - /** - * @brief Convert Cull to VkCullModeFlags - * - * @param cull - * @return VkCullModeFlags - */ VkCullModeFlags convert_cull(Cull cull); - /** - * @brief Convert Topology to VkPrimitiveTopology - * - * @param topology - * @return VkPrimitiveTopology - */ VkPrimitiveTopology convert_topology(Topology topology); - /** - * @brief Create a Vulkan instance. - * - * @param display - * @return VkInstance - */ + VkFormat convert_texture_format(TextureFormat format); + + VkFilter convert_texture_filter(TextureFilter filter); + + VkSamplerAddressMode convert_texture_address_mode(TextureAddressMode address_mode); + VkInstance VkInstance_create(const Display &display); - /** - * @brief Create a Vulkan debug messenger. - * - * @param instance - * @return VkDebugUtilsMessengerEXT - */ VkDebugUtilsMessengerEXT VkDebugUtilsMessengerEXT_create(VkInstance instance); - /** - * @brief Create the Vulkan logical device. - * - * @param physical - * @return VkDevice - */ VkDevice VkDevice_create(const PhysicalDevice &physical); - /** - * @brief Allocate Vulkan device memory. - * - * @param device - * @param type_index - * @param size - * @return VkDeviceMemory - */ - VkDeviceMemory VkDeviceMemory_allocate(VkDevice device, unsigned type_index, unsigned size); - - /** - * @brief Create a Vulkan buffer. - * - * @param device - * @param usage - * @param size - * @param queue_families - * @param queue_family_count - * @return VkBuffer - */ + VkDeviceMemory VkDeviceMemory_allocate(VkDevice device, unsigned type_index, VkDeviceSize size); + VkBuffer VkBuffer_create(VkDevice device, VkBufferUsageFlags usage, - unsigned size, + VkDeviceSize size, const QueueFamily *queue_families, unsigned queue_family_count); - /** - * @brief Submit a command to copy the contents of a Vulkan buffer to another buffer. - * - * @param src - * @param dst - * @param queue - * @param command_buffer - * @param regions - * @param region_count - */ - void VkBuffer_immediate_copy(VkBuffer src, - VkBuffer dst, - VkQueue queue, - VkCommandBuffer command_buffer, - VkBufferCopy *regions, - unsigned region_count); - - /** - * @brief Create a Vulkan image view. - * - * @param device - * @param image - * @param format - * @param type - * @param subresources - * @param swizzle - * @return VkImageView - */ + VkImage VkImage_create(VkDevice device, + const VkExtent3D &extent, + VkFormat format, + VkImageLayout layout, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkSampleCountFlagBits samples, + unsigned mip_levels, + unsigned array_layers, + const QueueFamily *queue_families, + unsigned queue_family_count); + + void VkImage_transition_layout(VkImage image, + VkCommandBuffer command_buffer, + VkFormat format, + VkImageLayout prev, + VkImageLayout next, + const VkImageSubresourceRange &subresources); + VkImageView VkImageView_create(VkDevice device, VkImage image, VkFormat format, @@ -167,53 +78,26 @@ namespace Dynamo::Graphics::Vulkan { const VkImageSubresourceRange &subresources, const VkComponentMapping &swizzle = {}); - /** - * @brief Create a Vulkan descriptor set layout. - * - * @param device - * @param bindings - * @param binding_count - * @return VkDescriptorSetLayout - */ + VkSampler VkSampler_create(VkDevice device, + VkSamplerAddressMode address_mode_u, + VkSamplerAddressMode address_mode_v, + VkSamplerAddressMode address_mode_w, + VkFilter mag_filter, + VkFilter min_filter, + VkBorderColor border_color, + float max_anisotropy); + VkDescriptorSetLayout VkDescriptorSetLayout_create(VkDevice device, const VkDescriptorSetLayoutBinding *bindings, unsigned binding_count); - /** - * @brief Create a Vulkan pipeline layout. - * - * @param device - * @param descriptor_layouts - * @param descriptor_layout_count - * @param push_constant_ranges - * @param push_constant_range_count - * @return VkPipelineLayout - */ VkPipelineLayout VkPipelineLayout_create(VkDevice device, VkDescriptorSetLayout *descriptor_layouts, unsigned descriptor_layout_count, VkPushConstantRange *push_constant_ranges, unsigned push_constant_range_count); - /** - * @brief Create a Vulkan shader module. - * - * @param device - * @param bytecode - * @return VkShaderModule - */ VkShaderModule VkShaderModule_create(VkDevice device, const std::vector &bytecode); - /** - * @brief Create a Vulkan framebuffer. - * - * @param device - * @param renderpass - * @param extent - * @param views - * @param view_count - * @param layer_count - * @return VkFramebuffer - */ VkFramebuffer VkFramebuffer_create(VkDevice device, VkRenderPass renderpass, const VkExtent2D &extent, @@ -221,70 +105,28 @@ namespace Dynamo::Graphics::Vulkan { unsigned view_count, unsigned layer_count); - /** - * @brief Create a Vulkan command pool. - * - * @param device - * @param family - * @return VkCommandPool - */ VkCommandPool VkCommandPool_create(VkDevice device, QueueFamily family); - /** - * @brief Allocate Vulkan command buffers from a pool. - * - * @param device - * @param pool - * @param level - * @param dst - * @param count - */ void VkCommandBuffer_allocate(VkDevice device, VkCommandPool pool, VkCommandBufferLevel level, VkCommandBuffer *dst, unsigned count); - /** - * @brief Create a Vulkan descriptor pool. - * - * @param device - * @param sizes - * @param size_count - * @param max_sets - * @return VkDescriptorPool - */ VkDescriptorPool VkDescriptorPool_create(VkDevice device, VkDescriptorPoolSize *sizes, unsigned size_count, unsigned max_sets); - /** - * @brief Allocate Vulkan descriptor sets from a pool. - * - * @param device - * @param pool - * @param layouts - * @param dst - * @param count - */ void VkDescriptorSet_allocate(VkDevice device, VkDescriptorPool pool, const VkDescriptorSetLayout *layouts, VkDescriptorSet *dst, unsigned count); - /** - * @brief Create a Vulkan fence. - * - * @param device - * @return VkFence - */ + void VkCommandBuffer_immediate_start(VkCommandBuffer command_buffer); + + void VkCommandBuffer_immediate_end(VkCommandBuffer command_buffer, VkQueue queue); + VkFence VkFence_create(VkDevice device); - /** - * @brief Create a Vulkan semaphore. - * - * @param device - * @return VkSemaphore - */ VkSemaphore VkSemaphore_create(VkDevice device); } // namespace Dynamo::Graphics::Vulkan diff --git a/src/Utils/Allocator.cpp b/src/Utils/Allocator.cpp index 82b87b2..82a2e3c 100644 --- a/src/Utils/Allocator.cpp +++ b/src/Utils/Allocator.cpp @@ -45,6 +45,7 @@ namespace Dynamo { } std::optional Allocator::reserve(unsigned size, unsigned alignment) { + DYN_ASSERT(size > 0); for (auto it = _free.begin(); it != _free.end(); it++) { Block &block = *it; @@ -74,7 +75,7 @@ namespace Dynamo { DYN_ASSERT(_used.count(offset) == 0); // Return the offset to the allocation and track it - _used[offset] = size; + _used.emplace(offset, size); return offset; } }