More code cleanup
This commit is contained in:
223
openVulkanoCpp/Vulkan/Resources/ResourceManager.cpp
Normal file
223
openVulkanoCpp/Vulkan/Resources/ResourceManager.cpp
Normal file
@@ -0,0 +1,223 @@
|
||||
/*
|
||||
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||
*/
|
||||
|
||||
#include "ResourceManager.hpp"
|
||||
#include "../Context.hpp"
|
||||
#include "../Scene/VulkanShader.hpp"
|
||||
#include "../Scene/VulkanGeometry.hpp"
|
||||
#include "../Scene/VulkanNode.hpp"
|
||||
|
||||
namespace openVulkanoCpp::Vulkan
|
||||
{
|
||||
ResourceManager::ResourceManager() = default;
|
||||
|
||||
ResourceManager::~ResourceManager() noexcept
|
||||
{
|
||||
if (device) ResourceManager::Close();
|
||||
}
|
||||
|
||||
void ResourceManager::Init(Context* context, int buffers)
|
||||
{
|
||||
this->context = context;
|
||||
this->device = context->device->device;
|
||||
this->buffers = buffers;
|
||||
|
||||
uniformBufferAlignment = context->device->properties.limits.minUniformBufferOffsetAlignment;
|
||||
|
||||
cmdPools = new vk::CommandPool[buffers];
|
||||
cmdBuffers = new vk::CommandBuffer[buffers];
|
||||
semaphores = new vk::Semaphore[buffers];
|
||||
for (int i = 0; i < buffers; i++)
|
||||
{
|
||||
cmdPools[i] = this->device.createCommandPool({ {}, context->device->queueIndices.transfer });
|
||||
cmdBuffers[i] = this->device.allocateCommandBuffers({ cmdPools[i], vk::CommandBufferLevel::ePrimary, 1 })[0];
|
||||
semaphores[i] = this->device.createSemaphore({});
|
||||
}
|
||||
toFree.resize(buffers);
|
||||
|
||||
transferQueue = this->device.getQueue(context->device->queueIndices.transfer, 0);
|
||||
}
|
||||
|
||||
void ResourceManager::Close()
|
||||
{
|
||||
transferQueue.waitIdle();
|
||||
for (int i = 0; i < buffers; i++)
|
||||
{
|
||||
device.freeCommandBuffers(cmdPools[i], 1, &cmdBuffers[i]);
|
||||
device.destroyCommandPool(cmdPools[0]);
|
||||
}
|
||||
for (auto shader : shaders)
|
||||
{
|
||||
shader->Close();
|
||||
}
|
||||
cmdBuffers = nullptr;
|
||||
cmdPools = nullptr;
|
||||
device = nullptr;
|
||||
}
|
||||
|
||||
void ResourceManager::StartFrame(uint64_t frameId)
|
||||
{
|
||||
currentBuffer = frameId;
|
||||
FreeBuffers();
|
||||
device.resetCommandPool(cmdPools[currentBuffer], {});
|
||||
cmdBuffers[currentBuffer].begin({ vk::CommandBufferUsageFlagBits::eOneTimeSubmit });
|
||||
}
|
||||
|
||||
vk::Semaphore ResourceManager::EndFrame()
|
||||
{
|
||||
cmdBuffers[currentBuffer].end();
|
||||
vk::SubmitInfo si = { 0, nullptr, nullptr, 1, &cmdBuffers[currentBuffer], 1, &semaphores[currentBuffer] };
|
||||
transferQueue.submit(1, &si, vk::Fence());
|
||||
return semaphores[currentBuffer];
|
||||
}
|
||||
|
||||
void ResourceManager::Resize()
|
||||
{
|
||||
for (auto shader : shaders)
|
||||
{
|
||||
shader->Resize();
|
||||
}
|
||||
}
|
||||
|
||||
void ResourceManager::PrepareGeometry(Scene::Geometry* geometry)
|
||||
{
|
||||
const std::unique_lock lock(mutex);
|
||||
if(!geometry->renderGeo)
|
||||
{
|
||||
VulkanGeometry* vkGeometry = new VulkanGeometry();
|
||||
ManagedBuffer* vertexBuffer = CreateDeviceOnlyBufferWithData(sizeof(Vertex) * geometry->GetVertexCount(), vk::BufferUsageFlagBits::eVertexBuffer, geometry->GetVertices());
|
||||
ManagedBuffer* indexBuffer = CreateDeviceOnlyBufferWithData(Utils::EnumAsInt(geometry->indexType) * geometry->GetIndexCount(), vk::BufferUsageFlagBits::eIndexBuffer, geometry->GetIndices());
|
||||
vkGeometry->Init(geometry, vertexBuffer->buffer, indexBuffer->buffer);
|
||||
geometry->renderGeo = vkGeometry;
|
||||
}
|
||||
}
|
||||
|
||||
void ResourceManager::PrepareMaterial(Scene::Material* material)
|
||||
{
|
||||
const std::unique_lock lock(mutex);
|
||||
if(!material->shader->renderShader)
|
||||
{
|
||||
material->shader->renderShader = CreateShader(material->shader);
|
||||
}
|
||||
}
|
||||
|
||||
void ResourceManager::PrepareNode(Scene::Node* node)
|
||||
{
|
||||
const std::unique_lock lock(mutex);
|
||||
if (!node->renderNode)
|
||||
{
|
||||
UniformBuffer* uBuffer = new UniformBuffer();
|
||||
ManagedBuffer* buffer;
|
||||
VulkanNode* vkNode;
|
||||
const vk::DeviceSize allocSize = Utils::Align(sizeof(glm::mat4x4), uniformBufferAlignment);
|
||||
if (node->GetUpdateFrequency() != Scene::UpdateFrequency::Never)
|
||||
{
|
||||
vkNode = new VulkanNodeDynamic();
|
||||
uint32_t imgs = context->swapChain.GetImageCount();
|
||||
buffer = CreateBuffer(imgs * allocSize, vk::BufferUsageFlagBits::eUniformBuffer, vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostVisible);
|
||||
buffer->Map();
|
||||
}
|
||||
else
|
||||
{
|
||||
vkNode = new VulkanNode();
|
||||
buffer = CreateDeviceOnlyBufferWithData(sizeof(glm::mat4), vk::BufferUsageFlagBits::eUniformBuffer, &node->worldMat);
|
||||
}
|
||||
uBuffer->Init(buffer, allocSize, &context->pipeline.descriptorSetLayout, context->pipeline.pipelineLayout);
|
||||
vkNode->Init(node, uBuffer);
|
||||
node->renderNode = vkNode;
|
||||
}
|
||||
}
|
||||
|
||||
void ResourceManager::RemoveShader(VulkanShader* shader)
|
||||
{
|
||||
Utils::Remove(shaders, shader);
|
||||
}
|
||||
|
||||
void ResourceManager::FreeBuffer(ManagedBuffer* buffer)
|
||||
{
|
||||
toFree[currentBuffer].push_back(buffer);
|
||||
}
|
||||
|
||||
void ResourceManager::DoFreeBuffer(ManagedBuffer* buffer)
|
||||
{
|
||||
if (buffer->IsLast())
|
||||
{
|
||||
device.destroyBuffer(buffer->buffer);
|
||||
buffer->allocation->used -= buffer->size;
|
||||
}
|
||||
else
|
||||
{
|
||||
recycleBuffers.push_back(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
void ResourceManager::FreeBuffers()
|
||||
{
|
||||
for (auto& i : toFree[currentBuffer])
|
||||
{
|
||||
DoFreeBuffer(i);
|
||||
}
|
||||
toFree[currentBuffer].clear();
|
||||
}
|
||||
|
||||
ManagedBuffer* ResourceManager::CreateDeviceOnlyBufferWithData(vk::DeviceSize size, vk::BufferUsageFlagBits usage, void* data)
|
||||
{
|
||||
ManagedBuffer* target = CreateBuffer(size, usage | vk::BufferUsageFlagBits::eTransferDst, vk::MemoryPropertyFlagBits::eDeviceLocal);
|
||||
ManagedBuffer* uploadBuffer = CreateBuffer(size, vk::BufferUsageFlagBits::eTransferSrc, vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostVisible);
|
||||
uploadBuffer->Copy(data, size, 0);
|
||||
RecordCopy(uploadBuffer->buffer, target->buffer, size);
|
||||
FreeBuffer(uploadBuffer);
|
||||
return target;
|
||||
}
|
||||
|
||||
ManagedBuffer* ResourceManager::CreateBuffer(vk::DeviceSize size, const vk::BufferUsageFlags& usage, const vk::MemoryPropertyFlags& properties)
|
||||
{
|
||||
size = Utils::Align(size, 16);
|
||||
const vk::BufferCreateInfo bufferCreateInfo = { {}, size, usage, vk::SharingMode::eExclusive };
|
||||
vk::Buffer buffer = device.createBuffer(bufferCreateInfo);
|
||||
const vk::MemoryRequirements memoryRequirements = device.getBufferMemoryRequirements(buffer);
|
||||
uint32_t memtype = context->device->GetMemoryType(memoryRequirements.memoryTypeBits, properties);
|
||||
if (memoryRequirements.size != size) Logger::DATA->warn("Memory Requirement Size ({0}) != Size ({1})", memoryRequirements.size, size);
|
||||
MemoryAllocation* allocation = GetFreeMemoryAllocation(memoryRequirements.size, memtype);
|
||||
uint32_t offset = allocation->used;
|
||||
device.bindBufferMemory(buffer, allocation->memory, offset);
|
||||
allocation->used += memoryRequirements.size;
|
||||
return new ManagedBuffer{ allocation, offset, size, buffer, usage, properties, nullptr };
|
||||
}
|
||||
|
||||
MemoryAllocation* ResourceManager::CreateMemoryAllocation(size_t size, uint32_t type, bool addToCache)
|
||||
{
|
||||
MemoryAllocation* alloc = new MemoryAllocation(size, type, device);
|
||||
const vk::MemoryAllocateInfo allocInfo = { size, type };
|
||||
alloc->memory = device.allocateMemory(allocInfo);
|
||||
if (addToCache) allocations.push_back(alloc);
|
||||
return alloc;
|
||||
}
|
||||
|
||||
MemoryAllocation* ResourceManager::GetFreeMemoryAllocation(size_t size, uint32_t type, bool createIfAllFull)
|
||||
{
|
||||
MemoryAllocation* alloc = nullptr;
|
||||
for (MemoryAllocation* allocation : allocations)
|
||||
{
|
||||
if (allocation->type == type && allocation->FreeSpace() >= size)
|
||||
{
|
||||
alloc = allocation;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(!alloc && createIfAllFull) alloc = CreateMemoryAllocation(256 * 1024 * 1024, type, true);
|
||||
if(alloc) lastAllocation = alloc;
|
||||
return alloc;
|
||||
}
|
||||
|
||||
VulkanShader* ResourceManager::CreateShader(Scene::Shader* shader)
|
||||
{
|
||||
VulkanShader* vkShader = new VulkanShader();
|
||||
vkShader->Init(context, shader, this);
|
||||
shaders.push_back(vkShader);
|
||||
return vkShader;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user