Files
OpenVulkano/openVulkanoCpp/Vulkan/Resources/MemoryPool.cpp
2024-07-11 13:22:01 +02:00

141 lines
4.1 KiB
C++

/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#include "MemoryPool.hpp"
#include "ManagedBuffer.hpp"
#include "Vulkan/Device.hpp"
#include "Base/Logger.hpp"
#include "Math/ByteSize.hpp"
namespace OpenVulkano::Vulkan
{
namespace
{
std::vector<MemoryPool*> MEM_POOLS;
}
void MemoryPool::ReleaseBuffer(OpenVulkano::Vulkan::ManagedBuffer* buffer)
{
for (MemoryPool* memPool : MEM_POOLS)
{
if (memPool->FreeBuffer(buffer)) return;
}
delete buffer;
}
MemoryPool::MemoryPool()
{
freeFunction = [this](ManagedBuffer* buffer) { this->FreeBuffer(buffer); };
MEM_POOLS.push_back(this);
Logger::RENDER->info("Created gpu memory pool");
}
MemoryPool::~MemoryPool()
{
Utils::Remove(MEM_POOLS, this);
Logger::RENDER->info("Destroyed gpu memory pool");
}
void MemoryPool::Init(Device* dev, int bufferCount)
{
device = dev;
toFree = decltype(toFree)(bufferCount);
}
void MemoryPool::StartFrame(uint64_t bufferId)
{
currentBuffer = bufferId;
for (ManagedBuffer* i : toFree[currentBuffer])
{
DoFree(i);
}
toFree[currentBuffer].clear();
}
void MemoryPool::DoFree(ManagedBuffer* buffer)
{
if (buffer->IsLast())
{
device->device.destroyBuffer(buffer->buffer);
buffer->allocation->used -= buffer->size;
}
else
{
recycleBuffers.push_back(buffer);
}
}
bool MemoryPool::FreeBuffer(ManagedBuffer* buffer)
{
if (buffer) toFree[currentBuffer].push_back(buffer);
return true;
}
MemoryAllocation* MemoryPool::CreateMemoryAllocation(size_t size, uint32_t type, bool addToCache)
{
MemoryAllocation* alloc = new MemoryAllocation(size, type, device->device);
const vk::MemoryAllocateInfo allocInfo = { size, type };
alloc->memory = device->device.allocateMemory(allocInfo);
if (addToCache) allocations.emplace_back(alloc);
return alloc;
}
MemoryAllocation* MemoryPool::GetFreeMemoryAllocation(size_t size, vk::DeviceSize alignment, uint32_t type, bool createIfAllFull)
{
MemoryAllocation* alloc = nullptr;
for (auto& allocation : allocations)
{
if (allocation->type == type && allocation->FreeSpace(alignment) >= size)
{
alloc = allocation.get();
break;
}
}
if(!alloc && createIfAllFull) alloc = CreateMemoryAllocation(64_MiB, type, true);
return alloc;
}
MemoryPool::ManagedBufferPtr MemoryPool::CreateBuffer(vk::DeviceSize size, const vk::BufferUsageFlags& usage, const vk::MemoryPropertyFlags& properties)
{
size = Utils::Align(size, 16);
vk::BufferCreateInfo bufferCreateInfo = { {}, size, usage, vk::SharingMode::eExclusive };
vk::Buffer buffer = device->device.createBuffer(bufferCreateInfo);
const vk::MemoryRequirements memoryRequirements = device->device.getBufferMemoryRequirements(buffer);
uint32_t memtype = device->GetMemoryType(memoryRequirements.memoryTypeBits, properties);
if (memoryRequirements.size != size)
{
Logger::DATA->debug("Memory Requirement Size ({0}) != Size ({1})", memoryRequirements.size, size);
size = memoryRequirements.size;
device->device.destroy(buffer);
bufferCreateInfo.size = size;
buffer = device->device.createBuffer(bufferCreateInfo);
}
MemoryAllocation* allocation = GetFreeMemoryAllocation(size, memoryRequirements.alignment, memtype);
uint32_t offset = Utils::Align(allocation->used, memoryRequirements.alignment);
device->device.bindBufferMemory(buffer, allocation->memory, offset);
allocation->used += size + (offset - allocation->used);
return ManagedBufferPtr{ new ManagedBuffer({allocation, offset, size, buffer, usage, properties, nullptr}) };
}
MemoryPool::ManagedBufferPtr MemoryPool::CreateSharedMemoryBuffer(size_t size)
{
if (!recycleBuffers.empty())
{
for(auto buff : recycleBuffers)
{
if (buff->size == size)
{
Logger::DATA->info("Recycle Buffer");
Utils::Remove(recycleBuffers, buff);
return ManagedBufferPtr{ buff };
}
}
}
return CreateBuffer(size, vk::BufferUsageFlagBits::eVertexBuffer, vk::MemoryPropertyFlagBits::eHostCoherent/* | vk::MemoryPropertyFlagBits::eDeviceLocal*/);
}
}