Merge buffer creation into BufferManager.
This commit is contained in:
parent
466e4a4093
commit
564f6cc205
|
|
@ -239,12 +239,11 @@ LightManager::Update()
|
||||||
const u16 requiredBufferCapacity = eastl::min(Cast<u16>(m_Lights.capacity()), MAX_LIGHTS);
|
const u16 requiredBufferCapacity = eastl::min(Cast<u16>(m_Lights.capacity()), MAX_LIGHTS);
|
||||||
if ((m_GpuBufferCapacity_ & CAPACITY_MASK) < requiredBufferCapacity)
|
if ((m_GpuBufferCapacity_ & CAPACITY_MASK) < requiredBufferCapacity)
|
||||||
{
|
{
|
||||||
StorageBuffer newBuffer;
|
|
||||||
newBuffer.Init(m_ResourceManager->m_Device, requiredBufferCapacity * sizeof m_Lights[0], true, "Light Buffer");
|
|
||||||
m_GpuBufferCapacity_ = requiredBufferCapacity | UPDATE_REQUIRED_BIT;
|
m_GpuBufferCapacity_ = requiredBufferCapacity | UPDATE_REQUIRED_BIT;
|
||||||
|
|
||||||
m_ResourceManager->Release(m_MetaInfo.m_LightBuffer);
|
m_ResourceManager->Release(m_MetaInfo.m_LightBuffer);
|
||||||
m_MetaInfo.m_LightBuffer = m_ResourceManager->Commit(&newBuffer);
|
m_MetaInfo.m_LightBuffer =
|
||||||
|
m_ResourceManager->CreateStorageBuffer(requiredBufferCapacity * sizeof m_Lights[0], "Light Buffer");
|
||||||
}
|
}
|
||||||
if (m_GpuBufferCapacity_ & UPDATE_REQUIRED_BIT)
|
if (m_GpuBufferCapacity_ & UPDATE_REQUIRED_BIT)
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -96,20 +96,39 @@ void
|
||||||
BufferManager::Init(const u32 maxCapacity)
|
BufferManager::Init(const u32 maxCapacity)
|
||||||
{
|
{
|
||||||
m_MaxCapacity = maxCapacity;
|
m_MaxCapacity = maxCapacity;
|
||||||
m_FreeHead = GpuResourceHandle::INVALID_HANDLE;
|
m_FreeHead = 0;
|
||||||
|
m_Buffers = new Buffer[maxCapacity];
|
||||||
|
|
||||||
|
// Chaining Freeheads
|
||||||
|
Buffer *pIter = m_Buffers;
|
||||||
|
for (u32 i = 1; i < m_MaxCapacity; ++i)
|
||||||
|
{
|
||||||
|
*Recast<u32 *>(pIter) = i;
|
||||||
|
++pIter;
|
||||||
|
}
|
||||||
|
*Recast<u32 *>(pIter) = GpuResourceHandle::INVALID_HANDLE;
|
||||||
}
|
}
|
||||||
|
|
||||||
BufferHandle
|
BufferHandle
|
||||||
BufferManager::Commit(StorageBuffer *buffer)
|
BufferManager::Allocate(const Device *device, const u32 bufferSize, const cstr name)
|
||||||
|
{
|
||||||
|
StorageBuffer sb;
|
||||||
|
sb.Init(device, bufferSize, true, true, name);
|
||||||
|
return Commit_(&sb);
|
||||||
|
}
|
||||||
|
|
||||||
|
BufferHandle
|
||||||
|
BufferManager::Commit_(StorageBuffer *buffer)
|
||||||
{
|
{
|
||||||
ERROR_IF(!buffer || !buffer->IsValid() || !buffer->IsOwned(), "Buffer must be valid and owned for commital")
|
ERROR_IF(!buffer || !buffer->IsValid() || !buffer->IsOwned(), "Buffer must be valid and owned for commital")
|
||||||
THEN_ABORT(-1);
|
THEN_ABORT(-1);
|
||||||
|
|
||||||
if (m_FreeHead != GpuResourceHandle::INVALID_HANDLE)
|
ERROR_IF(m_FreeHead == GpuResourceHandle::INVALID_HANDLE, "Out of buffers")
|
||||||
{
|
THEN_ABORT(-1);
|
||||||
|
|
||||||
const u32 index = m_FreeHead;
|
const u32 index = m_FreeHead;
|
||||||
|
|
||||||
StorageBuffer *allocatedBuffer = &m_Buffers[index];
|
Buffer *allocatedBuffer = &m_Buffers[index];
|
||||||
|
|
||||||
assert(!allocatedBuffer->IsValid());
|
assert(!allocatedBuffer->IsValid());
|
||||||
m_FreeHead = *Recast<u32 *>(allocatedBuffer);
|
m_FreeHead = *Recast<u32 *>(allocatedBuffer);
|
||||||
|
|
@ -124,29 +143,12 @@ BufferManager::Commit(StorageBuffer *buffer)
|
||||||
return {index};
|
return {index};
|
||||||
}
|
}
|
||||||
|
|
||||||
const u32 index = Cast<u32>(m_Buffers.size());
|
|
||||||
if (index < m_MaxCapacity)
|
|
||||||
{
|
|
||||||
StorageBuffer *allocatedBuffer = &m_Buffers.push_back();
|
|
||||||
|
|
||||||
// Ensure it is copyable.
|
|
||||||
static_assert(std::is_trivially_copyable_v<StorageBuffer>);
|
|
||||||
*allocatedBuffer = *buffer;
|
|
||||||
|
|
||||||
buffer->m_Size_ &= ~StorageBuffer::OWNED_BIT;
|
|
||||||
|
|
||||||
return {index};
|
|
||||||
}
|
|
||||||
|
|
||||||
ERROR("Out of Buffers") THEN_ABORT(-1);
|
|
||||||
}
|
|
||||||
|
|
||||||
StorageBuffer *
|
StorageBuffer *
|
||||||
BufferManager::Fetch(const BufferHandle handle)
|
BufferManager::Fetch(const BufferHandle handle)
|
||||||
{
|
{
|
||||||
assert(!handle.IsInvalid());
|
assert(!handle.IsInvalid());
|
||||||
|
|
||||||
return &m_Buffers[handle.m_Index];
|
return Recast<StorageBuffer *>(&m_Buffers[handle.m_Index]);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
|
@ -154,7 +156,7 @@ BufferManager::Release(const Device *device, const BufferHandle handle)
|
||||||
{
|
{
|
||||||
assert(!handle.IsInvalid());
|
assert(!handle.IsInvalid());
|
||||||
|
|
||||||
StorageBuffer *allocatedBuffer = &m_Buffers[handle.m_Index];
|
Buffer *allocatedBuffer = &m_Buffers[handle.m_Index];
|
||||||
allocatedBuffer->Destroy(device);
|
allocatedBuffer->Destroy(device);
|
||||||
|
|
||||||
assert(!allocatedBuffer->IsValid());
|
assert(!allocatedBuffer->IsValid());
|
||||||
|
|
@ -166,10 +168,19 @@ BufferManager::Release(const Device *device, const BufferHandle handle)
|
||||||
void
|
void
|
||||||
BufferManager::Destroy(const Device *device)
|
BufferManager::Destroy(const Device *device)
|
||||||
{
|
{
|
||||||
for (auto &buffer : m_Buffers)
|
if (!m_Buffers)
|
||||||
{
|
{
|
||||||
buffer.Destroy(device);
|
WARN("Double Deletion");
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Buffer *pBegin = m_Buffers;
|
||||||
|
const Buffer *pEnd = m_Buffers + m_MaxCapacity;
|
||||||
|
while (pBegin != pEnd)
|
||||||
|
{
|
||||||
|
(pBegin++)->Destroy(device);
|
||||||
|
}
|
||||||
|
delete[] Take(m_Buffers);
|
||||||
}
|
}
|
||||||
|
|
||||||
StorageTextureHandle
|
StorageTextureHandle
|
||||||
|
|
@ -405,9 +416,39 @@ RenderResourceManager::WriteInfo::WriteInfo(vk::BufferView info)
|
||||||
}
|
}
|
||||||
|
|
||||||
BufferHandle
|
BufferHandle
|
||||||
RenderResourceManager::Commit(StorageBuffer *storageBuffer)
|
RenderResourceManager::Commit_(StorageBuffer *storageBuffer)
|
||||||
{
|
{
|
||||||
const BufferHandle handle = m_BufferManager.Commit(storageBuffer);
|
const BufferHandle handle = m_BufferManager.Commit_(storageBuffer);
|
||||||
|
|
||||||
|
m_WriteInfos.emplace_back(vk::DescriptorBufferInfo{
|
||||||
|
.buffer = storageBuffer->m_Buffer,
|
||||||
|
.offset = 0,
|
||||||
|
.range = storageBuffer->GetSize(),
|
||||||
|
});
|
||||||
|
|
||||||
|
m_Writes.push_back({
|
||||||
|
.dstSet = m_DescriptorSet,
|
||||||
|
.dstBinding = BUFFER_BINDING_INDEX,
|
||||||
|
.dstArrayElement = handle.m_Index,
|
||||||
|
.descriptorCount = 1,
|
||||||
|
.descriptorType = vk::DescriptorType::eStorageBuffer,
|
||||||
|
.pBufferInfo = &m_WriteInfos.back().uBufferInfo,
|
||||||
|
});
|
||||||
|
|
||||||
|
m_WriteOwner.emplace_back(HandleType::eBuffer, handle.m_Index);
|
||||||
|
|
||||||
|
#if !defined(ASTER_NDEBUG)
|
||||||
|
++m_CommitedBufferCount;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
return handle;
|
||||||
|
}
|
||||||
|
|
||||||
|
BufferHandle
|
||||||
|
RenderResourceManager::CreateStorageBuffer(const u32 bufferSize, const cstr name)
|
||||||
|
{
|
||||||
|
auto handle = m_BufferManager.Allocate(m_Device, bufferSize, name);
|
||||||
|
const auto storageBuffer = m_BufferManager.Fetch(handle);
|
||||||
|
|
||||||
m_WriteInfos.emplace_back(vk::DescriptorBufferInfo{
|
m_WriteInfos.emplace_back(vk::DescriptorBufferInfo{
|
||||||
.buffer = storageBuffer->m_Buffer,
|
.buffer = storageBuffer->m_Buffer,
|
||||||
|
|
@ -748,7 +789,7 @@ RenderResourceManager::RenderResourceManager(Device *device, u16 maxSize, bool u
|
||||||
m_Device->SetName(m_DescriptorSet, "Bindless Set");
|
m_Device->SetName(m_DescriptorSet, "Bindless Set");
|
||||||
|
|
||||||
// NOTE: This needs to be synced with the destructor manually.
|
// NOTE: This needs to be synced with the destructor manually.
|
||||||
assert(Commit(m_Geometry.m_BackingBuffer.get()).m_Index == UNIFIED_GEOMETRY_DATA_HANDLE_INDEX); // Making an assumption to avoid extra bindings.
|
assert(Commit_(m_Geometry.m_BackingBuffer.get()).m_Index == UNIFIED_GEOMETRY_DATA_HANDLE_INDEX); // Making an assumption to avoid extra bindings.
|
||||||
}
|
}
|
||||||
|
|
||||||
RenderResourceManager::~RenderResourceManager()
|
RenderResourceManager::~RenderResourceManager()
|
||||||
|
|
|
||||||
|
|
@ -75,12 +75,13 @@ struct TextureManager
|
||||||
|
|
||||||
struct BufferManager
|
struct BufferManager
|
||||||
{
|
{
|
||||||
eastl::vector<StorageBuffer> m_Buffers;
|
Buffer* m_Buffers;
|
||||||
u32 m_MaxCapacity;
|
u32 m_MaxCapacity;
|
||||||
u32 m_FreeHead;
|
u32 m_FreeHead;
|
||||||
|
|
||||||
void Init(u32 maxCapacity);
|
void Init(u32 maxCapacity);
|
||||||
BufferHandle Commit(StorageBuffer *buffer);
|
BufferHandle Allocate(const Device *device, u32 bufferSize, cstr name);
|
||||||
|
BufferHandle Commit_(StorageBuffer *buffer);
|
||||||
StorageBuffer *Fetch(BufferHandle handle);
|
StorageBuffer *Fetch(BufferHandle handle);
|
||||||
void Release(const Device *device, BufferHandle handle);
|
void Release(const Device *device, BufferHandle handle);
|
||||||
void Destroy(const Device *device);
|
void Destroy(const Device *device);
|
||||||
|
|
@ -178,6 +179,7 @@ struct RenderResourceManager
|
||||||
SamplerManager m_SamplerManager;
|
SamplerManager m_SamplerManager;
|
||||||
|
|
||||||
void EraseWrites(u32 handleIndex, HandleType handleType);
|
void EraseWrites(u32 handleIndex, HandleType handleType);
|
||||||
|
BufferHandle Commit_(StorageBuffer *storageBuffer); // Commit to GPU and take Ownership
|
||||||
|
|
||||||
public:
|
public:
|
||||||
Device *m_Device;
|
Device *m_Device;
|
||||||
|
|
@ -198,7 +200,7 @@ struct RenderResourceManager
|
||||||
|
|
||||||
bool m_UseBufferAddr;
|
bool m_UseBufferAddr;
|
||||||
|
|
||||||
BufferHandle Commit(StorageBuffer *storageBuffer); // Commit to GPU and take Ownership
|
BufferHandle CreateStorageBuffer(u32 bufferSize, cstr name); // Allocate a new buffer and commit to GPU.
|
||||||
void Write(BufferHandle handle, usize offset, usize size, const void *data); // Write to buffer
|
void Write(BufferHandle handle, usize offset, usize size, const void *data); // Write to buffer
|
||||||
void Release(BufferHandle handle); // Release and Destroy
|
void Release(BufferHandle handle); // Release and Destroy
|
||||||
void Release(StorageBuffer *storageBuffer, BufferHandle handle); // Release and Return
|
void Release(StorageBuffer *storageBuffer, BufferHandle handle); // Release and Return
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue