fix(vulkan): defer resource frees until frame fence

This commit is contained in:
Kelsi 2026-03-14 03:25:52 -07:00
parent 6a7071fd64
commit c1b66f73c5
4 changed files with 118 additions and 49 deletions

View file

@ -57,6 +57,15 @@ public:
void pollUploadBatches(); // Check completed async uploads, free staging buffers
void waitAllUploads(); // Block until all in-flight uploads complete
// Defer resource destruction until it is safe with multiple frames in flight.
//
// This queues work to run after the fence for the *current frame slot* has
// signaled the next time we enter beginFrame() for that slot (i.e. after
// MAX_FRAMES_IN_FLIGHT submissions). Use this for resources that may still
// be referenced by command buffers submitted in the previous frame(s),
// such as descriptor sets and buffers freed during streaming/unload.
void deferAfterFrameFence(std::function<void()>&& fn);
// Accessors
VkInstance getInstance() const { return instance; }
VkPhysicalDevice getPhysicalDevice() const { return physicalDevice; }
@ -173,6 +182,9 @@ private:
};
std::vector<InFlightBatch> inFlightBatches_;
void runDeferredCleanup(uint32_t frameIndex);
std::vector<std::function<void()>> deferredCleanup_[MAX_FRAMES_IN_FLIGHT];
// Depth buffer (shared across all framebuffers)
VkImage depthImage = VK_NULL_HANDLE;
VkImageView depthImageView = VK_NULL_HANDLE;

View file

@ -990,42 +990,64 @@ void TerrainRenderer::clear() {
}
chunks.clear();
renderedChunks = 0;
if (materialDescPool) {
vkResetDescriptorPool(vkCtx->getDevice(), materialDescPool, 0);
}
}
void TerrainRenderer::destroyChunkGPU(TerrainChunkGPU& chunk) {
if (!vkCtx) return;
VkDevice device = vkCtx->getDevice();
VmaAllocator allocator = vkCtx->getAllocator();
if (chunk.vertexBuffer) {
AllocatedBuffer ab{}; ab.buffer = chunk.vertexBuffer; ab.allocation = chunk.vertexAlloc;
destroyBuffer(allocator, ab);
chunk.vertexBuffer = VK_NULL_HANDLE;
}
if (chunk.indexBuffer) {
AllocatedBuffer ab{}; ab.buffer = chunk.indexBuffer; ab.allocation = chunk.indexAlloc;
destroyBuffer(allocator, ab);
chunk.indexBuffer = VK_NULL_HANDLE;
}
if (chunk.paramsUBO) {
AllocatedBuffer ab{}; ab.buffer = chunk.paramsUBO; ab.allocation = chunk.paramsAlloc;
destroyBuffer(allocator, ab);
chunk.paramsUBO = VK_NULL_HANDLE;
}
// Return material descriptor set to the pool so it can be reused by new chunks
if (chunk.materialSet && materialDescPool) {
vkFreeDescriptorSets(vkCtx->getDevice(), materialDescPool, 1, &chunk.materialSet);
}
chunk.materialSet = VK_NULL_HANDLE;
// These resources may still be referenced by in-flight command buffers from
// previous frames. Defer actual destruction until this frame slot is safe.
::VkBuffer vertexBuffer = chunk.vertexBuffer;
VmaAllocation vertexAlloc = chunk.vertexAlloc;
::VkBuffer indexBuffer = chunk.indexBuffer;
VmaAllocation indexAlloc = chunk.indexAlloc;
::VkBuffer paramsUBO = chunk.paramsUBO;
VmaAllocation paramsAlloc = chunk.paramsAlloc;
VkDescriptorPool pool = materialDescPool;
VkDescriptorSet materialSet = chunk.materialSet;
// Destroy owned alpha textures (VkTexture::~VkTexture is a no-op, must call destroy() explicitly)
VkDevice device = vkCtx->getDevice();
std::vector<VkTexture*> alphaTextures;
alphaTextures.reserve(chunk.ownedAlphaTextures.size());
for (auto& tex : chunk.ownedAlphaTextures) {
if (tex) tex->destroy(device, allocator);
alphaTextures.push_back(tex.release());
}
chunk.vertexBuffer = VK_NULL_HANDLE;
chunk.vertexAlloc = VK_NULL_HANDLE;
chunk.indexBuffer = VK_NULL_HANDLE;
chunk.indexAlloc = VK_NULL_HANDLE;
chunk.paramsUBO = VK_NULL_HANDLE;
chunk.paramsAlloc = VK_NULL_HANDLE;
chunk.materialSet = VK_NULL_HANDLE;
chunk.ownedAlphaTextures.clear();
vkCtx->deferAfterFrameFence([device, allocator, vertexBuffer, vertexAlloc, indexBuffer, indexAlloc,
paramsUBO, paramsAlloc, pool, materialSet, alphaTextures]() {
if (vertexBuffer) {
AllocatedBuffer ab{}; ab.buffer = vertexBuffer; ab.allocation = vertexAlloc;
destroyBuffer(allocator, ab);
}
if (indexBuffer) {
AllocatedBuffer ab{}; ab.buffer = indexBuffer; ab.allocation = indexAlloc;
destroyBuffer(allocator, ab);
}
if (paramsUBO) {
AllocatedBuffer ab{}; ab.buffer = paramsUBO; ab.allocation = paramsAlloc;
destroyBuffer(allocator, ab);
}
if (materialSet && pool) {
VkDescriptorSet set = materialSet;
vkFreeDescriptorSets(device, pool, 1, &set);
}
for (VkTexture* tex : alphaTextures) {
if (!tex) continue;
tex->destroy(device, allocator);
delete tex;
}
});
}
int TerrainRenderer::getTriangleCount() const {

View file

@ -55,6 +55,11 @@ void VkContext::shutdown() {
vkDeviceWaitIdle(device);
}
// With the device idle, it is safe to run any deferred per-frame cleanup.
for (uint32_t fi = 0; fi < MAX_FRAMES_IN_FLIGHT; fi++) {
runDeferredCleanup(fi);
}
LOG_WARNING("VkContext::shutdown - destroyImGuiResources...");
destroyImGuiResources();
@ -103,6 +108,19 @@ void VkContext::shutdown() {
LOG_WARNING("Vulkan context shutdown complete");
}
void VkContext::deferAfterFrameFence(std::function<void()>&& fn) {
deferredCleanup_[currentFrame].push_back(std::move(fn));
}
void VkContext::runDeferredCleanup(uint32_t frameIndex) {
auto& q = deferredCleanup_[frameIndex];
if (q.empty()) return;
for (auto& fn : q) {
if (fn) fn();
}
q.clear();
}
bool VkContext::createInstance(SDL_Window* window) {
// Get required SDL extensions
unsigned int sdlExtCount = 0;
@ -1349,6 +1367,9 @@ VkCommandBuffer VkContext::beginFrame(uint32_t& imageIndex) {
return VK_NULL_HANDLE;
}
// Any work queued for this frame slot is now guaranteed to be unused by the GPU.
runDeferredCleanup(currentFrame);
// Acquire next swapchain image
VkResult result = vkAcquireNextImageKHR(device, swapchain, UINT64_MAX,
frame.imageAvailableSemaphore, VK_NULL_HANDLE, &imageIndex);

View file

@ -1029,10 +1029,6 @@ void WaterRenderer::clear() {
destroyWaterMesh(surface);
}
surfaces.clear();
if (vkCtx && materialDescPool) {
vkResetDescriptorPool(vkCtx->getDevice(), materialDescPool, 0);
}
}
// ==============================================================
@ -1358,27 +1354,45 @@ void WaterRenderer::createWaterMesh(WaterSurface& surface) {
void WaterRenderer::destroyWaterMesh(WaterSurface& surface) {
if (!vkCtx) return;
VkDevice device = vkCtx->getDevice();
VmaAllocator allocator = vkCtx->getAllocator();
if (surface.vertexBuffer) {
AllocatedBuffer ab{}; ab.buffer = surface.vertexBuffer; ab.allocation = surface.vertexAlloc;
destroyBuffer(allocator, ab);
::VkBuffer vertexBuffer = surface.vertexBuffer;
VmaAllocation vertexAlloc = surface.vertexAlloc;
::VkBuffer indexBuffer = surface.indexBuffer;
VmaAllocation indexAlloc = surface.indexAlloc;
::VkBuffer materialUBO = surface.materialUBO;
VmaAllocation materialAlloc = surface.materialAlloc;
VkDescriptorPool pool = materialDescPool;
VkDescriptorSet materialSet = surface.materialSet;
surface.vertexBuffer = VK_NULL_HANDLE;
}
if (surface.indexBuffer) {
AllocatedBuffer ab{}; ab.buffer = surface.indexBuffer; ab.allocation = surface.indexAlloc;
destroyBuffer(allocator, ab);
surface.vertexAlloc = VK_NULL_HANDLE;
surface.indexBuffer = VK_NULL_HANDLE;
}
if (surface.materialUBO) {
AllocatedBuffer ab{}; ab.buffer = surface.materialUBO; ab.allocation = surface.materialAlloc;
destroyBuffer(allocator, ab);
surface.indexAlloc = VK_NULL_HANDLE;
surface.materialUBO = VK_NULL_HANDLE;
}
if (surface.materialSet && materialDescPool) {
vkFreeDescriptorSets(vkCtx->getDevice(), materialDescPool, 1, &surface.materialSet);
}
surface.materialAlloc = VK_NULL_HANDLE;
surface.materialSet = VK_NULL_HANDLE;
vkCtx->deferAfterFrameFence([device, allocator, vertexBuffer, vertexAlloc, indexBuffer, indexAlloc,
materialUBO, materialAlloc, pool, materialSet]() {
if (vertexBuffer) {
AllocatedBuffer ab{}; ab.buffer = vertexBuffer; ab.allocation = vertexAlloc;
destroyBuffer(allocator, ab);
}
if (indexBuffer) {
AllocatedBuffer ab{}; ab.buffer = indexBuffer; ab.allocation = indexAlloc;
destroyBuffer(allocator, ab);
}
if (materialUBO) {
AllocatedBuffer ab{}; ab.buffer = materialUBO; ab.allocation = materialAlloc;
destroyBuffer(allocator, ab);
}
if (materialSet && pool) {
VkDescriptorSet set = materialSet;
vkFreeDescriptorSets(device, pool, 1, &set);
}
});
}
// ==============================================================