Hash :
00e48c13
Author :
Date :
2022-07-20T17:35:01
Vulkan: Destroy DescriptorPoolHelper when its unused. DynamicDescriptorPool keeps an array of DescriptorPoolHelpers. Right now we only grow this array of pools, we never shrink the pool count. This is partly due to we never release descriptorSet. But in the past few CLs, we now release invalid descriptorSet when texture/buffer gets deleted or re-specified. This means we could now have a pool with no valid descriptorSets. This CL adds the ability to actually release the pool when all of its descriptorSets has been released, thus reduce the pool count when a lot of textures have been deleted. Bug: b/235523746 Change-Id: I2d5047269154cc8ece8305408f08f2ad7c9dd8a6 Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/3780845 Reviewed-by: Yuxin Hu <yuxinhu@google.com> Reviewed-by: Shahbaz Youssefi <syoussefi@chromium.org> Commit-Queue: Charlie Lao <cclao@google.com>
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165
//
// Copyright 2022 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Suballocation.cpp:
// Implements class methods for BufferBlock and Suballocation and other related classes
//
// #include "libANGLE/renderer/vulkan/vk_utils.h"
#include "libANGLE/renderer/vulkan/Suballocation.h"
#include "libANGLE/Context.h"
#include "libANGLE/renderer/vulkan/RendererVk.h"
#include "libANGLE/renderer/vulkan/vk_mem_alloc_wrapper.h"
namespace rx
{
namespace vk
{
// BufferBlock implementation.
BufferBlock::BufferBlock() : mMemoryPropertyFlags(0), mSize(0), mMappedMemory(nullptr) {}
BufferBlock::BufferBlock(BufferBlock &&other)
: mVirtualBlock(std::move(other.mVirtualBlock)),
mBuffer(std::move(other.mBuffer)),
mDeviceMemory(std::move(other.mDeviceMemory)),
mMemoryPropertyFlags(other.mMemoryPropertyFlags),
mSize(other.mSize),
mMappedMemory(other.mMappedMemory),
mSerial(other.mSerial),
mCountRemainsEmpty(0)
{}
BufferBlock &BufferBlock::operator=(BufferBlock &&other)
{
std::swap(mVirtualBlock, other.mVirtualBlock);
std::swap(mBuffer, other.mBuffer);
std::swap(mDeviceMemory, other.mDeviceMemory);
std::swap(mMemoryPropertyFlags, other.mMemoryPropertyFlags);
std::swap(mSize, other.mSize);
std::swap(mMappedMemory, other.mMappedMemory);
std::swap(mSerial, other.mSerial);
std::swap(mCountRemainsEmpty, other.mCountRemainsEmpty);
return *this;
}
BufferBlock::~BufferBlock()
{
ASSERT(!mVirtualBlock.valid());
ASSERT(!mBuffer.valid());
ASSERT(!mDeviceMemory.valid());
ASSERT(mDescriptorSetCacheManager.empty());
}
void BufferBlock::destroy(RendererVk *renderer)
{
VkDevice device = renderer->getDevice();
mDescriptorSetCacheManager.destroyKeys(renderer);
if (mMappedMemory)
{
unmap(device);
}
mVirtualBlock.destroy(device);
mBuffer.destroy(device);
mDeviceMemory.destroy(device);
}
angle::Result BufferBlock::init(Context *context,
Buffer &buffer,
vma::VirtualBlockCreateFlags flags,
DeviceMemory &deviceMemory,
VkMemoryPropertyFlags memoryPropertyFlags,
VkDeviceSize size)
{
RendererVk *renderer = context->getRenderer();
ASSERT(!mVirtualBlock.valid());
ASSERT(!mBuffer.valid());
ASSERT(!mDeviceMemory.valid());
mVirtualBlockMutex.init(renderer->isAsyncCommandQueueEnabled());
ANGLE_VK_TRY(context, mVirtualBlock.init(renderer->getDevice(), flags, size));
mBuffer = std::move(buffer);
mDeviceMemory = std::move(deviceMemory);
mMemoryPropertyFlags = memoryPropertyFlags;
mSize = size;
mMappedMemory = nullptr;
mSerial = renderer->getResourceSerialFactory().generateBufferSerial();
return angle::Result::Continue;
}
void BufferBlock::initWithoutVirtualBlock(Context *context,
Buffer &buffer,
DeviceMemory &deviceMemory,
VkMemoryPropertyFlags memoryPropertyFlags,
VkDeviceSize size)
{
RendererVk *renderer = context->getRenderer();
ASSERT(!mVirtualBlock.valid());
ASSERT(!mBuffer.valid());
ASSERT(!mDeviceMemory.valid());
mBuffer = std::move(buffer);
mDeviceMemory = std::move(deviceMemory);
mMemoryPropertyFlags = memoryPropertyFlags;
mSize = size;
mMappedMemory = nullptr;
mSerial = renderer->getResourceSerialFactory().generateBufferSerial();
}
VkResult BufferBlock::map(const VkDevice device)
{
ASSERT(mMappedMemory == nullptr);
return mDeviceMemory.map(device, 0, mSize, 0, &mMappedMemory);
}
void BufferBlock::unmap(const VkDevice device)
{
mDeviceMemory.unmap(device);
mMappedMemory = nullptr;
}
void BufferBlock::free(VmaVirtualAllocation allocation, VkDeviceSize offset)
{
std::lock_guard<ConditionalMutex> lock(mVirtualBlockMutex);
mVirtualBlock.free(allocation, offset);
}
int32_t BufferBlock::getAndIncrementEmptyCounter()
{
return ++mCountRemainsEmpty;
}
void BufferBlock::calculateStats(vma::StatInfo *pStatInfo) const
{
std::lock_guard<ConditionalMutex> lock(mVirtualBlockMutex);
mVirtualBlock.calculateStats(pStatInfo);
}
// BufferSuballocation implementation.
VkResult BufferSuballocation::map(Context *context)
{
return mBufferBlock->map(context->getDevice());
}
// SharedBufferSuballocationGarbage implementation.
bool SharedBufferSuballocationGarbage::destroyIfComplete(RendererVk *renderer,
Serial completedSerial)
{
if (mLifetime.isCurrentlyInUse(completedSerial))
{
return false;
}
mBuffer.destroy(renderer->getDevice());
mSuballocation.destroy(renderer);
mLifetime.release();
return true;
}
} // namespace vk
} // namespace rx