Support flattening multiple CommandAllocators
This introduces CommandAllocator::AcquireCommandBlocks, which accepts a vector of CommandAllocators and flattens them into a single iterable sequence of commands. To support this, CommandAllocator is made movable. Bug: dawn:809 Change-Id: I3984c243e4bd74568eccba1a8a58ec26324c8ffa Reviewed-on: https://dawn-review.googlesource.com/c/dawn/+/63822 Commit-Queue: Ken Rockot <rockot@google.com> Reviewed-by: Austin Eng <enga@chromium.org>
This commit is contained in:
parent
5528d0edd2
commit
4646de90be
|
@ -20,6 +20,7 @@
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <climits>
|
#include <climits>
|
||||||
#include <cstdlib>
|
#include <cstdlib>
|
||||||
|
#include <utility>
|
||||||
|
|
||||||
namespace dawn_native {
|
namespace dawn_native {
|
||||||
|
|
||||||
|
@ -43,22 +44,32 @@ namespace dawn_native {
|
||||||
|
|
||||||
CommandIterator& CommandIterator::operator=(CommandIterator&& other) {
|
CommandIterator& CommandIterator::operator=(CommandIterator&& other) {
|
||||||
ASSERT(IsEmpty());
|
ASSERT(IsEmpty());
|
||||||
mBlocks = std::move(other.mBlocks);
|
if (!other.IsEmpty()) {
|
||||||
other.Reset();
|
mBlocks = std::move(other.mBlocks);
|
||||||
|
other.Reset();
|
||||||
|
}
|
||||||
Reset();
|
Reset();
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
CommandIterator::CommandIterator(CommandAllocator&& allocator)
|
CommandIterator::CommandIterator(CommandAllocator allocator)
|
||||||
: mBlocks(allocator.AcquireBlocks()) {
|
: mBlocks(allocator.AcquireBlocks()) {
|
||||||
Reset();
|
Reset();
|
||||||
}
|
}
|
||||||
|
|
||||||
CommandIterator& CommandIterator::operator=(CommandAllocator&& allocator) {
|
void CommandIterator::AcquireCommandBlocks(std::vector<CommandAllocator> allocators) {
|
||||||
ASSERT(IsEmpty());
|
ASSERT(IsEmpty());
|
||||||
mBlocks = allocator.AcquireBlocks();
|
mBlocks.clear();
|
||||||
|
for (CommandAllocator& allocator : allocators) {
|
||||||
|
CommandBlocks blocks = allocator.AcquireBlocks();
|
||||||
|
if (!blocks.empty()) {
|
||||||
|
mBlocks.reserve(mBlocks.size() + blocks.size());
|
||||||
|
for (BlockDef& block : blocks) {
|
||||||
|
mBlocks.push_back(std::move(block));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Reset();
|
Reset();
|
||||||
return *this;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool CommandIterator::NextCommandIdInNewBlock(uint32_t* commandId) {
|
bool CommandIterator::NextCommandIdInNewBlock(uint32_t* commandId) {
|
||||||
|
@ -92,7 +103,7 @@ namespace dawn_native {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto& block : mBlocks) {
|
for (BlockDef& block : mBlocks) {
|
||||||
free(block.block);
|
free(block.block);
|
||||||
}
|
}
|
||||||
mBlocks.clear();
|
mBlocks.clear();
|
||||||
|
@ -114,13 +125,49 @@ namespace dawn_native {
|
||||||
// - Better block allocation, maybe have Dawn API to say command buffer is going to have size
|
// - Better block allocation, maybe have Dawn API to say command buffer is going to have size
|
||||||
// close to another
|
// close to another
|
||||||
|
|
||||||
CommandAllocator::CommandAllocator()
|
CommandAllocator::CommandAllocator() {
|
||||||
: mCurrentPtr(reinterpret_cast<uint8_t*>(&mDummyEnum[0])),
|
ResetPointers();
|
||||||
mEndPtr(reinterpret_cast<uint8_t*>(&mDummyEnum[1])) {
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CommandAllocator::~CommandAllocator() {
|
CommandAllocator::~CommandAllocator() {
|
||||||
ASSERT(mBlocks.empty());
|
Reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
CommandAllocator::CommandAllocator(CommandAllocator&& other)
|
||||||
|
: mBlocks(std::move(other.mBlocks)), mLastAllocationSize(other.mLastAllocationSize) {
|
||||||
|
other.mBlocks.clear();
|
||||||
|
if (!other.IsEmpty()) {
|
||||||
|
mCurrentPtr = other.mCurrentPtr;
|
||||||
|
mEndPtr = other.mEndPtr;
|
||||||
|
} else {
|
||||||
|
ResetPointers();
|
||||||
|
}
|
||||||
|
other.Reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
CommandAllocator& CommandAllocator::operator=(CommandAllocator&& other) {
|
||||||
|
Reset();
|
||||||
|
if (!other.IsEmpty()) {
|
||||||
|
std::swap(mBlocks, other.mBlocks);
|
||||||
|
mLastAllocationSize = other.mLastAllocationSize;
|
||||||
|
mCurrentPtr = other.mCurrentPtr;
|
||||||
|
mEndPtr = other.mEndPtr;
|
||||||
|
}
|
||||||
|
other.Reset();
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
|
void CommandAllocator::Reset() {
|
||||||
|
for (BlockDef& block : mBlocks) {
|
||||||
|
free(block.block);
|
||||||
|
}
|
||||||
|
mBlocks.clear();
|
||||||
|
mLastAllocationSize = kDefaultBaseAllocationSize;
|
||||||
|
ResetPointers();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool CommandAllocator::IsEmpty() const {
|
||||||
|
return mCurrentPtr == reinterpret_cast<const uint8_t*>(&mDummyEnum[0]);
|
||||||
}
|
}
|
||||||
|
|
||||||
CommandBlocks&& CommandAllocator::AcquireBlocks() {
|
CommandBlocks&& CommandAllocator::AcquireBlocks() {
|
||||||
|
@ -173,4 +220,9 @@ namespace dawn_native {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void CommandAllocator::ResetPointers() {
|
||||||
|
mCurrentPtr = reinterpret_cast<uint8_t*>(&mDummyEnum[0]);
|
||||||
|
mEndPtr = reinterpret_cast<uint8_t*>(&mDummyEnum[1]);
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace dawn_native
|
} // namespace dawn_native
|
||||||
|
|
|
@ -75,8 +75,10 @@ namespace dawn_native {
|
||||||
CommandIterator(CommandIterator&& other);
|
CommandIterator(CommandIterator&& other);
|
||||||
CommandIterator& operator=(CommandIterator&& other);
|
CommandIterator& operator=(CommandIterator&& other);
|
||||||
|
|
||||||
CommandIterator(CommandAllocator&& allocator);
|
// Shorthand constructor for acquiring CommandBlocks from a single CommandAllocator.
|
||||||
CommandIterator& operator=(CommandAllocator&& allocator);
|
explicit CommandIterator(CommandAllocator allocator);
|
||||||
|
|
||||||
|
void AcquireCommandBlocks(std::vector<CommandAllocator> allocators);
|
||||||
|
|
||||||
template <typename E>
|
template <typename E>
|
||||||
bool NextCommandId(E* commandId) {
|
bool NextCommandId(E* commandId) {
|
||||||
|
@ -149,6 +151,15 @@ namespace dawn_native {
|
||||||
CommandAllocator();
|
CommandAllocator();
|
||||||
~CommandAllocator();
|
~CommandAllocator();
|
||||||
|
|
||||||
|
// NOTE: A moved-from CommandAllocator is reset to its initial empty state.
|
||||||
|
CommandAllocator(CommandAllocator&&);
|
||||||
|
CommandAllocator& operator=(CommandAllocator&&);
|
||||||
|
|
||||||
|
// Frees all blocks held by the allocator and restores it to its initial empty state.
|
||||||
|
void Reset();
|
||||||
|
|
||||||
|
bool IsEmpty() const;
|
||||||
|
|
||||||
template <typename T, typename E>
|
template <typename T, typename E>
|
||||||
T* Allocate(E commandId) {
|
T* Allocate(E commandId) {
|
||||||
static_assert(sizeof(E) == sizeof(uint32_t), "");
|
static_assert(sizeof(E) == sizeof(uint32_t), "");
|
||||||
|
@ -186,6 +197,9 @@ namespace dawn_native {
|
||||||
static constexpr size_t kWorstCaseAdditionalSize =
|
static constexpr size_t kWorstCaseAdditionalSize =
|
||||||
sizeof(uint32_t) + kMaxSupportedAlignment + alignof(uint32_t) + sizeof(uint32_t);
|
sizeof(uint32_t) + kMaxSupportedAlignment + alignof(uint32_t) + sizeof(uint32_t);
|
||||||
|
|
||||||
|
// The default value of mLastAllocationSize.
|
||||||
|
static constexpr size_t kDefaultBaseAllocationSize = 2048;
|
||||||
|
|
||||||
friend CommandIterator;
|
friend CommandIterator;
|
||||||
CommandBlocks&& AcquireBlocks();
|
CommandBlocks&& AcquireBlocks();
|
||||||
|
|
||||||
|
@ -237,19 +251,21 @@ namespace dawn_native {
|
||||||
|
|
||||||
bool GetNewBlock(size_t minimumSize);
|
bool GetNewBlock(size_t minimumSize);
|
||||||
|
|
||||||
|
void ResetPointers();
|
||||||
|
|
||||||
CommandBlocks mBlocks;
|
CommandBlocks mBlocks;
|
||||||
size_t mLastAllocationSize = 2048;
|
size_t mLastAllocationSize = kDefaultBaseAllocationSize;
|
||||||
|
|
||||||
|
// Data used for the block range at initialization so that the first call to Allocate sees
|
||||||
|
// there is not enough space and calls GetNewBlock. This avoids having to special case the
|
||||||
|
// initialization in Allocate.
|
||||||
|
uint32_t mDummyEnum[1] = {0};
|
||||||
|
|
||||||
// Pointers to the current range of allocation in the block. Guaranteed to allow for at
|
// Pointers to the current range of allocation in the block. Guaranteed to allow for at
|
||||||
// least one uint32_t if not nullptr, so that the special kEndOfBlock command id can always
|
// least one uint32_t if not nullptr, so that the special kEndOfBlock command id can always
|
||||||
// be written. Nullptr iff the blocks were moved out.
|
// be written. Nullptr iff the blocks were moved out.
|
||||||
uint8_t* mCurrentPtr = nullptr;
|
uint8_t* mCurrentPtr = nullptr;
|
||||||
uint8_t* mEndPtr = nullptr;
|
uint8_t* mEndPtr = nullptr;
|
||||||
|
|
||||||
// Data used for the block range at initialization so that the first call to Allocate sees
|
|
||||||
// there is not enough space and calls GetNewBlock. This avoids having to special case the
|
|
||||||
// initialization in Allocate.
|
|
||||||
uint32_t mDummyEnum[1] = {0};
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace dawn_native
|
} // namespace dawn_native
|
||||||
|
|
|
@ -48,7 +48,7 @@ namespace dawn_native {
|
||||||
|
|
||||||
void EncodingContext::MoveToIterator() {
|
void EncodingContext::MoveToIterator() {
|
||||||
if (!mWasMovedToIterator) {
|
if (!mWasMovedToIterator) {
|
||||||
mIterator = std::move(mAllocator);
|
mIterator = CommandIterator(std::move(mAllocator));
|
||||||
mWasMovedToIterator = true;
|
mWasMovedToIterator = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -428,7 +428,7 @@ TEST(CommandAllocator, AllocateDefaultInitializes) {
|
||||||
iterator.MakeEmptyAsDataWasDestroyed();
|
iterator.MakeEmptyAsDataWasDestroyed();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test that the allcator correctly defaults initalizes data for AllocateData
|
// Test that the allocator correctly default-initalizes data for AllocateData
|
||||||
TEST(CommandAllocator, AllocateDataDefaultInitializes) {
|
TEST(CommandAllocator, AllocateDataDefaultInitializes) {
|
||||||
CommandAllocator allocator;
|
CommandAllocator allocator;
|
||||||
|
|
||||||
|
@ -447,3 +447,57 @@ TEST(CommandAllocator, AllocateDataDefaultInitializes) {
|
||||||
CommandIterator iterator(std::move(allocator));
|
CommandIterator iterator(std::move(allocator));
|
||||||
iterator.MakeEmptyAsDataWasDestroyed();
|
iterator.MakeEmptyAsDataWasDestroyed();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Tests flattening of multiple CommandAllocators into a single CommandIterator using
|
||||||
|
// AcquireCommandBlocks.
|
||||||
|
TEST(CommandAllocator, AcquireCommandBlocks) {
|
||||||
|
constexpr size_t kNumAllocators = 2;
|
||||||
|
constexpr size_t kNumCommandsPerAllocator = 2;
|
||||||
|
const uint64_t pipelines[kNumAllocators][kNumCommandsPerAllocator] = {
|
||||||
|
{0xDEADBEEFBEEFDEAD, 0xC0FFEEF00DC0FFEE},
|
||||||
|
{0x1337C0DE1337C0DE, 0xCAFEFACEFACECAFE},
|
||||||
|
};
|
||||||
|
const uint32_t attachmentPoints[kNumAllocators][kNumCommandsPerAllocator] = {{1, 2}, {3, 4}};
|
||||||
|
const uint32_t firsts[kNumAllocators][kNumCommandsPerAllocator] = {{42, 43}, {5, 6}};
|
||||||
|
const uint32_t counts[kNumAllocators][kNumCommandsPerAllocator] = {{16, 32}, {4, 8}};
|
||||||
|
|
||||||
|
std::vector<CommandAllocator> allocators(kNumAllocators);
|
||||||
|
for (size_t j = 0; j < kNumAllocators; ++j) {
|
||||||
|
CommandAllocator& allocator = allocators[j];
|
||||||
|
for (size_t i = 0; i < kNumCommandsPerAllocator; ++i) {
|
||||||
|
CommandPipeline* pipeline = allocator.Allocate<CommandPipeline>(CommandType::Pipeline);
|
||||||
|
pipeline->pipeline = pipelines[j][i];
|
||||||
|
pipeline->attachmentPoint = attachmentPoints[j][i];
|
||||||
|
|
||||||
|
CommandDraw* draw = allocator.Allocate<CommandDraw>(CommandType::Draw);
|
||||||
|
draw->first = firsts[j][i];
|
||||||
|
draw->count = counts[j][i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
CommandIterator iterator;
|
||||||
|
iterator.AcquireCommandBlocks(std::move(allocators));
|
||||||
|
for (size_t j = 0; j < kNumAllocators; ++j) {
|
||||||
|
for (size_t i = 0; i < kNumCommandsPerAllocator; ++i) {
|
||||||
|
CommandType type;
|
||||||
|
bool hasNext = iterator.NextCommandId(&type);
|
||||||
|
ASSERT_TRUE(hasNext);
|
||||||
|
ASSERT_EQ(type, CommandType::Pipeline);
|
||||||
|
|
||||||
|
CommandPipeline* pipeline = iterator.NextCommand<CommandPipeline>();
|
||||||
|
ASSERT_EQ(pipeline->pipeline, pipelines[j][i]);
|
||||||
|
ASSERT_EQ(pipeline->attachmentPoint, attachmentPoints[j][i]);
|
||||||
|
|
||||||
|
hasNext = iterator.NextCommandId(&type);
|
||||||
|
ASSERT_TRUE(hasNext);
|
||||||
|
ASSERT_EQ(type, CommandType::Draw);
|
||||||
|
|
||||||
|
CommandDraw* draw = iterator.NextCommand<CommandDraw>();
|
||||||
|
ASSERT_EQ(draw->first, firsts[j][i]);
|
||||||
|
ASSERT_EQ(draw->count, counts[j][i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CommandType type;
|
||||||
|
ASSERT_FALSE(iterator.NextCommandId(&type));
|
||||||
|
iterator.MakeEmptyAsDataWasDestroyed();
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue