mirror of
https://github.com/encounter/dawn-cmake.git
synced 2025-12-21 10:49:14 +00:00
Add Dawn perf test harness
This patch adds a perf test harness for Dawn and a simple test of buffer upload performance. The test harness is based off of ANGLE's perf tests. Because perf tests are parameterized to support multiple test variants, this patch also adds DawnTestWithParams and ParamGenerator to support instantiating tests with additional parameters. Bug: dawn:208 Change-Id: I60df730e9f9f21a4c29fc21ea1a8315e4fff1aa6 Reviewed-on: https://dawn-review.googlesource.com/c/dawn/+/10340 Reviewed-by: Austin Eng <enga@chromium.org> Commit-Queue: Austin Eng <enga@chromium.org>
This commit is contained in:
committed by
Commit Bot service account
parent
650859b420
commit
ca0eac314b
122
src/tests/perf_tests/BufferUploadPerf.cpp
Normal file
122
src/tests/perf_tests/BufferUploadPerf.cpp
Normal file
@@ -0,0 +1,122 @@
|
||||
// Copyright 2019 The Dawn Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "tests/perf_tests/DawnPerfTest.h"
|
||||
|
||||
#include "tests/ParamGenerator.h"
|
||||
#include "utils/DawnHelpers.h"
|
||||
|
||||
namespace {
|
||||
|
||||
constexpr unsigned int kNumIterations = 50;
|
||||
constexpr uint32_t kBufferSize = 1024 * 1024;
|
||||
|
||||
enum class UploadMethod {
|
||||
SetSubData,
|
||||
CreateBufferMapped,
|
||||
};
|
||||
|
||||
struct BufferUploadParams : DawnTestParam {
|
||||
BufferUploadParams(const DawnTestParam& param, UploadMethod uploadMethod)
|
||||
: DawnTestParam(param), uploadMethod(uploadMethod) {
|
||||
}
|
||||
|
||||
UploadMethod uploadMethod;
|
||||
};
|
||||
|
||||
std::ostream& operator<<(std::ostream& ostream, const BufferUploadParams& param) {
|
||||
ostream << static_cast<const DawnTestParam&>(param);
|
||||
|
||||
switch (param.uploadMethod) {
|
||||
case UploadMethod::SetSubData:
|
||||
ostream << "_SetSubData";
|
||||
break;
|
||||
case UploadMethod::CreateBufferMapped:
|
||||
ostream << "_CreateBufferMapped";
|
||||
break;
|
||||
}
|
||||
return ostream;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
// Test uploading |kBufferSize| bytes of data |kNumIterations| times.
|
||||
class BufferUploadPerf : public DawnPerfTestWithParams<BufferUploadParams> {
|
||||
public:
|
||||
BufferUploadPerf() : DawnPerfTestWithParams(kNumIterations), data(kBufferSize) {
|
||||
}
|
||||
~BufferUploadPerf() override = default;
|
||||
|
||||
void SetUp() override;
|
||||
|
||||
private:
|
||||
void Step() override;
|
||||
|
||||
dawn::Buffer dst;
|
||||
std::vector<uint8_t> data;
|
||||
};
|
||||
|
||||
void BufferUploadPerf::SetUp() {
|
||||
DawnPerfTestWithParams<BufferUploadParams>::SetUp();
|
||||
|
||||
dawn::BufferDescriptor desc = {};
|
||||
desc.size = kBufferSize;
|
||||
desc.usage = dawn::BufferUsage::CopyDst;
|
||||
|
||||
dst = device.CreateBuffer(&desc);
|
||||
}
|
||||
|
||||
void BufferUploadPerf::Step() {
|
||||
switch (GetParam().uploadMethod) {
|
||||
case UploadMethod::SetSubData: {
|
||||
for (unsigned int i = 0; i < kNumIterations; ++i) {
|
||||
dst.SetSubData(0, kBufferSize, data.data());
|
||||
}
|
||||
// Make sure all SetSubData's are flushed.
|
||||
queue.Submit(0, nullptr);
|
||||
} break;
|
||||
|
||||
case UploadMethod::CreateBufferMapped: {
|
||||
dawn::BufferDescriptor desc = {};
|
||||
desc.size = kBufferSize;
|
||||
desc.usage = dawn::BufferUsage::CopySrc | dawn::BufferUsage::MapWrite;
|
||||
|
||||
dawn::CommandEncoder encoder = device.CreateCommandEncoder();
|
||||
|
||||
for (unsigned int i = 0; i < kNumIterations; ++i) {
|
||||
auto result = device.CreateBufferMapped(&desc);
|
||||
memcpy(result.data, data.data(), kBufferSize);
|
||||
result.buffer.Unmap();
|
||||
encoder.CopyBufferToBuffer(result.buffer, 0, dst, 0, kBufferSize);
|
||||
}
|
||||
|
||||
dawn::CommandBuffer commands = encoder.Finish();
|
||||
queue.Submit(1, &commands);
|
||||
} break;
|
||||
}
|
||||
|
||||
// Wait for the GPU in this batch of iterations.
|
||||
// If we don't wait, we can't properly compute the number of steps to run during
|
||||
// calibration.
|
||||
// The wait time gets amortized over the kNumIterations.
|
||||
WaitForGPU();
|
||||
}
|
||||
|
||||
TEST_P(BufferUploadPerf, Run) {
|
||||
RunTest();
|
||||
}
|
||||
|
||||
DAWN_INSTANTIATE_PERF_TEST_SUITE_P(BufferUploadPerf,
|
||||
{D3D12Backend, MetalBackend, OpenGLBackend, VulkanBackend},
|
||||
{UploadMethod::SetSubData, UploadMethod::CreateBufferMapped});
|
||||
220
src/tests/perf_tests/DawnPerfTest.cpp
Normal file
220
src/tests/perf_tests/DawnPerfTest.cpp
Normal file
@@ -0,0 +1,220 @@
|
||||
// Copyright 2019 The Dawn Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "tests/perf_tests/DawnPerfTest.h"
|
||||
|
||||
#include "utils/Timer.h"
|
||||
|
||||
namespace {
|
||||
|
||||
DawnPerfTestEnvironment* gTestEnv = nullptr;
|
||||
|
||||
constexpr double kMicroSecondsPerSecond = 1e6;
|
||||
constexpr double kNanoSecondsPerSecond = 1e9;
|
||||
|
||||
} // namespace
|
||||
|
||||
void InitDawnPerfTestEnvironment(int argc, char** argv) {
|
||||
gTestEnv = new DawnPerfTestEnvironment(argc, argv);
|
||||
DawnTestEnvironment::SetEnvironment(gTestEnv);
|
||||
testing::AddGlobalTestEnvironment(gTestEnv);
|
||||
}
|
||||
|
||||
DawnPerfTestEnvironment::DawnPerfTestEnvironment(int argc, char** argv)
|
||||
: DawnTestEnvironment(argc, argv) {
|
||||
for (int i = 1; i < argc; ++i) {
|
||||
if (strcmp("--calibration", argv[i]) == 0) {
|
||||
mIsCalibrating = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (strcmp("--override-steps", argv[i]) == 0) {
|
||||
const char* value = strchr(argv[i], '=');
|
||||
if (value != nullptr) {
|
||||
mOverrideStepsToRun = strtoul(value + 1, nullptr, 0);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (strcmp("-h", argv[i]) == 0 || strcmp("--help", argv[i]) == 0) {
|
||||
std::cout << "Additional flags:"
|
||||
<< " [--calibration]\n"
|
||||
<< " --calibration: Only run calibration. Calibration allows the perf test"
|
||||
" runner script to save some time.\n"
|
||||
<< std::endl;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
DawnPerfTestEnvironment::~DawnPerfTestEnvironment() = default;
|
||||
|
||||
void DawnPerfTestEnvironment::SetUp() {
|
||||
DawnTestEnvironment::SetUp();
|
||||
}
|
||||
|
||||
bool DawnPerfTestEnvironment::IsCalibrating() const {
|
||||
return mIsCalibrating;
|
||||
}
|
||||
|
||||
unsigned int DawnPerfTestEnvironment::OverrideStepsToRun() const {
|
||||
return mOverrideStepsToRun;
|
||||
}
|
||||
|
||||
DawnPerfTestBase::DawnPerfTestBase(DawnTestBase* test, unsigned int iterationsPerStep)
|
||||
: mTest(test), mIterationsPerStep(iterationsPerStep), mTimer(utils::CreateTimer()) {
|
||||
}
|
||||
|
||||
DawnPerfTestBase::~DawnPerfTestBase() = default;
|
||||
|
||||
void DawnPerfTestBase::AbortTest() {
|
||||
mRunning = false;
|
||||
}
|
||||
|
||||
void DawnPerfTestBase::WaitForGPU() {
|
||||
dawn::FenceDescriptor desc = {};
|
||||
desc.initialValue = 0;
|
||||
|
||||
dawn::Fence fence = mTest->queue.CreateFence(&desc);
|
||||
mTest->queue.Signal(fence, 1);
|
||||
|
||||
bool done = false;
|
||||
fence.OnCompletion(1,
|
||||
[](DawnFenceCompletionStatus status, void* userdata) {
|
||||
ASSERT_EQ(status, DAWN_FENCE_COMPLETION_STATUS_SUCCESS);
|
||||
*reinterpret_cast<bool*>(userdata) = true;
|
||||
},
|
||||
&done);
|
||||
|
||||
while (!done) {
|
||||
mTest->WaitABit();
|
||||
}
|
||||
}
|
||||
|
||||
void DawnPerfTestBase::RunTest() {
|
||||
if (gTestEnv->OverrideStepsToRun() == 0) {
|
||||
// Run to compute the approximate number of steps to perform.
|
||||
mStepsToRun = std::numeric_limits<unsigned int>::max();
|
||||
|
||||
// Do a warmup run for calibration.
|
||||
DoRunLoop(kCalibrationRunTimeSeconds);
|
||||
DoRunLoop(kCalibrationRunTimeSeconds);
|
||||
|
||||
// Scale steps down according to the time that exceeded one second.
|
||||
double scale = kCalibrationRunTimeSeconds / mTimer->GetElapsedTime();
|
||||
mStepsToRun = static_cast<unsigned int>(static_cast<double>(mNumStepsPerformed) * scale);
|
||||
|
||||
// Calibration allows the perf test runner script to save some time.
|
||||
if (gTestEnv->IsCalibrating()) {
|
||||
PrintResult("steps", mStepsToRun, "count", false);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
mStepsToRun = gTestEnv->OverrideStepsToRun();
|
||||
}
|
||||
|
||||
// Do another warmup run. Seems to consistently improve results.
|
||||
DoRunLoop(kMaximumRunTimeSeconds);
|
||||
|
||||
for (unsigned int trial = 0; trial < kNumTrials; ++trial) {
|
||||
DoRunLoop(kMaximumRunTimeSeconds);
|
||||
PrintResults();
|
||||
}
|
||||
}
|
||||
|
||||
void DawnPerfTestBase::DoRunLoop(double maxRunTime) {
|
||||
mNumStepsPerformed = 0;
|
||||
mRunning = true;
|
||||
mTimer->Start();
|
||||
|
||||
// This loop can be canceled by calling AbortTest().
|
||||
while (mRunning) {
|
||||
Step();
|
||||
if (mRunning) {
|
||||
++mNumStepsPerformed;
|
||||
if (mTimer->GetElapsedTime() > maxRunTime) {
|
||||
mRunning = false;
|
||||
} else if (mNumStepsPerformed >= mStepsToRun) {
|
||||
mRunning = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mTimer->Stop();
|
||||
}
|
||||
|
||||
void DawnPerfTestBase::PrintResults() {
|
||||
double elapsedTimeSeconds[2] = {
|
||||
mTimer->GetElapsedTime(),
|
||||
mGPUTimeNs * 1e-9,
|
||||
};
|
||||
|
||||
const char* clockNames[2] = {
|
||||
"wall_time",
|
||||
"gpu_time",
|
||||
};
|
||||
|
||||
// If measured gpu time is non-zero, print that too.
|
||||
unsigned int clocksToOutput = mGPUTimeNs > 0 ? 2 : 1;
|
||||
|
||||
for (unsigned int i = 0; i < clocksToOutput; ++i) {
|
||||
double secondsPerStep = elapsedTimeSeconds[i] / static_cast<double>(mNumStepsPerformed);
|
||||
double secondsPerIteration = secondsPerStep / static_cast<double>(mIterationsPerStep);
|
||||
|
||||
// Give the result a different name to ensure separate graphs if we transition.
|
||||
if (secondsPerIteration > 1e-3) {
|
||||
double microSecondsPerIteration = secondsPerIteration * kMicroSecondsPerSecond;
|
||||
PrintResult(clockNames[i], microSecondsPerIteration, "us", true);
|
||||
} else {
|
||||
double nanoSecPerIteration = secondsPerIteration * kNanoSecondsPerSecond;
|
||||
PrintResult(clockNames[i], nanoSecPerIteration, "ns", true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void DawnPerfTestBase::PrintResult(const std::string& trace,
|
||||
double value,
|
||||
const std::string& units,
|
||||
bool important) const {
|
||||
const ::testing::TestInfo* const testInfo =
|
||||
::testing::UnitTest::GetInstance()->current_test_info();
|
||||
|
||||
const char* testName = testInfo->name();
|
||||
const char* testSuite = testInfo->test_suite_name();
|
||||
|
||||
// The results are printed according to the format specified at
|
||||
// [chromium]//build/scripts/slave/performance_log_processor.py
|
||||
fflush(stdout);
|
||||
printf("%sRESULT %s%s: %s= %s%f%s %s\n", important ? "*" : "", testSuite, testName,
|
||||
trace.c_str(), "", value, "", units.c_str());
|
||||
fflush(stdout);
|
||||
}
|
||||
|
||||
void DawnPerfTestBase::PrintResult(const std::string& trace,
|
||||
unsigned int value,
|
||||
const std::string& units,
|
||||
bool important) const {
|
||||
const ::testing::TestInfo* const testInfo =
|
||||
::testing::UnitTest::GetInstance()->current_test_info();
|
||||
|
||||
const char* testName = testInfo->name();
|
||||
const char* testSuite = testInfo->test_suite_name();
|
||||
|
||||
// The results are printed according to the format specified at
|
||||
// [chromium]//build/scripts/slave/performance_log_processor.py
|
||||
fflush(stdout);
|
||||
printf("%sRESULT %s%s: %s= %s%u%s %s\n", important ? "*" : "", testName, testSuite,
|
||||
trace.c_str(), "", value, "", units.c_str());
|
||||
fflush(stdout);
|
||||
}
|
||||
113
src/tests/perf_tests/DawnPerfTest.h
Normal file
113
src/tests/perf_tests/DawnPerfTest.h
Normal file
@@ -0,0 +1,113 @@
|
||||
// Copyright 2019 The Dawn Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#ifndef TESTS_PERFTESTS_DAWNPERFTEST_H_
|
||||
#define TESTS_PERFTESTS_DAWNPERFTEST_H_
|
||||
|
||||
#include "tests/DawnTest.h"
|
||||
|
||||
namespace utils {
|
||||
class Timer;
|
||||
}
|
||||
|
||||
void InitDawnPerfTestEnvironment(int argc, char** argv);
|
||||
|
||||
class DawnPerfTestEnvironment : public DawnTestEnvironment {
|
||||
public:
|
||||
DawnPerfTestEnvironment(int argc, char** argv);
|
||||
~DawnPerfTestEnvironment();
|
||||
|
||||
void SetUp() override;
|
||||
|
||||
bool IsCalibrating() const;
|
||||
unsigned int OverrideStepsToRun() const;
|
||||
|
||||
private:
|
||||
// Only run calibration which allows the perf test runner to save time.
|
||||
bool mIsCalibrating = false;
|
||||
|
||||
// If non-zero, overrides the number of steps.
|
||||
unsigned int mOverrideStepsToRun = 0;
|
||||
};
|
||||
|
||||
// Dawn Perf Tests calls Step() of a derived class to measure its execution
|
||||
// time. First, a calibration step is run which determines the number of times
|
||||
// to call Step() to last approximately |kCalibrationRunTimeSeconds|. Then,
|
||||
// Step() is called for the computed number of times, or until
|
||||
// |kMaximumRunTimeSeconds| is exceeded. |kNumTrials| are performed and the
|
||||
// results and averages per iteration** are printed.
|
||||
//
|
||||
// The results are printed according to the format specified at
|
||||
// [chromium]//build/scripts/slave/performance_log_processor.py
|
||||
//
|
||||
// ** The number of iterations a test performs should be passed to the
|
||||
// constructor of DawnPerfTestBase. The reported times are the total time
|
||||
// divided by (numSteps * iterationsPerStep).
|
||||
class DawnPerfTestBase {
|
||||
static constexpr double kCalibrationRunTimeSeconds = 1.0;
|
||||
static constexpr double kMaximumRunTimeSeconds = 10.0;
|
||||
static constexpr unsigned int kNumTrials = 3;
|
||||
|
||||
public:
|
||||
DawnPerfTestBase(DawnTestBase* test, unsigned int iterationsPerStep);
|
||||
virtual ~DawnPerfTestBase();
|
||||
|
||||
protected:
|
||||
// Call if the test step was aborted and the test should stop running.
|
||||
void AbortTest();
|
||||
|
||||
void WaitForGPU();
|
||||
|
||||
void RunTest();
|
||||
void PrintResult(const std::string& trace,
|
||||
double value,
|
||||
const std::string& units,
|
||||
bool important) const;
|
||||
void PrintResult(const std::string& trace,
|
||||
unsigned int value,
|
||||
const std::string& units,
|
||||
bool important) const;
|
||||
|
||||
private:
|
||||
void DoRunLoop(double maxRunTime);
|
||||
void PrintResults();
|
||||
|
||||
virtual void Step() = 0;
|
||||
|
||||
DawnTestBase* mTest;
|
||||
bool mRunning = false;
|
||||
unsigned int mIterationsPerStep;
|
||||
unsigned int mStepsToRun = 0;
|
||||
unsigned int mNumStepsPerformed = 0;
|
||||
uint64_t mGPUTimeNs = 0; // TODO(enga): Measure GPU time with timing queries.
|
||||
std::unique_ptr<utils::Timer> mTimer;
|
||||
};
|
||||
|
||||
template <typename Params = DawnTestParam>
|
||||
class DawnPerfTestWithParams : public DawnTestWithParams<Params>, public DawnPerfTestBase {
|
||||
protected:
|
||||
DawnPerfTestWithParams(unsigned int iterationsPerStep)
|
||||
: DawnTestWithParams<Params>(), DawnPerfTestBase(this, iterationsPerStep) {
|
||||
}
|
||||
~DawnPerfTestWithParams() override = default;
|
||||
};
|
||||
|
||||
using DawnPerfTest = DawnPerfTestWithParams<>;
|
||||
|
||||
#define DAWN_INSTANTIATE_PERF_TEST_SUITE_P(testName, ...) \
|
||||
INSTANTIATE_TEST_SUITE_P( \
|
||||
, testName, ::testing::ValuesIn(MakeParamGenerator<testName::ParamType>(__VA_ARGS__)), \
|
||||
testing::PrintToStringParamName())
|
||||
|
||||
#endif // TESTS_PERFTESTS_DAWNPERFTEST_H_
|
||||
Reference in New Issue
Block a user