mirror of https://github.com/encounter/SDL.git
Fix bug/add test coverage for SDLTest_GenerateRunSeed helper; improve test harness adding output of repro steps for failures; improve negative test for SDL_GetError/SDL_SetError
This commit is contained in:
parent
b677d1d883
commit
7a36070a95
|
@ -67,12 +67,13 @@ SDLTest_GenerateRunSeed(const int length)
|
||||||
seed = (char *)SDL_malloc((length + 1) * sizeof(char));
|
seed = (char *)SDL_malloc((length + 1) * sizeof(char));
|
||||||
if (seed == NULL) {
|
if (seed == NULL) {
|
||||||
SDLTest_LogError("SDL_malloc for run seed output buffer failed.");
|
SDLTest_LogError("SDL_malloc for run seed output buffer failed.");
|
||||||
|
SDL_Error(SDL_ENOMEM);
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Generate a random string of alphanumeric characters */
|
/* Generate a random string of alphanumeric characters */
|
||||||
SDLTest_RandomInitTime(&randomContext);
|
SDLTest_RandomInitTime(&randomContext);
|
||||||
for (counter = 0; counter < length - 1; ++counter) {
|
for (counter = 0; counter < length; counter++) {
|
||||||
unsigned int number = SDLTest_Random(&randomContext);
|
unsigned int number = SDLTest_Random(&randomContext);
|
||||||
char ch = (char) (number % (91 - 48)) + 48;
|
char ch = (char) (number % (91 - 48)) + 48;
|
||||||
if (ch >= 58 && ch <= 64) {
|
if (ch >= 58 && ch <= 64) {
|
||||||
|
@ -80,7 +81,7 @@ SDLTest_GenerateRunSeed(const int length)
|
||||||
}
|
}
|
||||||
seed[counter] = ch;
|
seed[counter] = ch;
|
||||||
}
|
}
|
||||||
seed[counter] = '\0';
|
seed[length] = '\0';
|
||||||
|
|
||||||
return seed;
|
return seed;
|
||||||
}
|
}
|
||||||
|
@ -141,7 +142,8 @@ SDLTest_GenerateExecKey(char *runSeed, char *suiteName, char *testName, int iter
|
||||||
entireStringLength = runSeedLength + suiteNameLength + testNameLength + iterationStringLength + 1;
|
entireStringLength = runSeedLength + suiteNameLength + testNameLength + iterationStringLength + 1;
|
||||||
buffer = (char *)SDL_malloc(entireStringLength);
|
buffer = (char *)SDL_malloc(entireStringLength);
|
||||||
if (buffer == NULL) {
|
if (buffer == NULL) {
|
||||||
SDLTest_LogError("SDL_malloc failed to allocate buffer for execKey generation.");
|
SDLTest_LogError("Failed to allocate buffer for execKey generation.");
|
||||||
|
SDL_Error(SDL_ENOMEM);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
SDL_snprintf(buffer, entireStringLength, "%s%s%s%d", runSeed, suiteName, testName, iteration);
|
SDL_snprintf(buffer, entireStringLength, "%s%s%s%d", runSeed, suiteName, testName, iteration);
|
||||||
|
@ -347,7 +349,7 @@ float GetClock()
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* \brief Execute a test suite using the given run seend and execution key.
|
* \brief Execute a test suite using the given run seed and execution key.
|
||||||
*
|
*
|
||||||
* The filter string is matched to the suite name (full comparison) to select a single suite,
|
* The filter string is matched to the suite name (full comparison) to select a single suite,
|
||||||
* or if no suite matches, it is matched to the test names (full comparison) to select a single test.
|
* or if no suite matches, it is matched to the test names (full comparison) to select a single test.
|
||||||
|
@ -362,6 +364,8 @@ float GetClock()
|
||||||
*/
|
*/
|
||||||
int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *userRunSeed, Uint64 userExecKey, const char *filter, int testIterations)
|
int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *userRunSeed, Uint64 userExecKey, const char *filter, int testIterations)
|
||||||
{
|
{
|
||||||
|
int totalNumberOfTests = 0;
|
||||||
|
int failedNumberOfTests = 0;
|
||||||
int suiteCounter;
|
int suiteCounter;
|
||||||
int testCounter;
|
int testCounter;
|
||||||
int iterationCounter;
|
int iterationCounter;
|
||||||
|
@ -392,6 +396,7 @@ int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *user
|
||||||
Uint32 testSkippedCount = 0;
|
Uint32 testSkippedCount = 0;
|
||||||
Uint32 countSum = 0;
|
Uint32 countSum = 0;
|
||||||
char *logFormat = (char *)SDLTest_LogSummaryFormat;
|
char *logFormat = (char *)SDLTest_LogSummaryFormat;
|
||||||
|
SDLTest_TestCaseReference **failedTests;
|
||||||
|
|
||||||
/* Sanitize test iterations */
|
/* Sanitize test iterations */
|
||||||
if (testIterations < 1) {
|
if (testIterations < 1) {
|
||||||
|
@ -421,6 +426,27 @@ int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *user
|
||||||
/* Log run with fuzzer parameters */
|
/* Log run with fuzzer parameters */
|
||||||
SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);
|
SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);
|
||||||
|
|
||||||
|
/* Count the total number of tests */
|
||||||
|
suiteCounter = 0;
|
||||||
|
while (testSuites[suiteCounter]) {
|
||||||
|
testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
|
||||||
|
suiteCounter++;
|
||||||
|
testCounter = 0;
|
||||||
|
while (testSuite->testCases[testCounter])
|
||||||
|
{
|
||||||
|
testCounter++;
|
||||||
|
totalNumberOfTests++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Pre-allocate an array for tracking failed tests (potentially all test cases) */
|
||||||
|
failedTests = (SDLTest_TestCaseReference **)SDL_malloc(totalNumberOfTests * sizeof(SDLTest_TestCaseReference *));
|
||||||
|
if (failedTests == NULL) {
|
||||||
|
SDLTest_LogError("Unable to allocate cache for failed tests");
|
||||||
|
SDL_Error(SDL_ENOMEM);
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
/* Initialize filtering */
|
/* Initialize filtering */
|
||||||
if (filter != NULL && filter[0] != '\0') {
|
if (filter != NULL && filter[0] != '\0') {
|
||||||
/* Loop over all suites to check if we have a filter match */
|
/* Loop over all suites to check if we have a filter match */
|
||||||
|
@ -580,6 +606,11 @@ int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *user
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Collect failed test case references for repro-step display */
|
||||||
|
if (testResult == TEST_RESULT_FAILED) {
|
||||||
|
failedTests[failedNumberOfTests] = testCase;
|
||||||
|
failedNumberOfTests++;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -630,6 +661,15 @@ int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *user
|
||||||
SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Run /w seed", runSeed, "Failed");
|
SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Run /w seed", runSeed, "Failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Print repro steps for failed tests */
|
||||||
|
if (failedNumberOfTests > 0) {
|
||||||
|
SDLTest_Log("Harness input to repro failures:");
|
||||||
|
for (testCounter = 0; testCounter < failedNumberOfTests; testCounter++) {
|
||||||
|
SDLTest_Log(" --seed %s --filter %s", runSeed, failedTests[testCounter]->name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
SDL_free(failedTests);
|
||||||
|
|
||||||
SDLTest_Log("Exit code: %d", runResult);
|
SDLTest_Log("Exit code: %d", runResult);
|
||||||
return runResult;
|
return runResult;
|
||||||
}
|
}
|
||||||
|
|
|
@ -283,6 +283,7 @@ int platform_testDefaultInit(void *arg)
|
||||||
*/
|
*/
|
||||||
int platform_testGetSetClearError(void *arg)
|
int platform_testGetSetClearError(void *arg)
|
||||||
{
|
{
|
||||||
|
int result;
|
||||||
const char *testError = "Testing";
|
const char *testError = "Testing";
|
||||||
char *lastError;
|
char *lastError;
|
||||||
int len;
|
int len;
|
||||||
|
@ -301,8 +302,9 @@ int platform_testGetSetClearError(void *arg)
|
||||||
"SDL_GetError(): no message expected, len: %i", len);
|
"SDL_GetError(): no message expected, len: %i", len);
|
||||||
}
|
}
|
||||||
|
|
||||||
SDL_SetError("%s", testError);
|
result = SDL_SetError("%s", testError);
|
||||||
SDLTest_AssertPass("SDL_SetError()");
|
SDLTest_AssertPass("SDL_SetError()");
|
||||||
|
SDLTest_AssertCheck(result == -1, "SDL_SetError: expected -1, got: %i", result);
|
||||||
lastError = (char *)SDL_GetError();
|
lastError = (char *)SDL_GetError();
|
||||||
SDLTest_AssertCheck(lastError != NULL,
|
SDLTest_AssertCheck(lastError != NULL,
|
||||||
"SDL_GetError() != NULL");
|
"SDL_GetError() != NULL");
|
||||||
|
@ -333,12 +335,14 @@ int platform_testGetSetClearError(void *arg)
|
||||||
*/
|
*/
|
||||||
int platform_testSetErrorEmptyInput(void *arg)
|
int platform_testSetErrorEmptyInput(void *arg)
|
||||||
{
|
{
|
||||||
|
int result;
|
||||||
const char *testError = "";
|
const char *testError = "";
|
||||||
char *lastError;
|
char *lastError;
|
||||||
int len;
|
int len;
|
||||||
|
|
||||||
SDL_SetError("%s", testError);
|
result = SDL_SetError("%s", testError);
|
||||||
SDLTest_AssertPass("SDL_SetError()");
|
SDLTest_AssertPass("SDL_SetError()");
|
||||||
|
SDLTest_AssertCheck(result == -1, "SDL_SetError: expected -1, got: %i", result);
|
||||||
lastError = (char *)SDL_GetError();
|
lastError = (char *)SDL_GetError();
|
||||||
SDLTest_AssertCheck(lastError != NULL,
|
SDLTest_AssertCheck(lastError != NULL,
|
||||||
"SDL_GetError() != NULL");
|
"SDL_GetError() != NULL");
|
||||||
|
@ -369,7 +373,8 @@ int platform_testSetErrorEmptyInput(void *arg)
|
||||||
*/
|
*/
|
||||||
int platform_testSetErrorInvalidInput(void *arg)
|
int platform_testSetErrorInvalidInput(void *arg)
|
||||||
{
|
{
|
||||||
const char *testError = NULL;
|
int result;
|
||||||
|
const char *invalidError = NULL;
|
||||||
const char *probeError = "Testing";
|
const char *probeError = "Testing";
|
||||||
char *lastError;
|
char *lastError;
|
||||||
int len;
|
int len;
|
||||||
|
@ -379,8 +384,9 @@ int platform_testSetErrorInvalidInput(void *arg)
|
||||||
SDLTest_AssertPass("SDL_ClearError()");
|
SDLTest_AssertPass("SDL_ClearError()");
|
||||||
|
|
||||||
/* Check for no-op */
|
/* Check for no-op */
|
||||||
SDL_SetError(testError);
|
result = SDL_SetError(invalidError);
|
||||||
SDLTest_AssertPass("SDL_SetError()");
|
SDLTest_AssertPass("SDL_SetError()");
|
||||||
|
SDLTest_AssertCheck(result == -1, "SDL_SetError: expected -1, got: %i", result);
|
||||||
lastError = (char *)SDL_GetError();
|
lastError = (char *)SDL_GetError();
|
||||||
SDLTest_AssertCheck(lastError != NULL,
|
SDLTest_AssertCheck(lastError != NULL,
|
||||||
"SDL_GetError() != NULL");
|
"SDL_GetError() != NULL");
|
||||||
|
@ -397,12 +403,14 @@ int platform_testSetErrorInvalidInput(void *arg)
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Set */
|
/* Set */
|
||||||
SDL_SetError(probeError);
|
result = SDL_SetError(probeError);
|
||||||
SDLTest_AssertPass("SDL_SetError()");
|
SDLTest_AssertPass("SDL_SetError()");
|
||||||
|
SDLTest_AssertCheck(result == -1, "SDL_SetError: expected -1, got: %i", result);
|
||||||
|
|
||||||
/* Check for no-op */
|
/* Check for no-op */
|
||||||
SDL_SetError(testError);
|
result = SDL_SetError(invalidError);
|
||||||
SDLTest_AssertPass("SDL_SetError()");
|
SDLTest_AssertPass("SDL_SetError()");
|
||||||
|
SDLTest_AssertCheck(result == -1, "SDL_SetError: expected -1, got: %i", result);
|
||||||
lastError = (char *)SDL_GetError();
|
lastError = (char *)SDL_GetError();
|
||||||
SDLTest_AssertCheck(lastError != NULL,
|
SDLTest_AssertCheck(lastError != NULL,
|
||||||
"SDL_GetError() != NULL");
|
"SDL_GetError() != NULL");
|
||||||
|
@ -419,6 +427,30 @@ int platform_testSetErrorInvalidInput(void *arg)
|
||||||
lastError);
|
lastError);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Reset */
|
||||||
|
SDL_ClearError();
|
||||||
|
SDLTest_AssertPass("SDL_ClearError()");
|
||||||
|
|
||||||
|
/* Set and check */
|
||||||
|
result = SDL_SetError(probeError);
|
||||||
|
SDLTest_AssertPass("SDL_SetError()");
|
||||||
|
SDLTest_AssertCheck(result == -1, "SDL_SetError: expected -1, got: %i", result);
|
||||||
|
lastError = (char *)SDL_GetError();
|
||||||
|
SDLTest_AssertCheck(lastError != NULL,
|
||||||
|
"SDL_GetError() != NULL");
|
||||||
|
if (lastError != NULL)
|
||||||
|
{
|
||||||
|
len = SDL_strlen(lastError);
|
||||||
|
SDLTest_AssertCheck(len == SDL_strlen(probeError),
|
||||||
|
"SDL_GetError(): expected message len %i, was len: %i",
|
||||||
|
SDL_strlen(probeError),
|
||||||
|
len);
|
||||||
|
SDLTest_AssertCheck(SDL_strcmp(lastError, probeError) == 0,
|
||||||
|
"SDL_GetError(): expected message '%s', was message: '%s'",
|
||||||
|
probeError,
|
||||||
|
lastError);
|
||||||
|
}
|
||||||
|
|
||||||
/* Clean up */
|
/* Clean up */
|
||||||
SDL_ClearError();
|
SDL_ClearError();
|
||||||
SDLTest_AssertPass("SDL_ClearError()");
|
SDLTest_AssertPass("SDL_ClearError()");
|
||||||
|
|
|
@ -21,6 +21,39 @@
|
||||||
|
|
||||||
/* Test case functions */
|
/* Test case functions */
|
||||||
|
|
||||||
|
/* Forward declarations for internal harness functions */
|
||||||
|
extern char *SDLTest_GenerateRunSeed(const int length);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Calls to SDLTest_GenerateRunSeed()
|
||||||
|
*/
|
||||||
|
int
|
||||||
|
sdltest_generateRunSeed(void *arg)
|
||||||
|
{
|
||||||
|
char* result;
|
||||||
|
int i, l;
|
||||||
|
|
||||||
|
for (i = 1; i <= 10; i += 3) {
|
||||||
|
result = SDLTest_GenerateRunSeed((const int)i);
|
||||||
|
SDLTest_AssertPass("Call to SDLTest_GenerateRunSeed()");
|
||||||
|
SDLTest_AssertCheck(result != NULL, "Verify returned value is not NULL");
|
||||||
|
if (result != NULL) {
|
||||||
|
l = SDL_strlen(result);
|
||||||
|
SDLTest_AssertCheck(l == i, "Verify length of returned value is %d, got: %d", i, l);
|
||||||
|
SDL_free(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Negative cases */
|
||||||
|
for (i = -2; i <= 0; i++) {
|
||||||
|
result = SDLTest_GenerateRunSeed((const int)i);
|
||||||
|
SDLTest_AssertPass("Call to SDLTest_GenerateRunSeed()");
|
||||||
|
SDLTest_AssertCheck(result == NULL, "Verify returned value is not NULL");
|
||||||
|
}
|
||||||
|
|
||||||
|
return TEST_COMPLETED;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Calls to SDLTest_GetFuzzerInvocationCount()
|
* @brief Calls to SDLTest_GetFuzzerInvocationCount()
|
||||||
*/
|
*/
|
||||||
|
@ -1263,11 +1296,14 @@ static const SDLTest_TestCaseReference sdltestTest13 =
|
||||||
static const SDLTest_TestCaseReference sdltestTest14 =
|
static const SDLTest_TestCaseReference sdltestTest14 =
|
||||||
{ (SDLTest_TestCaseFp)sdltest_randomAsciiStringOfSize, "sdltest_randomAsciiStringOfSize", "Calls to fixed size ASCII string generator", TEST_ENABLED };
|
{ (SDLTest_TestCaseFp)sdltest_randomAsciiStringOfSize, "sdltest_randomAsciiStringOfSize", "Calls to fixed size ASCII string generator", TEST_ENABLED };
|
||||||
|
|
||||||
|
static const SDLTest_TestCaseReference sdltestTest15 =
|
||||||
|
{ (SDLTest_TestCaseFp)sdltest_generateRunSeed, "sdltest_generateRunSeed", "Checks internal harness function SDLTest_GenerateRunSeed", TEST_ENABLED };
|
||||||
|
|
||||||
/* Sequence of SDL_test test cases */
|
/* Sequence of SDL_test test cases */
|
||||||
static const SDLTest_TestCaseReference *sdltestTests[] = {
|
static const SDLTest_TestCaseReference *sdltestTests[] = {
|
||||||
&sdltestTest1, &sdltestTest2, &sdltestTest3, &sdltestTest4, &sdltestTest5, &sdltestTest6,
|
&sdltestTest1, &sdltestTest2, &sdltestTest3, &sdltestTest4, &sdltestTest5, &sdltestTest6,
|
||||||
&sdltestTest7, &sdltestTest8, &sdltestTest9, &sdltestTest10, &sdltestTest11, &sdltestTest12,
|
&sdltestTest7, &sdltestTest8, &sdltestTest9, &sdltestTest10, &sdltestTest11, &sdltestTest12,
|
||||||
&sdltestTest13, &sdltestTest14, NULL
|
&sdltestTest13, &sdltestTest14, &sdltestTest15, NULL
|
||||||
};
|
};
|
||||||
|
|
||||||
/* SDL_test test suite (global) */
|
/* SDL_test test suite (global) */
|
||||||
|
|
Loading…
Reference in New Issue