|
@@ -240,7 +240,7 @@ SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, const SDLTest_TestCaseRef
|
|
return TEST_RESULT_SETUP_FAILURE;
|
|
return TEST_RESULT_SETUP_FAILURE;
|
|
}
|
|
}
|
|
|
|
|
|
- if (!testCase->enabled && forceTestRun == SDL_FALSE)
|
|
|
|
|
|
+ if (!testCase->enabled && forceTestRun == SDL_FALSE)
|
|
{
|
|
{
|
|
SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Skipped (Disabled)");
|
|
SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Skipped (Disabled)");
|
|
return TEST_RESULT_SKIPPED;
|
|
return TEST_RESULT_SKIPPED;
|
|
@@ -391,7 +391,7 @@ int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *user
|
|
char *suiteFilterName = NULL;
|
|
char *suiteFilterName = NULL;
|
|
int testFilter = 0;
|
|
int testFilter = 0;
|
|
char *testFilterName = NULL;
|
|
char *testFilterName = NULL;
|
|
- SDL_bool forceTestRun = SDL_FALSE;
|
|
|
|
|
|
+ SDL_bool forceTestRun = SDL_FALSE;
|
|
int testResult = 0;
|
|
int testResult = 0;
|
|
int runResult = 0;
|
|
int runResult = 0;
|
|
int totalTestFailedCount = 0;
|
|
int totalTestFailedCount = 0;
|
|
@@ -431,7 +431,7 @@ int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *user
|
|
/* Log run with fuzzer parameters */
|
|
/* Log run with fuzzer parameters */
|
|
SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);
|
|
SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);
|
|
|
|
|
|
- /* Count the total number of tests */
|
|
|
|
|
|
+ /* Count the total number of tests */
|
|
suiteCounter = 0;
|
|
suiteCounter = 0;
|
|
while (testSuites[suiteCounter]) {
|
|
while (testSuites[suiteCounter]) {
|
|
testSuite = testSuites[suiteCounter];
|
|
testSuite = testSuites[suiteCounter];
|
|
@@ -440,17 +440,17 @@ int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *user
|
|
while (testSuite->testCases[testCounter])
|
|
while (testSuite->testCases[testCounter])
|
|
{
|
|
{
|
|
testCounter++;
|
|
testCounter++;
|
|
- totalNumberOfTests++;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- /* Pre-allocate an array for tracking failed tests (potentially all test cases) */
|
|
|
|
- failedTests = (const SDLTest_TestCaseReference **)SDL_malloc(totalNumberOfTests * sizeof(SDLTest_TestCaseReference *));
|
|
|
|
- if (failedTests == NULL) {
|
|
|
|
- SDLTest_LogError("Unable to allocate cache for failed tests");
|
|
|
|
- SDL_Error(SDL_ENOMEM);
|
|
|
|
|
|
+ totalNumberOfTests++;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ /* Pre-allocate an array for tracking failed tests (potentially all test cases) */
|
|
|
|
+ failedTests = (const SDLTest_TestCaseReference **)SDL_malloc(totalNumberOfTests * sizeof(SDLTest_TestCaseReference *));
|
|
|
|
+ if (failedTests == NULL) {
|
|
|
|
+ SDLTest_LogError("Unable to allocate cache for failed tests");
|
|
|
|
+ SDL_Error(SDL_ENOMEM);
|
|
return -1;
|
|
return -1;
|
|
- }
|
|
|
|
|
|
+ }
|
|
|
|
|
|
/* Initialize filtering */
|
|
/* Initialize filtering */
|
|
if (filter != NULL && filter[0] != '\0') {
|
|
if (filter != NULL && filter[0] != '\0') {
|
|
@@ -542,7 +542,7 @@ int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *user
|
|
/* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */
|
|
/* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */
|
|
if (testFilter == 1 && !testCase->enabled) {
|
|
if (testFilter == 1 && !testCase->enabled) {
|
|
SDLTest_Log("Force run of disabled test since test filter was set");
|
|
SDLTest_Log("Force run of disabled test since test filter was set");
|
|
- forceTestRun = SDL_TRUE;
|
|
|
|
|
|
+ forceTestRun = SDL_TRUE;
|
|
}
|
|
}
|
|
|
|
|
|
/* Take time - test start */
|
|
/* Take time - test start */
|
|
@@ -571,7 +571,7 @@ int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *user
|
|
}
|
|
}
|
|
|
|
|
|
SDLTest_Log("Test Iteration %i: execKey %" SDL_PRIu64, iterationCounter, execKey);
|
|
SDLTest_Log("Test Iteration %i: execKey %" SDL_PRIu64, iterationCounter, execKey);
|
|
- testResult = SDLTest_RunTest(testSuite, testCase, execKey, forceTestRun);
|
|
|
|
|
|
+ testResult = SDLTest_RunTest(testSuite, testCase, execKey, forceTestRun);
|
|
|
|
|
|
if (testResult == TEST_RESULT_PASSED) {
|
|
if (testResult == TEST_RESULT_PASSED) {
|
|
testPassedCount++;
|
|
testPassedCount++;
|