|
@@ -19,33 +19,25 @@
|
|
|
3. This notice may not be removed or altered from any source distribution.
|
|
3. This notice may not be removed or altered from any source distribution.
|
|
|
*/
|
|
*/
|
|
|
#include <SDL3/SDL_test.h>
|
|
#include <SDL3/SDL_test.h>
|
|
|
|
|
+#include "SDL_test_internal.h"
|
|
|
|
|
|
|
|
#include <stdlib.h> /* Needed for exit() */
|
|
#include <stdlib.h> /* Needed for exit() */
|
|
|
|
|
|
|
|
-/* Enable to have color in logs */
|
|
|
|
|
-#if 1
|
|
|
|
|
-#define COLOR_RED "\033[0;31m"
|
|
|
|
|
-#define COLOR_GREEN "\033[0;32m"
|
|
|
|
|
-#define COLOR_YELLOW "\033[0;93m"
|
|
|
|
|
-#define COLOR_BLUE "\033[0;94m"
|
|
|
|
|
-#define COLOR_END "\033[0m"
|
|
|
|
|
-#else
|
|
|
|
|
-#define COLOR_RED ""
|
|
|
|
|
-#define COLOR_GREEN ""
|
|
|
|
|
-#define COLOR_BLUE ""
|
|
|
|
|
-#define COLOR_YELLOW ""
|
|
|
|
|
-#define COLOR_END ""
|
|
|
|
|
-#endif
|
|
|
|
|
-
|
|
|
|
|
/* Invalid test name/description message format */
|
|
/* Invalid test name/description message format */
|
|
|
#define SDLTEST_INVALID_NAME_FORMAT "(Invalid)"
|
|
#define SDLTEST_INVALID_NAME_FORMAT "(Invalid)"
|
|
|
|
|
|
|
|
-/* Log summary message format */
|
|
|
|
|
-#define SDLTEST_LOG_SUMMARY_FORMAT "%s Summary: Total=%d " COLOR_GREEN "Passed=%d" COLOR_END " " COLOR_RED "Failed=%d" COLOR_END " " COLOR_BLUE "Skipped=%d" COLOR_END
|
|
|
|
|
-#define SDLTEST_LOG_SUMMARY_FORMAT_OK "%s Summary: Total=%d " COLOR_GREEN "Passed=%d" COLOR_END " " COLOR_GREEN "Failed=%d" COLOR_END " " COLOR_BLUE "Skipped=%d" COLOR_END
|
|
|
|
|
|
|
+static void SDLTest_LogSummary(bool success, const char *name, int total, int passed, int failed, int skipped)
|
|
|
|
|
+{
|
|
|
|
|
+ SDLTest_LogMessage(success ? SDL_LOG_PRIORITY_INFO : SDL_LOG_PRIORITY_ERROR,
|
|
|
|
|
+ "%s Summary: Total=%d " "%s" "Passed=%d" "%s" " " "%s" "Failed=%d" "%s" " " "%s" "Skipped=%d" "%s",
|
|
|
|
|
+ name, total, COLOR_GREEN, passed, COLOR_END, success ? COLOR_GREEN : COLOR_RED, failed, COLOR_END, COLOR_BLUE, skipped, COLOR_END);
|
|
|
|
|
+}
|
|
|
|
|
|
|
|
-/* Final result message format */
|
|
|
|
|
-#define SDLTEST_FINAL_RESULT_FORMAT COLOR_YELLOW ">>> %s '%s':" COLOR_END " %s\n"
|
|
|
|
|
|
|
+static void SDLTest_LogFinalResult(bool success, const char *stage, const char *name, const char *color_message, const char *message)
|
|
|
|
|
+{
|
|
|
|
|
+ SDL_LogPriority priority = success ? SDL_LOG_PRIORITY_INFO : SDL_LOG_PRIORITY_ERROR;
|
|
|
|
|
+ SDLTest_LogMessage(priority, "%s>>> %s '%s':" "%s" " " "%s" "%s" "%s", COLOR_YELLOW, stage, name, COLOR_END, color_message ? color_message : "", message, color_message ? COLOR_END : "");
|
|
|
|
|
+}
|
|
|
|
|
|
|
|
struct SDLTest_TestSuiteRunner {
|
|
struct SDLTest_TestSuiteRunner {
|
|
|
struct
|
|
struct
|
|
@@ -242,7 +234,7 @@ static int SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, const SDLTest_
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
if (!testCase->enabled && forceTestRun == false) {
|
|
if (!testCase->enabled && forceTestRun == false) {
|
|
|
- SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Skipped (Disabled)");
|
|
|
|
|
|
|
+ SDLTest_LogFinalResult(true, "Test", testCase->name, NULL, "Skipped (Disabled)");
|
|
|
return TEST_RESULT_SKIPPED;
|
|
return TEST_RESULT_SKIPPED;
|
|
|
}
|
|
}
|
|
|
|
|
|
|
@@ -259,7 +251,7 @@ static int SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, const SDLTest_
|
|
|
if (testSuite->testSetUp) {
|
|
if (testSuite->testSetUp) {
|
|
|
testSuite->testSetUp(&data);
|
|
testSuite->testSetUp(&data);
|
|
|
if (SDLTest_AssertSummaryToTestResult() == TEST_RESULT_FAILED) {
|
|
if (SDLTest_AssertSummaryToTestResult() == TEST_RESULT_FAILED) {
|
|
|
- SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite Setup", testSuite->name, COLOR_RED "Failed" COLOR_END);
|
|
|
|
|
|
|
+ SDLTest_LogFinalResult(false, "Suite Setup", testSuite->name, COLOR_RED, "Failed");
|
|
|
return TEST_RESULT_SETUP_FAILURE;
|
|
return TEST_RESULT_SETUP_FAILURE;
|
|
|
}
|
|
}
|
|
|
}
|
|
}
|
|
@@ -301,13 +293,13 @@ static int SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, const SDLTest_
|
|
|
/* Final log based on test execution result */
|
|
/* Final log based on test execution result */
|
|
|
if (testCaseResult == TEST_SKIPPED) {
|
|
if (testCaseResult == TEST_SKIPPED) {
|
|
|
/* Test was programmatically skipped */
|
|
/* Test was programmatically skipped */
|
|
|
- SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, COLOR_BLUE "Skipped (Programmatically)" COLOR_END);
|
|
|
|
|
|
|
+ SDLTest_LogFinalResult(true, "Test", testCase->name, COLOR_BLUE, "Skipped (Programmatically)");
|
|
|
} else if (testCaseResult == TEST_STARTED) {
|
|
} else if (testCaseResult == TEST_STARTED) {
|
|
|
/* Test did not return a TEST_COMPLETED value; assume it failed */
|
|
/* Test did not return a TEST_COMPLETED value; assume it failed */
|
|
|
- SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, COLOR_RED "Failed (test started, but did not return TEST_COMPLETED)" COLOR_END);
|
|
|
|
|
|
|
+ SDLTest_LogFinalResult(false, "Test", testCase->name, COLOR_RED, "Skipped (test started, but did not return TEST_COMPLETED)");
|
|
|
} else if (testCaseResult == TEST_ABORTED) {
|
|
} else if (testCaseResult == TEST_ABORTED) {
|
|
|
/* Test was aborted early; assume it failed */
|
|
/* Test was aborted early; assume it failed */
|
|
|
- SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, COLOR_RED "Failed (Aborted)" COLOR_END);
|
|
|
|
|
|
|
+ SDLTest_LogFinalResult(false, "Test", testCase->name, COLOR_RED, "Failed (Aborted)");
|
|
|
} else {
|
|
} else {
|
|
|
SDLTest_LogAssertSummary();
|
|
SDLTest_LogAssertSummary();
|
|
|
}
|
|
}
|
|
@@ -572,9 +564,11 @@ int SDLTest_ExecuteTestSuiteRunner(SDLTest_TestSuiteRunner *runner)
|
|
|
if (suiteFilter == 1 && suiteFilterName && testSuite->name &&
|
|
if (suiteFilter == 1 && suiteFilterName && testSuite->name &&
|
|
|
SDL_strcasecmp(suiteFilterName, testSuite->name) != 0) {
|
|
SDL_strcasecmp(suiteFilterName, testSuite->name) != 0) {
|
|
|
/* Skip suite */
|
|
/* Skip suite */
|
|
|
- SDLTest_Log("===== Test Suite %i: '%s' " COLOR_BLUE "skipped" COLOR_END "\n",
|
|
|
|
|
|
|
+ SDLTest_Log("===== Test Suite %i: '%s' " "%s" "skipped" "%s" "\n",
|
|
|
suiteCounter,
|
|
suiteCounter,
|
|
|
- currentSuiteName);
|
|
|
|
|
|
|
+ currentSuiteName,
|
|
|
|
|
+ COLOR_BLUE,
|
|
|
|
|
+ COLOR_END);
|
|
|
} else {
|
|
} else {
|
|
|
|
|
|
|
|
int nbTestCases = 0;
|
|
int nbTestCases = 0;
|
|
@@ -634,10 +628,12 @@ int SDLTest_ExecuteTestSuiteRunner(SDLTest_TestSuiteRunner *runner)
|
|
|
if (testFilter == 1 && testFilterName && testCase->name &&
|
|
if (testFilter == 1 && testFilterName && testCase->name &&
|
|
|
SDL_strcasecmp(testFilterName, testCase->name) != 0) {
|
|
SDL_strcasecmp(testFilterName, testCase->name) != 0) {
|
|
|
/* Skip test */
|
|
/* Skip test */
|
|
|
- SDLTest_Log("===== Test Case %i.%i: '%s' " COLOR_BLUE "skipped" COLOR_END "\n",
|
|
|
|
|
|
|
+ SDLTest_Log("===== Test Case %i.%i: '%s' " "%s" "skipped" "%s" "\n",
|
|
|
suiteCounter,
|
|
suiteCounter,
|
|
|
testCounter,
|
|
testCounter,
|
|
|
- currentTestName);
|
|
|
|
|
|
|
+ currentTestName,
|
|
|
|
|
+ COLOR_BLUE,
|
|
|
|
|
+ COLOR_END);
|
|
|
} else {
|
|
} else {
|
|
|
/* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */
|
|
/* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */
|
|
|
if (testFilter == 1 && !testCase->enabled) {
|
|
if (testFilter == 1 && !testCase->enabled) {
|
|
@@ -649,10 +645,12 @@ int SDLTest_ExecuteTestSuiteRunner(SDLTest_TestSuiteRunner *runner)
|
|
|
testStartSeconds = GetClock();
|
|
testStartSeconds = GetClock();
|
|
|
|
|
|
|
|
/* Log test started */
|
|
/* Log test started */
|
|
|
- SDLTest_Log(COLOR_YELLOW "----- Test Case %i.%i: '%s' started" COLOR_END,
|
|
|
|
|
|
|
+ SDLTest_Log("%s" "----- Test Case %i.%i: '%s' started" "%s",
|
|
|
|
|
+ COLOR_YELLOW,
|
|
|
suiteCounter,
|
|
suiteCounter,
|
|
|
testCounter,
|
|
testCounter,
|
|
|
- currentTestName);
|
|
|
|
|
|
|
+ currentTestName,
|
|
|
|
|
+ COLOR_END);
|
|
|
if (testCase->description && testCase->description[0] != '\0') {
|
|
if (testCase->description && testCase->description[0] != '\0') {
|
|
|
SDLTest_Log("Test Description: '%s'",
|
|
SDLTest_Log("Test Description: '%s'",
|
|
|
(testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT);
|
|
(testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT);
|
|
@@ -703,13 +701,13 @@ int SDLTest_ExecuteTestSuiteRunner(SDLTest_TestSuiteRunner *runner)
|
|
|
/* Log final test result */
|
|
/* Log final test result */
|
|
|
switch (testResult) {
|
|
switch (testResult) {
|
|
|
case TEST_RESULT_PASSED:
|
|
case TEST_RESULT_PASSED:
|
|
|
- SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, COLOR_GREEN "Passed" COLOR_END);
|
|
|
|
|
|
|
+ SDLTest_LogFinalResult(true, "Test", currentTestName, COLOR_GREEN, "Passed");
|
|
|
break;
|
|
break;
|
|
|
case TEST_RESULT_FAILED:
|
|
case TEST_RESULT_FAILED:
|
|
|
- SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, COLOR_RED "Failed" COLOR_END);
|
|
|
|
|
|
|
+ SDLTest_LogFinalResult(false, "Test", currentTestName, COLOR_RED, "Failed");
|
|
|
break;
|
|
break;
|
|
|
case TEST_RESULT_NO_ASSERT:
|
|
case TEST_RESULT_NO_ASSERT:
|
|
|
- SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, COLOR_BLUE "No Asserts" COLOR_END);
|
|
|
|
|
|
|
+ SDLTest_LogFinalResult(false, "Test", currentTestName, COLOR_BLUE, "No Asserts");
|
|
|
break;
|
|
break;
|
|
|
}
|
|
}
|
|
|
|
|
|
|
@@ -734,11 +732,11 @@ int SDLTest_ExecuteTestSuiteRunner(SDLTest_TestSuiteRunner *runner)
|
|
|
/* Log summary and final Suite result */
|
|
/* Log summary and final Suite result */
|
|
|
countSum = testPassedCount + testFailedCount + testSkippedCount;
|
|
countSum = testPassedCount + testFailedCount + testSkippedCount;
|
|
|
if (testFailedCount == 0) {
|
|
if (testFailedCount == 0) {
|
|
|
- SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT_OK, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
|
|
|
|
|
- SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, COLOR_GREEN "Passed" COLOR_END);
|
|
|
|
|
|
|
+ SDLTest_LogSummary(true, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
|
|
|
|
|
+ SDLTest_LogFinalResult(true, "Suite", currentSuiteName, COLOR_GREEN, "Passed");
|
|
|
} else {
|
|
} else {
|
|
|
- SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
|
|
|
|
|
- SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, COLOR_RED "Failed" COLOR_END);
|
|
|
|
|
|
|
+ SDLTest_LogSummary(false, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
|
|
|
|
|
+ SDLTest_LogFinalResult(false, "Suite", currentSuiteName, COLOR_RED, "Failed");
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
SDL_free(arrayTestCases);
|
|
SDL_free(arrayTestCases);
|
|
@@ -761,19 +759,19 @@ int SDLTest_ExecuteTestSuiteRunner(SDLTest_TestSuiteRunner *runner)
|
|
|
countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount;
|
|
countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount;
|
|
|
if (totalTestFailedCount == 0) {
|
|
if (totalTestFailedCount == 0) {
|
|
|
runResult = 0;
|
|
runResult = 0;
|
|
|
- SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT_OK, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
|
|
|
|
|
- SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, COLOR_GREEN "Passed" COLOR_END);
|
|
|
|
|
|
|
+ SDLTest_LogSummary(true, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
|
|
|
|
|
+ SDLTest_LogFinalResult(true, "Run /w seed", runSeed, COLOR_GREEN, "Passed");
|
|
|
} else {
|
|
} else {
|
|
|
runResult = 1;
|
|
runResult = 1;
|
|
|
- SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
|
|
|
|
|
- SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, COLOR_RED "Failed" COLOR_END);
|
|
|
|
|
|
|
+ SDLTest_LogSummary(false, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
|
|
|
|
|
+ SDLTest_LogFinalResult(false, "Run /w seed", runSeed, COLOR_RED, "Failed");
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
/* Print repro steps for failed tests */
|
|
/* Print repro steps for failed tests */
|
|
|
if (failedNumberOfTests > 0) {
|
|
if (failedNumberOfTests > 0) {
|
|
|
SDLTest_Log("Harness input to repro failures:");
|
|
SDLTest_Log("Harness input to repro failures:");
|
|
|
for (testCounter = 0; testCounter < failedNumberOfTests; testCounter++) {
|
|
for (testCounter = 0; testCounter < failedNumberOfTests; testCounter++) {
|
|
|
- SDLTest_Log(COLOR_RED " --seed %s --filter %s" COLOR_END, runSeed, failedTests[testCounter]->name);
|
|
|
|
|
|
|
+ SDLTest_Log("%s" " --seed %s --filter %s" "%s", COLOR_RED, runSeed, failedTests[testCounter]->name, COLOR_END);
|
|
|
}
|
|
}
|
|
|
}
|
|
}
|
|
|
SDL_free((void *)failedTests);
|
|
SDL_free((void *)failedTests);
|