If any assert in SetUp function fails that test will be skipped.
This commit is contained in:
parent
3efe0fed79
commit
6d3454e153
11 changed files with 377 additions and 293 deletions
|
@ -36,7 +36,7 @@ int _testAssertsFailed;
|
||||||
int _testAssertsPassed;
|
int _testAssertsPassed;
|
||||||
|
|
||||||
void
|
void
|
||||||
_InitTestEnvironment() // InitTestEnvironment
|
_InitTestEnvironment()
|
||||||
{
|
{
|
||||||
_testReturnValue = 0;
|
_testReturnValue = 0;
|
||||||
_testAssertsFailed = 0;
|
_testAssertsFailed = 0;
|
||||||
|
@ -56,8 +56,13 @@ _QuitTestEnvironment()
|
||||||
return _testReturnValue;
|
return _testReturnValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int
|
||||||
|
_CountFailedAsserts() {
|
||||||
|
return _testAssertsFailed;
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
AssertEquals(const int expected, const int actual, char *message, ...)
|
AssertEquals(int expected, int actual, char *message, ...)
|
||||||
{
|
{
|
||||||
va_list args;
|
va_list args;
|
||||||
char buf[256];
|
char buf[256];
|
||||||
|
|
|
@ -69,13 +69,19 @@ void _InitTestEnvironment();
|
||||||
*/
|
*/
|
||||||
int _QuitTestEnvironment();
|
int _QuitTestEnvironment();
|
||||||
|
|
||||||
|
/*!
|
||||||
|
* Can be used to query the number of failed asserts
|
||||||
|
* \return Returns the failed assert count.
|
||||||
|
*/
|
||||||
|
int _CountFailedAsserts();
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
* Assert function. Tests if the expected value equals the actual value, then
|
* Assert function. Tests if the expected value equals the actual value, then
|
||||||
* the test assert succeeds, otherwise it fails and warns about it.
|
* the test assert succeeds, otherwise it fails and warns about it.
|
||||||
*
|
*
|
||||||
* \param expected Value user expects to have
|
* \param expected Value user expects to have
|
||||||
* \param actual The actual value of tested variable
|
* \param actual The actual value of tested variable
|
||||||
* \param message Message that will be printed if assert fails
|
* \param message Message that will be printed
|
||||||
*/
|
*/
|
||||||
void AssertEquals(const int expected, const int actual, char *message, ...);
|
void AssertEquals(const int expected, const int actual, char *message, ...);
|
||||||
|
|
||||||
|
@ -85,18 +91,22 @@ void AssertEquals(const int expected, const int actual, char *message, ...);
|
||||||
* assert passes, otherwise it fails.
|
* assert passes, otherwise it fails.
|
||||||
*
|
*
|
||||||
* \param condition Condition which will be evaluated
|
* \param condition Condition which will be evaluated
|
||||||
* \param message Message that will be printed if assert fails
|
* \param message Message that will be printed
|
||||||
*/
|
*/
|
||||||
void AssertTrue(int condition, char *message, ...);
|
void AssertTrue(int condition, char *message, ...);
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
\todo add markup
|
* Assert function which will always fail
|
||||||
*/
|
*
|
||||||
|
* \param message Message that will be printed
|
||||||
|
*/
|
||||||
void AssertFail(char *message, ...);
|
void AssertFail(char *message, ...);
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
\todo add markup
|
* Assert function which will always pass
|
||||||
*/
|
*
|
||||||
|
* \param message Message that will be printed
|
||||||
|
*/
|
||||||
void AssertPass(char *message, ...);
|
void AssertPass(char *message, ...);
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
*/
|
*/
|
||||||
typedef void (*RunStartedFp)(int parameterCount, char *runnerParameters[], time_t eventTime, void *data);
|
typedef void (*RunStartedFp)(int parameterCount, char *runnerParameters[], time_t eventTime, void *data);
|
||||||
typedef void (*RunEndedFp)(int testCount, int suiteCount, int testPassCount, int testFailCount,
|
typedef void (*RunEndedFp)(int testCount, int suiteCount, int testPassCount, int testFailCount,
|
||||||
time_t endTime, double totalRuntime);
|
int testSkippedCount, time_t endTime, double totalRuntime);
|
||||||
|
|
||||||
typedef void (*SuiteStartedFp)(const char *suiteName, time_t eventTime);
|
typedef void (*SuiteStartedFp)(const char *suiteName, time_t eventTime);
|
||||||
typedef void (*SuiteEndedFp)(int testsPassed, int testsFailed, int testsSkipped,
|
typedef void (*SuiteEndedFp)(int testsPassed, int testsFailed, int testsSkipped,
|
||||||
|
|
|
@ -54,13 +54,14 @@ PlainRunStarted(int parameterCount, char *runnerParameters[], time_t eventTime,
|
||||||
|
|
||||||
void
|
void
|
||||||
PlainRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
|
PlainRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
|
||||||
time_t endTime, double totalRuntime)
|
int testSkippedCount, time_t endTime, double totalRuntime)
|
||||||
{
|
{
|
||||||
Output(indentLevel, "Ran %d tests in %0.5f seconds from %d suites.",
|
Output(indentLevel, "Ran %d tests in %0.5f seconds from %d suites.",
|
||||||
testCount, totalRuntime, suiteCount);
|
testCount, totalRuntime, suiteCount);
|
||||||
|
|
||||||
Output(indentLevel, "%d tests passed", testPassCount);
|
Output(indentLevel, "%d tests passed", testPassCount);
|
||||||
Output(indentLevel, "%d tests failed", testFailCount);
|
Output(indentLevel, "%d tests failed", testFailCount);
|
||||||
|
Output(indentLevel, "%d tests skipped", testSkippedCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -91,6 +92,9 @@ PlainTestEnded(const char *testName, const char *suiteName,
|
||||||
if(testResult) {
|
if(testResult) {
|
||||||
if(testResult == 2) {
|
if(testResult == 2) {
|
||||||
Output(--indentLevel, "%s: failed -> no assert", testName);
|
Output(--indentLevel, "%s: failed -> no assert", testName);
|
||||||
|
}
|
||||||
|
else if(testResult == 3) {
|
||||||
|
Output(--indentLevel, "%s: skipped", testName);
|
||||||
} else {
|
} else {
|
||||||
Output(--indentLevel, "%s: failed", testName);
|
Output(--indentLevel, "%s: failed", testName);
|
||||||
}
|
}
|
||||||
|
@ -104,7 +108,7 @@ PlainAssert(const char *assertName, int assertResult, const char *assertMessage,
|
||||||
time_t eventTime)
|
time_t eventTime)
|
||||||
{
|
{
|
||||||
const char *result = (assertResult) ? "passed" : "failed";
|
const char *result = (assertResult) ? "passed" : "failed";
|
||||||
Output(indentLevel, "%s: %s; %s", assertName, result, assertMessage);
|
Output(indentLevel, "%s: %s - %s", assertName, result, assertMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -112,7 +116,7 @@ PlainAssertWithValues(const char *assertName, int assertResult, const char *asse
|
||||||
int actualValue, int expected, time_t eventTime)
|
int actualValue, int expected, time_t eventTime)
|
||||||
{
|
{
|
||||||
const char *result = (assertResult) ? "passed" : "failed";
|
const char *result = (assertResult) ? "passed" : "failed";
|
||||||
Output(indentLevel, "%s %s (expected %d, actualValue &d): %s",
|
Output(indentLevel, "%s: %s (expected %d, actualValue &d) - %s",
|
||||||
assertName, result, expected, actualValue, assertMessage);
|
assertName, result, expected, actualValue, assertMessage);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ void PlainRunStarted(int parameterCount, char *runnerParameters[], time_t eventT
|
||||||
* \param totalRuntime How long the execution took
|
* \param totalRuntime How long the execution took
|
||||||
*/
|
*/
|
||||||
void PlainRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
|
void PlainRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
|
||||||
time_t endTime, double totalRuntime);
|
int testSkippedCount, time_t endTime, double totalRuntime);
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
* Prints the data about the test suite that'll be executed next
|
* Prints the data about the test suite that'll be executed next
|
||||||
|
|
|
@ -46,6 +46,8 @@ typedef int (*QuitTestInvironmentFp)(void);
|
||||||
typedef void (*TestCaseSetUpFp)(void *arg);
|
typedef void (*TestCaseSetUpFp)(void *arg);
|
||||||
//!< Function pointer to a test case tear down function
|
//!< Function pointer to a test case tear down function
|
||||||
typedef void (*TestCaseTearDownFp)(void *arg);
|
typedef void (*TestCaseTearDownFp)(void *arg);
|
||||||
|
//!< Function pointer to a function which returns the failed assert count
|
||||||
|
typedef int (*CountFailedAssertsFp)(void);
|
||||||
|
|
||||||
|
|
||||||
//!< Flag for executing tests in-process
|
//!< Flag for executing tests in-process
|
||||||
|
@ -115,6 +117,8 @@ typedef struct TestCaseItem {
|
||||||
TestCaseTearDownFp testTearDown;
|
TestCaseTearDownFp testTearDown;
|
||||||
QuitTestInvironmentFp quitTestEnvironment;
|
QuitTestInvironmentFp quitTestEnvironment;
|
||||||
|
|
||||||
|
CountFailedAssertsFp countFailedAsserts;
|
||||||
|
|
||||||
struct TestCaseItem *next;
|
struct TestCaseItem *next;
|
||||||
} TestCase;
|
} TestCase;
|
||||||
|
|
||||||
|
@ -126,6 +130,7 @@ QuitTestInvironmentFp LoadQuitTestInvironmentFunction(void *suite);
|
||||||
TestCaseReference **QueryTestCaseReferences(void *library);
|
TestCaseReference **QueryTestCaseReferences(void *library);
|
||||||
TestCaseSetUpFp LoadTestSetUpFunction(void *suite);
|
TestCaseSetUpFp LoadTestSetUpFunction(void *suite);
|
||||||
TestCaseTearDownFp LoadTestTearDownFunction(void *suite);
|
TestCaseTearDownFp LoadTestTearDownFunction(void *suite);
|
||||||
|
CountFailedAssertsFp LoadCountFailedAssertsFunction(void *suite);
|
||||||
|
|
||||||
|
|
||||||
/*! Pointers to selected logger implementation */
|
/*! Pointers to selected logger implementation */
|
||||||
|
@ -141,143 +146,6 @@ AssertSummaryFp AssertSummary = NULL;
|
||||||
LogFp Log = NULL;
|
LogFp Log = NULL;
|
||||||
|
|
||||||
|
|
||||||
/*!
|
|
||||||
* Goes through the previously loaded test suites and
|
|
||||||
* loads test cases from them. Test cases are filtered
|
|
||||||
* during the process. Function will only return the
|
|
||||||
* test cases which aren't filtered out.
|
|
||||||
*
|
|
||||||
* \param suites previously loaded test suites
|
|
||||||
*
|
|
||||||
* \return Test cases that survived filtering process.
|
|
||||||
*/
|
|
||||||
TestCase *
|
|
||||||
LoadTestCases(TestSuiteReference *suites)
|
|
||||||
{
|
|
||||||
TestCase *testCases = NULL;
|
|
||||||
|
|
||||||
TestSuiteReference *suiteReference = NULL;
|
|
||||||
for(suiteReference = suites; suiteReference; suiteReference = suiteReference->next) {
|
|
||||||
TestCaseReference **tests = QueryTestCaseReferences(suiteReference->library);
|
|
||||||
|
|
||||||
TestCaseReference *testReference = NULL;
|
|
||||||
int counter = 0;
|
|
||||||
for(testReference = tests[counter]; testReference; testReference = tests[++counter]) {
|
|
||||||
|
|
||||||
void *suite = suiteReference->library;
|
|
||||||
|
|
||||||
// Load test case functions
|
|
||||||
InitTestInvironmentFp initTestEnvironment = LoadInitTestInvironmentFunction(suiteReference->library);
|
|
||||||
QuitTestInvironmentFp quitTestEnvironment = LoadQuitTestInvironmentFunction(suiteReference->library);
|
|
||||||
|
|
||||||
TestCaseSetUpFp testSetUp = LoadTestSetUpFunction(suiteReference->library);
|
|
||||||
TestCaseTearDownFp testTearDown = LoadTestTearDownFunction(suiteReference->library);
|
|
||||||
|
|
||||||
TestCaseFp testCase = LoadTestCaseFunction(suiteReference->library, testReference->name);
|
|
||||||
|
|
||||||
// Do the filtering
|
|
||||||
if(FilterTestCase(testReference)) {
|
|
||||||
TestCase *item = SDL_malloc(sizeof(TestCase));
|
|
||||||
memset(item, 0, sizeof(TestCase));
|
|
||||||
|
|
||||||
item->initTestEnvironment = initTestEnvironment;
|
|
||||||
item->quitTestEnvironment = quitTestEnvironment;
|
|
||||||
|
|
||||||
item->testSetUp = testSetUp;
|
|
||||||
item->testTearDown = testTearDown;
|
|
||||||
|
|
||||||
item->testCase = testCase;
|
|
||||||
|
|
||||||
// copy suite name
|
|
||||||
int length = SDL_strlen(suiteReference->name) + 1;
|
|
||||||
item->suiteName = SDL_malloc(length);
|
|
||||||
strncpy(item->suiteName, suiteReference->name, length);
|
|
||||||
|
|
||||||
// copy test name
|
|
||||||
length = SDL_strlen(testReference->name) + 1;
|
|
||||||
item->testName = SDL_malloc(length);
|
|
||||||
strncpy(item->testName, testReference->name, length);
|
|
||||||
|
|
||||||
// copy test description
|
|
||||||
length = SDL_strlen(testReference->description) + 1;
|
|
||||||
item->description = SDL_malloc(length);
|
|
||||||
strncpy(item->description, testReference->description, length);
|
|
||||||
|
|
||||||
item->requirements = testReference->requirements;
|
|
||||||
item->timeout = testReference->timeout;
|
|
||||||
|
|
||||||
// prepend the list
|
|
||||||
item->next = testCases;
|
|
||||||
testCases = item;
|
|
||||||
|
|
||||||
//printf("Added test: %s\n", testReference->name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return testCases;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/*!
|
|
||||||
* Unloads the given TestCases. Frees all the resources
|
|
||||||
* allocated for test cases.
|
|
||||||
*
|
|
||||||
* \param testCases Test cases to be deallocated
|
|
||||||
*/
|
|
||||||
void
|
|
||||||
UnloadTestCases(TestCase *testCases)
|
|
||||||
{
|
|
||||||
TestCase *ref = testCases;
|
|
||||||
while(ref) {
|
|
||||||
SDL_free(ref->testName);
|
|
||||||
SDL_free(ref->suiteName);
|
|
||||||
SDL_free(ref->description);
|
|
||||||
|
|
||||||
TestCase *temp = ref->next;
|
|
||||||
SDL_free(ref);
|
|
||||||
ref = temp;
|
|
||||||
}
|
|
||||||
|
|
||||||
testCases = NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/*!
|
|
||||||
* Filters a test case based on its properties in TestCaseReference and user
|
|
||||||
* preference.
|
|
||||||
*
|
|
||||||
* \return Non-zero means test will be added to execution list, zero means opposite
|
|
||||||
*/
|
|
||||||
int
|
|
||||||
FilterTestCase(TestCaseReference *testReference)
|
|
||||||
{
|
|
||||||
int retVal = 1;
|
|
||||||
|
|
||||||
if(testReference->enabled == TEST_DISABLED) {
|
|
||||||
retVal = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if(only_selected_test) {
|
|
||||||
if(SDL_strncmp(testReference->name, selected_test_name, NAME_BUFFER_SIZE) == 0) {
|
|
||||||
retVal = 1;
|
|
||||||
} else {
|
|
||||||
retVal = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if(only_tests_with_string) {
|
|
||||||
if(strstr(testReference->name, testcase_name_substring) != NULL) {
|
|
||||||
retVal = 1;
|
|
||||||
} else {
|
|
||||||
retVal = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return retVal;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
* Scans the tests/ directory and returns the names
|
* Scans the tests/ directory and returns the names
|
||||||
* of the dynamic libraries implementing the test suites.
|
* of the dynamic libraries implementing the test suites.
|
||||||
|
@ -298,10 +166,9 @@ ScanForTestSuites(char *directoryName, char *extension)
|
||||||
{
|
{
|
||||||
typedef struct dirent Entry;
|
typedef struct dirent Entry;
|
||||||
DIR *directory = opendir(directoryName);
|
DIR *directory = opendir(directoryName);
|
||||||
|
|
||||||
TestSuiteReference *suites = NULL;
|
TestSuiteReference *suites = NULL;
|
||||||
|
|
||||||
Entry *entry = NULL;
|
Entry *entry = NULL;
|
||||||
|
|
||||||
if(!directory) {
|
if(!directory) {
|
||||||
fprintf(stderr, "Failed to open test suite directory: %s\n", directoryName);
|
fprintf(stderr, "Failed to open test suite directory: %s\n", directoryName);
|
||||||
perror("Error message");
|
perror("Error message");
|
||||||
|
@ -323,8 +190,11 @@ ScanForTestSuites(char *directoryName, char *extension)
|
||||||
if(ok && SDL_strcmp(ext, extension) == 0) {
|
if(ok && SDL_strcmp(ext, extension) == 0) {
|
||||||
// create test suite reference
|
// create test suite reference
|
||||||
TestSuiteReference *reference = (TestSuiteReference *) SDL_malloc(sizeof(TestSuiteReference));
|
TestSuiteReference *reference = (TestSuiteReference *) SDL_malloc(sizeof(TestSuiteReference));
|
||||||
memset(reference, 0, sizeof(TestSuiteReference));
|
if(reference == NULL) {
|
||||||
|
fprintf(stderr, "Allocating TestSuiteReference failed\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
memset(reference, 0, sizeof(TestSuiteReference));
|
||||||
|
|
||||||
const int dirSize = SDL_strlen(directoryName);
|
const int dirSize = SDL_strlen(directoryName);
|
||||||
const int extSize = SDL_strlen(ext);
|
const int extSize = SDL_strlen(ext);
|
||||||
|
@ -427,6 +297,147 @@ UnloadTestSuites(TestSuiteReference *suites)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*!
|
||||||
|
* Goes through the previously loaded test suites and
|
||||||
|
* loads test cases from them. Test cases are filtered
|
||||||
|
* during the process. Function will only return the
|
||||||
|
* test cases which aren't filtered out.
|
||||||
|
*
|
||||||
|
* \param suites previously loaded test suites
|
||||||
|
*
|
||||||
|
* \return Test cases that survived filtering process.
|
||||||
|
*/
|
||||||
|
TestCase *
|
||||||
|
LoadTestCases(TestSuiteReference *suites)
|
||||||
|
{
|
||||||
|
TestCase *testCases = NULL;
|
||||||
|
|
||||||
|
TestSuiteReference *suiteReference = NULL;
|
||||||
|
for(suiteReference = suites; suiteReference; suiteReference = suiteReference->next) {
|
||||||
|
TestCaseReference **tests = QueryTestCaseReferences(suiteReference->library);
|
||||||
|
|
||||||
|
TestCaseReference *testReference = NULL;
|
||||||
|
int counter = 0;
|
||||||
|
for(testReference = tests[counter]; testReference; testReference = tests[++counter]) {
|
||||||
|
|
||||||
|
void *suite = suiteReference->library;
|
||||||
|
|
||||||
|
// Load test case functions
|
||||||
|
InitTestInvironmentFp initTestEnvironment = LoadInitTestInvironmentFunction(suiteReference->library);
|
||||||
|
QuitTestInvironmentFp quitTestEnvironment = LoadQuitTestInvironmentFunction(suiteReference->library);
|
||||||
|
|
||||||
|
TestCaseSetUpFp testSetUp = LoadTestSetUpFunction(suiteReference->library);
|
||||||
|
TestCaseTearDownFp testTearDown = LoadTestTearDownFunction(suiteReference->library);
|
||||||
|
|
||||||
|
TestCaseFp testCase = LoadTestCaseFunction(suiteReference->library, testReference->name);
|
||||||
|
|
||||||
|
CountFailedAssertsFp countFailedAsserts = LoadCountFailedAssertsFunction(suiteReference->library);
|
||||||
|
|
||||||
|
// Do the filtering
|
||||||
|
if(FilterTestCase(testReference)) {
|
||||||
|
TestCase *item = SDL_malloc(sizeof(TestCase));
|
||||||
|
memset(item, 0, sizeof(TestCase));
|
||||||
|
|
||||||
|
item->initTestEnvironment = initTestEnvironment;
|
||||||
|
item->quitTestEnvironment = quitTestEnvironment;
|
||||||
|
|
||||||
|
item->testSetUp = testSetUp;
|
||||||
|
item->testTearDown = testTearDown;
|
||||||
|
|
||||||
|
item->testCase = testCase;
|
||||||
|
|
||||||
|
item->countFailedAsserts = countFailedAsserts;
|
||||||
|
|
||||||
|
// copy suite name
|
||||||
|
int length = SDL_strlen(suiteReference->name) + 1;
|
||||||
|
item->suiteName = SDL_malloc(length);
|
||||||
|
strncpy(item->suiteName, suiteReference->name, length);
|
||||||
|
|
||||||
|
// copy test name
|
||||||
|
length = SDL_strlen(testReference->name) + 1;
|
||||||
|
item->testName = SDL_malloc(length);
|
||||||
|
strncpy(item->testName, testReference->name, length);
|
||||||
|
|
||||||
|
// copy test description
|
||||||
|
length = SDL_strlen(testReference->description) + 1;
|
||||||
|
item->description = SDL_malloc(length);
|
||||||
|
strncpy(item->description, testReference->description, length);
|
||||||
|
|
||||||
|
item->requirements = testReference->requirements;
|
||||||
|
item->timeout = testReference->timeout;
|
||||||
|
|
||||||
|
// prepend the list
|
||||||
|
item->next = testCases;
|
||||||
|
testCases = item;
|
||||||
|
|
||||||
|
//printf("Added test: %s\n", testReference->name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return testCases;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*!
|
||||||
|
* Unloads the given TestCases. Frees all the resources
|
||||||
|
* allocated for test cases.
|
||||||
|
*
|
||||||
|
* \param testCases Test cases to be deallocated
|
||||||
|
*/
|
||||||
|
void
|
||||||
|
UnloadTestCases(TestCase *testCases)
|
||||||
|
{
|
||||||
|
TestCase *ref = testCases;
|
||||||
|
while(ref) {
|
||||||
|
SDL_free(ref->testName);
|
||||||
|
SDL_free(ref->suiteName);
|
||||||
|
SDL_free(ref->description);
|
||||||
|
|
||||||
|
TestCase *temp = ref->next;
|
||||||
|
SDL_free(ref);
|
||||||
|
ref = temp;
|
||||||
|
}
|
||||||
|
|
||||||
|
testCases = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*!
|
||||||
|
* Filters a test case based on its properties in TestCaseReference and user
|
||||||
|
* preference.
|
||||||
|
*
|
||||||
|
* \return Non-zero means test will be added to execution list, zero means opposite
|
||||||
|
*/
|
||||||
|
int
|
||||||
|
FilterTestCase(TestCaseReference *testReference)
|
||||||
|
{
|
||||||
|
int retVal = 1;
|
||||||
|
|
||||||
|
if(testReference->enabled == TEST_DISABLED) {
|
||||||
|
retVal = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if(only_selected_test) {
|
||||||
|
if(SDL_strncmp(testReference->name, selected_test_name, NAME_BUFFER_SIZE) == 0) {
|
||||||
|
retVal = 1;
|
||||||
|
} else {
|
||||||
|
retVal = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(only_tests_with_string) {
|
||||||
|
if(strstr(testReference->name, testcase_name_substring) != NULL) {
|
||||||
|
retVal = 1;
|
||||||
|
} else {
|
||||||
|
retVal = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
* Loads the test case references from the given test suite.
|
* Loads the test case references from the given test suite.
|
||||||
|
|
||||||
|
@ -554,6 +565,81 @@ LoadQuitTestInvironmentFunction(void *suite) {
|
||||||
return testEnvQuit;
|
return testEnvQuit;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*!
|
||||||
|
* Loads function that returns failed assert count in the current
|
||||||
|
* test environment
|
||||||
|
*
|
||||||
|
* \param suite Used test suite
|
||||||
|
*
|
||||||
|
* \return Function pointer to _CountFailedAsserts function
|
||||||
|
*/
|
||||||
|
CountFailedAssertsFp
|
||||||
|
LoadCountFailedAssertsFunction(void *suite) {
|
||||||
|
CountFailedAssertsFp countFailedAssert = (CountFailedAssertsFp) SDL_LoadFunction(suite, "_CountFailedAsserts");
|
||||||
|
if(countFailedAssert == NULL) {
|
||||||
|
fprintf(stderr, "Loading _CountFailedAsserts function failed, countFailedAssert == NULL\n");
|
||||||
|
fprintf(stderr, "%s\n", SDL_GetError());
|
||||||
|
}
|
||||||
|
|
||||||
|
return countFailedAssert;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Execute the test
|
||||||
|
*
|
||||||
|
* \param testItem Test to be executed
|
||||||
|
*/
|
||||||
|
int
|
||||||
|
RunTest(TestCase *testItem) {
|
||||||
|
testItem->initTestEnvironment();
|
||||||
|
|
||||||
|
if(testItem->testSetUp) {
|
||||||
|
testItem->testSetUp(0x0);
|
||||||
|
}
|
||||||
|
|
||||||
|
int cntFailedAsserts = testItem->countFailedAsserts();
|
||||||
|
if(cntFailedAsserts != 0) {
|
||||||
|
return 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
testItem->testCase(0x0);
|
||||||
|
|
||||||
|
if(testItem->testTearDown) {
|
||||||
|
testItem->testTearDown(0x0);
|
||||||
|
}
|
||||||
|
|
||||||
|
return testItem->quitTestEnvironment();
|
||||||
|
}
|
||||||
|
|
||||||
|
/*!
|
||||||
|
* Executes a test case. Loads the test, executes it and
|
||||||
|
* returns the tests return value to the caller.
|
||||||
|
*
|
||||||
|
* \param testItem The test case that will be executed
|
||||||
|
* \return The return value of the test. Zero means success, non-zero failure.
|
||||||
|
*/
|
||||||
|
int
|
||||||
|
ExecuteTest(TestCase *testItem) {
|
||||||
|
int retVal = 1;
|
||||||
|
|
||||||
|
if(execute_inproc) {
|
||||||
|
retVal = RunTest(testItem);
|
||||||
|
} else {
|
||||||
|
int childpid = fork();
|
||||||
|
if(childpid == 0) {
|
||||||
|
exit(RunTest(testItem));
|
||||||
|
} else {
|
||||||
|
int stat_lock = -1;
|
||||||
|
int child = wait(&stat_lock);
|
||||||
|
|
||||||
|
retVal = HandleChildProcessReturnValue(stat_lock);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
* If using out-of-proc execution of tests. This function
|
* If using out-of-proc execution of tests. This function
|
||||||
|
@ -584,56 +670,58 @@ HandleChildProcessReturnValue(int stat_lock)
|
||||||
|
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
* Executes a test case. Loads the test, executes it and
|
* Sets up the logger.
|
||||||
* returns the tests return value to the caller.
|
|
||||||
*
|
*
|
||||||
* \param testItem The test case that will be executed
|
* \return Some special data that will be passed to StartRun() logger call
|
||||||
* \return The return value of the test. Zero means success, non-zero failure.
|
|
||||||
*/
|
*/
|
||||||
int
|
void *
|
||||||
ExecuteTest(TestCase *testItem) {
|
SetUpLogger()
|
||||||
int retVal = 1;
|
{
|
||||||
if(execute_inproc) {
|
void *loggerData = NULL;
|
||||||
testItem->initTestEnvironment();
|
if(xml_enabled) {
|
||||||
|
RunStarted = XMLRunStarted;
|
||||||
|
RunEnded = XMLRunEnded;
|
||||||
|
|
||||||
if(testItem->testSetUp) {
|
SuiteStarted = XMLSuiteStarted;
|
||||||
testItem->testSetUp(0x0);
|
SuiteEnded = XMLSuiteEnded;
|
||||||
|
|
||||||
|
TestStarted = XMLTestStarted;
|
||||||
|
TestEnded = XMLTestEnded;
|
||||||
|
|
||||||
|
Assert = XMLAssert;
|
||||||
|
AssertWithValues = XMLAssertWithValues;
|
||||||
|
AssertSummary = XMLAssertSummary;
|
||||||
|
|
||||||
|
Log = XMLLog;
|
||||||
|
|
||||||
|
char *sheet = NULL;
|
||||||
|
if(xsl_enabled) {
|
||||||
|
sheet = "style.xsl"; // default style sheet;
|
||||||
}
|
}
|
||||||
|
|
||||||
testItem->testCase(0x0);
|
if(custom_xsl_enabled) {
|
||||||
|
sheet = xsl_stylesheet_name;
|
||||||
if(testItem->testTearDown) {
|
|
||||||
testItem->testTearDown(0x0);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
retVal = testItem->quitTestEnvironment();
|
loggerData = sheet;
|
||||||
} else {
|
} else {
|
||||||
int childpid = fork();
|
RunStarted = PlainRunStarted;
|
||||||
if(childpid == 0) {
|
RunEnded = PlainRunEnded;
|
||||||
testItem->initTestEnvironment();
|
|
||||||
|
|
||||||
if(testItem->testSetUp) {
|
SuiteStarted = PlainSuiteStarted;
|
||||||
testItem->testSetUp(0x0);
|
SuiteEnded = PlainSuiteEnded;
|
||||||
|
|
||||||
|
TestStarted = PlainTestStarted;
|
||||||
|
TestEnded = PlainTestEnded;
|
||||||
|
|
||||||
|
Assert = PlainAssert;
|
||||||
|
AssertWithValues = PlainAssertWithValues;
|
||||||
|
AssertSummary = PlainAssertSummary;
|
||||||
|
|
||||||
|
Log = PlainLog;
|
||||||
}
|
}
|
||||||
|
|
||||||
testItem->testCase(0x0);
|
return loggerData;
|
||||||
|
|
||||||
// note: if test case is is aborted by some signal
|
|
||||||
// then TearDown function won't be called
|
|
||||||
if(testItem->testTearDown) {
|
|
||||||
testItem->testTearDown(0x0);
|
|
||||||
}
|
|
||||||
|
|
||||||
exit(testItem->quitTestEnvironment());
|
|
||||||
} else {
|
|
||||||
int stat_lock = -1;
|
|
||||||
int child = wait(&stat_lock);
|
|
||||||
|
|
||||||
retVal = HandleChildProcessReturnValue(stat_lock);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return retVal;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -771,7 +859,7 @@ main(int argc, char *argv[])
|
||||||
|
|
||||||
// print: Testing against SDL version fuu (rev: bar) if verbose == true
|
// print: Testing against SDL version fuu (rev: bar) if verbose == true
|
||||||
|
|
||||||
int totalTestfailureCount = 0, totalTestPassCount = 0;
|
int totalTestFailureCount = 0, totalTestPassCount = 0, totalTestSkipCount = 0;
|
||||||
int testFailureCount = 0, testPassCount = 0, testSkipCount = 0;
|
int testFailureCount = 0, testPassCount = 0, testSkipCount = 0;
|
||||||
char *testSuiteName = NULL;
|
char *testSuiteName = NULL;
|
||||||
int suiteCounter = 0;
|
int suiteCounter = 0;
|
||||||
|
@ -782,49 +870,7 @@ main(int argc, char *argv[])
|
||||||
char *extension = "dylib";
|
char *extension = "dylib";
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
void *loggerData = NULL;
|
void *loggerData = SetUpLogger();
|
||||||
if(xml_enabled) {
|
|
||||||
RunStarted = XMLRunStarted;
|
|
||||||
RunEnded = XMLRunEnded;
|
|
||||||
|
|
||||||
SuiteStarted = XMLSuiteStarted;
|
|
||||||
SuiteEnded = XMLSuiteEnded;
|
|
||||||
|
|
||||||
TestStarted = XMLTestStarted;
|
|
||||||
TestEnded = XMLTestEnded;
|
|
||||||
|
|
||||||
Assert = XMLAssert;
|
|
||||||
AssertWithValues = XMLAssertWithValues;
|
|
||||||
AssertSummary = XMLAssertSummary;
|
|
||||||
|
|
||||||
Log = XMLLog;
|
|
||||||
|
|
||||||
char *sheet = NULL;
|
|
||||||
if(xsl_enabled) {
|
|
||||||
sheet = "style.xsl"; // default style sheet;
|
|
||||||
}
|
|
||||||
|
|
||||||
if(custom_xsl_enabled) {
|
|
||||||
sheet = xsl_stylesheet_name;
|
|
||||||
}
|
|
||||||
|
|
||||||
loggerData = sheet;
|
|
||||||
} else {
|
|
||||||
RunStarted = PlainRunStarted;
|
|
||||||
RunEnded = PlainRunEnded;
|
|
||||||
|
|
||||||
SuiteStarted = PlainSuiteStarted;
|
|
||||||
SuiteEnded = PlainSuiteEnded;
|
|
||||||
|
|
||||||
TestStarted = PlainTestStarted;
|
|
||||||
TestEnded = PlainTestEnded;
|
|
||||||
|
|
||||||
Assert = PlainAssert;
|
|
||||||
AssertWithValues = PlainAssertWithValues;
|
|
||||||
AssertSummary = PlainAssertSummary;
|
|
||||||
|
|
||||||
Log = PlainLog;
|
|
||||||
}
|
|
||||||
|
|
||||||
const Uint32 startTicks = SDL_GetTicks();
|
const Uint32 startTicks = SDL_GetTicks();
|
||||||
|
|
||||||
|
@ -845,9 +891,7 @@ main(int argc, char *argv[])
|
||||||
|
|
||||||
RunStarted(argc, argv, time(0), loggerData);
|
RunStarted(argc, argv, time(0), loggerData);
|
||||||
|
|
||||||
|
|
||||||
char *currentSuiteName = NULL;
|
char *currentSuiteName = NULL;
|
||||||
|
|
||||||
int suiteStartTime = SDL_GetTicks();
|
int suiteStartTime = SDL_GetTicks();
|
||||||
|
|
||||||
TestCase *testItem = NULL;
|
TestCase *testItem = NULL;
|
||||||
|
@ -856,7 +900,7 @@ main(int argc, char *argv[])
|
||||||
currentSuiteName = testItem->suiteName;
|
currentSuiteName = testItem->suiteName;
|
||||||
SuiteStarted(currentSuiteName, time(0));
|
SuiteStarted(currentSuiteName, time(0));
|
||||||
|
|
||||||
testFailureCount = testPassCount = 0;
|
testFailureCount = testPassCount = testSkipCount = 0;
|
||||||
|
|
||||||
suiteCounter++;
|
suiteCounter++;
|
||||||
}
|
}
|
||||||
|
@ -871,7 +915,7 @@ main(int argc, char *argv[])
|
||||||
currentSuiteName = testItem->suiteName;
|
currentSuiteName = testItem->suiteName;
|
||||||
SuiteStarted(currentSuiteName, time(0));
|
SuiteStarted(currentSuiteName, time(0));
|
||||||
|
|
||||||
testFailureCount = testPassCount = 0;
|
testFailureCount = testPassCount = testSkipCount = 0;
|
||||||
|
|
||||||
suiteCounter++;
|
suiteCounter++;
|
||||||
}
|
}
|
||||||
|
@ -882,8 +926,12 @@ main(int argc, char *argv[])
|
||||||
const Uint32 testTimeStart = SDL_GetTicks();
|
const Uint32 testTimeStart = SDL_GetTicks();
|
||||||
|
|
||||||
int retVal = ExecuteTest(testItem);
|
int retVal = ExecuteTest(testItem);
|
||||||
if(retVal) {
|
if(retVal == 3) {
|
||||||
totalTestfailureCount++;
|
testSkipCount++;
|
||||||
|
totalTestSkipCount++;
|
||||||
|
}
|
||||||
|
else if(retVal) {
|
||||||
|
totalTestFailureCount++;
|
||||||
testFailureCount++;
|
testFailureCount++;
|
||||||
} else {
|
} else {
|
||||||
totalTestPassCount++;
|
totalTestPassCount++;
|
||||||
|
@ -906,8 +954,8 @@ main(int argc, char *argv[])
|
||||||
const Uint32 endTicks = SDL_GetTicks();
|
const Uint32 endTicks = SDL_GetTicks();
|
||||||
const double totalRunTime = (endTicks - startTicks) / 1000.0f;
|
const double totalRunTime = (endTicks - startTicks) / 1000.0f;
|
||||||
|
|
||||||
RunEnded(totalTestPassCount + totalTestfailureCount, suiteCounter,
|
RunEnded(totalTestPassCount + totalTestFailureCount, suiteCounter,
|
||||||
totalTestPassCount, totalTestfailureCount, time(0), totalRunTime);
|
totalTestPassCount, totalTestFailureCount, totalTestSkipCount, time(0), totalRunTime);
|
||||||
|
|
||||||
return (totalTestfailureCount ? 1 : 0);
|
return (totalTestFailureCount ? 1 : 0);
|
||||||
}
|
}
|
||||||
|
|
|
@ -104,6 +104,7 @@ $(document).ready(function() {
|
||||||
/* Color the tests based on the result */
|
/* Color the tests based on the result */
|
||||||
$("span.testResult[result='passed']").addClass('passed');
|
$("span.testResult[result='passed']").addClass('passed');
|
||||||
$("span.testResult[result='failed']").addClass('failed');
|
$("span.testResult[result='failed']").addClass('failed');
|
||||||
|
$("span.testResult[result='skipped']").addClass('skipped');
|
||||||
|
|
||||||
/* Color the asserts based on the result */
|
/* Color the asserts based on the result */
|
||||||
$("span.assertResult[result='pass']").addClass('passed');
|
$("span.assertResult[result='pass']").addClass('passed');
|
||||||
|
@ -157,6 +158,10 @@ div, h1 {
|
||||||
color: red;
|
color: red;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.skipped {
|
||||||
|
color: gray;
|
||||||
|
}
|
||||||
|
|
||||||
</style>
|
</style>
|
||||||
|
|
||||||
</head>
|
</head>
|
||||||
|
|
|
@ -56,6 +56,9 @@ TestCaseReference **QueryTestSuite() {
|
||||||
* SetUp function can be used to create a test fixture for test cases.
|
* SetUp function can be used to create a test fixture for test cases.
|
||||||
* The function will be called right before executing the test case.
|
* The function will be called right before executing the test case.
|
||||||
*
|
*
|
||||||
|
* Note: If any assert in the function fails then the test will be skipped.
|
||||||
|
* In practice, the entire suite will be skipped if assert failure happens.
|
||||||
|
*
|
||||||
* Note: this function is optional.
|
* Note: this function is optional.
|
||||||
*
|
*
|
||||||
* \param arg parameters given to test. Usually NULL
|
* \param arg parameters given to test. Usually NULL
|
||||||
|
|
|
@ -32,11 +32,39 @@ TestCaseReference **QueryTestSuite() {
|
||||||
return (TestCaseReference **)testSuite;
|
return (TestCaseReference **)testSuite;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Function prototypes */
|
||||||
|
SDL_Surface *_CreateTestSurface();
|
||||||
|
|
||||||
|
|
||||||
|
/* Create test fixture */
|
||||||
|
|
||||||
|
static SDL_Surface *testsur = NULL;
|
||||||
|
|
||||||
|
|
||||||
|
void
|
||||||
|
SetUp(void *arg)
|
||||||
|
{
|
||||||
|
int ret = SDL_Init(SDL_INIT_VIDEO);
|
||||||
|
AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
|
||||||
|
|
||||||
|
testsur = _CreateTestSurface();
|
||||||
|
AssertTrue(testsur != NULL, "SDL_Init(SDL_INIT_VIDEO)");
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
TearDown(void *arg)
|
||||||
|
{
|
||||||
|
SDL_FreeSurface( testsur );
|
||||||
|
|
||||||
|
SDL_Quit();
|
||||||
|
}
|
||||||
|
|
||||||
/* Helper functions for the test cases */
|
/* Helper functions for the test cases */
|
||||||
|
|
||||||
#define TEST_SURFACE_WIDTH 80
|
#define TEST_SURFACE_WIDTH 80
|
||||||
#define TEST_SURFACE_HEIGHT 60
|
#define TEST_SURFACE_HEIGHT 60
|
||||||
|
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
* Creates test surface
|
* Creates test surface
|
||||||
*/
|
*/
|
||||||
|
@ -66,7 +94,7 @@ _CreateTestSurface()
|
||||||
/**
|
/**
|
||||||
* @brief Tests a blend mode.
|
* @brief Tests a blend mode.
|
||||||
*/
|
*/
|
||||||
int _testBlitBlendMode(SDL_Surface *testsur, SDL_Surface *face, int mode)
|
void _testBlitBlendMode(SDL_Surface *testsur, SDL_Surface *face, int mode)
|
||||||
{
|
{
|
||||||
int ret;
|
int ret;
|
||||||
int i, j, ni, nj;
|
int i, j, ni, nj;
|
||||||
|
@ -102,8 +130,6 @@ int _testBlitBlendMode(SDL_Surface *testsur, SDL_Surface *face, int mode)
|
||||||
ret = SDL_BlitSurface( face, NULL, testsur, &rect );
|
ret = SDL_BlitSurface( face, NULL, testsur, &rect );
|
||||||
AssertTrue(ret != 0, "SDL_BlitSurface"); }
|
AssertTrue(ret != 0, "SDL_BlitSurface"); }
|
||||||
}
|
}
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Test case functions */
|
/* Test case functions */
|
||||||
|
@ -115,11 +141,6 @@ void surface_testLoad(void *arg)
|
||||||
int ret;
|
int ret;
|
||||||
SDL_Surface *face, *rface;
|
SDL_Surface *face, *rface;
|
||||||
|
|
||||||
ret = SDL_Init(SDL_INIT_VIDEO);
|
|
||||||
AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
|
|
||||||
|
|
||||||
SDL_Surface *testsur = _CreateTestSurface();
|
|
||||||
|
|
||||||
/* Clear surface. */
|
/* Clear surface. */
|
||||||
ret = SDL_FillRect( testsur, NULL,
|
ret = SDL_FillRect( testsur, NULL,
|
||||||
SDL_MapRGB( testsur->format, 0, 0, 0 ) );
|
SDL_MapRGB( testsur->format, 0, 0, 0 ) );
|
||||||
|
@ -151,10 +172,6 @@ void surface_testLoad(void *arg)
|
||||||
/* Clean up. */
|
/* Clean up. */
|
||||||
SDL_FreeSurface( rface );
|
SDL_FreeSurface( rface );
|
||||||
SDL_FreeSurface( face );
|
SDL_FreeSurface( face );
|
||||||
|
|
||||||
SDL_FreeSurface( testsur );
|
|
||||||
|
|
||||||
SDL_Quit();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -163,14 +180,8 @@ void surface_testLoad(void *arg)
|
||||||
*/
|
*/
|
||||||
void surface_testLoadFailure(void *arg)
|
void surface_testLoadFailure(void *arg)
|
||||||
{
|
{
|
||||||
int ret = SDL_Init(SDL_INIT_VIDEO);
|
|
||||||
AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
|
|
||||||
|
|
||||||
SDL_Surface *face = SDL_LoadBMP("nonexistant.bmp");
|
SDL_Surface *face = SDL_LoadBMP("nonexistant.bmp");
|
||||||
|
|
||||||
AssertTrue(face == NULL, "SDL_CreateLoadBmp");
|
AssertTrue(face == NULL, "SDL_CreateLoadBmp");
|
||||||
|
|
||||||
SDL_Quit();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -184,11 +195,6 @@ void surface_testBlit(void *arg)
|
||||||
SDL_Surface *face;
|
SDL_Surface *face;
|
||||||
int i, j, ni, nj;
|
int i, j, ni, nj;
|
||||||
|
|
||||||
ret = SDL_Init(SDL_INIT_VIDEO);
|
|
||||||
AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
|
|
||||||
|
|
||||||
SDL_Surface *testsur = _CreateTestSurface();
|
|
||||||
|
|
||||||
/* Clear surface. */
|
/* Clear surface. */
|
||||||
ret = SDL_FillRect( testsur, NULL,
|
ret = SDL_FillRect( testsur, NULL,
|
||||||
SDL_MapRGB( testsur->format, 0, 0, 0 ) );
|
SDL_MapRGB( testsur->format, 0, 0, 0 ) );
|
||||||
|
@ -292,9 +298,6 @@ void surface_testBlit(void *arg)
|
||||||
|
|
||||||
/* Clean up. */
|
/* Clean up. */
|
||||||
SDL_FreeSurface( face );
|
SDL_FreeSurface( face );
|
||||||
SDL_FreeSurface( testsur );
|
|
||||||
|
|
||||||
SDL_Quit();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -308,11 +311,6 @@ void surface_testBlitBlend(void *arg)
|
||||||
int i, j, ni, nj;
|
int i, j, ni, nj;
|
||||||
int mode;
|
int mode;
|
||||||
|
|
||||||
ret = SDL_Init(SDL_INIT_VIDEO);
|
|
||||||
AssertTrue(ret == 0, "SDL_Init(SDL_INIT_VIDEO)");
|
|
||||||
|
|
||||||
SDL_Surface *testsur = _CreateTestSurface();
|
|
||||||
|
|
||||||
/* Clear surface. */
|
/* Clear surface. */
|
||||||
ret = SDL_FillRect( testsur, NULL,
|
ret = SDL_FillRect( testsur, NULL,
|
||||||
SDL_MapRGB( testsur->format, 0, 0, 0 ) );
|
SDL_MapRGB( testsur->format, 0, 0, 0 ) );
|
||||||
|
@ -415,7 +413,4 @@ void surface_testBlitBlend(void *arg)
|
||||||
|
|
||||||
/* Clean up. */
|
/* Clean up. */
|
||||||
SDL_FreeSurface( face );
|
SDL_FreeSurface( face );
|
||||||
SDL_FreeSurface( testsur );
|
|
||||||
|
|
||||||
SDL_Quit();
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,6 +38,7 @@ const char *numSuitesElementName = "numSuites";
|
||||||
const char *numTestElementName = "numTests";
|
const char *numTestElementName = "numTests";
|
||||||
const char *numPassedTestsElementName = "numPassedTests";
|
const char *numPassedTestsElementName = "numPassedTests";
|
||||||
const char *numFailedTestsElementName = "numFailedTests";
|
const char *numFailedTestsElementName = "numFailedTests";
|
||||||
|
const char *numSkippedTestsElementName = "numSkippedTests";
|
||||||
const char *endTimeElementName = "endTime";
|
const char *endTimeElementName = "endTime";
|
||||||
const char *totalRuntimeElementName = "totalRuntime";
|
const char *totalRuntimeElementName = "totalRuntime";
|
||||||
const char *suiteElementName = "suite";
|
const char *suiteElementName = "suite";
|
||||||
|
@ -145,7 +146,7 @@ XMLRunStarted(int parameterCount, char *runnerParameters[], time_t eventTime,
|
||||||
|
|
||||||
void
|
void
|
||||||
XMLRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
|
XMLRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
|
||||||
time_t endTime, double totalRuntime)
|
int testSkippedCount, time_t endTime, double totalRuntime)
|
||||||
{
|
{
|
||||||
// log suite count
|
// log suite count
|
||||||
char *output = XMLOpenElement(numSuitesElementName);
|
char *output = XMLOpenElement(numSuitesElementName);
|
||||||
|
@ -187,7 +188,17 @@ XMLRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
|
||||||
output = XMLCloseElement(numFailedTestsElementName);
|
output = XMLCloseElement(numFailedTestsElementName);
|
||||||
XMLOutputter(--indentLevel, YES, output);
|
XMLOutputter(--indentLevel, YES, output);
|
||||||
|
|
||||||
// log end timte
|
// log skipped test count
|
||||||
|
output = XMLOpenElement(numSkippedTestsElementName);
|
||||||
|
XMLOutputter(indentLevel++, NO, output);
|
||||||
|
|
||||||
|
output = XMLAddContent(IntToString(testSkippedCount));
|
||||||
|
XMLOutputter(indentLevel, NO, output);
|
||||||
|
|
||||||
|
output = XMLCloseElement(numSkippedTestsElementName);
|
||||||
|
XMLOutputter(--indentLevel, YES, output);
|
||||||
|
|
||||||
|
// log end tite
|
||||||
output = XMLOpenElement(endTimeElementName);
|
output = XMLOpenElement(endTimeElementName);
|
||||||
XMLOutputter(indentLevel++, NO, output);
|
XMLOutputter(indentLevel++, NO, output);
|
||||||
|
|
||||||
|
@ -342,6 +353,9 @@ XMLTestEnded(const char *testName, const char *suiteName,
|
||||||
if(testResult) {
|
if(testResult) {
|
||||||
if(testResult == 2) {
|
if(testResult == 2) {
|
||||||
output = XMLAddContent("failed. No assert");
|
output = XMLAddContent("failed. No assert");
|
||||||
|
}
|
||||||
|
else if(testResult == 3) {
|
||||||
|
output = XMLAddContent("skipped");
|
||||||
} else {
|
} else {
|
||||||
output = XMLAddContent("failed");
|
output = XMLAddContent("failed");
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ void XMLRunStarted(int parameterCount, char *runnerParameters[], time_t eventTim
|
||||||
* \param totalRuntime How long the execution took
|
* \param totalRuntime How long the execution took
|
||||||
*/
|
*/
|
||||||
void XMLRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
|
void XMLRunEnded(int testCount, int suiteCount, int testPassCount, int testFailCount,
|
||||||
time_t endTime, double totalRuntime);
|
int testSkippedCount, time_t endTime, double totalRuntime);
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
* Prints the data about the test suite that'll be executed next in XML
|
* Prints the data about the test suite that'll be executed next in XML
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue