Fixes in harness and fuzzer test lib components; improve harness driver; add rect test suite

This commit is contained in:
Andreas Schiffler 2012-12-15 21:50:17 -08:00
parent a6185d6ad9
commit adefd543ce
5 changed files with 1743 additions and 38 deletions

View file

@ -38,7 +38,7 @@
/** /**
*Counter for fuzzer invocations *Counter for fuzzer invocations
*/ */
static int fuzzerInvocationCounter; static int fuzzerInvocationCounter = 0;
/** /**
* Context for shared random number generator * Context for shared random number generator
@ -54,7 +54,9 @@ SDLTest_FuzzerInit(Uint64 execKey)
{ {
Uint32 a = (execKey >> 32) & 0x00000000FFFFFFFF; Uint32 a = (execKey >> 32) & 0x00000000FFFFFFFF;
Uint32 b = execKey & 0x00000000FFFFFFFF; Uint32 b = execKey & 0x00000000FFFFFFFF;
SDL_memset((void *)&rndContext, 0, sizeof(SDLTest_RandomContext));
SDLTest_RandomInit(&rndContext, a, b); SDLTest_RandomInit(&rndContext, a, b);
fuzzerInvocationCounter = 0;
} }
int int

View file

@ -222,6 +222,7 @@ SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, SDLTest_TestCaseReference
{ {
SDL_TimerID timer = 0; SDL_TimerID timer = 0;
int testResult = 0; int testResult = 0;
int fuzzerCount;
if (testSuite==NULL || testCase==NULL || testSuite->name==NULL || testCase->name==NULL) if (testSuite==NULL || testCase==NULL || testSuite->name==NULL || testCase->name==NULL)
{ {
@ -268,7 +269,10 @@ SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, SDLTest_TestCaseReference
} }
// Report on asserts and fuzzer usage // Report on asserts and fuzzer usage
SDLTest_Log("Fuzzer invocations: %d", SDLTest_GetFuzzerInvocationCount()); fuzzerCount = SDLTest_GetFuzzerInvocationCount();
if (fuzzerCount > 0) {
SDLTest_Log("Fuzzer invocations: %d", fuzzerCount);
}
SDLTest_LogAssertSummary(); SDLTest_LogAssertSummary();
return testResult; return testResult;
@ -361,6 +365,8 @@ SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], char *userRunSeed, U
SDLTest_LogError("Generating a random seed failed"); SDLTest_LogError("Generating a random seed failed");
return 2; return 2;
} }
} else {
runSeed = userRunSeed;
} }
// Reset per-run counters // Reset per-run counters
@ -372,7 +378,7 @@ SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], char *userRunSeed, U
runStartSeconds = GetClock(); runStartSeconds = GetClock();
// Log run with fuzzer parameters // Log run with fuzzer parameters
SDLTest_Log("::::: Test Run '%s' started\n", runSeed); SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);
// Loop over all suites // Loop over all suites
suiteCounter = 0; suiteCounter = 0;
@ -390,7 +396,7 @@ SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], char *userRunSeed, U
// Log suite started // Log suite started
currentSuiteName = (char *)((testSuite->name) ? testSuite->name : SDLTest_InvalidNameFormat); currentSuiteName = (char *)((testSuite->name) ? testSuite->name : SDLTest_InvalidNameFormat);
SDLTest_Log("===== Test Suite %i: %s started\n", SDLTest_Log("===== Test Suite %i: '%s' started\n",
suiteCounter, suiteCounter,
currentSuiteName); currentSuiteName);
@ -406,11 +412,14 @@ SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], char *userRunSeed, U
// Log test started // Log test started
currentTestName = (char *)((testCase->name) ? testCase->name : SDLTest_InvalidNameFormat); currentTestName = (char *)((testCase->name) ? testCase->name : SDLTest_InvalidNameFormat);
SDLTest_Log("----- Test Case %i: %s started", SDLTest_Log("----- Test Case %i.%i: '%s' started",
suiteCounter,
testCounter, testCounter,
currentTestName); currentTestName);
SDLTest_Log("Test Description: %s", if (testCase->description != NULL && strlen(testCase->description)>0) {
SDLTest_Log("Test Description: '%s'",
(testCase->description) ? testCase->description : SDLTest_InvalidNameFormat); (testCase->description) ? testCase->description : SDLTest_InvalidNameFormat);
}
// Loop over all iterations // Loop over all iterations
iterationCounter = 0; iterationCounter = 0;
@ -424,7 +433,7 @@ SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], char *userRunSeed, U
execKey = SDLTest_GenerateExecKey(runSeed, testSuite->name, testCase->name, iterationCounter); execKey = SDLTest_GenerateExecKey(runSeed, testSuite->name, testCase->name, iterationCounter);
} }
SDLTest_Log("Test Iteration %i: execKey %d", iterationCounter, execKey); SDLTest_Log("Test Iteration %i: execKey %llu", iterationCounter, execKey);
testResult = SDLTest_RunTest(testSuite, testCase, execKey); testResult = SDLTest_RunTest(testSuite, testCase, execKey);
if (testResult == TEST_RESULT_PASSED) { if (testResult == TEST_RESULT_PASSED) {
@ -442,10 +451,14 @@ SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], char *userRunSeed, U
// Take time - test end // Take time - test end
testEndSeconds = GetClock(); testEndSeconds = GetClock();
SDLTest_Log("Test Case %s ended", currentTestName); if (testIterations > 1) {
// Log test runtime
SDLTest_Log("Runtime of %i iterations: %.1f sec", testIterations, testEndSeconds - testStartSeconds);
SDLTest_Log("Test runtime: %.5f sec", (testEndSeconds - testStartSeconds) / (float)testIterations);
} else {
// Log test runtime // Log test runtime
SDLTest_Log("Test runtime: %.1f sec", testEndSeconds - testStartSeconds); SDLTest_Log("Test runtime: %.1f sec", testEndSeconds - testStartSeconds);
}
// Log final test result // Log final test result
switch (testResult) { switch (testResult) {
@ -493,14 +506,15 @@ SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], char *userRunSeed, U
{ {
runResult = 0; runResult = 0;
SDLTest_Log(logFormat, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount); SDLTest_Log(logFormat, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
SDLTest_Log((char *)SDLTest_FinalResultFormat, "Run", runSeed, "Passed"); SDLTest_Log((char *)SDLTest_FinalResultFormat, "Run /w seed", runSeed, "Passed");
} }
else else
{ {
runResult = 1; runResult = 1;
SDLTest_LogError(logFormat, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount); SDLTest_LogError(logFormat, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Run", runSeed, "Failed"); SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Run /w seed", runSeed, "Failed");
} }
SDLTest_Log("Exit code: %d", runResult);
return runResult; return runResult;
} }

View file

@ -34,6 +34,9 @@ int
main(int argc, char *argv[]) main(int argc, char *argv[])
{ {
int result; int result;
int testIterations = 1;
Uint64 userExecKey = 0;
char *userRunSeed = NULL;
int i; int i;
/* Initialize test framework */ /* Initialize test framework */
@ -52,27 +55,33 @@ main(int argc, char *argv[])
consumed = SDLTest_CommonArg(state, i); consumed = SDLTest_CommonArg(state, i);
if (consumed == 0) { if (consumed == 0) {
consumed = -1; consumed = -1;
/* Parse additional parameters if (SDL_strcasecmp(argv[i], "--iterations") == 0) {
if (SDL_strcasecmp(argv[i], "--BLAH") == 0) {
if (argv[i + 1]) { if (argv[i + 1]) {
if (SDL_strcasecmp(argv[i + 1], "BLUB") == 0) { testIterations = SDL_atoi(argv[i + 1]);
blah = blub; if (testIterations < 1) testIterations = 1;
consumed = 2; consumed = 2;
} }
} }
} else if (SDL_strcasecmp(argv[i], "--BINGO") == 0) { else if (SDL_strcasecmp(argv[i], "--execKey") == 0) {
bingo = SDL_TRUE; if (argv[i + 1]) {
consumed = 1; SDL_sscanf(argv[i + 1], "%llu", &userExecKey);
consumed = 2;
}
}
else if (SDL_strcasecmp(argv[i], "--seed") == 0) {
if (argv[i + 1]) {
userRunSeed = SDL_strdup(argv[i + 1]);
consumed = 2;
}
} }
*/
} }
if (consumed < 0) { if (consumed < 0) {
fprintf(stderr, fprintf(stderr,
"Usage: %s %s [--BLAH BLUB --BINGO]\n", "Usage: %s %s [--iterations #] [--execKey #] [--seed string]\n",
argv[0], SDLTest_CommonUsage(state)); argv[0], SDLTest_CommonUsage(state));
quit(1); quit(1);
} }
i += consumed; i += consumed;
} }
@ -89,9 +98,12 @@ main(int argc, char *argv[])
} }
/* Call Harness */ /* Call Harness */
// TODO: pass custom parameters result = SDLTest_RunSuites(testSuites, userRunSeed, userExecKey, testIterations);
result = SDLTest_RunSuites(testSuites, NULL, 0, 1);
//int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites, char *userRunSeed, Uint64 userExecKey, int testIterations); /* Clean up */
if (userRunSeed != NULL) {
SDL_free(userRunSeed);
}
/* Shutdown everything */ /* Shutdown everything */
quit(result); quit(result);

1677
test/tests/testrect.c Normal file

File diff suppressed because it is too large Load diff

View file

@ -14,7 +14,7 @@
//extern SDLTest_TestSuiteReference eventsTestSuite; //extern SDLTest_TestSuiteReference eventsTestSuite;
//extern SDLTest_TestSuiteReference keyboardTestSuite; //extern SDLTest_TestSuiteReference keyboardTestSuite;
extern SDLTest_TestSuiteReference platformTestSuite; extern SDLTest_TestSuiteReference platformTestSuite;
//extern SDLTest_TestSuiteReference rectTestSuite; extern SDLTest_TestSuiteReference rectTestSuite;
//extern SDLTest_TestSuiteReference renderTestSuite; //extern SDLTest_TestSuiteReference renderTestSuite;
//extern SDLTest_TestSuiteReference rwopsTestSuite; //extern SDLTest_TestSuiteReference rwopsTestSuite;
//extern SDLTest_TestSuiteReference surfaceTestSuite; //extern SDLTest_TestSuiteReference surfaceTestSuite;
@ -28,7 +28,7 @@ SDLTest_TestSuiteReference *testSuites[] = {
// &eventsTestSuite, // &eventsTestSuite,
// &keyboardTestSuite, // &keyboardTestSuite,
&platformTestSuite, &platformTestSuite,
// &rectTestSuite, &rectTestSuite,
// &renderTestSuite, // &renderTestSuite,
// &rwopsTestSuite, // &rwopsTestSuite,
// &surfaceTestSuite, // &surfaceTestSuite,