summaryrefslogtreecommitdiffstats
path: root/UnitTestFrameworkPkg/Library/UnitTestResultReportLib/UnitTestResultReportLib.c
diff options
context:
space:
mode:
Diffstat (limited to 'UnitTestFrameworkPkg/Library/UnitTestResultReportLib/UnitTestResultReportLib.c')
-rw-r--r--UnitTestFrameworkPkg/Library/UnitTestResultReportLib/UnitTestResultReportLib.c115
1 files changed, 59 insertions, 56 deletions
diff --git a/UnitTestFrameworkPkg/Library/UnitTestResultReportLib/UnitTestResultReportLib.c b/UnitTestFrameworkPkg/Library/UnitTestResultReportLib/UnitTestResultReportLib.c
index 7f7443a233..d088b927a6 100644
--- a/UnitTestFrameworkPkg/Library/UnitTestResultReportLib/UnitTestResultReportLib.c
+++ b/UnitTestFrameworkPkg/Library/UnitTestResultReportLib/UnitTestResultReportLib.c
@@ -23,36 +23,36 @@ ReportOutput (
);
struct _UNIT_TEST_STATUS_STRING {
- UNIT_TEST_STATUS Status;
- CHAR8 *String;
+ UNIT_TEST_STATUS Status;
+ CHAR8 *String;
};
struct _UNIT_TEST_FAILURE_TYPE_STRING {
- FAILURE_TYPE Type;
- CHAR8 *String;
+ FAILURE_TYPE Type;
+ CHAR8 *String;
};
struct _UNIT_TEST_STATUS_STRING mStatusStrings[] = {
- { UNIT_TEST_PASSED, "PASSED"},
- { UNIT_TEST_ERROR_PREREQUISITE_NOT_MET, "NOT RUN - PREREQUISITE FAILED"},
- { UNIT_TEST_ERROR_TEST_FAILED, "FAILED"},
- { UNIT_TEST_RUNNING, "RUNNING"},
- { UNIT_TEST_PENDING, "PENDING"},
- { 0, "**UNKNOWN**"}
+ { UNIT_TEST_PASSED, "PASSED" },
+ { UNIT_TEST_ERROR_PREREQUISITE_NOT_MET, "NOT RUN - PREREQUISITE FAILED" },
+ { UNIT_TEST_ERROR_TEST_FAILED, "FAILED" },
+ { UNIT_TEST_RUNNING, "RUNNING" },
+ { UNIT_TEST_PENDING, "PENDING" },
+ { 0, "**UNKNOWN**" }
};
-struct _UNIT_TEST_FAILURE_TYPE_STRING mFailureTypeStrings[] = {
- { FAILURETYPE_NOFAILURE, "NO FAILURE"},
- { FAILURETYPE_OTHER, "OTHER FAILURE"},
- { FAILURETYPE_ASSERTTRUE, "ASSERT_TRUE FAILURE"},
- { FAILURETYPE_ASSERTFALSE, "ASSERT_FALSE FAILURE"},
- { FAILURETYPE_ASSERTEQUAL, "ASSERT_EQUAL FAILURE"},
- { FAILURETYPE_ASSERTNOTEQUAL, "ASSERT_NOTEQUAL FAILURE"},
- { FAILURETYPE_ASSERTNOTEFIERROR, "ASSERT_NOTEFIERROR FAILURE"},
- { FAILURETYPE_ASSERTSTATUSEQUAL, "ASSERT_STATUSEQUAL FAILURE"},
- { FAILURETYPE_ASSERTNOTNULL, "ASSERT_NOTNULL FAILURE"},
- { FAILURETYPE_EXPECTASSERT, "EXPECT_ASSERT FAILURE"},
- { 0, "*UNKNOWN* Failure"}
+struct _UNIT_TEST_FAILURE_TYPE_STRING mFailureTypeStrings[] = {
+ { FAILURETYPE_NOFAILURE, "NO FAILURE" },
+ { FAILURETYPE_OTHER, "OTHER FAILURE" },
+ { FAILURETYPE_ASSERTTRUE, "ASSERT_TRUE FAILURE" },
+ { FAILURETYPE_ASSERTFALSE, "ASSERT_FALSE FAILURE" },
+ { FAILURETYPE_ASSERTEQUAL, "ASSERT_EQUAL FAILURE" },
+ { FAILURETYPE_ASSERTNOTEQUAL, "ASSERT_NOTEQUAL FAILURE" },
+ { FAILURETYPE_ASSERTNOTEFIERROR, "ASSERT_NOTEFIERROR FAILURE" },
+ { FAILURETYPE_ASSERTSTATUSEQUAL, "ASSERT_STATUSEQUAL FAILURE" },
+ { FAILURETYPE_ASSERTNOTNULL, "ASSERT_NOTNULL FAILURE" },
+ { FAILURETYPE_EXPECTASSERT, "EXPECT_ASSERT FAILURE" },
+ { 0, "*UNKNOWN* Failure" }
};
//
@@ -60,7 +60,7 @@ struct _UNIT_TEST_FAILURE_TYPE_STRING mFailureTypeStrings[] = {
//
STATIC
-CONST CHAR8*
+CONST CHAR8 *
GetStringForUnitTestStatus (
IN UNIT_TEST_STATUS Status
)
@@ -75,6 +75,7 @@ GetStringForUnitTestStatus (
return mStatusStrings[Index].String;
}
}
+
//
// Return last entry if no match found.
//
@@ -82,7 +83,7 @@ GetStringForUnitTestStatus (
}
STATIC
-CONST CHAR8*
+CONST CHAR8 *
GetStringForFailureType (
IN FAILURE_TYPE Failure
)
@@ -97,10 +98,11 @@ GetStringForFailureType (
return mFailureTypeStrings[Index].String;
}
}
+
//
// Return last entry if no match found.
//
- DEBUG((DEBUG_INFO, "%a Failure Type does not have string defined 0x%X\n", __FUNCTION__, (UINT32)Failure));
+ DEBUG ((DEBUG_INFO, "%a Failure Type does not have string defined 0x%X\n", __FUNCTION__, (UINT32)Failure));
return mFailureTypeStrings[Index].String;
}
@@ -128,7 +130,7 @@ OutputUnitTestFrameworkReport (
Passed = 0;
Failed = 0;
NotRun = 0;
- Suite = NULL;
+ Suite = NULL;
Framework = (UNIT_TEST_FRAMEWORK *)FrameworkHandle;
if (Framework == NULL) {
@@ -139,16 +141,16 @@ OutputUnitTestFrameworkReport (
ReportPrint ("------------- UNIT TEST FRAMEWORK RESULTS ---------------\n");
ReportPrint ("---------------------------------------------------------\n");
- //print the version and time
+ // print the version and time
//
// Iterate all suites
//
- for (Suite = (UNIT_TEST_SUITE_LIST_ENTRY*)GetFirstNode(&Framework->TestSuiteList);
- (LIST_ENTRY*)Suite != &Framework->TestSuiteList;
- Suite = (UNIT_TEST_SUITE_LIST_ENTRY*)GetNextNode(&Framework->TestSuiteList, (LIST_ENTRY*)Suite)) {
-
- Test = NULL;
+ for (Suite = (UNIT_TEST_SUITE_LIST_ENTRY *)GetFirstNode (&Framework->TestSuiteList);
+ (LIST_ENTRY *)Suite != &Framework->TestSuiteList;
+ Suite = (UNIT_TEST_SUITE_LIST_ENTRY *)GetNextNode (&Framework->TestSuiteList, (LIST_ENTRY *)Suite))
+ {
+ Test = NULL;
SPassed = 0;
SFailed = 0;
SNotRun = 0;
@@ -161,10 +163,10 @@ OutputUnitTestFrameworkReport (
//
// Iterate all tests within the suite
//
- for (Test = (UNIT_TEST_LIST_ENTRY*)GetFirstNode(&(Suite->UTS.TestCaseList));
- (LIST_ENTRY*)Test != &(Suite->UTS.TestCaseList);
- Test = (UNIT_TEST_LIST_ENTRY*)GetNextNode(&(Suite->UTS.TestCaseList), (LIST_ENTRY*)Test)) {
-
+ for (Test = (UNIT_TEST_LIST_ENTRY *)GetFirstNode (&(Suite->UTS.TestCaseList));
+ (LIST_ENTRY *)Test != &(Suite->UTS.TestCaseList);
+ Test = (UNIT_TEST_LIST_ENTRY *)GetNextNode (&(Suite->UTS.TestCaseList), (LIST_ENTRY *)Test))
+ {
ReportPrint ("*********************************************************\n");
ReportPrint (" CLASS NAME: %a\n", Test->UT.Name);
ReportPrint (" TEST: %a\n", Test->UT.Description);
@@ -178,41 +180,42 @@ OutputUnitTestFrameworkReport (
}
switch (Test->UT.Result) {
- case UNIT_TEST_PASSED:
- SPassed++;
- break;
- case UNIT_TEST_ERROR_TEST_FAILED:
- SFailed++;
- break;
- case UNIT_TEST_PENDING: // Fall through...
- case UNIT_TEST_RUNNING: // Fall through...
- case UNIT_TEST_ERROR_PREREQUISITE_NOT_MET:
- SNotRun++;
- break;
- default:
- break;
+ case UNIT_TEST_PASSED:
+ SPassed++;
+ break;
+ case UNIT_TEST_ERROR_TEST_FAILED:
+ SFailed++;
+ break;
+ case UNIT_TEST_PENDING: // Fall through...
+ case UNIT_TEST_RUNNING: // Fall through...
+ case UNIT_TEST_ERROR_PREREQUISITE_NOT_MET:
+ SNotRun++;
+ break;
+ default:
+ break;
}
+
ReportPrint ("**********************************************************\n");
- } //End Test iteration
+ } // End Test iteration
ReportPrint ("+++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n");
ReportPrint ("Suite Stats\n");
ReportPrint (" Passed: %d (%d%%)\n", SPassed, (SPassed * 100)/(SPassed+SFailed+SNotRun));
ReportPrint (" Failed: %d (%d%%)\n", SFailed, (SFailed * 100) / (SPassed + SFailed + SNotRun));
ReportPrint (" Not Run: %d (%d%%)\n", SNotRun, (SNotRun * 100) / (SPassed + SFailed + SNotRun));
- ReportPrint ("+++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n" );
+ ReportPrint ("+++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n");
- Passed += SPassed; //add to global counters
- Failed += SFailed; //add to global counters
- NotRun += SNotRun; //add to global counters
- }//End Suite iteration
+ Passed += SPassed; // add to global counters
+ Failed += SFailed; // add to global counters
+ NotRun += SNotRun; // add to global counters
+ }// End Suite iteration
ReportPrint ("=========================================================\n");
ReportPrint ("Total Stats\n");
ReportPrint (" Passed: %d (%d%%)\n", Passed, (Passed * 100) / (Passed + Failed + NotRun));
ReportPrint (" Failed: %d (%d%%)\n", Failed, (Failed * 100) / (Passed + Failed + NotRun));
ReportPrint (" Not Run: %d (%d%%)\n", NotRun, (NotRun * 100) / (Passed + Failed + NotRun));
- ReportPrint ("=========================================================\n" );
+ ReportPrint ("=========================================================\n");
return EFI_SUCCESS;
}