Implement better output. Running tally tests. Automatic test discovery.

This commit is contained in:
2024-06-18 11:04:08 -04:00
parent 9af61b7c34
commit f6fcf30f28
2 changed files with 38 additions and 17 deletions

View File

@@ -19,10 +19,12 @@ namespace jtest {
const std::function<void()>& callback;
const std::string file; // <- & is not needed here -maxine
int line;
bool passed;
};
// Globals for test tracking
std::vector<testdef> testlist;
int rantests;
int passedtests;
int failedtests;
@@ -42,21 +44,27 @@ namespace jtest {
try {
callback();
} catch(const std::exception& e) {
rantests++;
failedtests++;
jlog::log({
{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content ="JTEST"},
{.content = std::format("{}:{}", file, line)},
{.colorCode = jlog::ansi_escape_codes::FG_RED, .content = testname},
{.content = "Failed:", .delimiter = ""}
{.content = "Failed:", .delimiter = ""},
{.content = std::format("{}/{}", rantests, testlist.size())},
});
failedtests++;
return false;
}
jlog::log({
{.content = std::format("{}:{}", file, line)},
{.colorCode = jlog::ansi_escape_codes::FG_GREEN, .content = testname},
{.content = "Passed:", .delimiter = ""}
});
rantests++;
passedtests++;
jlog::log({
{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content ="JTEST"},
{.content = std::format("{}:{}", file, line)},
{.colorCode = jlog::ansi_escape_codes::FG_GREEN, .content = testname},
{.content = "Passed:", .delimiter = ""},
{.content = std::format("{}/{}", rantests, testlist.size())},
});
return true;
}
@@ -64,15 +72,29 @@ namespace jtest {
// We can also possibly do more tracing and allow other fancy features.
// -maxine
void run_tests() {
for (int i = 1; const testdef& td : testlist)
//int i;
//for (int i = 1; const testdef& td : testlist)
for (testdef& td : testlist)
{
test(td.testname, td.callback, td.file, td.line);
USINFO("Tests ran: " + std::to_string(i) + "/" + std::to_string(testlist.size()));
USINFO("Tests passed: " + std::to_string(passedtests));
USINFO("Tests failed: " + std::to_string(failedtests));
i++;
td.passed = test(td.testname, td.callback, td.file, td.line);
//i++;
}
jlog::log({
//{.content = std::format("{}:{}", file, line)},
{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content = "JTEST"},
{.content = std::format("Tests Ran: [{}/{}] Failed: [{}/{}] Passed: [{}/{}]", rantests, testlist.size(), failedtests, rantests, passedtests, rantests), .delimiter = ""},
});
//USINFO(std::format("Tests Ran: [{}/{}] Failed: [{}/{}] Passed: [{}/{}]", rantests, testlist.size(), failedtests, rantests, passedtests, rantests))
if (passedtests == rantests)
{
USINFO("All tests passed congratulations! Do you wanna cookie?");
jlog::log({
//{.content = std::format("{}:{}", file, line)},
{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content = "JTEST"},
{.colorCode = jlog::ansi_escape_codes::FG_GREEN, .content = "All tests passed congratulations! Do you wanna cookie?", .delimiter = ""},
});
}
}
}

View File

@@ -6,11 +6,8 @@
#include "include/jtest/jtest.hpp"
// TODO: Look into a different mechanism more similar to gtest wherein we have a TEST macro that declares and "registers" a test, and then inside our main block we call RUN_ALL_TESTS(argc, argv);
// TODO: Keep running tally of total checks, checks passed, and checks failed
// TODO: Provide introspection insofar as which assertion check failed.
// TODO: Provide alternate checks (google test has specific assertations for handling floats, for example) (Are these actually necessary??)
// TODO: Automatic test discovery
// TODO: Implement log-file-specification-capability in jlog so we can log to test_results.txt specifically.
// TODO: Provide benchmarking on test running-time
@@ -23,12 +20,14 @@ int main(int argc, char** argv)
});
TEST("Test2", [] {
jtest::check(2+2 == 5);
//jtest::check(2+2 == 5);
jtest::check(2+2 == 4);
});
TEST("Test3", []
{
jtest::check(6+9 == 69);
//jtest::check(2+2 == 4);
});
/*