Nice structural refactor part 1
This commit is contained in:
@@ -11,81 +11,33 @@
|
||||
|
||||
namespace jtest {
|
||||
|
||||
// Can't we just store a struct in a global vector per test?
|
||||
// -maxine
|
||||
/// Structure to store test meta-data, as tests are initially registered, and ran later.
|
||||
struct testdef
|
||||
{
|
||||
std::string testname;
|
||||
std::function<void()> callback;
|
||||
std::string file; // <- & is not needed here -maxine
|
||||
std::string file;
|
||||
int line;
|
||||
bool passed;
|
||||
};
|
||||
|
||||
// Globals for test tracking
|
||||
std::vector<testdef> testlist;
|
||||
int rantests;
|
||||
int passedtests;
|
||||
int failedtests;
|
||||
|
||||
std::vector<jlog::token> log_test_format(const std::string& testname, const std::string& file, int line, bool passed)
|
||||
{
|
||||
std::vector<jlog::token> wtokens;
|
||||
/// Generates and returns a sequence of logger tokens pre-formatted to the test data.
|
||||
std::vector<jlog::token> log_test_format(const std::string& testname, const std::string& file, int line, bool passed);
|
||||
|
||||
auto head = jlog::token{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content ="JTEST"};
|
||||
/// Generates and returns a sequence of logger tokens for the test summary printed at the end of testing.
|
||||
std::vector<jlog::token> log_test_tracking_format();
|
||||
|
||||
auto filedata = jlog::token{.content = std::format("{}:{}", file, line)};
|
||||
/// Registers a test internally.
|
||||
void definetest(const std::string& testname, const std::function<void()>& callback, const std::string& file, int line);
|
||||
|
||||
std::vector<jlog::token> teststate;
|
||||
if (passed)
|
||||
{
|
||||
teststate.push_back(jlog::token{.colorCode = jlog::ansi_escape_codes::FG_GREEN, .content = testname});
|
||||
teststate.push_back(jlog::token{.content = "Passed:", .delimiter = ""});
|
||||
} else
|
||||
{
|
||||
teststate.push_back(jlog::token{.colorCode = jlog::ansi_escape_codes::FG_RED, .content = testname});
|
||||
teststate.push_back(jlog::token{.content = "Failed:", .delimiter = ""});
|
||||
}
|
||||
|
||||
auto raninfo = jlog::token{.content = std::format("{}/{}", rantests, testlist.size())};
|
||||
// TODO: implement streaming a custom failure message with << operator on check statements
|
||||
|
||||
wtokens.push_back(head);
|
||||
wtokens.push_back(filedata);
|
||||
wtokens.insert(wtokens.end(), teststate.begin(), teststate.end());
|
||||
wtokens.push_back(raninfo);
|
||||
|
||||
return wtokens;
|
||||
}
|
||||
|
||||
std::vector<jlog::token> log_test_tracking_format()
|
||||
{
|
||||
auto head = jlog::token{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content = "JTEST"};
|
||||
auto tracking = jlog::token{.content = std::format("Tests Ran: [{}/{}] Failed: [{}/{}] Passed: [{}/{}]",
|
||||
rantests,
|
||||
testlist.size(),
|
||||
failedtests,
|
||||
rantests,
|
||||
passedtests,
|
||||
rantests),
|
||||
.delimiter = ""};
|
||||
return {head, tracking};
|
||||
}
|
||||
|
||||
void definetest(const std::string& testname, const std::function<void()>& callback, const std::string& file, int line)
|
||||
{
|
||||
testlist.push_back(testdef(testname, callback, file, line));
|
||||
}
|
||||
|
||||
// TODO: Implement check variants
|
||||
// TODO: implement streaming a custom failure message with << operator
|
||||
// i.e. : check(my_cond) << "The condition is not true!"
|
||||
|
||||
bool check(bool condition) {
|
||||
if (!condition)
|
||||
throw std::runtime_error("Test check failed!!");
|
||||
return condition;
|
||||
}
|
||||
/// Raises an exception if the given condition is false, otherwise returns true.
|
||||
bool check(bool condition);
|
||||
|
||||
/// Raises an exception of the given values evaluate to not-equal, otherwise returns true.
|
||||
template <typename T>
|
||||
bool check_eq(T a, T b) {
|
||||
if (a != b)
|
||||
@@ -93,98 +45,25 @@ namespace jtest {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool check_float_eq(float a, float b, float epsilon = 1e-3f) {
|
||||
if (std::abs(a - b) > epsilon) {
|
||||
throw std::runtime_error ("Test check failed!!");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/// Raises an exception if the given floats are not equal, up to the given epsilon. Otherwise returns true.
|
||||
/// @param epsilon The accuracy required to pass the test.
|
||||
bool check_float_eq(float a, float b, float epsilon = 1e-3f);
|
||||
|
||||
bool check_double_eq(double a, double b, double epsilon = 1e-3f) {
|
||||
if (std::abs(a - b) > epsilon) {
|
||||
throw std::runtime_error ("Test check failed!!");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/// Raises an exception if the given doubles are not equal, up to the given epsilon. Otherwise returns true.
|
||||
/// @param epsilon The accuracy required to pass the test.
|
||||
bool check_double_eq(double a, double b, double epsilon = 1e-3f);
|
||||
|
||||
bool check_string_eq(const std::string& a, const std::string& b) {
|
||||
if (a == b) // This is valid and recommended C++2x.
|
||||
throw std::runtime_error("Test check failed!!");
|
||||
return true;
|
||||
}
|
||||
/// Raises an exception if the given strings are not equal, otherwise returns true.
|
||||
bool check_string_eq(const std::string& a, const std::string& b);
|
||||
|
||||
bool test(const std::string& testname, const std::function<void()>& callback, const std::string& file, int line)
|
||||
{
|
||||
bool passed = true;
|
||||
|
||||
try
|
||||
{
|
||||
callback();
|
||||
} catch(const std::exception& e)
|
||||
{
|
||||
passed = false;
|
||||
}
|
||||
|
||||
rantests++;
|
||||
|
||||
if (passed)
|
||||
{
|
||||
passedtests++;
|
||||
} else
|
||||
{
|
||||
failedtests++;
|
||||
}
|
||||
|
||||
jlog::log(log_test_format(testname, file, line, passed));
|
||||
|
||||
return passed;
|
||||
/*
|
||||
try {
|
||||
callback();
|
||||
} catch(const std::exception& e) {
|
||||
rantests++;
|
||||
failedtests++;
|
||||
jlog::log(log_test_format(testname, file, line, false));
|
||||
return false;
|
||||
}
|
||||
|
||||
rantests++;
|
||||
passedtests++;
|
||||
jlog::log(log_test_format(testname, file, line, true));
|
||||
return true;
|
||||
*/
|
||||
}
|
||||
|
||||
// Storing a global vector with all the tests should allow us to loop through all the tests
|
||||
// We can also possibly do more tracing and allow other fancy features.
|
||||
// -maxine
|
||||
void run_tests() {
|
||||
//int i;
|
||||
//for (int i = 1; const testdef& td : testlist)
|
||||
for (testdef& td : testlist)
|
||||
{
|
||||
td.passed = test(td.testname, td.callback, td.file, td.line);
|
||||
//i++;
|
||||
}
|
||||
|
||||
jlog::log(log_test_tracking_format());
|
||||
|
||||
/*
|
||||
if (passedtests == rantests)
|
||||
{
|
||||
//USINFO("All tests passed congratulations! Do you wanna cookie?");
|
||||
jlog::log({
|
||||
//{.content = std::format("{}:{}", file, line)},
|
||||
{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content = "JTEST"},
|
||||
{.colorCode = jlog::ansi_escape_codes::FG_GREEN, .content = "All tests passed congratulations! Do you wanna cookie?", .delimiter = ""},
|
||||
});
|
||||
}
|
||||
*/
|
||||
}
|
||||
/// Runs a given test, generates a report, and returns the test result as a boolean.
|
||||
bool test(const std::string& testname, const std::function<void()>& callback, const std::string& file, int line);
|
||||
|
||||
/// Runs all tests that have been registered, and generates a final summary report for the testing suite.
|
||||
void run_tests();
|
||||
}
|
||||
|
||||
//#define TEST(a, b) jtest::test(a, b, __FILE__, __LINE__);
|
||||
// Same definition as before essentially, but points to a different function which adds the test to the global vector.
|
||||
// -maxine
|
||||
/// TEST macro to be used by API consumers. Automatically grabs reflection data such as current file, line, function name, etc.
|
||||
#define TEST(a, b) jtest::definetest(a, b, __FILE__, __LINE__);
|
||||
|
||||
/// TODO: Implement check macros
|
||||
|
@@ -1,7 +1,116 @@
|
||||
//
|
||||
// Created by dawsh on 6/16/24.
|
||||
//
|
||||
|
||||
|
||||
#include <jtest/jtest.hpp>
|
||||
namespace jtest {
|
||||
|
||||
// Globals for test tracking
|
||||
std::vector<testdef> testlist;
|
||||
int rantests;
|
||||
int passedtests;
|
||||
int failedtests;
|
||||
|
||||
std::vector<jlog::token> log_test_format(const std::string &testname, const std::string &file, int line,
|
||||
bool passed) {
|
||||
std::vector<jlog::token> wtokens;
|
||||
|
||||
auto head = jlog::token{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content ="JTEST"};
|
||||
|
||||
auto filedata = jlog::token{.content = std::format("{}:{}", file, line)};
|
||||
|
||||
std::vector<jlog::token> teststate;
|
||||
if (passed)
|
||||
{
|
||||
teststate.push_back(jlog::token{.colorCode = jlog::ansi_escape_codes::FG_GREEN, .content = testname});
|
||||
teststate.push_back(jlog::token{.content = "Passed:", .delimiter = ""});
|
||||
} else
|
||||
{
|
||||
teststate.push_back(jlog::token{.colorCode = jlog::ansi_escape_codes::FG_RED, .content = testname});
|
||||
teststate.push_back(jlog::token{.content = "Failed:", .delimiter = ""});
|
||||
}
|
||||
|
||||
auto raninfo = jlog::token{.content = std::format("{}/{}", rantests, testlist.size())};
|
||||
|
||||
wtokens.push_back(head);
|
||||
wtokens.push_back(filedata);
|
||||
wtokens.insert(wtokens.end(), teststate.begin(), teststate.end());
|
||||
wtokens.push_back(raninfo);
|
||||
|
||||
return wtokens;
|
||||
}
|
||||
|
||||
std::vector<jlog::token> log_test_tracking_format() {
|
||||
auto head = jlog::token{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content = "JTEST"};
|
||||
auto tracking = jlog::token{.content = std::format("Tests Ran: [{}/{}] Failed: [{}/{}] Passed: [{}/{}]",
|
||||
rantests,
|
||||
testlist.size(),
|
||||
failedtests,
|
||||
rantests,
|
||||
passedtests,
|
||||
rantests),
|
||||
.delimiter = ""};
|
||||
return {head, tracking};
|
||||
}
|
||||
|
||||
void definetest(const std::string &testname, const std::function<void()> &callback, const std::string &file,
|
||||
int line) {
|
||||
testlist.push_back(testdef(testname, callback, file, line));
|
||||
}
|
||||
|
||||
bool check(bool condition) {
|
||||
if (!condition)
|
||||
throw std::runtime_error("Test check failed!!");
|
||||
return condition;
|
||||
}
|
||||
|
||||
bool check_float_eq(float a, float b, float epsilon) {
|
||||
if (std::abs(a - b) > epsilon) {
|
||||
throw std::runtime_error ("Test check failed!!");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool check_double_eq(double a, double b, double epsilon) {
|
||||
if (std::abs(a - b) > epsilon) {
|
||||
throw std::runtime_error ("Test check failed!!");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool check_string_eq(const std::string &a, const std::string &b) {
|
||||
if (a == b) // This is valid and recommended C++2x.
|
||||
throw std::runtime_error("Test check failed!!");
|
||||
return true;
|
||||
}
|
||||
|
||||
bool test(const std::string &testname, const std::function<void()> &callback, const std::string &file, int line) {
|
||||
bool passed = true;
|
||||
|
||||
try { callback(); }
|
||||
catch(const std::exception& e)
|
||||
{ passed = false; }
|
||||
|
||||
rantests++;
|
||||
|
||||
if (passed) { passedtests++; }
|
||||
else { failedtests++; }
|
||||
|
||||
jlog::log(log_test_format(testname, file, line, passed));
|
||||
|
||||
return passed;
|
||||
|
||||
}
|
||||
|
||||
void run_tests() {
|
||||
|
||||
for (testdef& td : testlist)
|
||||
{
|
||||
td.passed = test(td.testname, td.callback, td.file, td.line);
|
||||
|
||||
}
|
||||
|
||||
jlog::log(log_test_tracking_format());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user