6 Commits

Author SHA1 Message Date
Redacted
8c9bb12834 Update CMakeLists.txt 2024-06-26 23:28:53 -04:00
875166c6b6 Nice structural refactor part 1 2024-06-26 12:28:55 -04:00
eea8e5547f Implement check_string_eq 2024-06-26 12:16:14 -04:00
20ed600b89 Implement check variants v1 2024-06-26 12:13:19 -04:00
75eb8f52d9 Merge remote-tracking branch 'origin/main' 2024-06-25 11:05:37 -04:00
b8df31dd50 Wrote log formatters for jtest. Cleaned up test running code. 2024-06-25 11:05:14 -04:00
4 changed files with 171 additions and 94 deletions

View File

@@ -32,7 +32,7 @@ CPMAddPackage(
CPMAddPackage(
NAME jlog
URL https://git.redacted.cc/josh/jlog/archive/Prerelease-5.zip
URL https://git.redacted.cc/josh/jlog/archive/Prerelease-9.zip
)

View File

@@ -6,103 +6,64 @@
#include <vector>
#include <jlog/jlog.hpp>
// TODO: Move implementation to jtest::detail
// have this file primarily expose the macros intended for users
namespace jtest {
// Requirements
//
// Can't we just store a struct in a global vector per test?
// -maxine
/// Structure to store test meta-data, as tests are initially registered, and ran later.
struct testdef
{
std::string testname;
std::function<void()> callback;
std::string file; // <- & is not needed here -maxine
std::string file;
int line;
bool passed;
};
// Globals for test tracking
std::vector<testdef> testlist;
int rantests;
int passedtests;
int failedtests;
void definetest(const std::string& testname, const std::function<void()>& callback, const std::string& file, int line)
{
testlist.push_back(testdef(testname, callback, file, line));
}
/// Generates and returns a sequence of logger tokens pre-formatted to the test data.
std::vector<jlog::token> log_test_format(const std::string& testname, const std::string& file, int line, bool passed);
// TODO: Implement check variants
// TODO: implement streaming a custom failure message with << operator
// i.e. : check(my_cond) << "The condition is not true!"
bool check(bool condition) {
if (!condition)
/// Generates and returns a sequence of logger tokens for the test summary printed at the end of testing.
std::vector<jlog::token> log_test_tracking_format();
/// Registers a test internally.
void definetest(const std::string& testname, const std::function<void()>& callback, const std::string& file, int line);
// TODO: implement streaming a custom failure message with << operator on check statements
/// Raises an exception if the given condition is false, otherwise returns true.
bool check(bool condition);
/// Raises an exception of the given values evaluate to not-equal, otherwise returns true.
template <typename T>
bool check_eq(T a, T b) {
if (a != b)
throw std::runtime_error("Test check failed!!");
return condition;
}
bool test(const std::string& testname, const std::function<void()>& callback, const std::string& file, int line)
{
try {
callback();
} catch(const std::exception& e) {
rantests++;
failedtests++;
jlog::log({
{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content ="JTEST"},
{.content = std::format("{}:{}", file, line)},
{.colorCode = jlog::ansi_escape_codes::FG_RED, .content = testname},
{.content = "Failed:", .delimiter = ""},
{.content = std::format("{}/{}", rantests, testlist.size())},
});
return false;
}
rantests++;
passedtests++;
jlog::log({
{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content ="JTEST"},
{.content = std::format("{}:{}", file, line)},
{.colorCode = jlog::ansi_escape_codes::FG_GREEN, .content = testname},
{.content = "Passed:", .delimiter = ""},
{.content = std::format("{}/{}", rantests, testlist.size())},
});
return true;
}
// Storing a global vector with all the tests should allow us to loop through all the tests
// We can also possibly do more tracing and allow other fancy features.
// -maxine
void run_tests() {
//int i;
//for (int i = 1; const testdef& td : testlist)
for (testdef& td : testlist)
{
td.passed = test(td.testname, td.callback, td.file, td.line);
//i++;
}
/// Raises an exception if the given floats are not equal, up to the given epsilon. Otherwise returns true.
/// @param epsilon The accuracy required to pass the test.
bool check_float_eq(float a, float b, float epsilon = 1e-3f);
jlog::log({
//{.content = std::format("{}:{}", file, line)},
{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content = "JTEST"},
{.content = std::format("Tests Ran: [{}/{}] Failed: [{}/{}] Passed: [{}/{}]", rantests, testlist.size(), failedtests, rantests, passedtests, rantests), .delimiter = ""},
});
//USINFO(std::format("Tests Ran: [{}/{}] Failed: [{}/{}] Passed: [{}/{}]", rantests, testlist.size(), failedtests, rantests, passedtests, rantests))
if (passedtests == rantests)
{
USINFO("All tests passed congratulations! Do you wanna cookie?");
jlog::log({
//{.content = std::format("{}:{}", file, line)},
{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content = "JTEST"},
{.colorCode = jlog::ansi_escape_codes::FG_GREEN, .content = "All tests passed congratulations! Do you wanna cookie?", .delimiter = ""},
});
}
}
/// Raises an exception if the given doubles are not equal, up to the given epsilon. Otherwise returns true.
/// @param epsilon The accuracy required to pass the test.
bool check_double_eq(double a, double b, double epsilon = 1e-3f);
/// Raises an exception if the given strings are not equal, otherwise returns true.
bool check_string_eq(const std::string& a, const std::string& b);
/// Runs a given test, generates a report, and returns the test result as a boolean.
bool test(const std::string& testname, const std::function<void()>& callback, const std::string& file, int line);
/// Runs all tests that have been registered, and generates a final summary report for the testing suite.
void run_tests();
}
//#define TEST(a, b) jtest::test(a, b, __FILE__, __LINE__);
// Same definition as before essentially, but points to a different function which adds the test to the global vector.
// -maxine
/// TEST macro to be used by API consumers. Automatically grabs reflection data such as current file, line, function name, etc.
#define TEST(a, b) jtest::definetest(a, b, __FILE__, __LINE__);
/// TODO: Implement check macros

View File

@@ -1,20 +1,23 @@
//
// Created by dawsh on 6/16/24.
//
#include <cassert>
#include "include/jtest/jtest.hpp"
// Josh's Test Library
// A no-frills, straightforward unit testing module in and for Modern C++.
// Created by Joshua O'Leary @ Redacted Software, June 2024
// This work is dedicated to the public domain.
// Contact: josh@redacted.cc, git.redacted.cc/josh
// TODO: Provide introspection insofar as which assertion check failed.
// TODO: Provide alternate checks (google test has specific assertations for handling floats, for example) (Are these actually necessary??)
// TODO: Implement log-file-specification-capability in jlog so we can log to test_results.txt specifically.
// TODO: Provide benchmarking on test running-time
#include <jtest/jtest.hpp>
void TestA() { jtest::check("Bruh" == "Bruh"); }
void TestB() { jtest::check(6*6 == 36); }
void TestC() { jtest::check(6+9 == 69); }
int main(int argc, char** argv)
{
TEST("Test1", []{
jtest::check(2+2 == 4);
});
@@ -31,11 +34,15 @@ int main(int argc, char** argv)
});
/*
TEST("LMAO");
TEST("KEKERINO")
TEST(":)")
TEST("Test4", []
{
assert(false);
});
*/
// Doesn't actually do anything yet
TEST("TestGroup::A", TestA);
TEST("TestGroup::B", TestB);
TEST("TestGroup::C", TestC);
jtest::run_tests();
}

View File

@@ -1,7 +1,116 @@
//
// Created by dawsh on 6/16/24.
//
#include <jtest/jtest.hpp>
namespace jtest {
// Globals for test tracking
std::vector<testdef> testlist;
int rantests;
int passedtests;
int failedtests;
std::vector<jlog::token> log_test_format(const std::string &testname, const std::string &file, int line,
bool passed) {
std::vector<jlog::token> wtokens;
auto head = jlog::token{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content ="JTEST"};
auto filedata = jlog::token{.content = std::format("{}:{}", file, line)};
std::vector<jlog::token> teststate;
if (passed)
{
teststate.push_back(jlog::token{.colorCode = jlog::ansi_escape_codes::FG_GREEN, .content = testname});
teststate.push_back(jlog::token{.content = "Passed:", .delimiter = ""});
} else
{
teststate.push_back(jlog::token{.colorCode = jlog::ansi_escape_codes::FG_RED, .content = testname});
teststate.push_back(jlog::token{.content = "Failed:", .delimiter = ""});
}
auto raninfo = jlog::token{.content = std::format("{}/{}", rantests, testlist.size())};
wtokens.push_back(head);
wtokens.push_back(filedata);
wtokens.insert(wtokens.end(), teststate.begin(), teststate.end());
wtokens.push_back(raninfo);
return wtokens;
}
std::vector<jlog::token> log_test_tracking_format() {
auto head = jlog::token{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content = "JTEST"};
auto tracking = jlog::token{.content = std::format("Tests Ran: [{}/{}] Failed: [{}/{}] Passed: [{}/{}]",
rantests,
testlist.size(),
failedtests,
rantests,
passedtests,
rantests),
.delimiter = ""};
return {head, tracking};
}
void definetest(const std::string &testname, const std::function<void()> &callback, const std::string &file,
int line) {
testlist.push_back(testdef(testname, callback, file, line));
}
bool check(bool condition) {
if (!condition)
throw std::runtime_error("Test check failed!!");
return condition;
}
bool check_float_eq(float a, float b, float epsilon) {
if (std::abs(a - b) > epsilon) {
throw std::runtime_error ("Test check failed!!");
}
return true;
}
bool check_double_eq(double a, double b, double epsilon) {
if (std::abs(a - b) > epsilon) {
throw std::runtime_error ("Test check failed!!");
}
return true;
}
bool check_string_eq(const std::string &a, const std::string &b) {
if (a == b) // This is valid and recommended C++2x.
throw std::runtime_error("Test check failed!!");
return true;
}
bool test(const std::string &testname, const std::function<void()> &callback, const std::string &file, int line) {
bool passed = true;
try { callback(); }
catch(const std::exception& e)
{ passed = false; }
rantests++;
if (passed) { passedtests++; }
else { failedtests++; }
jlog::log(log_test_format(testname, file, line, passed));
return passed;
}
void run_tests() {
for (testdef& td : testlist)
{
td.passed = test(td.testname, td.callback, td.file, td.line);
}
jlog::log(log_test_tracking_format());
}
}