Compare commits
13 Commits
Prerelease
...
Prerelease
Author | SHA1 | Date | |
---|---|---|---|
|
03795a2f3a | ||
5e8c85feea | |||
4ea05e09b7 | |||
b57cc68b6f | |||
|
8c9bb12834 | ||
875166c6b6 | |||
eea8e5547f | |||
20ed600b89 | |||
75eb8f52d9 | |||
b8df31dd50 | |||
21cc5313f2 | |||
ab57cd5774 | |||
e04b742954 |
@@ -32,7 +32,7 @@ CPMAddPackage(
|
||||
|
||||
CPMAddPackage(
|
||||
NAME jlog
|
||||
URL https://git.redacted.cc/josh/jlog/archive/Prerelease-5.zip
|
||||
URL https://git.redacted.cc/josh/jlog/archive/Prerelease-12.zip
|
||||
)
|
||||
|
||||
|
||||
|
@@ -6,103 +6,68 @@
|
||||
#include <vector>
|
||||
#include <jlog/jlog.hpp>
|
||||
|
||||
//#define NDEBUG for disabling assert, but tests still end up passing.
|
||||
|
||||
// TODO: Move implementation to jtest::detail
|
||||
// have this file primarily expose the macros intended for users
|
||||
|
||||
namespace jtest {
|
||||
|
||||
// Requirements
|
||||
//
|
||||
|
||||
// Can't we just store a struct in a global vector per test?
|
||||
// -maxine
|
||||
/// Structure to store test meta-data, as tests are initially registered, and ran later.
|
||||
struct testdef
|
||||
{
|
||||
const std::string& testname;
|
||||
const std::function<void()>& callback;
|
||||
const std::string file; // <- & is not needed here -maxine
|
||||
std::string testname;
|
||||
std::function<void()> callback;
|
||||
std::string file;
|
||||
int line;
|
||||
bool passed;
|
||||
};
|
||||
|
||||
// Globals for test tracking
|
||||
std::vector<testdef> testlist;
|
||||
int rantests;
|
||||
int passedtests;
|
||||
int failedtests;
|
||||
/// jlog log wrapper for jtest
|
||||
void log(std::vector<jlog::token> tokens);
|
||||
|
||||
void definetest(const std::string& testname, const std::function<void()>& callback, const std::string& file, int line)
|
||||
{
|
||||
testlist.push_back(testdef(testname, callback, file, line));
|
||||
}
|
||||
/// Generates and returns a sequence of logger tokens pre-formatted to the test data.
|
||||
std::vector<jlog::token> log_test_format(const std::string& testname, const std::string& file, int line, bool passed);
|
||||
|
||||
// TODO: Implement check variants
|
||||
// TODO: implement streaming a custom failure message with << operator
|
||||
// i.e. : check(my_cond) << "The condition is not true!"
|
||||
bool check(bool condition) {
|
||||
if (!condition)
|
||||
/// Generates and returns a sequence of logger tokens for the test summary printed at the end of testing.
|
||||
std::vector<jlog::token> log_test_tracking_format();
|
||||
|
||||
/// Registers a test internally.
|
||||
void definetest(const std::string& testname, const std::function<void()>& callback, const std::string& file, int line);
|
||||
|
||||
|
||||
// TODO: implement streaming a custom failure message with << operator on check statements
|
||||
|
||||
/// Raises an exception if the given condition is false, otherwise returns true.
|
||||
bool check(bool condition);
|
||||
|
||||
/// Raises an exception of the given values evaluate to not-equal, otherwise returns true.
|
||||
template <typename T>
|
||||
bool check_eq(T a, T b) {
|
||||
if (a != b)
|
||||
throw std::runtime_error("Test check failed!!");
|
||||
return condition;
|
||||
}
|
||||
|
||||
bool test(const std::string& testname, const std::function<void()>& callback, const std::string& file, int line)
|
||||
{
|
||||
try {
|
||||
callback();
|
||||
} catch(const std::exception& e) {
|
||||
rantests++;
|
||||
failedtests++;
|
||||
jlog::log({
|
||||
{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content ="JTEST"},
|
||||
{.content = std::format("{}:{}", file, line)},
|
||||
{.colorCode = jlog::ansi_escape_codes::FG_RED, .content = testname},
|
||||
{.content = "Failed:", .delimiter = ""},
|
||||
{.content = std::format("{}/{}", rantests, testlist.size())},
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
rantests++;
|
||||
passedtests++;
|
||||
jlog::log({
|
||||
{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content ="JTEST"},
|
||||
{.content = std::format("{}:{}", file, line)},
|
||||
{.colorCode = jlog::ansi_escape_codes::FG_GREEN, .content = testname},
|
||||
{.content = "Passed:", .delimiter = ""},
|
||||
{.content = std::format("{}/{}", rantests, testlist.size())},
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// Storing a global vector with all the tests should allow us to loop through all the tests
|
||||
// We can also possibly do more tracing and allow other fancy features.
|
||||
// -maxine
|
||||
void run_tests() {
|
||||
//int i;
|
||||
//for (int i = 1; const testdef& td : testlist)
|
||||
for (testdef& td : testlist)
|
||||
{
|
||||
td.passed = test(td.testname, td.callback, td.file, td.line);
|
||||
//i++;
|
||||
}
|
||||
/// Raises an exception if the given floats are not equal, up to the given epsilon. Otherwise returns true.
|
||||
/// @param epsilon The accuracy required to pass the test.
|
||||
bool check_float_eq(float a, float b, float epsilon = 1e-3f);
|
||||
|
||||
jlog::log({
|
||||
//{.content = std::format("{}:{}", file, line)},
|
||||
{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content = "JTEST"},
|
||||
{.content = std::format("Tests Ran: [{}/{}] Failed: [{}/{}] Passed: [{}/{}]", rantests, testlist.size(), failedtests, rantests, passedtests, rantests), .delimiter = ""},
|
||||
});
|
||||
//USINFO(std::format("Tests Ran: [{}/{}] Failed: [{}/{}] Passed: [{}/{}]", rantests, testlist.size(), failedtests, rantests, passedtests, rantests))
|
||||
if (passedtests == rantests)
|
||||
{
|
||||
USINFO("All tests passed congratulations! Do you wanna cookie?");
|
||||
jlog::log({
|
||||
//{.content = std::format("{}:{}", file, line)},
|
||||
{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content = "JTEST"},
|
||||
{.colorCode = jlog::ansi_escape_codes::FG_GREEN, .content = "All tests passed congratulations! Do you wanna cookie?", .delimiter = ""},
|
||||
});
|
||||
}
|
||||
}
|
||||
/// Raises an exception if the given doubles are not equal, up to the given epsilon. Otherwise returns true.
|
||||
/// @param epsilon The accuracy required to pass the test.
|
||||
bool check_double_eq(double a, double b, double epsilon = 1e-3f);
|
||||
|
||||
/// Raises an exception if the given strings are not equal, otherwise returns true.
|
||||
bool check_string_eq(const std::string& a, const std::string& b);
|
||||
|
||||
/// Runs a given test, generates a report, and returns the test result as a boolean.
|
||||
bool test(const std::string& testname, const std::function<void()>& callback, const std::string& file, int line);
|
||||
|
||||
/// Runs all tests that have been registered, and generates a final summary report for the testing suite.
|
||||
void run_tests();
|
||||
}
|
||||
|
||||
//#define TEST(a, b) jtest::test(a, b, __FILE__, __LINE__);
|
||||
// Same definition as before essentially, but points to a different function which adds the test to the global vector.
|
||||
// -maxine
|
||||
/// TEST macro to be used by API consumers. Automatically grabs reflection data such as current file, line, function name, etc.
|
||||
#define TEST(a, b) jtest::definetest(a, b, __FILE__, __LINE__);
|
||||
|
||||
/// TODO: Implement check macros
|
||||
|
45
main.cpp
45
main.cpp
@@ -1,20 +1,24 @@
|
||||
//
|
||||
// Created by dawsh on 6/16/24.
|
||||
//
|
||||
|
||||
#include <cassert>
|
||||
|
||||
#include "include/jtest/jtest.hpp"
|
||||
// Josh's Test Library
|
||||
// A no-frills, straightforward unit testing module in and for Modern C++.
|
||||
// Created by Joshua O'Leary @ Redacted Software, June 2024
|
||||
// This work is dedicated to the public domain.
|
||||
// Contact: josh@redacted.cc, git.redacted.cc/josh
|
||||
|
||||
// TODO: Provide introspection insofar as which assertion check failed.
|
||||
// TODO: Provide alternate checks (google test has specific assertations for handling floats, for example) (Are these actually necessary??)
|
||||
// TODO: Implement log-file-specification-capability in jlog so we can log to test_results.txt specifically.
|
||||
// TODO: Provide benchmarking on test running-time
|
||||
|
||||
#include <jtest/jtest.hpp>
|
||||
#include <cassert>
|
||||
|
||||
void TestA() { jtest::check("Bruh" == "Bruh"); }
|
||||
void TestB() { jtest::check(6*6 == 36); }
|
||||
void TestC() { jtest::check(6+9 == 69); }
|
||||
|
||||
int main(int argc, char** argv)
|
||||
{
|
||||
|
||||
|
||||
TEST("Test1", []{
|
||||
jtest::check(2+2 == 4);
|
||||
});
|
||||
@@ -30,12 +34,25 @@ int main(int argc, char** argv)
|
||||
//jtest::check(2+2 == 4);
|
||||
});
|
||||
|
||||
/*
|
||||
TEST("LMAO");
|
||||
TEST("KEKERINO")
|
||||
TEST(":)")
|
||||
*/
|
||||
|
||||
// Doesn't actually do anything yet
|
||||
TEST("Test4", []
|
||||
{
|
||||
//assert(69 == 9);//, "FUCKING COCK"); stil figuring out
|
||||
});
|
||||
|
||||
TEST("Test5", []
|
||||
{
|
||||
throw std::runtime_error("HOLY SHIT");
|
||||
});
|
||||
|
||||
TEST("Test6", []
|
||||
{
|
||||
throw std::exception();
|
||||
});
|
||||
|
||||
TEST("TestGroup::A", TestA);
|
||||
TEST("TestGroup::B", TestB);
|
||||
TEST("TestGroup::C", TestC);
|
||||
|
||||
jtest::run_tests();
|
||||
}
|
@@ -1,7 +1,120 @@
|
||||
//
|
||||
// Created by dawsh on 6/16/24.
|
||||
//
|
||||
|
||||
|
||||
#include <jtest/jtest.hpp>
|
||||
namespace jtest {
|
||||
|
||||
// Globals for test tracking
|
||||
std::vector<testdef> testlist;
|
||||
int rantests;
|
||||
int passedtests;
|
||||
int failedtests;
|
||||
|
||||
void log(std::vector<jlog::token> tokens)
|
||||
{
|
||||
std::vector<jlog::token> wtokens;
|
||||
auto head = jlog::token{.colorCode = jlog::ansi_escape_codes::FG_WHITE, .content ="JTEST"};
|
||||
|
||||
wtokens.push_back(head);
|
||||
wtokens.insert(wtokens.end(), tokens.begin(), tokens.end());
|
||||
|
||||
jlog::log(wtokens, "test_results.log");
|
||||
}
|
||||
|
||||
std::vector<jlog::token> log_test_format(const std::string &testname, const std::string &file, int line,
|
||||
bool passed) {
|
||||
std::vector<jlog::token> wtokens;
|
||||
|
||||
auto filedata = jlog::token{.content = std::format("{}:{}", file, line)};
|
||||
|
||||
std::vector<jlog::token> teststate;
|
||||
if (passed)
|
||||
{
|
||||
teststate.push_back(jlog::token{.colorCode = jlog::ansi_escape_codes::FG_GREEN, .content = testname});
|
||||
teststate.push_back(jlog::token{.content = "Passed:", .delimiter = ""});
|
||||
} else
|
||||
{
|
||||
teststate.push_back(jlog::token{.colorCode = jlog::ansi_escape_codes::FG_RED, .content = testname});
|
||||
teststate.push_back(jlog::token{.content = "Failed:", .delimiter = ""});
|
||||
}
|
||||
|
||||
auto raninfo = jlog::token{.content = std::format("{}/{}", rantests, testlist.size())};
|
||||
|
||||
wtokens.push_back(filedata);
|
||||
wtokens.insert(wtokens.end(), teststate.begin(), teststate.end());
|
||||
wtokens.push_back(raninfo);
|
||||
|
||||
return wtokens;
|
||||
}
|
||||
|
||||
std::vector<jlog::token> log_test_tracking_format() {
|
||||
auto ran = jlog::token{.content = std::format("Tests Ran: [{}/{}]", rantests, testlist.size()), .delimiter = ""};
|
||||
auto failed = jlog::token{.content = std::format("Failed: [{}/{}]", failedtests, rantests), .delimiter = ""};
|
||||
auto passed = jlog::token{.content = std::format("Passed: [{}/{}]", passedtests, rantests), .delimiter = ""};
|
||||
|
||||
return {ran, failed, passed};
|
||||
}
|
||||
|
||||
void definetest(const std::string &testname, const std::function<void()> &callback, const std::string &file,
|
||||
int line) {
|
||||
testlist.push_back(testdef(testname, callback, file, line));
|
||||
}
|
||||
|
||||
bool check(bool condition) {
|
||||
if (!condition)
|
||||
throw std::runtime_error("Test check failed!!");
|
||||
return condition;
|
||||
}
|
||||
|
||||
bool check_float_eq(float a, float b, float epsilon) {
|
||||
if (std::abs(a - b) > epsilon) {
|
||||
throw std::runtime_error ("Test check failed!!");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool check_double_eq(double a, double b, double epsilon) {
|
||||
if (std::abs(a - b) > epsilon) {
|
||||
throw std::runtime_error ("Test check failed!!");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool check_string_eq(const std::string &a, const std::string &b) {
|
||||
if (a == b) // This is valid and recommended C++2x.
|
||||
throw std::runtime_error("Test check failed!!");
|
||||
return true;
|
||||
}
|
||||
|
||||
bool test(const std::string &testname, const std::function<void()> &callback, const std::string &file, int line) {
|
||||
bool passed = true;
|
||||
|
||||
try { callback(); }
|
||||
catch(const std::exception& e)
|
||||
{ passed = false; }
|
||||
catch(...) // <- Basically covers all exception cases. GTest does something similar
|
||||
{ passed = false; }
|
||||
|
||||
rantests++;
|
||||
|
||||
if (passed) { passedtests++; }
|
||||
else { failedtests++; }
|
||||
|
||||
jtest::log(log_test_format(testname, file, line, passed));
|
||||
|
||||
return passed;
|
||||
|
||||
}
|
||||
|
||||
void run_tests() {
|
||||
|
||||
for (testdef& td : testlist)
|
||||
{
|
||||
td.passed = test(td.testname, td.callback, td.file, td.line);
|
||||
}
|
||||
|
||||
jtest::log(log_test_tracking_format());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
Reference in New Issue
Block a user