Concept is working, just needs some polish and can start translating the tests

This commit is contained in:
Tyler Wilding 2020-09-26 16:08:02 -04:00
parent c9b53d51ff
commit 282e53f1be
9 changed files with 4374 additions and 16 deletions

View file

@ -0,0 +1,2 @@
; simply return an integer
#x123456789

View file

@ -1,21 +1,22 @@
add_executable(goalc-test
test_main.cpp
test_test.cpp
test_reader.cpp
test_goos.cpp
test_listener_deci2.cpp
test_kernel.cpp
all_jak1_symbols.cpp
test_type_system.cpp
test_CodeTester.cpp
test_emitter_slow.cpp
test_emitter_loads_and_store.cpp
test_emitter_xmm32.cpp
test_emitter_integer_math.cpp
test_common_util.cpp
test_compiler_and_runtime.cpp
test_deftype.cpp
)
#test_test.cpp
#test_reader.cpp
#test_goos.cpp
#test_listener_deci2.cpp
#test_kernel.cpp
#all_jak1_symbols.cpp
#test_type_system.cpp
#test_CodeTester.cpp
#test_emitter_slow.cpp
#test_emitter_loads_and_store.cpp
#test_emitter_xmm32.cpp
#test_emitter_integer_math.cpp
#test_common_util.cpp
#test_compiler_and_runtime.cpp
"goalc/test_compiler.cpp"
"goalc/framework/test_runner.cpp"
"goalc/framework/test_runner.h")
enable_testing()

12
test/goalc/README.md Normal file
View file

@ -0,0 +1,12 @@
Goal:
Create a flexible test framework for testing GOAL code:
- pass in templated GOAL files to easily test many/large cases of tests
- probably need macros / helper functions around things like:
- generating math expressions in post-fix (we need to know the result to assert it)
- if a test fails, need to print out the code / save it to a file so it can be inspected
The real selling point is being able to three-fold:
- Reduce the number of compiler test files, while still maintaining the same test-coverage
- Easily create stressful tests for the compiler, make test combinations
- Have the expected test result in the same place as the test code, no need to cross-reference .gc file

View file

@ -0,0 +1,112 @@
#include "test_runner.h"
#include <string>
#include "gtest/gtest.h"
#include "third-party/inja.hpp"
#include "third-party/json.hpp"
#include "game/runtime.h"
#include "goalc/listener/Listener.h"
#include "goalc/compiler/Compiler.h"
#include "common\util\FileUtil.h"
namespace GoalTest {
std::string escaped_string(const std::string& in) {
std::string result;
for (auto x : in) {
switch (x) {
case '\n':
result.append("\\n");
break;
case '\t':
result.append("\\t");
break;
default:
result.push_back(x);
}
}
return result;
}
void CompilerTestRunner::run_test(const std::string& test_file,
const std::vector<std::string>& expected,
MatchParam<int> truncate) {
fprintf(stderr, "Testing %s\n", test_file.c_str());
auto result = c->run_test("test/goalc/source_generated/" + test_file);
if (!truncate.is_wildcard) {
for (auto& x : result) {
x = x.substr(0, truncate.value);
}
}
EXPECT_EQ(result, expected);
if (testing::Test::HasFailure()) {
std::string testFile = file_util::get_file_path({"test/goalc/source_generated/" + test_file});
// TODO - put the index and such there incase there are multiple failures
std::string failedFile = file_util::get_file_path({"test/goalc/source_generated/" + test_file + ".failed"});
std::ifstream src(testFile, std::ios::binary);
std::ofstream dst(failedFile, std::ios::binary);
dst << src.rdbuf();
}
tests.push_back({expected, result, test_file, false});
}
void CompilerTestRunner::run_always_pass(const std::string& test_file) {
c->run_test("test/goalc/source_generated/" + test_file);
tests.push_back({{}, {}, test_file, true});
}
// TODO - This might not be necessary with the switch to parameterized tests
void CompilerTestRunner::print_summary() {
fmt::print("~~ Compiler Test Summary for {} tests... ~~\n", tests.size());
int passed = 0;
int passable = 0;
int auto_pass = 0;
for (auto& test : tests) {
if (test.auto_pass) {
auto_pass++;
fmt::print("[{:40}] AUTO-PASS!\n", test.test_name);
} else {
passable++;
if (test.expected == test.actual) {
fmt::print("[{:40}] PASS!\n", test.test_name);
passed++;
} else {
fmt::print("[{:40}] FAIL!\n", test.test_name);
fmt::print("expected:\n");
for (auto& x : test.expected) {
fmt::print(" \"{}\"\n", escaped_string(x));
}
fmt::print("result:\n");
for (auto& x : test.actual) {
fmt::print(" \"{}\"\n", escaped_string(x));
}
}
}
}
fmt::print("Total: passed {}/{} passable tests, {} auto-passed\n", passed, passable, auto_pass);
}
std::vector<std::string> get_test_pass_string(const std::string& name, int n_tests) {
return {fmt::format("Test \"{}\": {} Passes\n0\n", name, n_tests)};
}
void runtime_no_kernel() {
constexpr int argc = 4;
const char* argv[argc] = {"", "-fakeiso", "-debug", "-nokernel"};
exec_runtime(argc, const_cast<char**>(argv));
}
void runtime_with_kernel() {
constexpr int argc = 3;
const char* argv[argc] = {"", "-fakeiso", "-debug"};
exec_runtime(argc, const_cast<char**>(argv));
}
} // namespace GoalTest

View file

@ -0,0 +1,39 @@
#pragma once
#include <string>
#include <vector>
#include "goalc/compiler/Compiler.h"
#include "common\util\FileUtil.h"
namespace GoalTest {
std::string escaped_string(const std::string& in);
struct CompilerTestRunner {
public:
Compiler* c = nullptr;
struct Test {
std::vector<std::string> expected, actual;
std::string test_name;
bool auto_pass = false;
};
std::vector<Test> tests;
void run_test(const std::string& test_file,
const std::vector<std::string>& expected,
MatchParam<int> truncate = {});
void run_always_pass(const std::string& test_file);
void print_summary();
};
std::vector<std::string> get_test_pass_string(const std::string& name, int n_tests);
void runtime_no_kernel();
void runtime_with_kernel();
} // namespace GoalTest

View file

@ -0,0 +1,2 @@
*.gc
*.gc.failed

View file

@ -0,0 +1,2 @@
; simply return an integer
{{ integer }}

View file

@ -0,0 +1,121 @@
// https://github.com/google/googletest/blob/master/googletest/docs/advanced.md#value-parameterized-tests
#include <thread>
#include <chrono>
#include "gtest/gtest.h"
#include "game/runtime.h"
#include "goalc/listener/Listener.h"
#include "goalc/compiler/Compiler.h"
#include "third-party/inja.hpp"
#include "third-party/json.hpp"
#include <common\util\FileUtil.h>
#include <test\goalc\framework\test_runner.h>
// TODO - put into the framework
#include <iostream>
#include <string>
#include <cstdio>
#include <sstream>
#include <iostream>
#include <random>
struct IntegerParam {
s64 val;
bool hex;
IntegerParam(s64 val, bool hex = false) : val(val), hex(hex) {}
std::string toLisp() {
// Append hex reader macro '#x'
if (hex) {
return std::string("#x") + std::string(std::to_string(val));
}
return std::to_string(val);
}
std::string eval() {
if (hex) {
int64_t hexVal;
std::stringstream ss;
ss << std::hex << std::to_string(val);
ss >> hexVal;
return std::string(std::to_string(hexVal)) + "\n";
}
if (val == 123)
return std::to_string(val);
return std::to_string(val) + "\n";
}
};
// TODO - make sure i log the input/output if there is a failure
// - maybe i don't have to, the last test may exit and the file would remain?
class IntegerTests : public testing::TestWithParam<IntegerParam> {};
TEST_P(IntegerTests, IntegerTests) {
// TODO - might be slow if we open / close the thread for each test.
// we might want to persist the compiler/test runner instance long term...shouldn't be that
// difficult, this is C++ right..no rules! pointers pointers pointers.
std::thread runtime_thread(GoalTest::runtime_no_kernel);
Compiler compiler;
GoalTest::CompilerTestRunner runner;
runner.c = &compiler;
// With separate input and output path
std::string templateDir = file_util::get_file_path({"test/goalc/source_templates/"});
std::string generatedDir = file_util::get_file_path({"test/goalc/source_generated/"});
inja::Environment env{templateDir, generatedDir};
IntegerParam param = GetParam();
nlohmann::json data;
data["integer"] = param.toLisp();
env.write("integer-test.template.gc", data, "integer-test.generated.gc");
runner.run_test("integer-test.generated.gc", {param.eval()});
compiler.shutdown_target();
runtime_thread.join();
runner.print_summary();
}
// Generates a collection of evenly distributed tests
std::vector<IntegerParam> genIntegerTests(int numTests, bool includeHex, bool includeNegative) {
std::vector<IntegerParam> tests;
std::random_device dev;
std::mt19937 rng(dev());
std::uniform_int_distribution<std::mt19937::result_type> dist6(0, UINT32_MAX);
int testCases = includeNegative ? 2 : 1;
if (includeHex) {
testCases *= 2;
}
for (int i = 0; i < numTests; i++) {
switch (i % testCases) {
case 0:
tests.push_back(IntegerParam(dist6(rng)));
break;
case 1:
tests.push_back(IntegerParam(dist6(rng) * -1));
break;
case 2:
tests.push_back(IntegerParam(dist6(rng), true));
tests.push_back(IntegerParam(123));
break;
case 3:
tests.push_back(IntegerParam(dist6(rng) * -1, true));
break;
}
}
return tests;
}
// TODO - don't really need generated tests here, proof of concept
// specific examples for integers is more than enough
INSTANTIATE_TEST_SUITE_P(InstantiationName,
IntegerTests,
testing::ValuesIn(genIntegerTests(10, true, true)));

4067
third-party/inja.hpp vendored Normal file

File diff suppressed because it is too large Load diff