- Renamed project from Celuna to AISSIA - Updated all documentation and configuration files - Codebase improvements and fixes 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
337 lines
10 KiB
C++
337 lines
10 KiB
C++
#include "TestRunnerModule.h"
|
|
#include <grove/JsonDataNode.h>
|
|
#include <spdlog/spdlog.h>
|
|
#include <filesystem>
|
|
#include <fstream>
|
|
#include <chrono>
|
|
#include <dlfcn.h>
|
|
|
|
namespace fs = std::filesystem;
|
|
|
|
namespace celuna {
|
|
|
|
TestRunnerModule::TestRunnerModule() = default;
|
|
TestRunnerModule::~TestRunnerModule() = default;
|
|
|
|
void TestRunnerModule::setConfiguration(const grove::IDataNode& config,
|
|
grove::IIO* io,
|
|
grove::ITaskScheduler* scheduler) {
|
|
m_io = io;
|
|
m_scheduler = scheduler;
|
|
m_config = std::make_unique<grove::JsonDataNode>("config");
|
|
|
|
m_testDirectory = config.getString("testDirectory", "tests/integration");
|
|
m_globalTimeoutMs = config.getInt("globalTimeoutMs", 300000);
|
|
m_stopOnFirstFailure = config.getBool("stopOnFirstFailure", false);
|
|
m_verboseOutput = config.getBool("verboseOutput", true);
|
|
m_jsonOutputPath = config.getString("jsonOutputPath", "test-results.json");
|
|
|
|
spdlog::info("[TestRunner] Configuration loaded:");
|
|
spdlog::info(" Test directory: {}", m_testDirectory);
|
|
spdlog::info(" Global timeout: {}ms", m_globalTimeoutMs);
|
|
spdlog::info(" Stop on first failure: {}", m_stopOnFirstFailure);
|
|
|
|
discoverTests();
|
|
}
|
|
|
|
const grove::IDataNode& TestRunnerModule::getConfiguration() {
|
|
return *m_config;
|
|
}
|
|
|
|
void TestRunnerModule::discoverTests() {
|
|
m_testPaths.clear();
|
|
|
|
fs::path testDir(m_testDirectory);
|
|
if (!fs::exists(testDir)) {
|
|
spdlog::warn("[TestRunner] Test directory not found: {}", testDir.string());
|
|
return;
|
|
}
|
|
|
|
for (const auto& entry : fs::directory_iterator(testDir)) {
|
|
if (entry.is_regular_file()) {
|
|
std::string filename = entry.path().filename().string();
|
|
if (filename.find("IT_") == 0 && entry.path().extension() == ".so") {
|
|
m_testPaths.push_back(entry.path().string());
|
|
}
|
|
}
|
|
}
|
|
|
|
std::sort(m_testPaths.begin(), m_testPaths.end());
|
|
|
|
spdlog::info("[TestRunner] Discovered {} test(s)", m_testPaths.size());
|
|
for (const auto& path : m_testPaths) {
|
|
spdlog::info(" - {}", fs::path(path).filename().string());
|
|
}
|
|
}
|
|
|
|
testing::TestResult TestRunnerModule::runTest(const std::string& testPath) {
|
|
testing::TestResult result;
|
|
auto startTime = std::chrono::steady_clock::now();
|
|
|
|
// Load the test module
|
|
void* handle = dlopen(testPath.c_str(), RTLD_LAZY);
|
|
if (!handle) {
|
|
result.passed = false;
|
|
result.testName = fs::path(testPath).stem().string();
|
|
result.message = std::string("Failed to load module: ") + dlerror();
|
|
spdlog::error("[TestRunner] {}", result.message);
|
|
return result;
|
|
}
|
|
|
|
// Get createModule function
|
|
using CreateModuleFn = grove::IModule* (*)();
|
|
auto createModule = reinterpret_cast<CreateModuleFn>(dlsym(handle, "createModule"));
|
|
if (!createModule) {
|
|
result.passed = false;
|
|
result.testName = fs::path(testPath).stem().string();
|
|
result.message = "createModule symbol not found";
|
|
dlclose(handle);
|
|
return result;
|
|
}
|
|
|
|
// Create test instance
|
|
auto* module = createModule();
|
|
auto* testModule = dynamic_cast<testing::ITestModule*>(module);
|
|
if (!testModule) {
|
|
result.passed = false;
|
|
result.testName = fs::path(testPath).stem().string();
|
|
result.message = "Module does not implement ITestModule";
|
|
delete module;
|
|
dlclose(handle);
|
|
return result;
|
|
}
|
|
|
|
// Configure test module
|
|
grove::JsonDataNode config("test_config");
|
|
config.setInt("timeoutMs", 10000);
|
|
testModule->setConfiguration(config, m_io, m_scheduler);
|
|
|
|
// Execute test
|
|
try {
|
|
result = testModule->execute();
|
|
} catch (const std::exception& e) {
|
|
result.passed = false;
|
|
result.testName = testModule->getTestName();
|
|
result.message = std::string("Exception: ") + e.what();
|
|
}
|
|
|
|
auto endTime = std::chrono::steady_clock::now();
|
|
result.durationMs = std::chrono::duration_cast<std::chrono::milliseconds>(
|
|
endTime - startTime).count();
|
|
|
|
// Cleanup
|
|
using DestroyModuleFn = void (*)(grove::IModule*);
|
|
auto destroyModule = reinterpret_cast<DestroyModuleFn>(dlsym(handle, "destroyModule"));
|
|
if (destroyModule) {
|
|
destroyModule(module);
|
|
} else {
|
|
delete module;
|
|
}
|
|
dlclose(handle);
|
|
|
|
return result;
|
|
}
|
|
|
|
void TestRunnerModule::process(const grove::IDataNode& input) {
|
|
if (m_executed) {
|
|
return; // Tests already run
|
|
}
|
|
|
|
m_executed = true;
|
|
|
|
spdlog::info("========================================");
|
|
spdlog::info(" AISSIA Integration Tests");
|
|
spdlog::info(" Running {} test(s)...", m_testPaths.size());
|
|
spdlog::info("========================================");
|
|
|
|
auto globalStart = std::chrono::steady_clock::now();
|
|
|
|
for (size_t i = 0; i < m_testPaths.size(); ++i) {
|
|
const auto& testPath = m_testPaths[i];
|
|
std::string testName = fs::path(testPath).stem().string();
|
|
|
|
if (m_verboseOutput) {
|
|
spdlog::info("[{}/{}] {}...", i + 1, m_testPaths.size(), testName);
|
|
}
|
|
|
|
auto result = runTest(testPath);
|
|
m_results.push_back(result);
|
|
|
|
std::string status = result.passed ? "✅ PASS" : "❌ FAIL";
|
|
spdlog::info("[{}/{}] {}... {} ({:.1f}s)",
|
|
i + 1, m_testPaths.size(), testName, status,
|
|
result.durationMs / 1000.0);
|
|
|
|
if (m_verboseOutput && !result.message.empty()) {
|
|
spdlog::info(" {}", result.message);
|
|
}
|
|
|
|
if (!result.passed && m_stopOnFirstFailure) {
|
|
spdlog::warn("[TestRunner] Stopping on first failure");
|
|
break;
|
|
}
|
|
|
|
// Check global timeout
|
|
auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(
|
|
std::chrono::steady_clock::now() - globalStart).count();
|
|
if (elapsed > m_globalTimeoutMs) {
|
|
spdlog::error("[TestRunner] Global timeout exceeded ({}ms)", m_globalTimeoutMs);
|
|
break;
|
|
}
|
|
}
|
|
|
|
generateReport();
|
|
|
|
if (!m_jsonOutputPath.empty()) {
|
|
generateJsonReport(m_jsonOutputPath);
|
|
}
|
|
|
|
// Determine exit code
|
|
int failedCount = 0;
|
|
for (const auto& result : m_results) {
|
|
if (!result.passed) {
|
|
failedCount++;
|
|
}
|
|
}
|
|
|
|
int exitCode = failedCount == 0 ? 0 : 1;
|
|
spdlog::info("Exit code: {}", exitCode);
|
|
|
|
// Exit the application
|
|
std::exit(exitCode);
|
|
}
|
|
|
|
void TestRunnerModule::generateReport() {
|
|
spdlog::info("========================================");
|
|
|
|
int passed = 0;
|
|
int failed = 0;
|
|
int totalDuration = 0;
|
|
|
|
for (const auto& result : m_results) {
|
|
if (result.passed) {
|
|
passed++;
|
|
} else {
|
|
failed++;
|
|
}
|
|
totalDuration += result.durationMs;
|
|
}
|
|
|
|
int total = passed + failed;
|
|
double successRate = total > 0 ? (100.0 * passed) / total : 0.0;
|
|
|
|
spdlog::info("Results: {}/{} passed ({:.1f}%)", passed, total, successRate);
|
|
spdlog::info("Total time: {:.1f}s", totalDuration / 1000.0);
|
|
|
|
if (failed > 0) {
|
|
spdlog::info("Failed tests:");
|
|
for (const auto& result : m_results) {
|
|
if (!result.passed) {
|
|
spdlog::info(" - {}: {}", result.testName, result.message);
|
|
}
|
|
}
|
|
}
|
|
|
|
spdlog::info("========================================");
|
|
}
|
|
|
|
void TestRunnerModule::generateJsonReport(const std::string& outputPath) {
|
|
nlohmann::json report;
|
|
|
|
// Summary
|
|
int passed = 0;
|
|
int failed = 0;
|
|
int totalDuration = 0;
|
|
|
|
for (const auto& result : m_results) {
|
|
if (result.passed) {
|
|
passed++;
|
|
} else {
|
|
failed++;
|
|
}
|
|
totalDuration += result.durationMs;
|
|
}
|
|
|
|
int total = passed + failed;
|
|
double successRate = total > 0 ? (100.0 * passed) / total : 0.0;
|
|
|
|
report["summary"] = {
|
|
{"total", total},
|
|
{"passed", passed},
|
|
{"failed", failed},
|
|
{"skipped", 0},
|
|
{"successRate", successRate},
|
|
{"totalDurationMs", totalDuration}
|
|
};
|
|
|
|
// Individual tests
|
|
nlohmann::json tests = nlohmann::json::array();
|
|
for (const auto& result : m_results) {
|
|
nlohmann::json testJson = {
|
|
{"name", result.testName},
|
|
{"passed", result.passed},
|
|
{"message", result.message},
|
|
{"durationMs", result.durationMs},
|
|
{"details", result.details}
|
|
};
|
|
tests.push_back(testJson);
|
|
}
|
|
report["tests"] = tests;
|
|
|
|
// Metadata
|
|
auto now = std::chrono::system_clock::now();
|
|
auto timestamp = std::chrono::system_clock::to_time_t(now);
|
|
char buf[100];
|
|
std::strftime(buf, sizeof(buf), "%Y-%m-%dT%H:%M:%SZ", std::gmtime(×tamp));
|
|
|
|
report["timestamp"] = buf;
|
|
report["environment"] = {
|
|
{"platform", "linux"},
|
|
{"testDirectory", m_testDirectory}
|
|
};
|
|
|
|
// Write to file
|
|
std::ofstream file(outputPath);
|
|
if (file.is_open()) {
|
|
file << report.dump(2);
|
|
file.close();
|
|
spdlog::info("[TestRunner] JSON report written to: {}", outputPath);
|
|
} else {
|
|
spdlog::error("[TestRunner] Failed to write JSON report to: {}", outputPath);
|
|
}
|
|
}
|
|
|
|
std::unique_ptr<grove::IDataNode> TestRunnerModule::getHealthStatus() {
|
|
auto status = std::make_unique<grove::JsonDataNode>("health");
|
|
status->setString("status", "healthy");
|
|
status->setInt("testsRun", m_results.size());
|
|
return status;
|
|
}
|
|
|
|
void TestRunnerModule::shutdown() {
|
|
spdlog::info("[TestRunner] Shutdown");
|
|
}
|
|
|
|
std::unique_ptr<grove::IDataNode> TestRunnerModule::getState() {
|
|
auto state = std::make_unique<grove::JsonDataNode>("state");
|
|
state->setBool("executed", m_executed);
|
|
return state;
|
|
}
|
|
|
|
void TestRunnerModule::setState(const grove::IDataNode& state) {
|
|
m_executed = state.getBool("executed", false);
|
|
}
|
|
|
|
} // namespace celuna
|
|
|
|
// Factory functions
|
|
extern "C" {
|
|
grove::IModule* createModule() {
|
|
return new celuna::TestRunnerModule();
|
|
}
|
|
|
|
void destroyModule(grove::IModule* module) {
|
|
delete module;
|
|
}
|
|
}
|