aissia/tests/modules/AIModuleTests.cpp

294 lines
9.0 KiB
C++

/**
* @file AIModuleTests.cpp
* @brief Integration tests for AIModule (10 TI)
*/
#include <catch2/catch_test_macros.hpp>
#include "mocks/MockIO.hpp"
#include "utils/TimeSimulator.hpp"
#include "utils/TestHelpers.hpp"
#include "modules/AIModule.h"
#include <grove/JsonDataNode.h>
using namespace aissia;
using namespace aissia::tests;
// ============================================================================
// Test Fixture
// ============================================================================
class AITestFixture {
public:
MockIO io;
TimeSimulator time;
AIModule module;
void configure(const json& config = json::object()) {
json fullConfig = {
{"system_prompt", "Tu es un assistant personnel intelligent."},
{"max_iterations", 10}
};
fullConfig.merge_patch(config);
grove::JsonDataNode configNode("config", fullConfig);
module.setConfiguration(configNode, &io, nullptr);
}
void process() {
grove::JsonDataNode input("input", time.createInput());
module.process(input);
}
};
// ============================================================================
// TI_AI_001: Query Sends LLM Request
// ============================================================================
TEST_CASE("TI_AI_001_QuerySendsLLMRequest", "[ai][integration]") {
AITestFixture f;
f.configure();
// Send query
f.io.injectMessage("ai:query", {{"query", "Quelle heure est-il?"}});
f.process();
// Verify LLM request published
REQUIRE(f.io.wasPublished("llm:request"));
auto msg = f.io.getLastPublished("llm:request");
REQUIRE(msg["query"] == "Quelle heure est-il?");
}
// ============================================================================
// TI_AI_002: Voice Transcription Triggers Query
// ============================================================================
TEST_CASE("TI_AI_002_VoiceTranscriptionTriggersQuery", "[ai][integration]") {
AITestFixture f;
f.configure();
// Send voice transcription
f.io.injectMessage("voice:transcription", {
{"text", "Aide-moi avec mon code"},
{"confidence", 0.95}
});
f.process();
// Verify LLM request
REQUIRE(f.io.wasPublished("llm:request"));
auto msg = f.io.getLastPublished("llm:request");
REQUIRE(msg["query"] == "Aide-moi avec mon code");
}
// ============================================================================
// TI_AI_003: LLM Response Handled
// ============================================================================
TEST_CASE("TI_AI_003_LLMResponseHandled", "[ai][integration]") {
AITestFixture f;
f.configure();
// Send query to set awaiting state
f.io.injectMessage("ai:query", {{"query", "Test"}});
f.process();
REQUIRE(f.module.isIdle() == false);
// Receive response
f.io.injectMessage("llm:response", {
{"text", "Voici la reponse"},
{"tokens", 100},
{"conversationId", "default"}
});
f.process();
// Verify no longer awaiting
REQUIRE(f.module.isIdle() == true);
}
// ============================================================================
// TI_AI_004: LLM Error Handled
// ============================================================================
TEST_CASE("TI_AI_004_LLMErrorHandled", "[ai][integration]") {
AITestFixture f;
f.configure();
// Send query
f.io.injectMessage("ai:query", {{"query", "Test"}});
f.process();
REQUIRE(f.module.isIdle() == false);
// Receive error
f.io.injectMessage("llm:error", {
{"message", "API rate limit exceeded"},
{"conversationId", "default"}
});
f.process();
// Should no longer be awaiting
REQUIRE(f.module.isIdle() == true);
}
// ============================================================================
// TI_AI_005: Hyperfocus Alert Generates Suggestion
// ============================================================================
TEST_CASE("TI_AI_005_HyperfocusAlertGeneratesSuggestion", "[ai][integration]") {
AITestFixture f;
f.configure();
// Receive hyperfocus alert
f.io.injectMessage("scheduler:hyperfocus_alert", {
{"sessionMinutes", 130},
{"task", "coding"}
});
f.process();
// Verify LLM request published
REQUIRE(f.io.wasPublished("llm:request"));
auto req = f.io.getLastPublished("llm:request");
std::string convId = req["conversationId"];
// Simulate LLM response
f.io.injectMessage("llm:response", {
{"text", "Time to take a break!"},
{"conversationId", convId}
});
f.process();
// Verify suggestion published
REQUIRE(f.io.wasPublished("ai:suggestion"));
auto msg = f.io.getLastPublished("ai:suggestion");
REQUIRE(msg.contains("message"));
}
// ============================================================================
// TI_AI_006: Break Reminder Generates Suggestion
// ============================================================================
TEST_CASE("TI_AI_006_BreakReminderGeneratesSuggestion", "[ai][integration]") {
AITestFixture f;
f.configure();
// Receive break reminder
f.io.injectMessage("scheduler:break_reminder", {
{"workMinutes", 45}
});
f.process();
// Verify LLM request published
REQUIRE(f.io.wasPublished("llm:request"));
auto req = f.io.getLastPublished("llm:request");
std::string convId = req["conversationId"];
// Simulate LLM response
f.io.injectMessage("llm:response", {
{"text", "Take a short break now!"},
{"conversationId", convId}
});
f.process();
// Verify suggestion
REQUIRE(f.io.wasPublished("ai:suggestion"));
}
// ============================================================================
// TI_AI_007: System Prompt In Request
// ============================================================================
TEST_CASE("TI_AI_007_SystemPromptInRequest", "[ai][integration]") {
AITestFixture f;
f.configure({{"system_prompt", "Custom prompt here"}});
f.io.injectMessage("ai:query", {{"query", "Test"}});
f.process();
REQUIRE(f.io.wasPublished("llm:request"));
auto msg = f.io.getLastPublished("llm:request");
REQUIRE(msg["systemPrompt"] == "Custom prompt here");
}
// ============================================================================
// TI_AI_008: Conversation ID Tracking
// ============================================================================
TEST_CASE("TI_AI_008_ConversationIdTracking", "[ai][integration]") {
AITestFixture f;
f.configure();
// First query
f.io.injectMessage("ai:query", {{"query", "Question 1"}});
f.process();
auto msg1 = f.io.getLastPublished("llm:request");
std::string convId = msg1["conversationId"];
REQUIRE(!convId.empty());
// Simulate response
f.io.injectMessage("llm:response", {{"text", "Response"}, {"conversationId", convId}});
f.process();
f.io.clearPublished();
// Second query should use same conversation
f.io.injectMessage("ai:query", {{"query", "Question 2"}});
f.process();
auto msg2 = f.io.getLastPublished("llm:request");
REQUIRE(msg2["conversationId"] == convId);
}
// ============================================================================
// TI_AI_009: Token Counting Accumulates
// ============================================================================
TEST_CASE("TI_AI_009_TokenCountingAccumulates", "[ai][integration]") {
AITestFixture f;
f.configure();
// Query 1
f.io.injectMessage("ai:query", {{"query", "Q1"}});
f.process();
f.io.injectMessage("llm:response", {{"text", "R1"}, {"tokens", 50}});
f.process();
// Query 2
f.io.injectMessage("ai:query", {{"query", "Q2"}});
f.process();
f.io.injectMessage("llm:response", {{"text", "R2"}, {"tokens", 75}});
f.process();
// Verify total
auto state = f.module.getState();
// TODO: Verify totalTokens == 125
SUCCEED(); // Placeholder
}
// ============================================================================
// TI_AI_010: State Serialization
// ============================================================================
TEST_CASE("TI_AI_010_StateSerialization", "[ai][integration]") {
AITestFixture f;
f.configure();
// Build state
f.io.injectMessage("ai:query", {{"query", "Test"}});
f.process();
f.io.injectMessage("llm:response", {{"text", "Response"}, {"tokens", 100}});
f.process();
// Get state
auto state = f.module.getState();
REQUIRE(state != nullptr);
// Restore
AIModule module2;
grove::JsonDataNode configNode2("config", json::object());
module2.setConfiguration(configNode2, &f.io, nullptr);
module2.setState(*state);
auto state2 = module2.getState();
REQUIRE(state2 != nullptr);
SUCCEED(); // Placeholder
}