aissia/src/modules/AIModule.cpp
StillHammer 26a5d3438b refactor: Services architecture for GroveEngine compliance
- Create 4 infrastructure services (LLM, Storage, Platform, Voice)
- Refactor all modules to pure business logic (no HTTP/SQLite/Win32)
- Add bundled SQLite amalgamation for MinGW compatibility
- Make OpenSSL optional in CMake configuration
- Fix topic naming convention (colon format)
- Add succession documentation

Build status: CMake config needs SQLite C language fix (documented)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-26 11:57:53 +08:00

203 lines
7.1 KiB
C++

#include "AIModule.h"
#include <grove/JsonDataNode.h>
namespace aissia {
AIModule::AIModule() {
m_logger = spdlog::get("AIModule");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("AIModule");
}
m_config = std::make_unique<grove::JsonDataNode>("config");
}
void AIModule::setConfiguration(const grove::IDataNode& configNode,
grove::IIO* io,
grove::ITaskScheduler* scheduler) {
m_io = io;
m_config = std::make_unique<grove::JsonDataNode>("config");
m_maxIterations = configNode.getInt("max_iterations", 10);
m_systemPrompt = configNode.getString("system_prompt",
"Tu es AISSIA, un assistant personnel specialise dans la gestion du temps et de l'attention. "
"Tu aides l'utilisateur a rester productif tout en evitant l'hyperfocus excessif. "
"Tu es bienveillant mais ferme quand necessaire pour encourager les pauses.");
// Subscribe to relevant topics
if (m_io) {
grove::SubscriptionConfig subConfig;
m_io->subscribe("ai:query", subConfig);
m_io->subscribe("voice:transcription", subConfig);
m_io->subscribe("scheduler:hyperfocus_alert", subConfig);
m_io->subscribe("scheduler:break_reminder", subConfig);
m_io->subscribe("llm:response", subConfig);
m_io->subscribe("llm:error", subConfig);
}
m_logger->info("AIModule configure (v2 - sans infrastructure)");
}
const grove::IDataNode& AIModule::getConfiguration() {
return *m_config;
}
void AIModule::process(const grove::IDataNode& input) {
processMessages();
}
void AIModule::processMessages() {
if (!m_io) return;
while (m_io->hasMessages() > 0) {
auto msg = m_io->pullMessage();
if (msg.topic == "ai:query" && msg.data) {
std::string query = msg.data->getString("query", "");
if (!query.empty()) {
sendQuery(query);
}
}
else if (msg.topic == "voice:transcription" && msg.data) {
std::string text = msg.data->getString("text", "");
if (!text.empty()) {
sendQuery(text);
}
}
else if (msg.topic == "llm:response" && msg.data) {
handleLLMResponse(*msg.data);
}
else if (msg.topic == "llm:error" && msg.data) {
handleLLMError(*msg.data);
}
else if (msg.topic == "scheduler:hyperfocus_alert" && msg.data) {
handleHyperfocusAlert(*msg.data);
}
else if (msg.topic == "scheduler:break_reminder" && msg.data) {
handleBreakReminder(*msg.data);
}
}
}
void AIModule::sendQuery(const std::string& query) {
if (!m_io) return;
m_awaitingResponse = true;
m_logger->info("Sending query: {}", query.substr(0, 50));
auto request = std::make_unique<grove::JsonDataNode>("request");
request->setString("query", query);
request->setString("systemPrompt", m_systemPrompt);
request->setString("conversationId", m_currentConversationId);
request->setInt("maxIterations", m_maxIterations);
m_io->publish("llm:request", std::move(request));
m_totalQueries++;
}
void AIModule::handleLLMResponse(const grove::IDataNode& data) {
std::string conversationId = data.getString("conversationId", "default");
if (conversationId != m_currentConversationId) return;
m_awaitingResponse = false;
std::string text = data.getString("text", "");
int tokens = data.getInt("tokens", 0);
int iterations = data.getInt("iterations", 1);
m_totalTokens += tokens;
m_logger->info("Response received: {} chars, {} tokens, {} iterations",
text.size(), tokens, iterations);
// Publish response for other modules (VoiceModule, NotificationModule)
if (m_io) {
auto event = std::make_unique<grove::JsonDataNode>("response");
event->setString("text", text);
event->setInt("tokens", tokens);
m_io->publish("ai:response", std::move(event));
}
}
void AIModule::handleLLMError(const grove::IDataNode& data) {
std::string conversationId = data.getString("conversationId", "default");
if (conversationId != m_currentConversationId) return;
m_awaitingResponse = false;
std::string message = data.getString("message", "Unknown error");
m_logger->error("LLM error: {}", message);
if (m_io) {
auto event = std::make_unique<grove::JsonDataNode>("error");
event->setString("message", message);
m_io->publish("ai:error", std::move(event));
}
}
void AIModule::handleHyperfocusAlert(const grove::IDataNode& data) {
int minutes = data.getInt("duration_minutes", 120);
std::string task = data.getString("task", "");
std::string query = "L'utilisateur est en hyperfocus depuis " + std::to_string(minutes) +
" minutes sur '" + task + "'. Genere une intervention bienveillante mais ferme "
"pour l'encourager a faire une pause.";
sendQuery(query);
}
void AIModule::handleBreakReminder(const grove::IDataNode& data) {
int breakDuration = data.getInt("break_duration", 10);
std::string query = "Rappelle gentiment a l'utilisateur qu'il est temps de faire une pause de " +
std::to_string(breakDuration) + " minutes. Sois encourageant.";
sendQuery(query);
}
void AIModule::publishSuggestion(const std::string& message, int duration) {
if (!m_io) return;
auto event = std::make_unique<grove::JsonDataNode>("suggestion");
event->setString("message", message);
event->setInt("duration", duration);
m_io->publish("ai:suggestion", std::move(event));
}
std::unique_ptr<grove::IDataNode> AIModule::getHealthStatus() {
auto status = std::make_unique<grove::JsonDataNode>("status");
status->setString("status", "ready");
status->setInt("totalQueries", m_totalQueries);
status->setInt("totalTokens", m_totalTokens);
status->setBool("awaitingResponse", m_awaitingResponse);
return status;
}
void AIModule::shutdown() {
m_logger->info("AIModule arrete. Queries: {}, Tokens: {}", m_totalQueries, m_totalTokens);
}
std::unique_ptr<grove::IDataNode> AIModule::getState() {
auto state = std::make_unique<grove::JsonDataNode>("state");
state->setInt("totalQueries", m_totalQueries);
state->setInt("totalTokens", m_totalTokens);
state->setString("conversationId", m_currentConversationId);
return state;
}
void AIModule::setState(const grove::IDataNode& state) {
m_totalQueries = state.getInt("totalQueries", 0);
m_totalTokens = state.getInt("totalTokens", 0);
m_currentConversationId = state.getString("conversationId", "default");
m_logger->info("Etat restore: queries={}, tokens={}", m_totalQueries, m_totalTokens);
}
} // namespace aissia
extern "C" {
grove::IModule* createModule() {
return new aissia::AIModule();
}
void destroyModule(grove::IModule* module) {
delete module;
}
}