feat: Implement Phase 1 complete - All 6 core modules

## New Modules
- StorageModule: SQLite persistence for sessions, app usage, conversations
- MonitoringModule: Cross-platform window tracking (Win32/X11)
- AIModule: Multi-provider LLM integration with agentic tool loop
- VoiceModule: TTS/STT coordination with speak queue

## Shared Libraries
- AissiaLLM: ILLMProvider abstraction (Claude + OpenAI providers)
- AissiaPlatform: IWindowTracker abstraction (Win32 + X11)
- AissiaAudio: ITTSEngine (SAPI/espeak) + ISTTEngine (Whisper API)
- HttpClient: Header-only HTTP client with OpenSSL

## Configuration
- Added JSON configs for all modules (storage, monitoring, ai, voice)
- Multi-provider LLM config with Claude and OpenAI support

## Dependencies
- SQLite3, OpenSSL, cpp-httplib (FetchContent)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
StillHammer 2025-11-26 00:42:18 +08:00
parent 0dfb5f1535
commit bc3b6cbaba
35 changed files with 3591 additions and 9 deletions

View File

@ -13,6 +13,25 @@ set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
set(GROVE_BUILD_TESTS OFF CACHE BOOL "Disable GroveEngine tests" FORCE)
add_subdirectory(external/GroveEngine)
# ============================================================================
# Dependencies
# ============================================================================
# SQLite3
find_package(SQLite3 REQUIRED)
# OpenSSL for HTTPS
find_package(OpenSSL REQUIRED)
# cpp-httplib (header-only HTTP client)
include(FetchContent)
FetchContent_Declare(
httplib
GIT_REPOSITORY https://github.com/yhirose/cpp-httplib.git
GIT_TAG v0.14.1
)
FetchContent_MakeAvailable(httplib)
# ============================================================================
# Main Executable
# ============================================================================
@ -59,10 +78,125 @@ set_target_properties(NotificationModule PROPERTIES
LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/modules
)
# Futurs modules (décommenter quand implémentés):
# add_library(AIAssistantModule SHARED src/modules/AIAssistantModule.cpp)
# add_library(LanguageLearningModule SHARED src/modules/LanguageLearningModule.cpp)
# add_library(DataModule SHARED src/modules/DataModule.cpp)
# ============================================================================
# Shared Libraries (linked into modules)
# ============================================================================
# LLM Providers Library
add_library(AissiaLLM STATIC
src/shared/llm/LLMProviderFactory.cpp
src/shared/llm/ClaudeProvider.cpp
src/shared/llm/OpenAIProvider.cpp
src/shared/llm/ToolRegistry.cpp
)
target_include_directories(AissiaLLM PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}/src
${httplib_SOURCE_DIR}
)
target_link_libraries(AissiaLLM PRIVATE
GroveEngine::impl
spdlog::spdlog
OpenSSL::SSL
OpenSSL::Crypto
)
target_compile_definitions(AissiaLLM PRIVATE CPPHTTPLIB_OPENSSL_SUPPORT)
# Platform Library (window tracking)
add_library(AissiaPlatform STATIC
src/shared/platform/WindowTrackerFactory.cpp
)
target_include_directories(AissiaPlatform PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}/src
)
target_link_libraries(AissiaPlatform PRIVATE
spdlog::spdlog
)
if(WIN32)
target_link_libraries(AissiaPlatform PRIVATE psapi)
endif()
# Audio Library (TTS/STT)
add_library(AissiaAudio STATIC
src/shared/audio/TTSEngineFactory.cpp
src/shared/audio/STTEngineFactory.cpp
)
target_include_directories(AissiaAudio PRIVATE
${CMAKE_CURRENT_SOURCE_DIR}/src
${httplib_SOURCE_DIR}
)
target_link_libraries(AissiaAudio PRIVATE
spdlog::spdlog
OpenSSL::SSL
OpenSSL::Crypto
)
target_compile_definitions(AissiaAudio PRIVATE CPPHTTPLIB_OPENSSL_SUPPORT)
if(WIN32)
target_link_libraries(AissiaAudio PRIVATE sapi ole32)
endif()
# ============================================================================
# New Modules
# ============================================================================
# StorageModule - SQLite persistence
add_library(StorageModule SHARED
src/modules/StorageModule.cpp
)
target_include_directories(StorageModule PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src)
target_link_libraries(StorageModule PRIVATE
GroveEngine::impl
spdlog::spdlog
SQLite::SQLite3
)
set_target_properties(StorageModule PROPERTIES
PREFIX "lib"
LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/modules
)
# MonitoringModule - Window tracking
add_library(MonitoringModule SHARED
src/modules/MonitoringModule.cpp
)
target_include_directories(MonitoringModule PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src)
target_link_libraries(MonitoringModule PRIVATE
GroveEngine::impl
spdlog::spdlog
AissiaPlatform
)
set_target_properties(MonitoringModule PROPERTIES
PREFIX "lib"
LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/modules
)
# AIModule - LLM integration
add_library(AIModule SHARED
src/modules/AIModule.cpp
)
target_include_directories(AIModule PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src)
target_link_libraries(AIModule PRIVATE
GroveEngine::impl
spdlog::spdlog
AissiaLLM
)
set_target_properties(AIModule PROPERTIES
PREFIX "lib"
LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/modules
)
# VoiceModule - TTS/STT
add_library(VoiceModule SHARED
src/modules/VoiceModule.cpp
)
target_include_directories(VoiceModule PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/src)
target_link_libraries(VoiceModule PRIVATE
GroveEngine::impl
spdlog::spdlog
AissiaAudio
)
set_target_properties(VoiceModule PROPERTIES
PREFIX "lib"
LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/modules
)
# ============================================================================
# Copy config files to build directory
@ -76,10 +210,13 @@ file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/config/
# Quick rebuild of modules only (for hot-reload workflow)
add_custom_target(modules
DEPENDS SchedulerModule NotificationModule
DEPENDS SchedulerModule NotificationModule StorageModule MonitoringModule AIModule VoiceModule
COMMENT "Building hot-reloadable modules only"
)
# Create data directory
file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/data)
# Run Aissia
add_custom_target(run
COMMAND $<TARGET_FILE:aissia>

22
config/ai.json Normal file
View File

@ -0,0 +1,22 @@
{
"provider": "claude",
"max_iterations": 10,
"config_path": "./config/ai.json",
"providers": {
"claude": {
"api_key_env": "ANTHROPIC_API_KEY",
"model": "claude-sonnet-4-20250514",
"max_tokens": 4096,
"base_url": "https://api.anthropic.com"
},
"openai": {
"api_key_env": "OPENAI_API_KEY",
"model": "gpt-4o",
"max_tokens": 4096,
"base_url": "https://api.openai.com"
}
},
"system_prompt": "Tu es AISSIA, un assistant personnel specialise dans la gestion du temps et de l'attention. Tu aides l'utilisateur a rester productif tout en evitant l'hyperfocus excessif. Tu es bienveillant mais ferme quand necessaire pour encourager les pauses. Reponds toujours en francais."
}

28
config/monitoring.json Normal file
View File

@ -0,0 +1,28 @@
{
"poll_interval_ms": 1000,
"idle_threshold_seconds": 300,
"enabled": true,
"productive_apps": [
"Code",
"code",
"CLion",
"clion",
"Visual Studio",
"devenv",
"rider",
"idea",
"pycharm",
"nvim",
"vim",
"emacs"
],
"distracting_apps": [
"Discord",
"discord",
"Steam",
"steam",
"YouTube",
"Netflix",
"Twitch"
]
}

5
config/storage.json Normal file
View File

@ -0,0 +1,5 @@
{
"database_path": "./data/aissia.db",
"journal_mode": "WAL",
"busy_timeout_ms": 5000
}

14
config/voice.json Normal file
View File

@ -0,0 +1,14 @@
{
"tts": {
"enabled": true,
"engine": "auto",
"rate": 0,
"volume": 80
},
"stt": {
"enabled": true,
"api_key_env": "OPENAI_API_KEY",
"model": "whisper-1",
"language": "fr"
}
}

View File

@ -106,12 +106,12 @@ int main(int argc, char* argv[]) {
// Liste des modules à charger
std::vector<std::pair<std::string, std::string>> moduleList = {
{"StorageModule", "storage.json"}, // Doit être chargé en premier (persistence)
{"SchedulerModule", "scheduler.json"},
{"NotificationModule", "notification.json"},
// Futurs modules:
// {"AIAssistantModule", "ai_assistant.json"},
// {"LanguageLearningModule", "language.json"},
// {"DataModule", "data.json"},
{"MonitoringModule", "monitoring.json"},
{"AIModule", "ai.json"},
{"VoiceModule", "voice.json"},
};
// Charger les modules

306
src/modules/AIModule.cpp Normal file
View File

@ -0,0 +1,306 @@
#include "AIModule.h"
#include "../shared/llm/LLMProviderFactory.hpp"
#include <grove/JsonDataNode.h>
#include <fstream>
namespace aissia {
AIModule::AIModule() {
m_logger = spdlog::get("AIModule");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("AIModule");
}
m_config = std::make_unique<grove::JsonDataNode>("config");
m_conversationHistory = nlohmann::json::array();
}
void AIModule::setConfiguration(const grove::IDataNode& configNode,
grove::IIO* io,
grove::ITaskScheduler* scheduler) {
m_io = io;
m_config = std::make_unique<grove::JsonDataNode>("config");
m_providerName = configNode.getString("provider", "claude");
m_maxIterations = configNode.getInt("max_iterations", 10);
m_systemPrompt = configNode.getString("system_prompt",
"Tu es AISSIA, un assistant personnel specialise dans la gestion du temps et de l'attention. "
"Tu aides l'utilisateur a rester productif tout en evitant l'hyperfocus excessif. "
"Tu es bienveillant mais ferme quand necessaire pour encourager les pauses.");
// Load full config from file for LLM provider
std::string configPath = configNode.getString("config_path", "./config/ai.json");
try {
std::ifstream file(configPath);
if (file.is_open()) {
nlohmann::json fullConfig;
file >> fullConfig;
m_provider = LLMProviderFactory::create(fullConfig);
m_logger->info("AIModule configure: provider={}, model={}",
m_providerName, m_provider->getModel());
} else {
m_logger->warn("Config file not found: {}, using defaults", configPath);
}
} catch (const std::exception& e) {
m_logger->error("Failed to initialize LLM provider: {}", e.what());
}
// Subscribe to relevant topics
if (m_io) {
grove::SubscriptionConfig subConfig;
m_io->subscribe("ai:query", subConfig);
m_io->subscribe("voice:transcription", subConfig);
m_io->subscribe("scheduler:hyperfocus_alert", subConfig);
m_io->subscribe("scheduler:break_reminder", subConfig);
}
registerDefaultTools();
}
const grove::IDataNode& AIModule::getConfiguration() {
return *m_config;
}
void AIModule::process(const grove::IDataNode& input) {
processMessages();
}
void AIModule::processMessages() {
if (!m_io) return;
while (m_io->hasMessages() > 0) {
auto msg = m_io->pullMessage();
if (msg.topic == "ai:query" && msg.data) {
std::string query = msg.data->getString("query", "");
if (!query.empty()) {
handleQuery(query);
}
}
else if (msg.topic == "voice:transcription" && msg.data) {
std::string text = msg.data->getString("text", "");
if (!text.empty()) {
handleQuery(text);
}
}
else if (msg.topic == "scheduler:hyperfocus_alert" && msg.data) {
handleHyperfocusAlert(*msg.data);
}
else if (msg.topic == "scheduler:break_reminder" && msg.data) {
handleBreakReminder(*msg.data);
}
}
}
void AIModule::handleQuery(const std::string& query) {
if (!m_provider) {
publishError("LLM provider not initialized");
return;
}
m_isProcessing = true;
m_logger->info("Processing query: {}", query.substr(0, 50));
try {
auto result = agenticLoop(query);
if (result.contains("response")) {
publishResponse(result["response"].get<std::string>());
m_totalQueries++;
} else if (result.contains("error")) {
publishError(result["error"].get<std::string>());
}
} catch (const std::exception& e) {
publishError(e.what());
}
m_isProcessing = false;
}
void AIModule::handleHyperfocusAlert(const grove::IDataNode& data) {
int minutes = data.getInt("duration_minutes", 120);
std::string task = data.getString("task", "");
std::string query = "L'utilisateur est en hyperfocus depuis " + std::to_string(minutes) +
" minutes sur '" + task + "'. Genere une intervention bienveillante mais ferme "
"pour l'encourager a faire une pause.";
handleQuery(query);
}
void AIModule::handleBreakReminder(const grove::IDataNode& data) {
int breakDuration = data.getInt("break_duration", 10);
std::string query = "Rappelle gentiment a l'utilisateur qu'il est temps de faire une pause de " +
std::to_string(breakDuration) + " minutes. Sois encourageant.";
handleQuery(query);
}
nlohmann::json AIModule::agenticLoop(const std::string& userQuery) {
nlohmann::json messages = nlohmann::json::array();
messages.push_back({{"role", "user"}, {"content", userQuery}});
nlohmann::json tools = m_toolRegistry.getToolDefinitions();
for (int iteration = 0; iteration < m_maxIterations; iteration++) {
m_logger->debug("Agentic loop iteration {}", iteration + 1);
auto response = m_provider->chat(m_systemPrompt, messages, tools);
m_totalTokens += response.input_tokens + response.output_tokens;
if (response.is_end_turn) {
// Add to conversation history
m_conversationHistory.push_back({{"role", "user"}, {"content", userQuery}});
m_conversationHistory.push_back({{"role", "assistant"}, {"content", response.text}});
return {
{"response", response.text},
{"iterations", iteration + 1},
{"tokens", response.input_tokens + response.output_tokens}
};
}
// Execute tool calls
if (!response.tool_calls.empty()) {
std::vector<ToolResult> results;
for (const auto& call : response.tool_calls) {
m_logger->debug("Executing tool: {}", call.name);
nlohmann::json result = m_toolRegistry.execute(call.name, call.input);
results.push_back({call.id, result.dump(), false});
}
// Append assistant message and tool results
m_provider->appendAssistantMessage(messages, response);
auto toolResultsMsg = m_provider->formatToolResults(results);
// Handle different provider formats
if (toolResultsMsg.is_array()) {
for (const auto& msg : toolResultsMsg) {
messages.push_back(msg);
}
} else {
messages.push_back(toolResultsMsg);
}
}
}
return {{"error", "max_iterations_reached"}};
}
void AIModule::registerDefaultTools() {
// Tool: get_current_time
m_toolRegistry.registerTool(
"get_current_time",
"Obtient l'heure actuelle",
{{"type", "object"}, {"properties", nlohmann::json::object()}},
[](const nlohmann::json& input) -> nlohmann::json {
std::time_t now = std::time(nullptr);
std::tm* tm = std::localtime(&now);
char buffer[64];
std::strftime(buffer, sizeof(buffer), "%Y-%m-%d %H:%M:%S", tm);
return {{"time", buffer}};
}
);
// Tool: suggest_break
m_toolRegistry.registerTool(
"suggest_break",
"Suggere une pause a l'utilisateur avec un message personnalise",
{
{"type", "object"},
{"properties", {
{"message", {{"type", "string"}, {"description", "Message de suggestion"}}},
{"duration_minutes", {{"type", "integer"}, {"description", "Duree suggere"}}}
}},
{"required", nlohmann::json::array({"message"})}
},
[this](const nlohmann::json& input) -> nlohmann::json {
std::string message = input.value("message", "Prends une pause!");
int duration = input.value("duration_minutes", 10);
// Publish suggestion
if (m_io) {
auto event = std::make_unique<grove::JsonDataNode>("suggestion");
event->setString("message", message);
event->setInt("duration", duration);
m_io->publish("ai:suggestion", std::move(event));
}
return {{"status", "suggestion_sent"}, {"message", message}};
}
);
m_logger->info("Registered {} default tools", m_toolRegistry.size());
}
void AIModule::publishResponse(const std::string& response) {
if (!m_io) return;
auto event = std::make_unique<grove::JsonDataNode>("response");
event->setString("text", response);
event->setString("provider", m_providerName);
m_io->publish("ai:response", std::move(event));
m_logger->info("AI response: {}", response.substr(0, 100));
}
void AIModule::publishError(const std::string& error) {
if (!m_io) return;
auto event = std::make_unique<grove::JsonDataNode>("error");
event->setString("message", error);
m_io->publish("ai:error", std::move(event));
m_logger->error("AI error: {}", error);
}
std::unique_ptr<grove::IDataNode> AIModule::getHealthStatus() {
auto status = std::make_unique<grove::JsonDataNode>("status");
status->setString("status", m_provider ? "ready" : "not_initialized");
status->setString("provider", m_providerName);
status->setInt("totalQueries", m_totalQueries);
status->setInt("totalTokens", m_totalTokens);
status->setBool("isProcessing", m_isProcessing);
return status;
}
void AIModule::shutdown() {
m_logger->info("AIModule arrete. Queries: {}, Tokens: {}", m_totalQueries, m_totalTokens);
}
std::unique_ptr<grove::IDataNode> AIModule::getState() {
auto state = std::make_unique<grove::JsonDataNode>("state");
state->setString("provider", m_providerName);
state->setInt("totalQueries", m_totalQueries);
state->setInt("totalTokens", m_totalTokens);
state->setString("conversationHistory", m_conversationHistory.dump());
return state;
}
void AIModule::setState(const grove::IDataNode& state) {
m_totalQueries = state.getInt("totalQueries", 0);
m_totalTokens = state.getInt("totalTokens", 0);
std::string historyStr = state.getString("conversationHistory", "[]");
try {
m_conversationHistory = nlohmann::json::parse(historyStr);
} catch (...) {
m_conversationHistory = nlohmann::json::array();
}
m_logger->info("Etat restore: queries={}, tokens={}", m_totalQueries, m_totalTokens);
}
} // namespace aissia
extern "C" {
grove::IModule* createModule() {
return new aissia::AIModule();
}
void destroyModule(grove::IModule* module) {
delete module;
}
}

89
src/modules/AIModule.h Normal file
View File

@ -0,0 +1,89 @@
#pragma once
#include <grove/IModule.h>
#include <grove/JsonDataNode.h>
#include "../shared/llm/ILLMProvider.hpp"
#include "../shared/llm/ToolRegistry.hpp"
#include <spdlog/spdlog.h>
#include <spdlog/sinks/stdout_color_sinks.h>
#include <memory>
#include <string>
namespace aissia {
/**
* @brief AI Assistant Module - LLM integration agentique
*
* Fonctionnalites:
* - Boucle agentique avec tools
* - Support multi-provider (Claude, OpenAI)
* - Interventions proactives
* - Gestion contexte conversation
*
* Publie sur:
* - "ai:response" : Reponse finale du LLM
* - "ai:thinking" : LLM en cours de reflexion
* - "ai:suggestion" : Suggestion proactive
* - "ai:error" : Erreur API
*
* Souscrit a:
* - "ai:query" : Requete utilisateur
* - "voice:transcription" : Texte transcrit (STT)
* - "scheduler:hyperfocus_alert": Generer intervention
* - "scheduler:break_reminder" : Generer suggestion pause
*/
class AIModule : public grove::IModule {
public:
AIModule();
~AIModule() override = default;
// IModule interface
void process(const grove::IDataNode& input) override;
void setConfiguration(const grove::IDataNode& configNode, grove::IIO* io,
grove::ITaskScheduler* scheduler) override;
const grove::IDataNode& getConfiguration() override;
std::unique_ptr<grove::IDataNode> getHealthStatus() override;
void shutdown() override;
std::unique_ptr<grove::IDataNode> getState() override;
void setState(const grove::IDataNode& state) override;
std::string getType() const override { return "AIModule"; }
bool isIdle() const override { return !m_isProcessing; }
int getVersion() const override { return 1; }
private:
// Configuration
std::string m_providerName = "claude";
std::string m_systemPrompt;
int m_maxIterations = 10;
// State
std::unique_ptr<ILLMProvider> m_provider;
ToolRegistry m_toolRegistry;
nlohmann::json m_conversationHistory;
int m_totalQueries = 0;
int m_totalTokens = 0;
bool m_isProcessing = false;
// Services
grove::IIO* m_io = nullptr;
std::unique_ptr<grove::JsonDataNode> m_config;
std::shared_ptr<spdlog::logger> m_logger;
// Helpers
void processMessages();
void handleQuery(const std::string& query);
void handleHyperfocusAlert(const grove::IDataNode& data);
void handleBreakReminder(const grove::IDataNode& data);
nlohmann::json agenticLoop(const std::string& userQuery);
void registerDefaultTools();
void publishResponse(const std::string& response);
void publishError(const std::string& error);
};
} // namespace aissia
extern "C" {
grove::IModule* createModule();
void destroyModule(grove::IModule* module);
}

View File

@ -0,0 +1,222 @@
#include "MonitoringModule.h"
#include <grove/JsonDataNode.h>
#include <algorithm>
namespace aissia {
MonitoringModule::MonitoringModule() {
m_logger = spdlog::get("MonitoringModule");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("MonitoringModule");
}
m_config = std::make_unique<grove::JsonDataNode>("config");
}
void MonitoringModule::setConfiguration(const grove::IDataNode& configNode,
grove::IIO* io,
grove::ITaskScheduler* scheduler) {
m_io = io;
m_config = std::make_unique<grove::JsonDataNode>("config");
m_pollIntervalMs = configNode.getInt("poll_interval_ms", 1000);
m_idleThresholdSeconds = configNode.getInt("idle_threshold_seconds", 300);
m_enabled = configNode.getBool("enabled", true);
// Load productive apps list
m_productiveApps.clear();
auto* prodNode = configNode.getChildReadOnly("productive_apps");
if (prodNode) {
for (const auto& name : prodNode->getChildNames()) {
m_productiveApps.insert(prodNode->getString(name, ""));
}
}
// Default productive apps
if (m_productiveApps.empty()) {
m_productiveApps = {"Code", "code", "CLion", "clion", "Visual Studio",
"devenv", "rider", "idea", "pycharm"};
}
// Load distracting apps list
m_distractingApps.clear();
auto* distNode = configNode.getChildReadOnly("distracting_apps");
if (distNode) {
for (const auto& name : distNode->getChildNames()) {
m_distractingApps.insert(distNode->getString(name, ""));
}
}
if (m_distractingApps.empty()) {
m_distractingApps = {"Discord", "discord", "Steam", "steam",
"firefox", "chrome", "YouTube"};
}
// Create window tracker
m_tracker = WindowTrackerFactory::create();
m_logger->info("MonitoringModule configure: poll={}ms, idle={}s, platform={}",
m_pollIntervalMs, m_idleThresholdSeconds,
m_tracker ? m_tracker->getPlatformName() : "none");
}
const grove::IDataNode& MonitoringModule::getConfiguration() {
return *m_config;
}
void MonitoringModule::process(const grove::IDataNode& input) {
if (!m_enabled || !m_tracker || !m_tracker->isAvailable()) return;
float currentTime = input.getDouble("gameTime", 0.0);
// Poll based on interval
float pollIntervalSec = m_pollIntervalMs / 1000.0f;
if (currentTime - m_lastPollTime < pollIntervalSec) return;
m_lastPollTime = currentTime;
checkCurrentApp(currentTime);
checkIdleState(currentTime);
}
void MonitoringModule::checkCurrentApp(float currentTime) {
std::string newApp = m_tracker->getCurrentAppName();
std::string newTitle = m_tracker->getCurrentWindowTitle();
if (newApp != m_currentApp) {
// App changed
int duration = static_cast<int>(currentTime - m_appStartTime);
if (!m_currentApp.empty() && duration > 0) {
m_appDurations[m_currentApp] += duration;
// Update productivity counters
if (isProductiveApp(m_currentApp)) {
m_totalProductiveSeconds += duration;
} else if (isDistractingApp(m_currentApp)) {
m_totalDistractingSeconds += duration;
}
publishAppChanged(m_currentApp, newApp, duration);
}
m_currentApp = newApp;
m_currentWindowTitle = newTitle;
m_appStartTime = currentTime;
m_logger->debug("App: {} - {}", m_currentApp, m_currentWindowTitle.substr(0, 50));
}
}
void MonitoringModule::checkIdleState(float currentTime) {
bool wasIdle = m_isIdle;
m_isIdle = m_tracker->isUserIdle(m_idleThresholdSeconds);
if (m_isIdle && !wasIdle) {
m_logger->info("Utilisateur inactif ({}s)", m_idleThresholdSeconds);
if (m_io) {
auto event = std::make_unique<grove::JsonDataNode>("idle");
event->setString("type", "idle_detected");
event->setInt("idleSeconds", m_tracker->getIdleTimeSeconds());
m_io->publish("monitoring:idle_detected", std::move(event));
}
}
else if (!m_isIdle && wasIdle) {
m_logger->info("Activite reprise");
if (m_io) {
auto event = std::make_unique<grove::JsonDataNode>("active");
event->setString("type", "activity_resumed");
m_io->publish("monitoring:activity_resumed", std::move(event));
}
}
}
bool MonitoringModule::isProductiveApp(const std::string& appName) const {
// Check exact match
if (m_productiveApps.count(appName)) return true;
// Check if app name contains productive keyword
std::string lowerApp = appName;
std::transform(lowerApp.begin(), lowerApp.end(), lowerApp.begin(), ::tolower);
for (const auto& prod : m_productiveApps) {
std::string lowerProd = prod;
std::transform(lowerProd.begin(), lowerProd.end(), lowerProd.begin(), ::tolower);
if (lowerApp.find(lowerProd) != std::string::npos) return true;
}
return false;
}
bool MonitoringModule::isDistractingApp(const std::string& appName) const {
if (m_distractingApps.count(appName)) return true;
std::string lowerApp = appName;
std::transform(lowerApp.begin(), lowerApp.end(), lowerApp.begin(), ::tolower);
for (const auto& dist : m_distractingApps) {
std::string lowerDist = dist;
std::transform(lowerDist.begin(), lowerDist.end(), lowerDist.begin(), ::tolower);
if (lowerApp.find(lowerDist) != std::string::npos) return true;
}
return false;
}
void MonitoringModule::publishAppChanged(const std::string& oldApp, const std::string& newApp, int duration) {
if (!m_io) return;
auto event = std::make_unique<grove::JsonDataNode>("app_changed");
event->setString("oldApp", oldApp);
event->setString("newApp", newApp);
event->setInt("duration", duration);
event->setBool("wasProductive", isProductiveApp(oldApp));
event->setBool("wasDistracting", isDistractingApp(oldApp));
m_io->publish("monitoring:app_changed", std::move(event));
}
std::unique_ptr<grove::IDataNode> MonitoringModule::getHealthStatus() {
auto status = std::make_unique<grove::JsonDataNode>("status");
status->setString("status", m_enabled ? "running" : "disabled");
status->setString("currentApp", m_currentApp);
status->setBool("isIdle", m_isIdle);
status->setInt("totalProductiveSeconds", m_totalProductiveSeconds);
status->setInt("totalDistractingSeconds", m_totalDistractingSeconds);
status->setString("platform", m_tracker ? m_tracker->getPlatformName() : "none");
return status;
}
void MonitoringModule::shutdown() {
m_logger->info("MonitoringModule arrete. Productif: {}s, Distrait: {}s",
m_totalProductiveSeconds, m_totalDistractingSeconds);
}
std::unique_ptr<grove::IDataNode> MonitoringModule::getState() {
auto state = std::make_unique<grove::JsonDataNode>("state");
state->setString("currentApp", m_currentApp);
state->setDouble("appStartTime", m_appStartTime);
state->setBool("isIdle", m_isIdle);
state->setInt("totalProductiveSeconds", m_totalProductiveSeconds);
state->setInt("totalDistractingSeconds", m_totalDistractingSeconds);
return state;
}
void MonitoringModule::setState(const grove::IDataNode& state) {
m_currentApp = state.getString("currentApp", "");
m_appStartTime = state.getDouble("appStartTime", 0.0);
m_isIdle = state.getBool("isIdle", false);
m_totalProductiveSeconds = state.getInt("totalProductiveSeconds", 0);
m_totalDistractingSeconds = state.getInt("totalDistractingSeconds", 0);
m_logger->info("Etat restore: app={}, productif={}s", m_currentApp, m_totalProductiveSeconds);
}
} // namespace aissia
extern "C" {
grove::IModule* createModule() {
return new aissia::MonitoringModule();
}
void destroyModule(grove::IModule* module) {
delete module;
}
}

View File

@ -0,0 +1,91 @@
#pragma once
#include <grove/IModule.h>
#include <grove/JsonDataNode.h>
#include "../shared/platform/IWindowTracker.hpp"
#include <spdlog/spdlog.h>
#include <spdlog/sinks/stdout_color_sinks.h>
#include <memory>
#include <string>
#include <vector>
#include <map>
#include <set>
namespace aissia {
/**
* @brief Monitoring Module - Tracking des applications actives
*
* Fonctionnalites:
* - Detection de l'application au premier plan
* - Classification productive/distracting
* - Detection d'inactivite utilisateur
* - Statistiques par application
*
* Publie sur:
* - "monitoring:app_changed" : Changement d'application
* - "monitoring:idle_detected" : Utilisateur inactif
* - "monitoring:activity_resumed" : Retour d'activite
* - "monitoring:productivity_update": Mise a jour des stats
*
* Souscrit a:
* - "scheduler:task_started" : Associer tracking a tache
* - "scheduler:task_completed" : Fin tracking tache
*/
class MonitoringModule : public grove::IModule {
public:
MonitoringModule();
~MonitoringModule() override = default;
// IModule interface
void process(const grove::IDataNode& input) override;
void setConfiguration(const grove::IDataNode& configNode, grove::IIO* io,
grove::ITaskScheduler* scheduler) override;
const grove::IDataNode& getConfiguration() override;
std::unique_ptr<grove::IDataNode> getHealthStatus() override;
void shutdown() override;
std::unique_ptr<grove::IDataNode> getState() override;
void setState(const grove::IDataNode& state) override;
std::string getType() const override { return "MonitoringModule"; }
bool isIdle() const override { return true; }
int getVersion() const override { return 1; }
private:
// Configuration
int m_pollIntervalMs = 1000;
int m_idleThresholdSeconds = 300;
std::set<std::string> m_productiveApps;
std::set<std::string> m_distractingApps;
bool m_enabled = true;
// State
std::string m_currentApp;
std::string m_currentWindowTitle;
float m_appStartTime = 0.0f;
bool m_isIdle = false;
std::map<std::string, int> m_appDurations; // seconds per app
int m_totalProductiveSeconds = 0;
int m_totalDistractingSeconds = 0;
// Services
grove::IIO* m_io = nullptr;
std::unique_ptr<IWindowTracker> m_tracker;
std::unique_ptr<grove::JsonDataNode> m_config;
std::shared_ptr<spdlog::logger> m_logger;
float m_lastPollTime = 0.0f;
// Helpers
void checkCurrentApp(float currentTime);
void checkIdleState(float currentTime);
bool isProductiveApp(const std::string& appName) const;
bool isDistractingApp(const std::string& appName) const;
void publishAppChanged(const std::string& oldApp, const std::string& newApp, int duration);
};
} // namespace aissia
extern "C" {
grove::IModule* createModule();
void destroyModule(grove::IModule* module);
}

View File

@ -0,0 +1,273 @@
#include "StorageModule.h"
#include <grove/JsonDataNode.h>
#include <sqlite3.h>
#include <filesystem>
#include <ctime>
namespace fs = std::filesystem;
namespace aissia {
StorageModule::StorageModule() {
m_logger = spdlog::get("StorageModule");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("StorageModule");
}
m_config = std::make_unique<grove::JsonDataNode>("config");
}
StorageModule::~StorageModule() {
closeDatabase();
}
void StorageModule::setConfiguration(const grove::IDataNode& configNode,
grove::IIO* io,
grove::ITaskScheduler* scheduler) {
m_io = io;
m_config = std::make_unique<grove::JsonDataNode>("config");
m_dbPath = configNode.getString("database_path", "./data/aissia.db");
m_journalMode = configNode.getString("journal_mode", "WAL");
m_busyTimeoutMs = configNode.getInt("busy_timeout_ms", 5000);
// Ensure data directory exists
fs::path dbPath(m_dbPath);
if (dbPath.has_parent_path()) {
fs::create_directories(dbPath.parent_path());
}
if (openDatabase()) {
initializeSchema();
m_logger->info("StorageModule configure: db={}, journal={}", m_dbPath, m_journalMode);
} else {
m_logger->error("Echec ouverture base de donnees: {}", m_dbPath);
}
}
const grove::IDataNode& StorageModule::getConfiguration() {
return *m_config;
}
void StorageModule::process(const grove::IDataNode& input) {
if (!m_isConnected) return;
processMessages();
}
void StorageModule::processMessages() {
if (!m_io) return;
while (m_io->hasMessages() > 0) {
auto msg = m_io->pullMessage();
if (msg.topic == "scheduler:task_completed" && msg.data) {
std::string taskName = msg.data->getString("taskName", "unknown");
int duration = msg.data->getInt("duration", 0);
bool hyperfocus = msg.data->getBool("hyperfocus", false);
saveWorkSession(taskName, duration, hyperfocus);
}
else if (msg.topic == "monitoring:app_changed" && msg.data) {
std::string appName = msg.data->getString("appName", "");
int duration = msg.data->getInt("duration", 0);
bool productive = msg.data->getBool("productive", false);
saveAppUsage(m_lastSessionId, appName, duration, productive);
}
}
}
bool StorageModule::openDatabase() {
int rc = sqlite3_open(m_dbPath.c_str(), &m_db);
if (rc != SQLITE_OK) {
m_logger->error("SQLite open error: {}", sqlite3_errmsg(m_db));
return false;
}
// Set pragmas
std::string pragmas = "PRAGMA journal_mode=" + m_journalMode + ";"
"PRAGMA busy_timeout=" + std::to_string(m_busyTimeoutMs) + ";"
"PRAGMA foreign_keys=ON;";
executeSQL(pragmas);
m_isConnected = true;
return true;
}
void StorageModule::closeDatabase() {
if (m_db) {
sqlite3_close(m_db);
m_db = nullptr;
m_isConnected = false;
}
}
bool StorageModule::initializeSchema() {
const char* schema = R"SQL(
CREATE TABLE IF NOT EXISTS work_sessions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
task_name TEXT,
start_time INTEGER,
end_time INTEGER,
duration_minutes INTEGER,
hyperfocus_detected BOOLEAN DEFAULT 0,
created_at INTEGER DEFAULT (strftime('%s', 'now'))
);
CREATE TABLE IF NOT EXISTS app_usage (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id INTEGER,
app_name TEXT,
duration_seconds INTEGER,
is_productive BOOLEAN,
created_at INTEGER DEFAULT (strftime('%s', 'now')),
FOREIGN KEY (session_id) REFERENCES work_sessions(id)
);
CREATE TABLE IF NOT EXISTS conversations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
role TEXT,
content TEXT,
provider TEXT,
model TEXT,
tokens_used INTEGER,
created_at INTEGER DEFAULT (strftime('%s', 'now'))
);
CREATE TABLE IF NOT EXISTS daily_metrics (
date TEXT PRIMARY KEY,
total_focus_minutes INTEGER DEFAULT 0,
total_breaks INTEGER DEFAULT 0,
hyperfocus_count INTEGER DEFAULT 0,
updated_at INTEGER DEFAULT (strftime('%s', 'now'))
);
CREATE INDEX IF NOT EXISTS idx_sessions_date ON work_sessions(created_at);
CREATE INDEX IF NOT EXISTS idx_app_usage_session ON app_usage(session_id);
CREATE INDEX IF NOT EXISTS idx_conversations_date ON conversations(created_at);
)SQL";
return executeSQL(schema);
}
bool StorageModule::executeSQL(const std::string& sql) {
char* errMsg = nullptr;
int rc = sqlite3_exec(m_db, sql.c_str(), nullptr, nullptr, &errMsg);
if (rc != SQLITE_OK) {
m_logger->error("SQL error: {}", errMsg ? errMsg : "unknown");
sqlite3_free(errMsg);
return false;
}
m_totalQueries++;
return true;
}
bool StorageModule::saveWorkSession(const std::string& taskName, int durationMinutes, bool hyperfocusDetected) {
if (!m_isConnected) return false;
std::time_t now = std::time(nullptr);
std::time_t startTime = now - (durationMinutes * 60);
std::string sql = "INSERT INTO work_sessions (task_name, start_time, end_time, duration_minutes, hyperfocus_detected) "
"VALUES ('" + taskName + "', " + std::to_string(startTime) + ", " +
std::to_string(now) + ", " + std::to_string(durationMinutes) + ", " +
(hyperfocusDetected ? "1" : "0") + ");";
if (executeSQL(sql)) {
m_lastSessionId = static_cast<int>(sqlite3_last_insert_rowid(m_db));
m_logger->debug("Session sauvegardee: {} ({}min)", taskName, durationMinutes);
return true;
}
return false;
}
bool StorageModule::saveAppUsage(int sessionId, const std::string& appName, int durationSeconds, bool productive) {
if (!m_isConnected) return false;
std::string sql = "INSERT INTO app_usage (session_id, app_name, duration_seconds, is_productive) "
"VALUES (" + std::to_string(sessionId) + ", '" + appName + "', " +
std::to_string(durationSeconds) + ", " + (productive ? "1" : "0") + ");";
return executeSQL(sql);
}
bool StorageModule::saveConversation(const std::string& role, const std::string& content,
const std::string& provider, const std::string& model, int tokensUsed) {
if (!m_isConnected) return false;
// Escape single quotes in content
std::string escapedContent = content;
size_t pos = 0;
while ((pos = escapedContent.find("'", pos)) != std::string::npos) {
escapedContent.replace(pos, 1, "''");
pos += 2;
}
std::string sql = "INSERT INTO conversations (role, content, provider, model, tokens_used) "
"VALUES ('" + role + "', '" + escapedContent + "', '" + provider + "', '" +
model + "', " + std::to_string(tokensUsed) + ");";
return executeSQL(sql);
}
bool StorageModule::updateDailyMetrics(int focusMinutes, int breaks, int hyperfocusCount) {
if (!m_isConnected) return false;
std::time_t now = std::time(nullptr);
std::tm* tm = std::localtime(&now);
char dateStr[11];
std::strftime(dateStr, sizeof(dateStr), "%Y-%m-%d", tm);
std::string sql = "INSERT INTO daily_metrics (date, total_focus_minutes, total_breaks, hyperfocus_count) "
"VALUES ('" + std::string(dateStr) + "', " + std::to_string(focusMinutes) + ", " +
std::to_string(breaks) + ", " + std::to_string(hyperfocusCount) + ") "
"ON CONFLICT(date) DO UPDATE SET "
"total_focus_minutes = total_focus_minutes + " + std::to_string(focusMinutes) + ", "
"total_breaks = total_breaks + " + std::to_string(breaks) + ", "
"hyperfocus_count = hyperfocus_count + " + std::to_string(hyperfocusCount) + ", "
"updated_at = strftime('%s', 'now');";
return executeSQL(sql);
}
std::unique_ptr<grove::IDataNode> StorageModule::getHealthStatus() {
auto status = std::make_unique<grove::JsonDataNode>("status");
status->setString("status", m_isConnected ? "connected" : "disconnected");
status->setString("database", m_dbPath);
status->setInt("totalQueries", m_totalQueries);
status->setInt("lastSessionId", m_lastSessionId);
return status;
}
void StorageModule::shutdown() {
closeDatabase();
m_logger->info("StorageModule arrete. Total queries: {}", m_totalQueries);
}
std::unique_ptr<grove::IDataNode> StorageModule::getState() {
auto state = std::make_unique<grove::JsonDataNode>("state");
state->setString("dbPath", m_dbPath);
state->setBool("isConnected", m_isConnected);
state->setInt("totalQueries", m_totalQueries);
state->setInt("lastSessionId", m_lastSessionId);
return state;
}
void StorageModule::setState(const grove::IDataNode& state) {
m_totalQueries = state.getInt("totalQueries", 0);
m_lastSessionId = state.getInt("lastSessionId", 0);
m_logger->info("Etat restore: queries={}, lastSession={}", m_totalQueries, m_lastSessionId);
}
} // namespace aissia
extern "C" {
grove::IModule* createModule() {
return new aissia::StorageModule();
}
void destroyModule(grove::IModule* module) {
delete module;
}
}

View File

@ -0,0 +1,90 @@
#pragma once
#include <grove/IModule.h>
#include <grove/JsonDataNode.h>
#include <spdlog/spdlog.h>
#include <spdlog/sinks/stdout_color_sinks.h>
#include <memory>
#include <string>
struct sqlite3;
namespace aissia {
/**
* @brief Storage Module - SQLite persistence locale
*
* Fonctionnalites:
* - Persistance des sessions de travail
* - Stockage des conversations IA
* - Metriques journalieres
* - Historique d'utilisation des apps
*
* Publie sur:
* - "storage:ready" : DB initialisee
* - "storage:error" : Erreur DB
* - "storage:query_result" : Resultat de requete
*
* Souscrit a:
* - "storage:save_session" : Sauvegarder session
* - "storage:save_conversation" : Sauvegarder conversation
* - "scheduler:task_completed" : Logger completion tache
* - "monitoring:app_changed" : Logger changement app
*/
class StorageModule : public grove::IModule {
public:
StorageModule();
~StorageModule() override;
// IModule interface
void process(const grove::IDataNode& input) override;
void setConfiguration(const grove::IDataNode& configNode, grove::IIO* io,
grove::ITaskScheduler* scheduler) override;
const grove::IDataNode& getConfiguration() override;
std::unique_ptr<grove::IDataNode> getHealthStatus() override;
void shutdown() override;
std::unique_ptr<grove::IDataNode> getState() override;
void setState(const grove::IDataNode& state) override;
std::string getType() const override { return "StorageModule"; }
bool isIdle() const override { return true; }
int getVersion() const override { return 1; }
// Public API for other modules
bool saveWorkSession(const std::string& taskName, int durationMinutes, bool hyperfocusDetected);
bool saveAppUsage(int sessionId, const std::string& appName, int durationSeconds, bool productive);
bool saveConversation(const std::string& role, const std::string& content,
const std::string& provider, const std::string& model, int tokensUsed);
bool updateDailyMetrics(int focusMinutes, int breaks, int hyperfocusCount);
private:
// Configuration
std::string m_dbPath = "./data/aissia.db";
std::string m_journalMode = "WAL";
int m_busyTimeoutMs = 5000;
// State
sqlite3* m_db = nullptr;
bool m_isConnected = false;
int m_totalQueries = 0;
int m_lastSessionId = 0;
// Services
grove::IIO* m_io = nullptr;
std::unique_ptr<grove::JsonDataNode> m_config;
std::shared_ptr<spdlog::logger> m_logger;
// Helpers
bool openDatabase();
void closeDatabase();
bool initializeSchema();
bool executeSQL(const std::string& sql);
void processMessages();
};
} // namespace aissia
extern "C" {
grove::IModule* createModule();
void destroyModule(grove::IModule* module);
}

209
src/modules/VoiceModule.cpp Normal file
View File

@ -0,0 +1,209 @@
#include "VoiceModule.h"
#include <grove/JsonDataNode.h>
#include <cstdlib>
namespace aissia {
VoiceModule::VoiceModule() {
m_logger = spdlog::get("VoiceModule");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("VoiceModule");
}
m_config = std::make_unique<grove::JsonDataNode>("config");
}
void VoiceModule::setConfiguration(const grove::IDataNode& configNode,
grove::IIO* io,
grove::ITaskScheduler* scheduler) {
m_io = io;
m_config = std::make_unique<grove::JsonDataNode>("config");
// TTS config
auto* ttsNode = configNode.getChildReadOnly("tts");
if (ttsNode) {
m_ttsEnabled = ttsNode->getBool("enabled", true);
m_ttsRate = ttsNode->getInt("rate", 0);
m_ttsVolume = ttsNode->getInt("volume", 80);
}
// STT config
auto* sttNode = configNode.getChildReadOnly("stt");
std::string sttApiKey;
if (sttNode) {
m_sttEnabled = sttNode->getBool("enabled", true);
m_language = sttNode->getString("language", "fr");
std::string apiKeyEnv = sttNode->getString("api_key_env", "OPENAI_API_KEY");
const char* key = std::getenv(apiKeyEnv.c_str());
if (key) sttApiKey = key;
}
// Create TTS engine
m_ttsEngine = TTSEngineFactory::create();
if (m_ttsEngine && m_ttsEngine->isAvailable()) {
m_ttsEngine->setRate(m_ttsRate);
m_ttsEngine->setVolume(m_ttsVolume);
}
// Create STT engine
m_sttEngine = STTEngineFactory::create(sttApiKey);
if (m_sttEngine) {
m_sttEngine->setLanguage(m_language);
}
// Subscribe to topics
if (m_io) {
grove::SubscriptionConfig subConfig;
m_io->subscribe("voice:speak", subConfig);
m_io->subscribe("voice:listen", subConfig);
m_io->subscribe("ai:response", subConfig);
m_io->subscribe("ai:suggestion", subConfig);
m_io->subscribe("notification:speak", subConfig);
}
m_logger->info("VoiceModule configure: TTS={} ({}), STT={} ({})",
m_ttsEnabled, m_ttsEngine ? m_ttsEngine->getEngineName() : "none",
m_sttEnabled, m_sttEngine ? m_sttEngine->getEngineName() : "none");
}
const grove::IDataNode& VoiceModule::getConfiguration() {
return *m_config;
}
void VoiceModule::process(const grove::IDataNode& input) {
processMessages();
processSpeakQueue();
}
void VoiceModule::processMessages() {
if (!m_io) return;
while (m_io->hasMessages() > 0) {
auto msg = m_io->pullMessage();
if (msg.topic == "voice:speak" && msg.data) {
handleSpeakRequest(*msg.data);
}
else if (msg.topic == "ai:response" && msg.data) {
handleAIResponse(*msg.data);
}
else if (msg.topic == "ai:suggestion" && msg.data) {
handleSuggestion(*msg.data);
}
else if (msg.topic == "notification:speak" && msg.data) {
std::string text = msg.data->getString("message", "");
if (!text.empty()) {
m_speakQueue.push(text);
}
}
}
}
void VoiceModule::processSpeakQueue() {
if (!m_ttsEnabled || !m_ttsEngine || m_speakQueue.empty()) return;
// Only speak if not currently speaking
if (!m_ttsEngine->isSpeaking() && !m_speakQueue.empty()) {
std::string text = m_speakQueue.front();
m_speakQueue.pop();
speak(text);
}
}
void VoiceModule::speak(const std::string& text) {
if (!m_ttsEngine || !m_ttsEnabled) return;
// Publish speaking started
if (m_io) {
auto event = std::make_unique<grove::JsonDataNode>("event");
event->setString("text", text.substr(0, 100));
m_io->publish("voice:speaking_started", std::move(event));
}
m_ttsEngine->speak(text, true);
m_totalSpoken++;
m_logger->debug("Speaking: {}", text.substr(0, 50));
}
void VoiceModule::handleSpeakRequest(const grove::IDataNode& data) {
std::string text = data.getString("text", "");
bool priority = data.getBool("priority", false);
if (text.empty()) return;
if (priority) {
// Clear queue and speak immediately
while (!m_speakQueue.empty()) m_speakQueue.pop();
if (m_ttsEngine) m_ttsEngine->stop();
}
m_speakQueue.push(text);
}
void VoiceModule::handleAIResponse(const grove::IDataNode& data) {
if (!m_ttsEnabled) return;
std::string text = data.getString("text", "");
if (!text.empty()) {
m_speakQueue.push(text);
}
}
void VoiceModule::handleSuggestion(const grove::IDataNode& data) {
if (!m_ttsEnabled) return;
std::string message = data.getString("message", "");
if (!message.empty()) {
// Priority for suggestions
if (m_ttsEngine) m_ttsEngine->stop();
while (!m_speakQueue.empty()) m_speakQueue.pop();
m_speakQueue.push(message);
}
}
std::unique_ptr<grove::IDataNode> VoiceModule::getHealthStatus() {
auto status = std::make_unique<grove::JsonDataNode>("status");
status->setString("status", "running");
status->setBool("ttsEnabled", m_ttsEnabled);
status->setBool("sttEnabled", m_sttEnabled);
status->setString("ttsEngine", m_ttsEngine ? m_ttsEngine->getEngineName() : "none");
status->setString("sttEngine", m_sttEngine ? m_sttEngine->getEngineName() : "none");
status->setInt("queueSize", m_speakQueue.size());
status->setInt("totalSpoken", m_totalSpoken);
return status;
}
void VoiceModule::shutdown() {
if (m_ttsEngine) {
m_ttsEngine->stop();
}
m_logger->info("VoiceModule arrete. Total spoken: {}", m_totalSpoken);
}
std::unique_ptr<grove::IDataNode> VoiceModule::getState() {
auto state = std::make_unique<grove::JsonDataNode>("state");
state->setInt("totalSpoken", m_totalSpoken);
state->setInt("totalTranscribed", m_totalTranscribed);
state->setInt("queueSize", m_speakQueue.size());
return state;
}
void VoiceModule::setState(const grove::IDataNode& state) {
m_totalSpoken = state.getInt("totalSpoken", 0);
m_totalTranscribed = state.getInt("totalTranscribed", 0);
m_logger->info("Etat restore: spoken={}, transcribed={}", m_totalSpoken, m_totalTranscribed);
}
} // namespace aissia
extern "C" {
grove::IModule* createModule() {
return new aissia::VoiceModule();
}
void destroyModule(grove::IModule* module) {
delete module;
}
}

89
src/modules/VoiceModule.h Normal file
View File

@ -0,0 +1,89 @@
#pragma once
#include <grove/IModule.h>
#include <grove/JsonDataNode.h>
#include "../shared/audio/ITTSEngine.hpp"
#include "../shared/audio/ISTTEngine.hpp"
#include <spdlog/spdlog.h>
#include <spdlog/sinks/stdout_color_sinks.h>
#include <memory>
#include <string>
#include <queue>
namespace aissia {
/**
* @brief Voice Module - TTS and STT coordination
*
* Fonctionnalites:
* - Text-to-Speech via SAPI (Windows) ou espeak (Linux)
* - Speech-to-Text via OpenAI Whisper API
* - File d'attente de messages a parler
* - Integration avec les autres modules
*
* Publie sur:
* - "voice:transcription" : Texte transcrit (STT)
* - "voice:speaking_started" : TTS commence
* - "voice:speaking_ended" : TTS termine
*
* Souscrit a:
* - "voice:speak" : Demande TTS
* - "voice:listen" : Demande STT
* - "ai:response" : Reponse IA a lire
* - "notification:speak" : Notification a lire
* - "ai:suggestion" : Suggestion a lire
*/
class VoiceModule : public grove::IModule {
public:
VoiceModule();
~VoiceModule() override = default;
// IModule interface
void process(const grove::IDataNode& input) override;
void setConfiguration(const grove::IDataNode& configNode, grove::IIO* io,
grove::ITaskScheduler* scheduler) override;
const grove::IDataNode& getConfiguration() override;
std::unique_ptr<grove::IDataNode> getHealthStatus() override;
void shutdown() override;
std::unique_ptr<grove::IDataNode> getState() override;
void setState(const grove::IDataNode& state) override;
std::string getType() const override { return "VoiceModule"; }
bool isIdle() const override { return m_speakQueue.empty(); }
int getVersion() const override { return 1; }
private:
// Configuration
bool m_ttsEnabled = true;
bool m_sttEnabled = true;
int m_ttsRate = 0;
int m_ttsVolume = 80;
std::string m_language = "fr";
// State
std::unique_ptr<ITTSEngine> m_ttsEngine;
std::unique_ptr<ISTTEngine> m_sttEngine;
std::queue<std::string> m_speakQueue;
int m_totalSpoken = 0;
int m_totalTranscribed = 0;
// Services
grove::IIO* m_io = nullptr;
std::unique_ptr<grove::JsonDataNode> m_config;
std::shared_ptr<spdlog::logger> m_logger;
// Helpers
void processMessages();
void processSpeakQueue();
void speak(const std::string& text);
void handleSpeakRequest(const grove::IDataNode& data);
void handleAIResponse(const grove::IDataNode& data);
void handleSuggestion(const grove::IDataNode& data);
};
} // namespace aissia
extern "C" {
grove::IModule* createModule();
void destroyModule(grove::IModule* module);
}

View File

@ -0,0 +1,134 @@
#pragma once
#ifdef __linux__
#include "ITTSEngine.hpp"
#include <spdlog/spdlog.h>
#include <cstdlib>
#include <cstdio>
#include <memory>
#include <thread>
#include <atomic>
namespace aissia {
/**
* @brief Linux espeak-ng Text-to-Speech engine
*
* Uses espeak-ng command line tool. Falls back gracefully if not installed.
*/
class EspeakTTSEngine : public ITTSEngine {
public:
EspeakTTSEngine() {
m_logger = spdlog::get("EspeakTTS");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("EspeakTTS");
}
// Check if espeak-ng is available
m_available = (system("which espeak-ng > /dev/null 2>&1") == 0);
if (!m_available) {
// Try espeak as fallback
m_available = (system("which espeak > /dev/null 2>&1") == 0);
if (m_available) {
m_command = "espeak";
}
}
if (m_available) {
m_logger->info("espeak TTS initialized ({})", m_command);
} else {
m_logger->warn("espeak not available. Install with: sudo apt install espeak-ng");
}
}
~EspeakTTSEngine() override {
stop();
}
void speak(const std::string& text, bool async = true) override {
if (!m_available) return;
stop(); // Stop any current speech
// Build command
std::string cmd = m_command;
cmd += " -s " + std::to_string(m_rate);
cmd += " -a " + std::to_string(m_volume);
cmd += " -v " + m_voice;
cmd += " \"" + escapeQuotes(text) + "\"";
if (async) {
cmd += " &";
m_speaking = true;
system(cmd.c_str());
} else {
m_speaking = true;
system(cmd.c_str());
m_speaking = false;
}
m_logger->debug("Speaking: {}", text.substr(0, 50));
}
void stop() override {
if (m_speaking) {
system("pkill -9 espeak 2>/dev/null");
system("pkill -9 espeak-ng 2>/dev/null");
m_speaking = false;
}
}
void setRate(int rate) override {
// espeak rate is words per minute, default ~175
// Map -10..10 to 80..400
m_rate = 175 + (rate * 20);
m_rate = std::max(80, std::min(400, m_rate));
}
void setVolume(int volume) override {
// espeak volume is 0-200, default 100
m_volume = volume * 2;
m_volume = std::max(0, std::min(200, m_volume));
}
bool isSpeaking() const override {
return m_speaking;
}
bool isAvailable() const override {
return m_available;
}
std::string getEngineName() const override {
return "espeak";
}
void setVoice(const std::string& voice) {
m_voice = voice;
}
private:
std::string m_command = "espeak-ng";
std::string m_voice = "fr"; // Default French voice
int m_rate = 175;
int m_volume = 100;
bool m_available = false;
std::atomic<bool> m_speaking{false};
std::shared_ptr<spdlog::logger> m_logger;
std::string escapeQuotes(const std::string& text) {
std::string result;
for (char c : text) {
if (c == '"') result += "\\\"";
else if (c == '`') result += "\\`";
else if (c == '$') result += "\\$";
else result += c;
}
return result;
}
};
} // namespace aissia
#endif // __linux__

View File

@ -0,0 +1,64 @@
#pragma once
#include <string>
#include <memory>
#include <vector>
#include <functional>
namespace aissia {
/**
* @brief Callback for transcription results
*/
using TranscriptionCallback = std::function<void(const std::string& text)>;
/**
* @brief Interface for Speech-to-Text engines
*
* Implementations:
* - WhisperAPIEngine: OpenAI Whisper API
*/
class ISTTEngine {
public:
virtual ~ISTTEngine() = default;
/**
* @brief Transcribe audio data
* @param audioData PCM audio samples (16-bit, 16kHz, mono)
* @return Transcribed text
*/
virtual std::string transcribe(const std::vector<float>& audioData) = 0;
/**
* @brief Transcribe audio file
* @param filePath Path to audio file (wav, mp3, etc.)
* @return Transcribed text
*/
virtual std::string transcribeFile(const std::string& filePath) = 0;
/**
* @brief Set language for transcription
* @param language ISO 639-1 code (e.g., "fr", "en")
*/
virtual void setLanguage(const std::string& language) = 0;
/**
* @brief Check if engine is available
*/
virtual bool isAvailable() const = 0;
/**
* @brief Get engine name
*/
virtual std::string getEngineName() const = 0;
};
/**
* @brief Factory to create STT engine
*/
class STTEngineFactory {
public:
static std::unique_ptr<ISTTEngine> create(const std::string& apiKey);
};
} // namespace aissia

View File

@ -0,0 +1,67 @@
#pragma once
#include <string>
#include <memory>
namespace aissia {
/**
* @brief Interface for Text-to-Speech engines
*
* Implementations:
* - SAPITTSEngine: Windows SAPI
* - EspeakTTSEngine: Linux espeak-ng
*/
class ITTSEngine {
public:
virtual ~ITTSEngine() = default;
/**
* @brief Speak text
* @param text Text to speak
* @param async If true, return immediately (default)
*/
virtual void speak(const std::string& text, bool async = true) = 0;
/**
* @brief Stop current speech
*/
virtual void stop() = 0;
/**
* @brief Set speech rate
* @param rate -10 to 10, 0 is normal
*/
virtual void setRate(int rate) = 0;
/**
* @brief Set volume
* @param volume 0 to 100
*/
virtual void setVolume(int volume) = 0;
/**
* @brief Check if currently speaking
*/
virtual bool isSpeaking() const = 0;
/**
* @brief Check if engine is available
*/
virtual bool isAvailable() const = 0;
/**
* @brief Get engine name
*/
virtual std::string getEngineName() const = 0;
};
/**
* @brief Factory to create appropriate TTS engine
*/
class TTSEngineFactory {
public:
static std::unique_ptr<ITTSEngine> create();
};
} // namespace aissia

View File

@ -0,0 +1,117 @@
#pragma once
#ifdef _WIN32
#include "ITTSEngine.hpp"
#include <spdlog/spdlog.h>
#include <Windows.h>
#include <sapi.h>
#include <sphelper.h>
#include <memory>
namespace aissia {
/**
* @brief Windows SAPI Text-to-Speech engine
*/
class SAPITTSEngine : public ITTSEngine {
public:
SAPITTSEngine() {
m_logger = spdlog::get("SAPITTS");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("SAPITTS");
}
// Initialize COM
HRESULT hr = CoInitializeEx(nullptr, COINIT_MULTITHREADED);
if (FAILED(hr) && hr != RPC_E_CHANGED_MODE) {
m_logger->error("Failed to initialize COM: {}", hr);
return;
}
m_comInitialized = true;
// Create voice
hr = CoCreateInstance(CLSID_SpVoice, nullptr, CLSCTX_ALL,
IID_ISpVoice, (void**)&m_voice);
if (FAILED(hr)) {
m_logger->error("Failed to create SAPI voice: {}", hr);
return;
}
m_available = true;
m_logger->info("SAPI TTS initialized");
}
~SAPITTSEngine() override {
if (m_voice) {
m_voice->Release();
m_voice = nullptr;
}
if (m_comInitialized) {
CoUninitialize();
}
}
void speak(const std::string& text, bool async = true) override {
if (!m_voice) return;
// Convert to wide string
int size = MultiByteToWideChar(CP_UTF8, 0, text.c_str(), -1, nullptr, 0);
std::wstring wtext(size, 0);
MultiByteToWideChar(CP_UTF8, 0, text.c_str(), -1, &wtext[0], size);
DWORD flags = async ? SPF_ASYNC : SPF_DEFAULT;
flags |= SPF_PURGEBEFORESPEAK; // Stop current speech first
m_voice->Speak(wtext.c_str(), flags, nullptr);
m_logger->debug("Speaking: {}", text.substr(0, 50));
}
void stop() override {
if (m_voice) {
m_voice->Speak(nullptr, SPF_PURGEBEFORESPEAK, nullptr);
}
}
void setRate(int rate) override {
if (m_voice) {
// SAPI rate is -10 to 10
rate = std::max(-10, std::min(10, rate));
m_voice->SetRate(rate);
}
}
void setVolume(int volume) override {
if (m_voice) {
// SAPI volume is 0 to 100
volume = std::max(0, std::min(100, volume));
m_voice->SetVolume(static_cast<USHORT>(volume));
}
}
bool isSpeaking() const override {
if (!m_voice) return false;
SPVOICESTATUS status;
m_voice->GetStatus(&status, nullptr);
return status.dwRunningState == SPRS_IS_SPEAKING;
}
bool isAvailable() const override {
return m_available;
}
std::string getEngineName() const override {
return "sapi";
}
private:
ISpVoice* m_voice = nullptr;
bool m_comInitialized = false;
bool m_available = false;
std::shared_ptr<spdlog::logger> m_logger;
};
} // namespace aissia
#endif // _WIN32

View File

@ -0,0 +1,41 @@
#include "ISTTEngine.hpp"
#include "WhisperAPIEngine.hpp"
#include <spdlog/spdlog.h>
namespace aissia {
// Stub STT for when API key is not available
class StubSTTEngine : public ISTTEngine {
public:
std::string transcribe(const std::vector<float>& audioData) override {
spdlog::info("[STT Stub] Would transcribe {} samples", audioData.size());
return "";
}
std::string transcribeFile(const std::string& filePath) override {
spdlog::info("[STT Stub] Would transcribe file: {}", filePath);
return "";
}
void setLanguage(const std::string& language) override {}
bool isAvailable() const override { return false; }
std::string getEngineName() const override { return "stub"; }
};
std::unique_ptr<ISTTEngine> STTEngineFactory::create(const std::string& apiKey) {
auto logger = spdlog::get("STTFactory");
if (!logger) {
logger = spdlog::stdout_color_mt("STTFactory");
}
if (!apiKey.empty()) {
auto engine = std::make_unique<WhisperAPIEngine>(apiKey);
if (engine->isAvailable()) {
logger->info("Using Whisper API STT engine");
return engine;
}
}
logger->warn("No STT engine available (API key missing), using stub");
return std::make_unique<StubSTTEngine>();
}
} // namespace aissia

View File

@ -0,0 +1,50 @@
#include "ITTSEngine.hpp"
#include <spdlog/spdlog.h>
#ifdef _WIN32
#include "SAPITTSEngine.hpp"
#elif defined(__linux__)
#include "EspeakTTSEngine.hpp"
#endif
namespace aissia {
// Stub TTS for unsupported platforms
class StubTTSEngine : public ITTSEngine {
public:
void speak(const std::string& text, bool async) override {
spdlog::info("[TTS Stub] Would speak: {}", text.substr(0, 50));
}
void stop() override {}
void setRate(int rate) override {}
void setVolume(int volume) override {}
bool isSpeaking() const override { return false; }
bool isAvailable() const override { return false; }
std::string getEngineName() const override { return "stub"; }
};
std::unique_ptr<ITTSEngine> TTSEngineFactory::create() {
auto logger = spdlog::get("TTSFactory");
if (!logger) {
logger = spdlog::stdout_color_mt("TTSFactory");
}
#ifdef _WIN32
auto engine = std::make_unique<SAPITTSEngine>();
if (engine->isAvailable()) {
logger->info("Using SAPI TTS engine");
return engine;
}
#elif defined(__linux__)
auto engine = std::make_unique<EspeakTTSEngine>();
if (engine->isAvailable()) {
logger->info("Using espeak TTS engine");
return engine;
}
#endif
logger->warn("No TTS engine available, using stub");
return std::make_unique<StubTTSEngine>();
}
} // namespace aissia

View File

@ -0,0 +1,156 @@
#pragma once
#include "ISTTEngine.hpp"
#include "../http/HttpClient.hpp"
#include <spdlog/spdlog.h>
#include <fstream>
#include <memory>
namespace aissia {
/**
* @brief OpenAI Whisper API Speech-to-Text engine
*/
class WhisperAPIEngine : public ISTTEngine {
public:
explicit WhisperAPIEngine(const std::string& apiKey)
: m_apiKey(apiKey) {
m_logger = spdlog::get("WhisperAPI");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("WhisperAPI");
}
m_client = std::make_unique<HttpClient>("https://api.openai.com", 60);
m_client->setBearerToken(m_apiKey);
m_available = !m_apiKey.empty();
if (m_available) {
m_logger->info("Whisper API STT initialized");
} else {
m_logger->warn("Whisper API not available (no API key)");
}
}
std::string transcribe(const std::vector<float>& audioData) override {
if (!m_available || audioData.empty()) return "";
// Write audio data to temporary WAV file
std::string tempPath = "/tmp/aissia_audio_" + std::to_string(std::time(nullptr)) + ".wav";
if (!writeWavFile(tempPath, audioData)) {
m_logger->error("Failed to write temp audio file");
return "";
}
std::string result = transcribeFile(tempPath);
// Cleanup temp file
std::remove(tempPath.c_str());
return result;
}
std::string transcribeFile(const std::string& filePath) override {
if (!m_available) return "";
// Read file contents
std::ifstream file(filePath, std::ios::binary);
if (!file.is_open()) {
m_logger->error("Failed to open audio file: {}", filePath);
return "";
}
std::vector<char> fileData((std::istreambuf_iterator<char>(file)),
std::istreambuf_iterator<char>());
file.close();
// Prepare multipart form data
httplib::MultipartFormDataItems items = {
{"file", std::string(fileData.begin(), fileData.end()),
filePath.substr(filePath.find_last_of("/\\") + 1), "audio/wav"},
{"model", "whisper-1", "", ""},
{"language", m_language, "", ""},
{"response_format", "text", "", ""}
};
auto response = m_client->postMultipart("/v1/audio/transcriptions", items);
if (!response.success) {
m_logger->error("Whisper API error: {}", response.error);
return "";
}
m_logger->debug("Transcription: {}", response.body.substr(0, 100));
return response.body;
}
void setLanguage(const std::string& language) override {
m_language = language;
}
bool isAvailable() const override {
return m_available;
}
std::string getEngineName() const override {
return "whisper-api";
}
private:
std::string m_apiKey;
std::string m_language = "fr";
bool m_available = false;
std::unique_ptr<HttpClient> m_client;
std::shared_ptr<spdlog::logger> m_logger;
bool writeWavFile(const std::string& path, const std::vector<float>& samples) {
// Simple WAV file writer for 16-bit mono 16kHz audio
std::ofstream file(path, std::ios::binary);
if (!file.is_open()) return false;
const int sampleRate = 16000;
const int bitsPerSample = 16;
const int numChannels = 1;
const int byteRate = sampleRate * numChannels * bitsPerSample / 8;
const int blockAlign = numChannels * bitsPerSample / 8;
const int dataSize = samples.size() * sizeof(int16_t);
// RIFF header
file.write("RIFF", 4);
int32_t fileSize = 36 + dataSize;
file.write(reinterpret_cast<const char*>(&fileSize), 4);
file.write("WAVE", 4);
// fmt chunk
file.write("fmt ", 4);
int32_t fmtSize = 16;
file.write(reinterpret_cast<const char*>(&fmtSize), 4);
int16_t audioFormat = 1; // PCM
file.write(reinterpret_cast<const char*>(&audioFormat), 2);
int16_t channels = numChannels;
file.write(reinterpret_cast<const char*>(&channels), 2);
int32_t sr = sampleRate;
file.write(reinterpret_cast<const char*>(&sr), 4);
int32_t br = byteRate;
file.write(reinterpret_cast<const char*>(&br), 4);
int16_t ba = blockAlign;
file.write(reinterpret_cast<const char*>(&ba), 2);
int16_t bps = bitsPerSample;
file.write(reinterpret_cast<const char*>(&bps), 2);
// data chunk
file.write("data", 4);
int32_t ds = dataSize;
file.write(reinterpret_cast<const char*>(&ds), 4);
// Convert float samples to 16-bit
for (float sample : samples) {
int16_t s = static_cast<int16_t>(std::max(-1.0f, std::min(1.0f, sample)) * 32767.0f);
file.write(reinterpret_cast<const char*>(&s), 2);
}
file.close();
return true;
}
};
} // namespace aissia

View File

@ -0,0 +1,201 @@
#pragma once
/**
* @brief Simple HTTP Client wrapper around cpp-httplib
*
* Header-only wrapper for making HTTP requests to LLM APIs.
* Requires cpp-httplib and OpenSSL for HTTPS support.
*/
#define CPPHTTPLIB_OPENSSL_SUPPORT
#include <httplib.h>
#include <nlohmann/json.hpp>
#include <string>
#include <optional>
#include <spdlog/spdlog.h>
namespace aissia {
using json = nlohmann::json;
struct HttpResponse {
int status = 0;
std::string body;
bool success = false;
std::string error;
};
class HttpClient {
public:
explicit HttpClient(const std::string& baseUrl, int timeoutSeconds = 30)
: m_baseUrl(baseUrl), m_timeoutSeconds(timeoutSeconds) {
m_logger = spdlog::get("HttpClient");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("HttpClient");
}
// Parse URL to extract host and determine HTTPS
if (baseUrl.find("https://") == 0) {
m_host = baseUrl.substr(8);
m_useSSL = true;
} else if (baseUrl.find("http://") == 0) {
m_host = baseUrl.substr(7);
m_useSSL = false;
} else {
m_host = baseUrl;
m_useSSL = false;
}
// Remove trailing path
auto slashPos = m_host.find('/');
if (slashPos != std::string::npos) {
m_host = m_host.substr(0, slashPos);
}
}
void setHeader(const std::string& key, const std::string& value) {
m_headers[key] = value;
}
void setBearerToken(const std::string& token) {
m_headers["Authorization"] = "Bearer " + token;
}
HttpResponse post(const std::string& path, const json& body) {
HttpResponse response;
try {
std::unique_ptr<httplib::Client> client;
if (m_useSSL) {
client = std::make_unique<httplib::Client>(m_host);
client->enable_server_certificate_verification(true);
} else {
client = std::make_unique<httplib::Client>(m_host);
}
client->set_connection_timeout(m_timeoutSeconds);
client->set_read_timeout(m_timeoutSeconds);
client->set_write_timeout(m_timeoutSeconds);
httplib::Headers headers;
headers.emplace("Content-Type", "application/json");
for (const auto& [key, value] : m_headers) {
headers.emplace(key, value);
}
std::string bodyStr = body.dump();
m_logger->debug("POST {} ({} bytes)", path, bodyStr.size());
auto result = client->Post(path, headers, bodyStr, "application/json");
if (result) {
response.status = result->status;
response.body = result->body;
response.success = (result->status >= 200 && result->status < 300);
if (!response.success) {
response.error = "HTTP " + std::to_string(result->status);
m_logger->warn("HTTP {} for {}: {}", result->status, path,
result->body.substr(0, 200));
}
} else {
response.error = httplib::to_string(result.error());
m_logger->error("HTTP request failed: {}", response.error);
}
} catch (const std::exception& e) {
response.error = e.what();
m_logger->error("HTTP exception: {}", e.what());
}
return response;
}
HttpResponse postMultipart(const std::string& path,
const httplib::MultipartFormDataItems& items) {
HttpResponse response;
try {
std::unique_ptr<httplib::Client> client;
if (m_useSSL) {
client = std::make_unique<httplib::Client>(m_host);
client->enable_server_certificate_verification(true);
} else {
client = std::make_unique<httplib::Client>(m_host);
}
client->set_connection_timeout(m_timeoutSeconds);
client->set_read_timeout(m_timeoutSeconds);
httplib::Headers headers;
for (const auto& [key, value] : m_headers) {
headers.emplace(key, value);
}
auto result = client->Post(path, headers, items);
if (result) {
response.status = result->status;
response.body = result->body;
response.success = (result->status >= 200 && result->status < 300);
} else {
response.error = httplib::to_string(result.error());
}
} catch (const std::exception& e) {
response.error = e.what();
}
return response;
}
HttpResponse get(const std::string& path) {
HttpResponse response;
try {
std::unique_ptr<httplib::Client> client;
if (m_useSSL) {
client = std::make_unique<httplib::Client>(m_host);
} else {
client = std::make_unique<httplib::Client>(m_host);
}
client->set_connection_timeout(m_timeoutSeconds);
client->set_read_timeout(m_timeoutSeconds);
httplib::Headers headers;
for (const auto& [key, value] : m_headers) {
headers.emplace(key, value);
}
auto result = client->Get(path, headers);
if (result) {
response.status = result->status;
response.body = result->body;
response.success = (result->status >= 200 && result->status < 300);
} else {
response.error = httplib::to_string(result.error());
}
} catch (const std::exception& e) {
response.error = e.what();
}
return response;
}
private:
std::string m_baseUrl;
std::string m_host;
bool m_useSSL = false;
int m_timeoutSeconds;
std::map<std::string, std::string> m_headers;
std::shared_ptr<spdlog::logger> m_logger;
};
} // namespace aissia

View File

@ -0,0 +1,180 @@
#include "ClaudeProvider.hpp"
#include <spdlog/sinks/stdout_color_sinks.h>
#include <cstdlib>
namespace aissia {
ClaudeProvider::ClaudeProvider(const json& config) {
m_logger = spdlog::get("ClaudeProvider");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("ClaudeProvider");
}
// Get API key from environment
std::string apiKeyEnv = config.value("api_key_env", "ANTHROPIC_API_KEY");
const char* apiKey = std::getenv(apiKeyEnv.c_str());
if (!apiKey) {
m_logger->error("API key not found in environment: {}", apiKeyEnv);
throw std::runtime_error("Missing API key: " + apiKeyEnv);
}
m_apiKey = apiKey;
m_model = config.value("model", "claude-sonnet-4-20250514");
m_maxTokens = config.value("max_tokens", 4096);
m_baseUrl = config.value("base_url", "https://api.anthropic.com");
m_client = std::make_unique<HttpClient>(m_baseUrl, 60);
m_client->setHeader("x-api-key", m_apiKey);
m_client->setHeader("anthropic-version", "2023-06-01");
m_client->setHeader("Content-Type", "application/json");
m_logger->info("ClaudeProvider initialized: model={}", m_model);
}
LLMResponse ClaudeProvider::chat(const std::string& systemPrompt,
const json& messages,
const json& tools) {
json request = {
{"model", m_model},
{"max_tokens", m_maxTokens},
{"system", systemPrompt},
{"messages", messages}
};
if (!tools.empty()) {
request["tools"] = convertTools(tools);
}
m_logger->debug("Sending request to Claude: {} messages", messages.size());
auto response = m_client->post("/v1/messages", request);
if (!response.success) {
m_logger->error("Claude API error: {}", response.error);
LLMResponse errorResp;
errorResp.text = "Error: " + response.error;
errorResp.is_end_turn = true;
return errorResp;
}
try {
json jsonResponse = json::parse(response.body);
return parseResponse(jsonResponse);
} catch (const json::exception& e) {
m_logger->error("Failed to parse Claude response: {}", e.what());
LLMResponse errorResp;
errorResp.text = "Parse error: " + std::string(e.what());
errorResp.is_end_turn = true;
return errorResp;
}
}
json ClaudeProvider::convertTools(const json& tools) {
// Tools are already in a compatible format
// Just ensure the structure matches Claude's expectations
json claudeTools = json::array();
for (const auto& tool : tools) {
json claudeTool = {
{"name", tool.value("name", "")},
{"description", tool.value("description", "")},
{"input_schema", tool.value("input_schema", json::object())}
};
claudeTools.push_back(claudeTool);
}
return claudeTools;
}
LLMResponse ClaudeProvider::parseResponse(const json& response) {
LLMResponse result;
result.stop_reason = response.value("stop_reason", "");
result.is_end_turn = (result.stop_reason == "end_turn");
result.model = response.value("model", m_model);
// Parse usage
if (response.contains("usage")) {
result.input_tokens = response["usage"].value("input_tokens", 0);
result.output_tokens = response["usage"].value("output_tokens", 0);
}
// Parse content blocks
if (response.contains("content") && response["content"].is_array()) {
for (const auto& block : response["content"]) {
std::string type = block.value("type", "");
if (type == "text") {
result.text += block.value("text", "");
}
else if (type == "tool_use") {
ToolCall call;
call.id = block.value("id", "");
call.name = block.value("name", "");
call.input = block.value("input", json::object());
result.tool_calls.push_back(call);
}
}
}
m_logger->debug("Claude response: text={} chars, tools={}, stop={}",
result.text.size(), result.tool_calls.size(), result.stop_reason);
return result;
}
json ClaudeProvider::formatToolResults(const std::vector<ToolResult>& results) {
// Claude format: single user message with array of tool_result blocks
json content = json::array();
for (const auto& result : results) {
json block = {
{"type", "tool_result"},
{"tool_use_id", result.tool_call_id},
{"content", result.content}
};
if (result.is_error) {
block["is_error"] = true;
}
content.push_back(block);
}
return {
{"role", "user"},
{"content", content}
};
}
void ClaudeProvider::appendAssistantMessage(json& messages, const LLMResponse& response) {
json content = buildAssistantContent(response);
messages.push_back({
{"role", "assistant"},
{"content", content}
});
}
json ClaudeProvider::buildAssistantContent(const LLMResponse& response) {
json content = json::array();
// Add text block if present
if (!response.text.empty()) {
content.push_back({
{"type", "text"},
{"text", response.text}
});
}
// Add tool_use blocks
for (const auto& call : response.tool_calls) {
content.push_back({
{"type", "tool_use"},
{"id", call.id},
{"name", call.name},
{"input", call.input}
});
}
return content;
}
} // namespace aissia

View File

@ -0,0 +1,53 @@
#pragma once
#include "ILLMProvider.hpp"
#include "../http/HttpClient.hpp"
#include <spdlog/spdlog.h>
#include <memory>
namespace aissia {
/**
* @brief Anthropic Claude API provider
*
* Supports Claude models with native tool use.
* Response format uses tool_use/tool_result content blocks.
*/
class ClaudeProvider : public ILLMProvider {
public:
explicit ClaudeProvider(const json& config);
~ClaudeProvider() override = default;
LLMResponse chat(const std::string& systemPrompt,
const json& messages,
const json& tools) override;
json formatToolResults(const std::vector<ToolResult>& results) override;
void appendAssistantMessage(json& messages, const LLMResponse& response) override;
std::string getProviderName() const override { return "claude"; }
void setModel(const std::string& model) override { m_model = model; }
void setMaxTokens(int maxTokens) override { m_maxTokens = maxTokens; }
std::string getModel() const override { return m_model; }
private:
std::string m_apiKey;
std::string m_model = "claude-sonnet-4-20250514";
std::string m_baseUrl = "https://api.anthropic.com";
int m_maxTokens = 4096;
std::unique_ptr<HttpClient> m_client;
std::shared_ptr<spdlog::logger> m_logger;
// Convert tools to Claude format
json convertTools(const json& tools);
// Parse Claude response
LLMResponse parseResponse(const json& response);
// Build assistant message content from response
json buildAssistantContent(const LLMResponse& response);
};
} // namespace aissia

View File

@ -0,0 +1,109 @@
#pragma once
#include <nlohmann/json.hpp>
#include <string>
#include <vector>
#include <memory>
namespace aissia {
using json = nlohmann::json;
/**
* @brief Represents a tool call requested by the LLM
*/
struct ToolCall {
std::string id; // Unique identifier for this call
std::string name; // Tool name
json input; // Arguments passed to the tool
};
/**
* @brief Result of executing a tool
*/
struct ToolResult {
std::string tool_call_id; // Reference to the ToolCall.id
std::string content; // Result content (usually JSON string)
bool is_error = false; // Whether this is an error result
};
/**
* @brief Response from an LLM provider
*/
struct LLMResponse {
std::string text; // Text response (if any)
std::vector<ToolCall> tool_calls; // Tool calls requested
bool is_end_turn = false; // True if this is a final response
int input_tokens = 0; // Tokens used for input
int output_tokens = 0; // Tokens generated
std::string stop_reason; // Why generation stopped
std::string model; // Model that generated this
};
/**
* @brief Abstract interface for LLM providers
*
* Implementations:
* - ClaudeProvider: Anthropic Claude API
* - OpenAIProvider: OpenAI GPT API
* - OllamaProvider: Local Ollama models
*
* All providers support tool use for agentic workflows.
*/
class ILLMProvider {
public:
virtual ~ILLMProvider() = default;
/**
* @brief Send a chat completion request
*
* @param systemPrompt System instructions for the assistant
* @param messages Conversation history (role/content pairs)
* @param tools Available tools in provider-agnostic format
* @return LLMResponse with text and/or tool calls
*/
virtual LLMResponse chat(const std::string& systemPrompt,
const json& messages,
const json& tools) = 0;
/**
* @brief Format tool results for the next request
*
* Different providers have different formats for returning tool results.
* Claude uses tool_result blocks, OpenAI uses tool role messages.
*
* @param results Vector of tool results to format
* @return JSON message(s) to append to conversation
*/
virtual json formatToolResults(const std::vector<ToolResult>& results) = 0;
/**
* @brief Append assistant response to message history
*
* @param messages Message array to modify
* @param response Response to append
*/
virtual void appendAssistantMessage(json& messages, const LLMResponse& response) = 0;
/**
* @brief Get provider name for logging
*/
virtual std::string getProviderName() const = 0;
/**
* @brief Set the model to use
*/
virtual void setModel(const std::string& model) = 0;
/**
* @brief Set maximum tokens for response
*/
virtual void setMaxTokens(int maxTokens) = 0;
/**
* @brief Get current model name
*/
virtual std::string getModel() const = 0;
};
} // namespace aissia

View File

@ -0,0 +1,46 @@
#include "LLMProviderFactory.hpp"
#include "ClaudeProvider.hpp"
#include "OpenAIProvider.hpp"
#include <spdlog/spdlog.h>
#include <stdexcept>
namespace aissia {
std::unique_ptr<ILLMProvider> LLMProviderFactory::create(const nlohmann::json& config) {
std::string providerName = config.value("provider", "claude");
if (!config.contains("providers") || !config["providers"].contains(providerName)) {
throw std::runtime_error("Provider configuration not found: " + providerName);
}
return createProvider(providerName, config["providers"][providerName]);
}
std::unique_ptr<ILLMProvider> LLMProviderFactory::createProvider(
const std::string& providerName,
const nlohmann::json& providerConfig) {
auto logger = spdlog::get("LLMFactory");
if (!logger) {
logger = spdlog::stdout_color_mt("LLMFactory");
}
logger->info("Creating LLM provider: {}", providerName);
if (providerName == "claude") {
return std::make_unique<ClaudeProvider>(providerConfig);
}
else if (providerName == "openai") {
return std::make_unique<OpenAIProvider>(providerConfig);
}
// Future providers:
// else if (providerName == "ollama") {
// return std::make_unique<OllamaProvider>(providerConfig);
// }
// else if (providerName == "deepseek") { ... }
// else if (providerName == "gemini") { ... }
throw std::runtime_error("Unknown LLM provider: " + providerName);
}
} // namespace aissia

View File

@ -0,0 +1,56 @@
#pragma once
#include "ILLMProvider.hpp"
#include <nlohmann/json.hpp>
#include <memory>
#include <string>
namespace aissia {
/**
* @brief Factory for creating LLM providers
*
* Supported providers:
* - "claude": Anthropic Claude (requires ANTHROPIC_API_KEY)
* - "openai": OpenAI GPT (requires OPENAI_API_KEY)
* - "ollama": Local Ollama (no API key required)
*
* Configuration format:
* {
* "provider": "claude",
* "providers": {
* "claude": {
* "api_key_env": "ANTHROPIC_API_KEY",
* "model": "claude-sonnet-4-20250514",
* "max_tokens": 4096,
* "base_url": "https://api.anthropic.com"
* },
* "openai": { ... },
* "ollama": { ... }
* }
* }
*/
class LLMProviderFactory {
public:
/**
* @brief Create a provider from configuration
*
* @param config Full configuration object
* @return Unique pointer to the provider
* @throws std::runtime_error if provider unknown or config invalid
*/
static std::unique_ptr<ILLMProvider> create(const nlohmann::json& config);
/**
* @brief Create a specific provider by name
*
* @param providerName Provider identifier (claude, openai, ollama)
* @param providerConfig Provider-specific configuration
* @return Unique pointer to the provider
*/
static std::unique_ptr<ILLMProvider> createProvider(
const std::string& providerName,
const nlohmann::json& providerConfig);
};
} // namespace aissia

View File

@ -0,0 +1,201 @@
#include "OpenAIProvider.hpp"
#include <spdlog/sinks/stdout_color_sinks.h>
#include <cstdlib>
namespace aissia {
OpenAIProvider::OpenAIProvider(const json& config) {
m_logger = spdlog::get("OpenAIProvider");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("OpenAIProvider");
}
// Get API key from environment
std::string apiKeyEnv = config.value("api_key_env", "OPENAI_API_KEY");
const char* apiKey = std::getenv(apiKeyEnv.c_str());
if (!apiKey) {
m_logger->error("API key not found in environment: {}", apiKeyEnv);
throw std::runtime_error("Missing API key: " + apiKeyEnv);
}
m_apiKey = apiKey;
m_model = config.value("model", "gpt-4o");
m_maxTokens = config.value("max_tokens", 4096);
m_baseUrl = config.value("base_url", "https://api.openai.com");
m_client = std::make_unique<HttpClient>(m_baseUrl, 60);
m_client->setBearerToken(m_apiKey);
m_client->setHeader("Content-Type", "application/json");
m_logger->info("OpenAIProvider initialized: model={}", m_model);
}
LLMResponse OpenAIProvider::chat(const std::string& systemPrompt,
const json& messages,
const json& tools) {
// Build messages array with system prompt
json allMessages = json::array();
allMessages.push_back({
{"role", "system"},
{"content", systemPrompt}
});
for (const auto& msg : messages) {
allMessages.push_back(msg);
}
json request = {
{"model", m_model},
{"max_tokens", m_maxTokens},
{"messages", allMessages}
};
if (!tools.empty()) {
request["tools"] = convertTools(tools);
}
m_logger->debug("Sending request to OpenAI: {} messages", allMessages.size());
auto response = m_client->post("/v1/chat/completions", request);
if (!response.success) {
m_logger->error("OpenAI API error: {}", response.error);
LLMResponse errorResp;
errorResp.text = "Error: " + response.error;
errorResp.is_end_turn = true;
return errorResp;
}
try {
json jsonResponse = json::parse(response.body);
m_lastRawResponse = jsonResponse;
return parseResponse(jsonResponse);
} catch (const json::exception& e) {
m_logger->error("Failed to parse OpenAI response: {}", e.what());
LLMResponse errorResp;
errorResp.text = "Parse error: " + std::string(e.what());
errorResp.is_end_turn = true;
return errorResp;
}
}
json OpenAIProvider::convertTools(const json& tools) {
// Convert to OpenAI function format
json openaiTools = json::array();
for (const auto& tool : tools) {
json openaiTool = {
{"type", "function"},
{"function", {
{"name", tool.value("name", "")},
{"description", tool.value("description", "")},
{"parameters", tool.value("input_schema", json::object())}
}}
};
openaiTools.push_back(openaiTool);
}
return openaiTools;
}
LLMResponse OpenAIProvider::parseResponse(const json& response) {
LLMResponse result;
// Parse usage
if (response.contains("usage")) {
result.input_tokens = response["usage"].value("prompt_tokens", 0);
result.output_tokens = response["usage"].value("completion_tokens", 0);
}
result.model = response.value("model", m_model);
// Parse choices
if (response.contains("choices") && !response["choices"].empty()) {
const auto& choice = response["choices"][0];
result.stop_reason = choice.value("finish_reason", "");
if (choice.contains("message")) {
const auto& message = choice["message"];
// Get text content
if (message.contains("content") && !message["content"].is_null()) {
result.text = message["content"].get<std::string>();
}
// Get tool calls
if (message.contains("tool_calls") && message["tool_calls"].is_array()) {
for (const auto& tc : message["tool_calls"]) {
ToolCall call;
call.id = tc.value("id", "");
if (tc.contains("function")) {
call.name = tc["function"].value("name", "");
std::string args = tc["function"].value("arguments", "{}");
try {
call.input = json::parse(args);
} catch (...) {
call.input = json::object();
}
}
result.tool_calls.push_back(call);
}
}
}
}
// Determine if this is end turn
result.is_end_turn = result.tool_calls.empty() ||
result.stop_reason == "stop";
m_logger->debug("OpenAI response: text={} chars, tools={}, stop={}",
result.text.size(), result.tool_calls.size(), result.stop_reason);
return result;
}
json OpenAIProvider::formatToolResults(const std::vector<ToolResult>& results) {
// OpenAI format: separate messages with role "tool"
json messages = json::array();
for (const auto& result : results) {
messages.push_back({
{"role", "tool"},
{"tool_call_id", result.tool_call_id},
{"content", result.content}
});
}
// Return array of messages (caller should append each)
return messages;
}
void OpenAIProvider::appendAssistantMessage(json& messages, const LLMResponse& response) {
// Build assistant message
json assistantMsg = {
{"role", "assistant"}
};
if (!response.text.empty()) {
assistantMsg["content"] = response.text;
} else {
assistantMsg["content"] = nullptr;
}
// Add tool_calls if present
if (!response.tool_calls.empty()) {
json toolCalls = json::array();
for (const auto& call : response.tool_calls) {
toolCalls.push_back({
{"id", call.id},
{"type", "function"},
{"function", {
{"name", call.name},
{"arguments", call.input.dump()}
}}
});
}
assistantMsg["tool_calls"] = toolCalls;
}
messages.push_back(assistantMsg);
}
} // namespace aissia

View File

@ -0,0 +1,53 @@
#pragma once
#include "ILLMProvider.hpp"
#include "../http/HttpClient.hpp"
#include <spdlog/spdlog.h>
#include <memory>
namespace aissia {
/**
* @brief OpenAI GPT API provider
*
* Supports GPT models with function calling.
* Response format uses tool_calls array.
*/
class OpenAIProvider : public ILLMProvider {
public:
explicit OpenAIProvider(const json& config);
~OpenAIProvider() override = default;
LLMResponse chat(const std::string& systemPrompt,
const json& messages,
const json& tools) override;
json formatToolResults(const std::vector<ToolResult>& results) override;
void appendAssistantMessage(json& messages, const LLMResponse& response) override;
std::string getProviderName() const override { return "openai"; }
void setModel(const std::string& model) override { m_model = model; }
void setMaxTokens(int maxTokens) override { m_maxTokens = maxTokens; }
std::string getModel() const override { return m_model; }
private:
std::string m_apiKey;
std::string m_model = "gpt-4o";
std::string m_baseUrl = "https://api.openai.com";
int m_maxTokens = 4096;
std::unique_ptr<HttpClient> m_client;
std::shared_ptr<spdlog::logger> m_logger;
// Convert tools to OpenAI format
json convertTools(const json& tools);
// Parse OpenAI response
LLMResponse parseResponse(const json& response);
// Store raw response for appendAssistantMessage
json m_lastRawResponse;
};
} // namespace aissia

View File

@ -0,0 +1,64 @@
#include "ToolRegistry.hpp"
#include <spdlog/spdlog.h>
namespace aissia {
void ToolRegistry::registerTool(const std::string& name,
const std::string& description,
const json& inputSchema,
ToolHandler handler) {
ToolDefinition tool;
tool.name = name;
tool.description = description;
tool.input_schema = inputSchema;
tool.handler = handler;
registerTool(tool);
}
void ToolRegistry::registerTool(const ToolDefinition& tool) {
m_tools[tool.name] = tool;
spdlog::debug("Registered tool: {}", tool.name);
}
json ToolRegistry::execute(const std::string& name, const json& input) {
auto it = m_tools.find(name);
if (it == m_tools.end()) {
spdlog::warn("Unknown tool called: {}", name);
return {
{"error", "unknown_tool"},
{"message", "Tool not found: " + name}
};
}
try {
spdlog::debug("Executing tool: {} with input: {}", name, input.dump().substr(0, 100));
json result = it->second.handler(input);
return result;
} catch (const std::exception& e) {
spdlog::error("Tool {} threw exception: {}", name, e.what());
return {
{"error", "execution_failed"},
{"message", e.what()}
};
}
}
json ToolRegistry::getToolDefinitions() const {
json tools = json::array();
for (const auto& [name, tool] : m_tools) {
tools.push_back({
{"name", tool.name},
{"description", tool.description},
{"input_schema", tool.input_schema}
});
}
return tools;
}
bool ToolRegistry::hasTool(const std::string& name) const {
return m_tools.find(name) != m_tools.end();
}
} // namespace aissia

View File

@ -0,0 +1,88 @@
#pragma once
#include <nlohmann/json.hpp>
#include <string>
#include <map>
#include <functional>
#include <memory>
namespace aissia {
using json = nlohmann::json;
/**
* @brief Function signature for tool handlers
*
* @param input Tool input arguments
* @return JSON result to send back to LLM
*/
using ToolHandler = std::function<json(const json& input)>;
/**
* @brief Tool definition with metadata and handler
*/
struct ToolDefinition {
std::string name;
std::string description;
json input_schema;
ToolHandler handler;
};
/**
* @brief Registry for tools available to the LLM
*
* Tools are functions that the LLM can call to interact with the system.
* Each tool has:
* - name: Unique identifier
* - description: What the tool does (for LLM)
* - input_schema: JSON Schema for parameters
* - handler: Function to execute
*/
class ToolRegistry {
public:
ToolRegistry() = default;
/**
* @brief Register a new tool
*/
void registerTool(const std::string& name,
const std::string& description,
const json& inputSchema,
ToolHandler handler);
/**
* @brief Register a tool from a definition struct
*/
void registerTool(const ToolDefinition& tool);
/**
* @brief Execute a tool by name
*
* @param name Tool name
* @param input Tool arguments
* @return JSON result
*/
json execute(const std::string& name, const json& input);
/**
* @brief Get tool definitions for LLM
*
* @return Array of tool definitions in provider-agnostic format
*/
json getToolDefinitions() const;
/**
* @brief Check if a tool exists
*/
bool hasTool(const std::string& name) const;
/**
* @brief Get number of registered tools
*/
size_t size() const { return m_tools.size(); }
private:
std::map<std::string, ToolDefinition> m_tools;
};
} // namespace aissia

View File

@ -0,0 +1,66 @@
#pragma once
#include <string>
#include <memory>
namespace aissia {
/**
* @brief Interface for window tracking across platforms
*
* Implementations:
* - Win32WindowTracker: Windows (GetForegroundWindow)
* - X11WindowTracker: Linux with X11
* - StubWindowTracker: Fallback for unsupported platforms
*/
class IWindowTracker {
public:
virtual ~IWindowTracker() = default;
/**
* @brief Get the name of the currently focused application
* @return Application/process name (e.g., "Code", "firefox")
*/
virtual std::string getCurrentAppName() = 0;
/**
* @brief Get the title of the currently focused window
* @return Window title string
*/
virtual std::string getCurrentWindowTitle() = 0;
/**
* @brief Check if user has been idle (no input) for given threshold
* @param thresholdSeconds Seconds of inactivity to consider idle
* @return true if user is idle
*/
virtual bool isUserIdle(int thresholdSeconds) = 0;
/**
* @brief Get idle time in seconds
* @return Seconds since last user input
*/
virtual int getIdleTimeSeconds() = 0;
/**
* @brief Check if this tracker is functional on current platform
* @return true if tracking is available
*/
virtual bool isAvailable() const = 0;
/**
* @brief Get platform name
* @return Platform identifier (e.g., "win32", "x11", "stub")
*/
virtual std::string getPlatformName() const = 0;
};
/**
* @brief Factory to create appropriate tracker for current platform
*/
class WindowTrackerFactory {
public:
static std::unique_ptr<IWindowTracker> create();
};
} // namespace aissia

View File

@ -0,0 +1,90 @@
#pragma once
#ifdef _WIN32
#include "IWindowTracker.hpp"
#include <Windows.h>
#include <Psapi.h>
#include <string>
namespace aissia {
/**
* @brief Windows implementation of window tracking
*
* Uses Win32 APIs:
* - GetForegroundWindow(): Get active window handle
* - GetWindowText(): Get window title
* - GetWindowThreadProcessId() + GetModuleBaseName(): Get process name
* - GetLastInputInfo(): Track user idle time
*/
class Win32WindowTracker : public IWindowTracker {
public:
Win32WindowTracker() = default;
~Win32WindowTracker() override = default;
std::string getCurrentAppName() override {
HWND hwnd = GetForegroundWindow();
if (!hwnd) return "";
DWORD processId = 0;
GetWindowThreadProcessId(hwnd, &processId);
if (processId == 0) return "";
HANDLE hProcess = OpenProcess(PROCESS_QUERY_INFORMATION | PROCESS_VM_READ,
FALSE, processId);
if (!hProcess) return "";
char processName[MAX_PATH] = {0};
if (GetModuleBaseNameA(hProcess, NULL, processName, sizeof(processName)) > 0) {
CloseHandle(hProcess);
// Remove .exe extension
std::string name(processName);
auto pos = name.rfind(".exe");
if (pos != std::string::npos) {
name = name.substr(0, pos);
}
return name;
}
CloseHandle(hProcess);
return "";
}
std::string getCurrentWindowTitle() override {
HWND hwnd = GetForegroundWindow();
if (!hwnd) return "";
char title[256] = {0};
GetWindowTextA(hwnd, title, sizeof(title));
return std::string(title);
}
bool isUserIdle(int thresholdSeconds) override {
return getIdleTimeSeconds() >= thresholdSeconds;
}
int getIdleTimeSeconds() override {
LASTINPUTINFO lii;
lii.cbSize = sizeof(LASTINPUTINFO);
if (GetLastInputInfo(&lii)) {
DWORD tickCount = GetTickCount();
DWORD idleTime = tickCount - lii.dwTime;
return static_cast<int>(idleTime / 1000);
}
return 0;
}
bool isAvailable() const override {
return true; // Always available on Windows
}
std::string getPlatformName() const override {
return "win32";
}
};
} // namespace aissia
#endif // _WIN32

View File

@ -0,0 +1,47 @@
#include "IWindowTracker.hpp"
#include <spdlog/spdlog.h>
#ifdef _WIN32
#include "Win32WindowTracker.hpp"
#elif defined(__linux__)
#include "X11WindowTracker.hpp"
#endif
namespace aissia {
// Stub implementation for unsupported platforms
class StubWindowTracker : public IWindowTracker {
public:
std::string getCurrentAppName() override { return "unknown"; }
std::string getCurrentWindowTitle() override { return ""; }
bool isUserIdle(int thresholdSeconds) override { return false; }
int getIdleTimeSeconds() override { return 0; }
bool isAvailable() const override { return false; }
std::string getPlatformName() const override { return "stub"; }
};
std::unique_ptr<IWindowTracker> WindowTrackerFactory::create() {
auto logger = spdlog::get("WindowTracker");
if (!logger) {
logger = spdlog::stdout_color_mt("WindowTracker");
}
#ifdef _WIN32
auto tracker = std::make_unique<Win32WindowTracker>();
if (tracker->isAvailable()) {
logger->info("Using Win32 window tracker");
return tracker;
}
#elif defined(__linux__)
auto tracker = std::make_unique<X11WindowTracker>();
if (tracker->isAvailable()) {
logger->info("Using X11 window tracker");
return tracker;
}
#endif
logger->warn("No window tracker available, using stub");
return std::make_unique<StubWindowTracker>();
}
} // namespace aissia

View File

@ -0,0 +1,124 @@
#pragma once
#ifdef __linux__
#include "IWindowTracker.hpp"
#include <string>
#include <cstdio>
#include <memory>
#include <array>
#include <fstream>
#include <chrono>
namespace aissia {
/**
* @brief Linux X11 implementation of window tracking
*
* Uses xdotool and /proc for tracking (avoids X11 library dependency).
* Falls back gracefully if tools are not available.
*/
class X11WindowTracker : public IWindowTracker {
public:
X11WindowTracker() {
// Check if xdotool is available
m_available = (system("which xdotool > /dev/null 2>&1") == 0);
}
~X11WindowTracker() override = default;
std::string getCurrentAppName() override {
if (!m_available) return "";
// Get PID of active window
std::string pid = executeCommand("xdotool getactivewindow getwindowpid 2>/dev/null");
if (pid.empty()) return "";
// Trim newline
while (!pid.empty() && (pid.back() == '\n' || pid.back() == '\r')) {
pid.pop_back();
}
// Read process name from /proc
std::string procPath = "/proc/" + pid + "/comm";
std::ifstream procFile(procPath);
if (procFile.is_open()) {
std::string name;
std::getline(procFile, name);
return name;
}
return "";
}
std::string getCurrentWindowTitle() override {
if (!m_available) return "";
std::string title = executeCommand("xdotool getactivewindow getwindowname 2>/dev/null");
// Trim newline
while (!title.empty() && (title.back() == '\n' || title.back() == '\r')) {
title.pop_back();
}
return title;
}
bool isUserIdle(int thresholdSeconds) override {
return getIdleTimeSeconds() >= thresholdSeconds;
}
int getIdleTimeSeconds() override {
// Try xprintidle if available
std::string result = executeCommand("xprintidle 2>/dev/null");
if (!result.empty()) {
try {
// xprintidle returns milliseconds
long ms = std::stol(result);
return static_cast<int>(ms / 1000);
} catch (...) {}
}
// Fallback: track time since last process change
auto now = std::chrono::steady_clock::now();
std::string currentApp = getCurrentAppName();
if (currentApp != m_lastApp) {
m_lastApp = currentApp;
m_lastActivityTime = now;
}
auto elapsed = std::chrono::duration_cast<std::chrono::seconds>(
now - m_lastActivityTime).count();
return static_cast<int>(elapsed);
}
bool isAvailable() const override {
return m_available;
}
std::string getPlatformName() const override {
return "x11";
}
private:
bool m_available = false;
std::string m_lastApp;
std::chrono::steady_clock::time_point m_lastActivityTime = std::chrono::steady_clock::now();
std::string executeCommand(const std::string& cmd) {
std::array<char, 256> buffer;
std::string result;
std::unique_ptr<FILE, decltype(&pclose)> pipe(popen(cmd.c_str(), "r"), pclose);
if (!pipe) return "";
while (fgets(buffer.data(), buffer.size(), pipe.get()) != nullptr) {
result += buffer.data();
}
return result;
}
};
} // namespace aissia
#endif // __linux__