feat: Implement MCP client and internal tools for agentic LLM

Add complete tool calling infrastructure for Claude Code-like functionality:

Internal Tools (via GroveEngine IIO):
- Scheduler: get_current_task, list_tasks, start_task, complete_task, start_break
- Monitoring: get_focus_stats, get_current_app
- Storage: save_note, query_notes, get_session_history
- Voice: speak

MCP Client (for external servers):
- StdioTransport for fork/exec JSON-RPC communication
- MCPClient for multi-server orchestration
- Support for filesystem, brave-search, fetch servers

Architecture:
- IOBridge for sync request/response over async IIO pub/sub
- Tool handlers added to all modules (SchedulerModule, MonitoringModule, StorageModule, VoiceModule)
- LLMService unifies internal tools + MCP tools in ToolRegistry

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
StillHammer 2025-11-26 16:50:30 +08:00
parent 9c1e168d4f
commit 059709cd0d
28 changed files with 2572 additions and 7 deletions

View File

@ -73,6 +73,20 @@ if(OPENSSL_FOUND)
target_compile_definitions(AissiaLLM PRIVATE CPPHTTPLIB_OPENSSL_SUPPORT)
endif()
# Tools Library (Internal tools + MCP client)
add_library(AissiaTools STATIC
src/shared/tools/InternalTools.cpp
src/shared/mcp/StdioTransport.cpp
src/shared/mcp/MCPClient.cpp
)
target_include_directories(AissiaTools PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}/src
)
target_link_libraries(AissiaTools PUBLIC
GroveEngine::impl
spdlog::spdlog
)
# Platform Library (window tracking)
add_library(AissiaPlatform STATIC
src/shared/platform/WindowTrackerFactory.cpp
@ -95,6 +109,7 @@ add_library(AissiaAudio STATIC
target_include_directories(AissiaAudio PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}/src
${httplib_SOURCE_DIR}
${nlohmann_json_SOURCE_DIR}/include
)
target_link_libraries(AissiaAudio PUBLIC
spdlog::spdlog
@ -123,6 +138,7 @@ target_link_libraries(AissiaServices PUBLIC
GroveEngine::impl
spdlog::spdlog
AissiaLLM
AissiaTools
AissiaPlatform
AissiaAudio
SQLite::SQLite3

31
config/mcp.json Normal file
View File

@ -0,0 +1,31 @@
{
"servers": {
"filesystem": {
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-filesystem", "/home"],
"enabled": false,
"_comment": "Enable to allow file read/write operations"
},
"brave-search": {
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-brave-search"],
"env": {
"BRAVE_API_KEY": "${BRAVE_API_KEY}"
},
"enabled": false,
"_comment": "Enable for web search (requires BRAVE_API_KEY)"
},
"fetch": {
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-fetch"],
"enabled": false,
"_comment": "Enable for HTTP fetch operations"
},
"memory": {
"command": "npx",
"args": ["-y", "@modelcontextprotocol/server-memory"],
"enabled": false,
"_comment": "Enable for persistent memory/knowledge graph"
}
}
}

View File

@ -0,0 +1,338 @@
# Plan d'Intégration MCP + Agent Vocal
## Objectif
Transformer AISSIA en "Claude Code vocal" : un assistant qui peut utiliser des tools (internes + MCP) pour accomplir des tâches, piloté par la voix.
## Architecture Cible
```
┌─────────────────────────────────────────────────────────────────────────────┐
│ AISSIA - Agent Vocal │
├─────────────────────────────────────────────────────────────────────────────┤
│ │
│ ┌──────────┐ ┌──────────┐ ┌─────────────────────────────────────┐ │
│ │ Micro │───►│ STT │───►│ LLMService │ │
│ └──────────┘ │ (Whisper)│ │ (Agentic Loop) │ │
│ └──────────┘ │ │ │
│ │ ┌─────────────────────────────┐ │ │
│ │ │ ToolRegistry │ │ │
│ │ │ │ │ │
│ │ │ ┌───────────────────────┐ │ │ │
│ │ │ │ Internal Tools │ │ │ │
│ │ │ │ (via GroveEngine) │ │ │ │
│ │ │ │ │ │ │ │
│ │ │ │ • get_schedule │ │ │ │
│ │ │ │ • start_break │ │ │ │
│ │ │ │ • get_focus_stats │ │ │ │
│ │ │ │ • save_note │ │ │ │
│ │ │ │ • query_history │ │ │ │
│ │ │ └───────────────────────┘ │ │ │
│ │ │ │ │ │
│ │ │ ┌───────────────────────┐ │ │ │
│ │ │ │ MCP Tools │ │ │ │
│ │ │ │ (via MCPClient) │ │ │ │
│ │ │ │ │ │ │ │
│ │ │ │ • read_file │ │ │ │
│ │ │ │ • write_file │ │ │ │
│ │ │ │ • list_directory │ │ │ │
│ │ │ │ • web_search │ │ │ │
│ │ │ │ • fetch_url │ │ │ │
│ │ │ └───────────────────────┘ │ │ │
│ │ └─────────────────────────────┘ │ │
│ │ │ │
│ └──────────────────┬──────────────────┘ │
│ │ │
│ ▼ │
│ ┌──────────┐ ┌──────────┐ ┌─────────────────────────────────────┐ │
│ │ Speaker │◄───│ TTS │◄───│ Response Text │ │
│ └──────────┘ │ (espeak) │ └─────────────────────────────────────┘ │
│ └──────────┘ │
│ │
└─────────────────────────────────────────────────────────────────────────────┘
COMMUNICATION INTERNE (GroveEngine IIO)
┌──────────────┐ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐
│ Scheduler │ │ Monitoring │ │ Storage │ │ Voice │
│ Module │ │ Module │ │ Module │ │ Module │
├──────────────┤ ├──────────────┤ ├──────────────┤ ├──────────────┤
│ │ │ │ │ │ │ │
│ • tasks │ │ • app usage │ │ • sessions │ │ • TTS queue │
│ • breaks │ │ • focus time │ │ • notes │ │ • STT events │
│ • hyperfocus │ │ • idle │ │ • history │ │ │
│ │ │ │ │ │ │ │
└──────┬───────┘ └──────┬───────┘ └──────┬───────┘ └──────┬───────┘
│ │ │ │
└────────────────────┴────────────────────┴────────────────────┘
IIO Pub/Sub
┌───────────────┴───────────────┐
│ LLMService │
│ (écoute les tool:* topics) │
└───────────────────────────────┘
```
## Phases d'Implémentation
### Phase 1 : Tools Internes via IIO
**Objectif** : Les modules GroveEngine exposent leurs fonctionnalités comme tools LLM.
**Fichiers à créer/modifier** :
- `src/shared/tools/InternalTools.hpp` - Définitions des tools internes
- `src/shared/tools/InternalTools.cpp` - Implémentation
- `src/services/LLMService.cpp` - Enregistrement des tools
**Tools à implémenter** :
| Tool | Module Source | Description |
|------|---------------|-------------|
| `get_current_task` | SchedulerModule | Retourne la tâche en cours |
| `list_tasks` | SchedulerModule | Liste toutes les tâches planifiées |
| `start_task` | SchedulerModule | Démarre une tâche par ID |
| `complete_task` | SchedulerModule | Marque une tâche comme terminée |
| `start_break` | SchedulerModule | Déclenche une pause |
| `get_focus_stats` | MonitoringModule | Stats de focus (temps, apps) |
| `get_current_app` | MonitoringModule | App actuellement active |
| `save_note` | StorageModule | Sauvegarde une note |
| `query_notes` | StorageModule | Recherche dans les notes |
| `get_session_history` | StorageModule | Historique des sessions |
| `speak` | VoiceModule | Fait parler l'assistant |
**Pattern d'implémentation** :
```cpp
// Le tool envoie une requête via IIO et attend la réponse
json InternalTools::executeGetCurrentTask(const json& input) {
// 1. Créer la requête
auto request = std::make_unique<grove::JsonDataNode>("request");
request->setString("action", "get_current_task");
// 2. Publier et attendre réponse (sync via promise/future)
auto response = m_ioBridge->requestAndWait("scheduler:query", std::move(request), 1000);
// 3. Convertir en JSON pour le LLM
return {
{"task_id", response->getString("task_id", "")},
{"task_name", response->getString("task_name", "")},
{"duration_minutes", response->getInt("duration_minutes", 0)}
};
}
```
### Phase 2 : Client MCP
**Objectif** : Se connecter à des serveurs MCP externes (filesystem, brave-search, etc.)
**Fichiers à créer** :
- `src/shared/mcp/MCPClient.hpp` - Interface client MCP
- `src/shared/mcp/MCPClient.cpp` - Implémentation
- `src/shared/mcp/MCPTransport.hpp` - Transport stdio/SSE
- `src/shared/mcp/MCPTypes.hpp` - Types MCP (Tool, Resource, etc.)
**Protocole MCP simplifié** :
```
Client Server (ex: filesystem)
│ │
│──── initialize ─────────────────►│
│◄─── initialized (capabilities) ──│
│ │
│──── tools/list ─────────────────►│
│◄─── tools (read_file, etc.) ─────│
│ │
│──── tools/call ─────────────────►│
│ {name: "read_file", │
│ arguments: {path: "..."}} │
│◄─── result ──────────────────────│
│ {content: "..."} │
│ │
```
**Transport stdio** :
```cpp
class StdioTransport : public IMCPTransport {
public:
StdioTransport(const std::string& command, const std::vector<std::string>& args) {
// Fork + exec le serveur MCP
// Rediriger stdin/stdout pour JSON-RPC
}
json sendRequest(const std::string& method, const json& params) {
json request = {
{"jsonrpc", "2.0"},
{"id", m_nextId++},
{"method", method},
{"params", params}
};
// Écrire sur stdin du process
write(request.dump() + "\n");
// Lire la réponse sur stdout
return json::parse(readLine());
}
};
```
**Configuration MCP** :
```json
// config/mcp.json
{
"servers": {
"filesystem": {
"command": "npx",
"args": ["-y", "@anthropic-ai/mcp-server-filesystem", "/home/user/documents"],
"enabled": true
},
"brave-search": {
"command": "npx",
"args": ["-y", "@anthropic-ai/mcp-server-brave-search"],
"env": {
"BRAVE_API_KEY": "${BRAVE_API_KEY}"
},
"enabled": true
},
"fetch": {
"command": "npx",
"args": ["-y", "@anthropic-ai/mcp-server-fetch"],
"enabled": true
}
}
}
```
### Phase 3 : Unification ToolRegistry
**Objectif** : Le ToolRegistry agrège tools internes + MCP de façon transparente.
**Modification de LLMService** :
```cpp
void LLMService::initializeTools() {
// 1. Tools internes (via IIO)
m_internalTools = std::make_unique<InternalTools>(m_io);
for (const auto& tool : m_internalTools->getTools()) {
m_toolRegistry.registerTool(tool);
}
// 2. Tools MCP (via MCPClient)
m_mcpClient = std::make_unique<MCPClient>();
m_mcpClient->loadConfig("config/mcp.json");
m_mcpClient->connectAll();
for (const auto& tool : m_mcpClient->listAllTools()) {
m_toolRegistry.registerTool(
tool.name,
tool.description,
tool.inputSchema,
[this, name = tool.name](const json& input) {
return m_mcpClient->callTool(name, input);
}
);
}
m_logger->info("Tools loaded: {} internal, {} MCP",
m_internalTools->size(), m_mcpClient->toolCount());
}
```
### Phase 4 : Tests
**Tests unitaires** :
- `tests/test_internal_tools.cpp` - Mock IIO, vérifier tool execution
- `tests/test_mcp_client.cpp` - Mock server, vérifier protocole
- `tests/test_tool_registry.cpp` - Vérifier aggregation
**Test d'intégration** :
```cpp
// tests/integration/test_agentic_loop.cpp
TEST_CASE("Agentic loop with tools") {
LLMService service;
service.initialize(io);
service.loadConfig("config/llm.json");
// Simuler une query qui nécessite des tools
auto result = service.query("Quelle tâche je fais en ce moment ?");
// Vérifier que le tool get_current_task a été appelé
REQUIRE(result.contains("task"));
}
```
## Fichiers à Créer
```
src/
├── shared/
│ ├── tools/
│ │ ├── InternalTools.hpp # Tools internes (IIO)
│ │ ├── InternalTools.cpp
│ │ └── IOBridge.hpp # Request/response sync via IIO
│ └── mcp/
│ ├── MCPClient.hpp # Client MCP principal
│ ├── MCPClient.cpp
│ ├── MCPTransport.hpp # Interface transport
│ ├── StdioTransport.hpp # Transport stdio (fork/exec)
│ ├── StdioTransport.cpp
│ └── MCPTypes.hpp # Tool, Resource, etc.
├── services/
│ └── LLMService.cpp # Modifier pour init tools
└── modules/
├── SchedulerModule.cpp # Ajouter handlers pour tools
├── MonitoringModule.cpp
└── StorageModule.cpp
config/
└── mcp.json # Configuration serveurs MCP
tests/
├── test_internal_tools.cpp
├── test_mcp_client.cpp
└── integration/
└── test_agentic_loop.cpp
```
## Ordre d'Implémentation
1. **IOBridge** - Request/response synchrone via IIO
2. **InternalTools** - Définition et implémentation des tools
3. **Modules handlers** - Ajouter la gestion des requêtes tools dans chaque module
4. **Test internal tools** - Vérifier que ça marche
5. **MCPTypes** - Types de base MCP
6. **StdioTransport** - Fork/exec + communication JSON-RPC
7. **MCPClient** - Orchestration des serveurs
8. **Test MCP** - Vérifier avec un vrai serveur (filesystem)
9. **Unification** - Merger dans LLMService
10. **Test intégration** - End-to-end
## Estimation Temps
| Phase | Temps estimé |
|-------|--------------|
| Phase 1 : Tools Internes | 2-3h |
| Phase 2 : Client MCP | 2-3h |
| Phase 3 : Unification | 1h |
| Phase 4 : Tests | 1-2h |
| **Total** | **6-9h** |
## Dépendances
- `nlohmann/json` - Déjà présent
- `spdlog` - Déjà présent
- Node.js/npx - Pour les serveurs MCP (optionnel, on peut aussi implémenter nos propres serveurs en C++)
## Risques et Mitigations
| Risque | Mitigation |
|--------|------------|
| Fork/exec complexe sur Windows | Utiliser `_popen` ou process lib cross-platform |
| Timeout sur tools lents | Configurable per-tool, default 30s |
| Serveur MCP crash | Reconnect automatique, fallback graceful |
| Trop de tools = confusion LLM | Grouper par namespace, limiter à ~20 tools |

View File

@ -19,6 +19,21 @@
namespace fs = std::filesystem;
// Helper to clone IDataNode (workaround for missing clone() method)
std::unique_ptr<grove::IDataNode> cloneDataNode(grove::IDataNode* node) {
if (!node) return nullptr;
// Cast to JsonDataNode to access the JSON data
auto* jsonNode = dynamic_cast<grove::JsonDataNode*>(node);
if (jsonNode) {
return std::make_unique<grove::JsonDataNode>(
jsonNode->getName(),
jsonNode->getJsonData()
);
}
// Fallback: create empty node
return std::make_unique<grove::JsonDataNode>("data");
}
// Global flag for clean shutdown
static volatile bool g_running = true;
@ -121,7 +136,7 @@ public:
if (prefix == "llm" || prefix == "storage" || prefix == "platform" || prefix == "voice") {
for (auto& [name, io] : m_serviceIOs) {
if (io && msg.data) {
io->publish(msg.topic, msg.data->clone());
io->publish(msg.topic, cloneDataNode(msg.data.get()));
}
}
}
@ -129,7 +144,7 @@ public:
// Route to modules (broadcast)
for (auto& [name, io] : m_moduleIOs) {
if (io && msg.data) {
io->publish(msg.topic, msg.data->clone());
io->publish(msg.topic, cloneDataNode(msg.data.get()));
}
}
}

View File

@ -1,4 +1,5 @@
#include "AIModule.h"
#include <grove/IIO.h>
#include <grove/JsonDataNode.h>
namespace aissia {

View File

@ -1,4 +1,5 @@
#include "MonitoringModule.h"
#include <grove/IIO.h>
#include <grove/JsonDataNode.h>
#include <algorithm>
@ -22,7 +23,7 @@ void MonitoringModule::setConfiguration(const grove::IDataNode& configNode,
// Load productive apps list
m_productiveApps.clear();
auto* prodNode = configNode.getChildReadOnly("productive_apps");
auto* prodNode = const_cast<grove::IDataNode&>(configNode).getChildReadOnly("productive_apps");
if (prodNode) {
for (const auto& name : prodNode->getChildNames()) {
m_productiveApps.insert(prodNode->getString(name, ""));
@ -36,7 +37,7 @@ void MonitoringModule::setConfiguration(const grove::IDataNode& configNode,
// Load distracting apps list
m_distractingApps.clear();
auto* distNode = configNode.getChildReadOnly("distracting_apps");
auto* distNode = const_cast<grove::IDataNode&>(configNode).getChildReadOnly("distracting_apps");
if (distNode) {
for (const auto& name : distNode->getChildNames()) {
m_distractingApps.insert(distNode->getString(name, ""));
@ -54,6 +55,8 @@ void MonitoringModule::setConfiguration(const grove::IDataNode& configNode,
m_io->subscribe("platform:window_changed", subConfig);
m_io->subscribe("platform:idle_detected", subConfig);
m_io->subscribe("platform:activity_resumed", subConfig);
// Tool request handlers
m_io->subscribe("monitoring:query", subConfig);
}
m_logger->info("MonitoringModule configure (v2 - sans infrastructure)");
@ -86,9 +89,49 @@ void MonitoringModule::processMessages() {
else if (msg.topic == "platform:activity_resumed" && msg.data) {
handleActivityResumed(*msg.data);
}
// Tool query handlers
else if (msg.topic == "monitoring:query" && msg.data) {
handleToolQuery(*msg.data);
}
}
}
void MonitoringModule::handleToolQuery(const grove::IDataNode& request) {
std::string correlationId = request.getString("correlation_id", "");
std::string action = request.getString("action", "");
auto response = std::make_unique<grove::JsonDataNode>("response");
response->setString("correlation_id", correlationId);
if (action == "get_focus_stats") {
std::string period = request.getString("period", "today");
// For now, we only track "today" stats
int productiveMinutes = m_totalProductiveSeconds / 60;
int distractingMinutes = m_totalDistractingSeconds / 60;
int totalMinutes = productiveMinutes + distractingMinutes;
int focusScore = totalMinutes > 0 ? (productiveMinutes * 100 / totalMinutes) : 0;
response->setInt("productive_minutes", productiveMinutes);
response->setInt("distracting_minutes", distractingMinutes);
response->setInt("total_minutes", totalMinutes);
response->setInt("focus_score", focusScore);
response->setString("period", period);
response->setBool("is_idle", m_isIdle);
response->setString("current_app", m_currentApp);
}
else if (action == "get_current_app") {
response->setString("app_name", m_currentApp);
response->setBool("is_productive", isProductiveApp(m_currentApp));
response->setBool("is_distracting", isDistractingApp(m_currentApp));
response->setBool("is_idle", m_isIdle);
}
else {
response->setString("error", "unknown_action");
}
m_io->publish("monitoring:response", std::move(response));
}
void MonitoringModule::handleWindowChanged(const grove::IDataNode& data) {
std::string oldApp = data.getString("oldApp", "");
std::string newApp = data.getString("newApp", "");

View File

@ -72,6 +72,9 @@ private:
void handleActivityResumed(const grove::IDataNode& data);
bool isProductiveApp(const std::string& appName) const;
bool isDistractingApp(const std::string& appName) const;
// Tool handlers
void handleToolQuery(const grove::IDataNode& request);
};
} // namespace aissia

View File

@ -1,4 +1,5 @@
#include "SchedulerModule.h"
#include <grove/IIO.h>
#include <grove/JsonDataNode.h>
namespace aissia {
@ -29,6 +30,9 @@ void SchedulerModule::setConfiguration(const grove::IDataNode& configNode,
m_io->subscribe("user:task_switch", subConfig);
m_io->subscribe("monitoring:idle_detected", subConfig);
m_io->subscribe("monitoring:activity_resumed", subConfig);
// Tool request handlers
m_io->subscribe("scheduler:query", subConfig);
m_io->subscribe("scheduler:command", subConfig);
}
m_logger->info("SchedulerModule configure: hyperfocus={}min, break_interval={}min",
@ -84,9 +88,109 @@ void SchedulerModule::processMessages() {
// User returned - resume tracking
m_logger->debug("User active, resuming session tracking");
}
// Tool query handlers
else if (msg.topic == "scheduler:query" && msg.data) {
handleToolQuery(*msg.data);
}
else if (msg.topic == "scheduler:command" && msg.data) {
handleToolCommand(*msg.data);
}
}
}
void SchedulerModule::handleToolQuery(const grove::IDataNode& request) {
std::string correlationId = request.getString("correlation_id", "");
std::string action = request.getString("action", "");
auto response = std::make_unique<grove::JsonDataNode>("response");
response->setString("correlation_id", correlationId);
if (action == "get_current_task") {
Task* task = findTask(m_currentTaskId);
response->setString("task_id", m_currentTaskId);
response->setString("task_name", task ? task->name : (m_currentTaskId.empty() ? "Aucune tache active" : m_currentTaskId));
float sessionMinutes = m_currentTaskId.empty() ? 0 : (m_lastActivityTime - m_sessionStartTime) / 60.0f;
response->setInt("duration_minutes", static_cast<int>(sessionMinutes));
response->setString("started_at", ""); // TODO: format timestamp
}
else if (action == "list_tasks") {
bool includeCompleted = request.getBool("include_completed", false);
// Build task list as JSON-like structure
int count = 0;
for (const auto& task : m_tasks) {
if (!includeCompleted && task.completed) continue;
auto taskNode = std::make_unique<grove::JsonDataNode>("task_" + std::to_string(count));
taskNode->setString("id", task.id);
taskNode->setString("name", task.name);
taskNode->setInt("estimated_minutes", task.estimatedMinutes);
taskNode->setInt("actual_minutes", task.actualMinutes);
taskNode->setBool("completed", task.completed);
response->setChild("task_" + std::to_string(count), std::move(taskNode));
count++;
}
response->setInt("task_count", count);
response->setString("current_task", m_currentTaskId);
}
else {
response->setString("error", "unknown_action");
}
m_io->publish("scheduler:response", std::move(response));
}
void SchedulerModule::handleToolCommand(const grove::IDataNode& request) {
std::string correlationId = request.getString("correlation_id", "");
std::string action = request.getString("action", "");
auto response = std::make_unique<grove::JsonDataNode>("response");
response->setString("correlation_id", correlationId);
if (action == "start_task") {
std::string taskId = request.getString("task_id", "");
if (taskId.empty()) {
response->setString("error", "missing_task_id");
} else {
startTask(taskId);
response->setBool("success", true);
response->setString("task_id", taskId);
}
}
else if (action == "complete_task") {
std::string taskId = request.getString("task_id", "");
if (taskId.empty()) {
// Complete current task
completeCurrentTask();
} else {
// Complete specific task
Task* task = findTask(taskId);
if (task) {
task->completed = true;
}
}
response->setBool("success", true);
}
else if (action == "start_break") {
int duration = request.getInt("duration_minutes", m_breakDurationMinutes);
std::string reason = request.getString("reason", "break");
m_lastBreakTime = m_lastActivityTime;
m_logger->info("Pause demarree: {} minutes ({})", duration, reason);
// Publish break started event
auto event = std::make_unique<grove::JsonDataNode>("break");
event->setInt("duration", duration);
event->setString("reason", reason);
m_io->publish("scheduler:break_started", std::move(event));
response->setBool("success", true);
response->setInt("duration", duration);
}
else {
response->setString("error", "unknown_action");
}
m_io->publish("scheduler:response", std::move(response));
}
void SchedulerModule::checkHyperfocus(float currentTime) {
if (m_currentTaskId.empty()) return;

View File

@ -81,11 +81,16 @@ private:
std::shared_ptr<spdlog::logger> m_logger;
// Helpers
void processMessages();
void checkHyperfocus(float currentTime);
void checkBreakReminder(float currentTime);
void startTask(const std::string& taskId);
void completeCurrentTask();
Task* findTask(const std::string& taskId);
// Tool handlers
void handleToolQuery(const grove::IDataNode& request);
void handleToolCommand(const grove::IDataNode& request);
};
} // namespace aissia

View File

@ -1,4 +1,5 @@
#include "StorageModule.h"
#include <grove/IIO.h>
#include <grove/JsonDataNode.h>
namespace aissia {
@ -24,6 +25,9 @@ void StorageModule::setConfiguration(const grove::IDataNode& configNode,
m_io->subscribe("monitoring:app_changed", subConfig);
m_io->subscribe("storage:session_saved", subConfig);
m_io->subscribe("storage:error", subConfig);
// Tool request handlers
m_io->subscribe("storage:query", subConfig);
m_io->subscribe("storage:command", subConfig);
}
m_logger->info("StorageModule configure (v2 - sans infrastructure)");
@ -55,9 +59,107 @@ void StorageModule::processMessages() {
else if (msg.topic == "storage:error" && msg.data) {
handleStorageError(*msg.data);
}
// Tool handlers
else if (msg.topic == "storage:query" && msg.data) {
handleToolQuery(*msg.data);
}
else if (msg.topic == "storage:command" && msg.data) {
handleToolCommand(*msg.data);
}
}
}
void StorageModule::handleToolQuery(const grove::IDataNode& request) {
std::string correlationId = request.getString("correlation_id", "");
std::string action = request.getString("action", "");
auto response = std::make_unique<grove::JsonDataNode>("response");
response->setString("correlation_id", correlationId);
if (action == "query_notes") {
std::string query = request.getString("query", "");
std::string tag = request.getString("tag", "");
int limit = request.getInt("limit", 10);
// Search in-memory notes
int count = 0;
for (const auto& note : m_notes) {
if (count >= limit) break;
bool matches = query.empty() ||
note.content.find(query) != std::string::npos;
if (!tag.empty()) {
bool hasTag = false;
for (const auto& t : note.tags) {
if (t == tag) { hasTag = true; break; }
}
matches = matches && hasTag;
}
if (matches) {
auto noteNode = std::make_unique<grove::JsonDataNode>("note_" + std::to_string(count));
noteNode->setString("id", note.id);
noteNode->setString("content", note.content);
noteNode->setString("timestamp", note.timestamp);
response->setChild("note_" + std::to_string(count), std::move(noteNode));
count++;
}
}
response->setInt("count", count);
}
else if (action == "get_session_history") {
int days = request.getInt("days", 7);
response->setInt("last_session_id", m_lastSessionId);
response->setInt("total_saved", m_totalSaved);
response->setString("note", "Full history requires StorageService query");
}
else {
response->setString("error", "unknown_action");
}
m_io->publish("storage:response", std::move(response));
}
void StorageModule::handleToolCommand(const grove::IDataNode& request) {
std::string correlationId = request.getString("correlation_id", "");
std::string action = request.getString("action", "");
auto response = std::make_unique<grove::JsonDataNode>("response");
response->setString("correlation_id", correlationId);
if (action == "save_note") {
std::string content = request.getString("content", "");
if (content.empty()) {
response->setString("error", "missing_content");
} else {
// Create note in memory
Note note;
note.id = "note_" + std::to_string(m_notes.size());
note.content = content;
note.timestamp = "now"; // TODO: proper timestamp
m_notes.push_back(note);
m_logger->info("Note saved: {}", note.id);
// Also publish to StorageService for persistence
if (m_io) {
auto saveReq = std::make_unique<grove::JsonDataNode>("save_note");
saveReq->setString("content", content);
m_io->publish("storage:save_note", std::move(saveReq));
}
response->setBool("success", true);
response->setString("note_id", note.id);
}
}
else {
response->setString("error", "unknown_action");
}
m_io->publish("storage:response", std::move(response));
}
void StorageModule::handleTaskCompleted(const grove::IDataNode& data) {
std::string taskName = data.getString("taskName", "unknown");
int duration = data.getInt("duration", 0);

View File

@ -7,6 +7,7 @@
#include <spdlog/sinks/stdout_color_sinks.h>
#include <memory>
#include <string>
#include <vector>
namespace aissia {
@ -47,10 +48,19 @@ public:
int getVersion() const override { return 2; }
private:
// Note structure for in-memory storage
struct Note {
std::string id;
std::string content;
std::string timestamp;
std::vector<std::string> tags;
};
// State
int m_lastSessionId = 0;
int m_pendingSaves = 0;
int m_totalSaved = 0;
std::vector<Note> m_notes;
// Services
grove::IIO* m_io = nullptr;
@ -63,6 +73,10 @@ private:
void handleAppChanged(const grove::IDataNode& data);
void handleSessionSaved(const grove::IDataNode& data);
void handleStorageError(const grove::IDataNode& data);
// Tool handlers
void handleToolQuery(const grove::IDataNode& request);
void handleToolCommand(const grove::IDataNode& request);
};
} // namespace aissia

View File

@ -1,4 +1,5 @@
#include "VoiceModule.h"
#include <grove/IIO.h>
#include <grove/JsonDataNode.h>
namespace aissia {
@ -18,13 +19,13 @@ void VoiceModule::setConfiguration(const grove::IDataNode& configNode,
m_config = std::make_unique<grove::JsonDataNode>("config");
// TTS config
auto* ttsNode = configNode.getChildReadOnly("tts");
auto* ttsNode = const_cast<grove::IDataNode&>(configNode).getChildReadOnly("tts");
if (ttsNode) {
m_ttsEnabled = ttsNode->getBool("enabled", true);
}
// STT config
auto* sttNode = configNode.getChildReadOnly("stt");
auto* sttNode = const_cast<grove::IDataNode&>(configNode).getChildReadOnly("stt");
if (sttNode) {
m_sttEnabled = sttNode->getBool("enabled", true);
m_language = sttNode->getString("language", "fr");
@ -39,6 +40,8 @@ void VoiceModule::setConfiguration(const grove::IDataNode& configNode,
m_io->subscribe("voice:speaking_started", subConfig);
m_io->subscribe("voice:speaking_ended", subConfig);
m_io->subscribe("voice:transcription", subConfig);
// Tool request handlers
m_io->subscribe("voice:command", subConfig);
}
m_logger->info("VoiceModule configure (v2 - sans infrastructure): TTS={}, STT={}",
@ -77,6 +80,25 @@ void VoiceModule::processMessages() {
else if (msg.topic == "voice:transcription" && msg.data) {
handleTranscription(*msg.data);
}
// Tool command handler
else if (msg.topic == "voice:command" && msg.data) {
handleToolCommand(*msg.data);
}
}
}
void VoiceModule::handleToolCommand(const grove::IDataNode& request) {
std::string action = request.getString("action", "");
if (action == "speak") {
std::string text = request.getString("text", "");
std::string priority = request.getString("priority", "normal");
if (!text.empty()) {
bool isPriority = (priority == "high");
requestSpeak(text, isPriority);
m_logger->info("Tool speak: {} (priority={})", text.substr(0, 50), priority);
}
}
}

View File

@ -72,6 +72,9 @@ private:
void handleSpeakingStarted(const grove::IDataNode& data);
void handleSpeakingEnded(const grove::IDataNode& data);
void handleTranscription(const grove::IDataNode& data);
// Tool handlers
void handleToolCommand(const grove::IDataNode& request);
};
} // namespace aissia

View File

@ -53,9 +53,16 @@ bool LLMService::loadConfig(const std::string& configPath) {
m_providerName = config.value("provider", "claude");
m_maxIterations = config.value("max_iterations", 10);
m_defaultSystemPrompt = config.value("system_prompt",
"Tu es AISSIA, un assistant personnel intelligent.");
"Tu es AISSIA, un assistant personnel intelligent. "
"Tu peux utiliser des tools pour accomplir des taches: "
"gerer le planning, verifier le focus, sauvegarder des notes, "
"lire des fichiers, faire des recherches web, etc.");
m_logger->info("LLM provider loaded: {} ({})", m_providerName, m_provider->getModel());
// Initialize tools after provider is ready
initializeTools();
return true;
} catch (const std::exception& e) {
@ -64,6 +71,58 @@ bool LLMService::loadConfig(const std::string& configPath) {
}
}
void LLMService::initializeTools() {
m_logger->info("Initializing tools...");
// 1. Internal tools (via GroveEngine IIO)
if (m_io) {
m_internalTools = std::make_unique<InternalTools>(m_io);
for (const auto& tool : m_internalTools->getTools()) {
m_toolRegistry.registerTool(tool);
}
m_logger->info("Registered {} internal tools", m_internalTools->size());
}
// 2. MCP tools (via external servers)
m_mcpClient = std::make_unique<mcp::MCPClient>();
if (loadMCPConfig("config/mcp.json")) {
int connected = m_mcpClient->connectAll();
if (connected > 0) {
for (const auto& tool : m_mcpClient->listAllTools()) {
// Convert MCP tool to our ToolDefinition format
m_toolRegistry.registerTool(
tool.name,
tool.description,
tool.inputSchema,
[this, toolName = tool.name](const nlohmann::json& input) -> nlohmann::json {
auto result = m_mcpClient->callTool(toolName, input);
// Convert MCP result to simple JSON
if (result.isError) {
return {{"error", true}, {"content", result.content}};
}
// Extract text content
std::string text;
for (const auto& content : result.content) {
if (content.contains("text")) {
text += content["text"].get<std::string>();
}
}
return {{"content", text}};
}
);
}
m_logger->info("Registered {} MCP tools from {} servers",
m_mcpClient->toolCount(), connected);
}
}
m_logger->info("Total tools available: {}", m_toolRegistry.size());
}
bool LLMService::loadMCPConfig(const std::string& configPath) {
return m_mcpClient->loadConfig(configPath);
}
void LLMService::registerTool(const std::string& name, const std::string& description,
const nlohmann::json& schema,
std::function<nlohmann::json(const nlohmann::json&)> handler) {

View File

@ -3,6 +3,8 @@
#include "IService.hpp"
#include "../shared/llm/ILLMProvider.hpp"
#include "../shared/llm/ToolRegistry.hpp"
#include "../shared/tools/InternalTools.hpp"
#include "../shared/mcp/MCPClient.hpp"
#include <grove/IIO.h>
#include <grove/JsonDataNode.h>
@ -51,6 +53,12 @@ public:
const nlohmann::json& schema,
std::function<nlohmann::json(const nlohmann::json&)> handler);
/// Load and initialize all tools (internal + MCP)
void initializeTools();
/// Load MCP server configurations
bool loadMCPConfig(const std::string& configPath);
private:
struct Request {
std::string query;
@ -76,6 +84,8 @@ private:
// State
std::unique_ptr<ILLMProvider> m_provider;
ToolRegistry m_toolRegistry;
std::unique_ptr<InternalTools> m_internalTools;
std::unique_ptr<mcp::MCPClient> m_mcpClient;
std::map<std::string, nlohmann::json> m_conversations; // conversationId -> history
// Threading

View File

@ -4,6 +4,7 @@
#include "ITTSEngine.hpp"
#include <spdlog/spdlog.h>
#include <spdlog/sinks/stdout_color_sinks.h>
#include <cstdlib>
#include <cstdio>
#include <memory>

View File

@ -1,5 +1,6 @@
#include "ITTSEngine.hpp"
#include <spdlog/spdlog.h>
#include <spdlog/sinks/stdout_color_sinks.h>
#ifdef _WIN32
#include "SAPITTSEngine.hpp"

View File

@ -12,7 +12,9 @@
#include <nlohmann/json.hpp>
#include <string>
#include <optional>
#include <map>
#include <spdlog/spdlog.h>
#include <spdlog/sinks/stdout_color_sinks.h>
namespace aissia {

View File

@ -0,0 +1,302 @@
#include "MCPClient.hpp"
#include <spdlog/sinks/stdout_color_sinks.h>
#include <fstream>
namespace aissia::mcp {
MCPClient::MCPClient() {
m_logger = spdlog::get("MCPClient");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("MCPClient");
}
}
MCPClient::~MCPClient() {
disconnectAll();
}
bool MCPClient::loadConfig(const std::string& configPath) {
try {
std::ifstream file(configPath);
if (!file.is_open()) {
m_logger->warn("MCP config not found: {}", configPath);
return false;
}
json config;
file >> config;
if (!config.contains("servers")) {
m_logger->warn("No 'servers' key in MCP config");
return false;
}
m_configs.clear();
for (auto& [name, serverConfig] : config["servers"].items()) {
auto cfg = MCPServerConfig::fromJson(name, serverConfig);
if (cfg.enabled && !cfg.command.empty()) {
m_configs[name] = cfg;
m_logger->info("Loaded MCP server config: {}", name);
}
}
m_logger->info("Loaded {} MCP server configs", m_configs.size());
return true;
} catch (const std::exception& e) {
m_logger->error("Failed to load MCP config: {}", e.what());
return false;
}
}
int MCPClient::connectAll() {
int connected = 0;
for (const auto& [name, config] : m_configs) {
if (connect(name)) {
connected++;
}
}
m_logger->info("Connected to {}/{} MCP servers", connected, m_configs.size());
return connected;
}
bool MCPClient::connect(const std::string& serverName) {
auto configIt = m_configs.find(serverName);
if (configIt == m_configs.end()) {
m_logger->error("Unknown MCP server: {}", serverName);
return false;
}
if (m_servers.count(serverName) > 0 && m_servers[serverName].initialized) {
m_logger->debug("Server {} already connected", serverName);
return true;
}
m_logger->info("Connecting to MCP server: {}", serverName);
ConnectedServer server;
server.config = configIt->second;
server.transport = std::make_unique<StdioTransport>(server.config);
if (!server.transport->start()) {
m_logger->error("Failed to start MCP server: {}", serverName);
return false;
}
if (!initializeServer(server)) {
m_logger->error("Failed to initialize MCP server: {}", serverName);
server.transport->stop();
return false;
}
if (!discoverTools(server)) {
m_logger->warn("Failed to discover tools for: {}", serverName);
// Continue anyway, server might not have tools
}
m_servers[serverName] = std::move(server);
m_logger->info("Connected to MCP server: {} ({} tools)",
serverName, m_servers[serverName].tools.size());
return true;
}
void MCPClient::disconnect(const std::string& serverName) {
auto it = m_servers.find(serverName);
if (it == m_servers.end()) {
return;
}
if (it->second.transport) {
it->second.transport->stop();
}
m_servers.erase(it);
m_logger->info("Disconnected from MCP server: {}", serverName);
}
void MCPClient::disconnectAll() {
for (auto& [name, server] : m_servers) {
if (server.transport) {
server.transport->stop();
}
}
m_servers.clear();
}
bool MCPClient::initializeServer(ConnectedServer& server) {
// Send initialize request
JsonRpcRequest initReq;
initReq.method = "initialize";
initReq.params = {
{"protocolVersion", "2024-11-05"},
{"capabilities", {
{"roots", {{"listChanged", true}}}
}},
{"clientInfo", {
{"name", "AISSIA"},
{"version", "1.0.0"}
}}
};
auto response = server.transport->sendRequest(initReq, 10000);
if (response.isError()) {
m_logger->error("Initialize failed: {}",
response.error->dump());
return false;
}
if (response.result.has_value()) {
server.info = MCPServerInfo::fromJson(*response.result);
m_logger->debug("Server info: {} v{}",
server.info.name, server.info.version);
}
// Send initialized notification
server.transport->sendNotification("notifications/initialized", json::object());
server.initialized = true;
return true;
}
bool MCPClient::discoverTools(ConnectedServer& server) {
if (!server.info.capabilities.hasTools) {
m_logger->debug("Server {} does not support tools", server.config.name);
return true;
}
JsonRpcRequest listReq;
listReq.method = "tools/list";
listReq.params = json::object();
auto response = server.transport->sendRequest(listReq, 10000);
if (response.isError()) {
m_logger->error("tools/list failed: {}",
response.error->dump());
return false;
}
server.tools.clear();
if (response.result.has_value() && response.result->contains("tools")) {
for (const auto& toolJson : (*response.result)["tools"]) {
MCPTool tool = MCPTool::fromJson(toolJson);
server.tools.push_back(tool);
m_logger->debug("Discovered tool: {}:{}", server.config.name, tool.name);
}
}
return true;
}
std::vector<MCPTool> MCPClient::listAllTools() {
std::vector<MCPTool> allTools;
for (const auto& [serverName, server] : m_servers) {
for (const auto& tool : server.tools) {
MCPTool prefixedTool = tool;
prefixedTool.name = serverName + ":" + tool.name;
allTools.push_back(prefixedTool);
}
}
return allTools;
}
MCPToolResult MCPClient::callTool(const std::string& toolName, const json& arguments) {
auto [serverName, actualToolName] = parseToolName(toolName);
auto it = m_servers.find(serverName);
if (it == m_servers.end()) {
MCPToolResult error;
error.isError = true;
error.content = {{{"type", "text"}, {"text", "Server not connected: " + serverName}}};
return error;
}
JsonRpcRequest callReq;
callReq.method = "tools/call";
callReq.params = {
{"name", actualToolName},
{"arguments", arguments}
};
m_logger->debug("Calling tool {}:{} with args: {}",
serverName, actualToolName, arguments.dump().substr(0, 100));
auto response = it->second.transport->sendRequest(callReq, 30000);
MCPToolResult result;
if (response.isError()) {
result.isError = true;
std::string errorMsg = response.error.has_value() ?
response.error->dump() : "Unknown error";
result.content = {{{"type", "text"}, {"text", errorMsg}}};
m_logger->error("Tool call failed: {}", errorMsg);
return result;
}
if (response.result.has_value()) {
auto& res = *response.result;
if (res.contains("content")) {
for (const auto& contentItem : res["content"]) {
result.content.push_back(contentItem);
}
}
result.isError = res.value("isError", false);
}
m_logger->debug("Tool {} returned {} content items",
toolName, result.content.size());
return result;
}
size_t MCPClient::toolCount() const {
size_t count = 0;
for (const auto& [_, server] : m_servers) {
count += server.tools.size();
}
return count;
}
bool MCPClient::isConnected(const std::string& serverName) const {
auto it = m_servers.find(serverName);
return it != m_servers.end() && it->second.initialized;
}
std::vector<std::string> MCPClient::getConnectedServers() const {
std::vector<std::string> names;
for (const auto& [name, _] : m_servers) {
names.push_back(name);
}
return names;
}
std::pair<std::string, std::string> MCPClient::parseToolName(const std::string& toolName) {
size_t colonPos = toolName.find(':');
if (colonPos == std::string::npos) {
// No prefix, try to find tool in any server
for (const auto& [serverName, server] : m_servers) {
for (const auto& tool : server.tools) {
if (tool.name == toolName) {
return {serverName, toolName};
}
}
}
return {"", toolName};
}
return {
toolName.substr(0, colonPos),
toolName.substr(colonPos + 1)
};
}
} // namespace aissia::mcp

View File

@ -0,0 +1,110 @@
#pragma once
#include "MCPTypes.hpp"
#include "MCPTransport.hpp"
#include "StdioTransport.hpp"
#include <spdlog/spdlog.h>
#include <nlohmann/json.hpp>
#include <memory>
#include <map>
#include <vector>
#include <string>
namespace aissia::mcp {
using json = nlohmann::json;
/**
* @brief Client for managing multiple MCP servers
*
* Handles:
* - Loading server configurations
* - Starting/stopping servers
* - Initializing MCP protocol
* - Discovering and calling tools
*/
class MCPClient {
public:
MCPClient();
~MCPClient();
/**
* @brief Load server configurations from JSON file
* @param configPath Path to mcp.json config file
* @return true if config loaded successfully
*/
bool loadConfig(const std::string& configPath);
/**
* @brief Connect to all enabled servers
* @return Number of servers successfully connected
*/
int connectAll();
/**
* @brief Connect to a specific server by name
* @return true if connected successfully
*/
bool connect(const std::string& serverName);
/**
* @brief Disconnect from a specific server
*/
void disconnect(const std::string& serverName);
/**
* @brief Disconnect from all servers
*/
void disconnectAll();
/**
* @brief List all available tools across all connected servers
* @return Vector of tools with server prefix (e.g., "filesystem:read_file")
*/
std::vector<MCPTool> listAllTools();
/**
* @brief Call a tool by name
* @param toolName Full tool name (e.g., "filesystem:read_file")
* @param arguments Tool arguments
* @return Tool result
*/
MCPToolResult callTool(const std::string& toolName, const json& arguments);
/**
* @brief Get total number of available tools
*/
size_t toolCount() const;
/**
* @brief Check if a server is connected
*/
bool isConnected(const std::string& serverName) const;
/**
* @brief Get list of connected server names
*/
std::vector<std::string> getConnectedServers() const;
private:
struct ConnectedServer {
MCPServerConfig config;
std::unique_ptr<IMCPTransport> transport;
MCPServerInfo info;
std::vector<MCPTool> tools;
bool initialized = false;
};
std::shared_ptr<spdlog::logger> m_logger;
std::map<std::string, MCPServerConfig> m_configs;
std::map<std::string, ConnectedServer> m_servers;
// MCP protocol methods
bool initializeServer(ConnectedServer& server);
bool discoverTools(ConnectedServer& server);
std::pair<std::string, std::string> parseToolName(const std::string& toolName);
};
} // namespace aissia::mcp

View File

@ -0,0 +1,51 @@
#pragma once
#include "MCPTypes.hpp"
#include <nlohmann/json.hpp>
#include <string>
#include <memory>
namespace aissia::mcp {
using json = nlohmann::json;
/**
* @brief Abstract transport interface for MCP communication
*/
class IMCPTransport {
public:
virtual ~IMCPTransport() = default;
/**
* @brief Start the transport (connect/spawn process)
* @return true if successful
*/
virtual bool start() = 0;
/**
* @brief Stop the transport
*/
virtual void stop() = 0;
/**
* @brief Check if transport is running
*/
virtual bool isRunning() const = 0;
/**
* @brief Send a JSON-RPC request and wait for response
* @param request The request to send
* @param timeoutMs Timeout in milliseconds
* @return The response
*/
virtual JsonRpcResponse sendRequest(const JsonRpcRequest& request, int timeoutMs = 30000) = 0;
/**
* @brief Send a notification (no response expected)
* @param method Method name
* @param params Parameters
*/
virtual void sendNotification(const std::string& method, const json& params) = 0;
};
} // namespace aissia::mcp

186
src/shared/mcp/MCPTypes.hpp Normal file
View File

@ -0,0 +1,186 @@
#pragma once
#include <nlohmann/json.hpp>
#include <string>
#include <vector>
#include <optional>
namespace aissia::mcp {
using json = nlohmann::json;
/**
* @brief MCP Tool definition
*/
struct MCPTool {
std::string name;
std::string description;
json inputSchema;
json toJson() const {
return {
{"name", name},
{"description", description},
{"inputSchema", inputSchema}
};
}
static MCPTool fromJson(const json& j) {
MCPTool tool;
tool.name = j.value("name", "");
tool.description = j.value("description", "");
tool.inputSchema = j.value("inputSchema", json::object());
return tool;
}
};
/**
* @brief MCP Resource definition
*/
struct MCPResource {
std::string uri;
std::string name;
std::string description;
std::string mimeType;
static MCPResource fromJson(const json& j) {
MCPResource res;
res.uri = j.value("uri", "");
res.name = j.value("name", "");
res.description = j.value("description", "");
res.mimeType = j.value("mimeType", "");
return res;
}
};
/**
* @brief Result of a tool call
*/
struct MCPToolResult {
std::vector<json> content; // Array of content blocks
bool isError = false;
json toJson() const {
return {
{"content", content},
{"isError", isError}
};
}
};
/**
* @brief MCP Server capabilities
*/
struct MCPCapabilities {
bool hasTools = false;
bool hasResources = false;
bool hasPrompts = false;
static MCPCapabilities fromJson(const json& j) {
MCPCapabilities caps;
if (j.contains("tools")) caps.hasTools = true;
if (j.contains("resources")) caps.hasResources = true;
if (j.contains("prompts")) caps.hasPrompts = true;
return caps;
}
};
/**
* @brief MCP Server info after initialization
*/
struct MCPServerInfo {
std::string name;
std::string version;
MCPCapabilities capabilities;
static MCPServerInfo fromJson(const json& j) {
MCPServerInfo info;
info.name = j.value("name", "unknown");
info.version = j.value("version", "0.0.0");
if (j.contains("capabilities")) {
info.capabilities = MCPCapabilities::fromJson(j["capabilities"]);
}
return info;
}
};
/**
* @brief JSON-RPC request
*/
struct JsonRpcRequest {
std::string jsonrpc = "2.0";
int id;
std::string method;
json params;
json toJson() const {
json j = {
{"jsonrpc", jsonrpc},
{"id", id},
{"method", method}
};
if (!params.is_null()) {
j["params"] = params;
}
return j;
}
};
/**
* @brief JSON-RPC response
*/
struct JsonRpcResponse {
std::string jsonrpc;
int id;
std::optional<json> result;
std::optional<json> error;
bool isError() const { return error.has_value(); }
static JsonRpcResponse fromJson(const json& j) {
JsonRpcResponse resp;
resp.jsonrpc = j.value("jsonrpc", "2.0");
resp.id = j.value("id", 0);
if (j.contains("result")) {
resp.result = j["result"];
}
if (j.contains("error")) {
resp.error = j["error"];
}
return resp;
}
};
/**
* @brief MCP Server configuration
*/
struct MCPServerConfig {
std::string name;
std::string command;
std::vector<std::string> args;
std::map<std::string, std::string> env;
bool enabled = true;
static MCPServerConfig fromJson(const std::string& name, const json& j) {
MCPServerConfig config;
config.name = name;
config.command = j.value("command", "");
config.enabled = j.value("enabled", true);
if (j.contains("args") && j["args"].is_array()) {
for (const auto& arg : j["args"]) {
config.args.push_back(arg.get<std::string>());
}
}
if (j.contains("env") && j["env"].is_object()) {
for (auto& [key, value] : j["env"].items()) {
config.env[key] = value.get<std::string>();
}
}
return config;
}
};
} // namespace aissia::mcp

View File

@ -0,0 +1,396 @@
#include "StdioTransport.hpp"
#include <spdlog/sinks/stdout_color_sinks.h>
#include <sstream>
#include <cstdlib>
namespace aissia::mcp {
StdioTransport::StdioTransport(const MCPServerConfig& config) : m_config(config) {
m_logger = spdlog::get("MCP:" + config.name);
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("MCP:" + config.name);
}
}
StdioTransport::~StdioTransport() {
stop();
}
bool StdioTransport::start() {
if (m_running) {
return true;
}
m_logger->info("Starting MCP server: {} {}", m_config.command,
[&]() {
std::string args;
for (const auto& a : m_config.args) args += a + " ";
return args;
}());
if (!spawnProcess()) {
m_logger->error("Failed to spawn MCP server process");
return false;
}
m_running = true;
// Start reader thread
m_readerThread = std::thread(&StdioTransport::readerLoop, this);
m_logger->info("MCP server started successfully");
return true;
}
void StdioTransport::stop() {
if (!m_running) {
return;
}
m_running = false;
// Close write pipe to signal EOF to child
#ifdef _WIN32
if (m_stdinWrite) {
CloseHandle(m_stdinWrite);
m_stdinWrite = nullptr;
}
#else
if (m_stdinFd >= 0) {
close(m_stdinFd);
m_stdinFd = -1;
}
#endif
// Wait for reader thread
if (m_readerThread.joinable()) {
m_readerThread.join();
}
// Terminate process
#ifdef _WIN32
if (m_processHandle) {
TerminateProcess(m_processHandle, 0);
CloseHandle(m_processHandle);
m_processHandle = nullptr;
}
if (m_stdoutRead) {
CloseHandle(m_stdoutRead);
m_stdoutRead = nullptr;
}
#else
if (m_pid > 0) {
kill(m_pid, SIGTERM);
waitpid(m_pid, nullptr, 0);
m_pid = -1;
}
if (m_stdoutFd >= 0) {
close(m_stdoutFd);
m_stdoutFd = -1;
}
#endif
m_logger->info("MCP server stopped");
}
bool StdioTransport::isRunning() const {
return m_running;
}
bool StdioTransport::spawnProcess() {
#ifdef _WIN32
// Windows implementation
SECURITY_ATTRIBUTES sa;
sa.nLength = sizeof(SECURITY_ATTRIBUTES);
sa.bInheritHandle = TRUE;
sa.lpSecurityDescriptor = nullptr;
HANDLE stdinRead, stdoutWrite;
// Create pipes
if (!CreatePipe(&stdinRead, &m_stdinWrite, &sa, 0)) {
return false;
}
if (!CreatePipe(&m_stdoutRead, &stdoutWrite, &sa, 0)) {
CloseHandle(stdinRead);
CloseHandle(m_stdinWrite);
return false;
}
// Don't inherit our end of the pipes
SetHandleInformation(m_stdinWrite, HANDLE_FLAG_INHERIT, 0);
SetHandleInformation(m_stdoutRead, HANDLE_FLAG_INHERIT, 0);
// Build command line
std::string cmdLine = m_config.command;
for (const auto& arg : m_config.args) {
cmdLine += " " + arg;
}
// Setup environment
std::string envBlock;
for (const auto& [key, value] : m_config.env) {
std::string resolvedValue = value;
// Resolve ${VAR} references
if (value.find("${") != std::string::npos) {
size_t start = value.find("${");
size_t end = value.find("}");
if (end > start) {
std::string envVar = value.substr(start + 2, end - start - 2);
const char* envVal = std::getenv(envVar.c_str());
if (envVal) {
resolvedValue = envVal;
}
}
}
envBlock += key + "=" + resolvedValue + '\0';
}
envBlock += '\0';
STARTUPINFOA si = {};
si.cb = sizeof(si);
si.hStdInput = stdinRead;
si.hStdOutput = stdoutWrite;
si.hStdError = GetStdHandle(STD_ERROR_HANDLE);
si.dwFlags |= STARTF_USESTDHANDLES;
PROCESS_INFORMATION pi = {};
BOOL success = CreateProcessA(
nullptr,
const_cast<char*>(cmdLine.c_str()),
nullptr, nullptr,
TRUE,
0,
envBlock.empty() ? nullptr : const_cast<char*>(envBlock.c_str()),
nullptr,
&si, &pi
);
CloseHandle(stdinRead);
CloseHandle(stdoutWrite);
if (!success) {
CloseHandle(m_stdinWrite);
CloseHandle(m_stdoutRead);
return false;
}
m_processHandle = pi.hProcess;
CloseHandle(pi.hThread);
return true;
#else
// Unix implementation
int stdinPipe[2];
int stdoutPipe[2];
if (pipe(stdinPipe) < 0 || pipe(stdoutPipe) < 0) {
return false;
}
m_pid = fork();
if (m_pid < 0) {
close(stdinPipe[0]);
close(stdinPipe[1]);
close(stdoutPipe[0]);
close(stdoutPipe[1]);
return false;
}
if (m_pid == 0) {
// Child process
close(stdinPipe[1]); // Close write end of stdin
close(stdoutPipe[0]); // Close read end of stdout
dup2(stdinPipe[0], STDIN_FILENO);
dup2(stdoutPipe[1], STDOUT_FILENO);
close(stdinPipe[0]);
close(stdoutPipe[1]);
// Set environment variables
for (const auto& [key, value] : m_config.env) {
std::string resolvedValue = value;
if (value.find("${") != std::string::npos) {
size_t start = value.find("${");
size_t end = value.find("}");
if (end > start) {
std::string envVar = value.substr(start + 2, end - start - 2);
const char* envVal = std::getenv(envVar.c_str());
if (envVal) {
resolvedValue = envVal;
}
}
}
setenv(key.c_str(), resolvedValue.c_str(), 1);
}
// Build argv
std::vector<char*> argv;
argv.push_back(const_cast<char*>(m_config.command.c_str()));
for (const auto& arg : m_config.args) {
argv.push_back(const_cast<char*>(arg.c_str()));
}
argv.push_back(nullptr);
execvp(m_config.command.c_str(), argv.data());
_exit(1);
}
// Parent process
close(stdinPipe[0]); // Close read end of stdin
close(stdoutPipe[1]); // Close write end of stdout
m_stdinFd = stdinPipe[1];
m_stdoutFd = stdoutPipe[0];
return true;
#endif
}
void StdioTransport::readerLoop() {
m_logger->debug("Reader thread started");
while (m_running) {
std::string line = readLine();
if (line.empty()) {
if (m_running) {
m_logger->warn("EOF from MCP server");
}
break;
}
try {
json message = json::parse(line);
handleMessage(message);
} catch (const json::exception& e) {
m_logger->error("Failed to parse MCP message: {}", e.what());
}
}
m_logger->debug("Reader thread stopped");
}
void StdioTransport::handleMessage(const json& message) {
// Check if this is a response
if (message.contains("id") && (message.contains("result") || message.contains("error"))) {
int id = message["id"].get<int>();
std::lock_guard<std::mutex> lock(m_mutex);
auto it = m_pendingRequests.find(id);
if (it != m_pendingRequests.end()) {
auto pending = it->second;
{
std::lock_guard<std::mutex> reqLock(pending->mutex);
pending->response = JsonRpcResponse::fromJson(message);
pending->hasResponse = true;
}
pending->cv.notify_one();
}
}
// TODO: Handle notifications from server
}
JsonRpcResponse StdioTransport::sendRequest(const JsonRpcRequest& request, int timeoutMs) {
if (!m_running) {
JsonRpcResponse error;
error.error = json{{"code", -1}, {"message", "Transport not running"}};
return error;
}
// Create a mutable copy with assigned ID
JsonRpcRequest req = request;
req.id = m_nextRequestId++;
// Create pending request
auto pending = std::make_shared<PendingRequest>();
{
std::lock_guard<std::mutex> lock(m_mutex);
m_pendingRequests[req.id] = pending;
}
// Send request
std::string line = req.toJson().dump();
m_logger->debug("Sending: {}", line.substr(0, 200));
writeLine(line);
// Wait for response
std::unique_lock<std::mutex> lock(pending->mutex);
bool received = pending->cv.wait_for(lock, std::chrono::milliseconds(timeoutMs), [&] {
return pending->hasResponse;
});
// Cleanup
{
std::lock_guard<std::mutex> globalLock(m_mutex);
m_pendingRequests.erase(req.id);
}
if (!received) {
m_logger->warn("Request {} timed out", req.method);
JsonRpcResponse error;
error.id = req.id;
error.error = json{{"code", -2}, {"message", "Request timed out"}};
return error;
}
return pending->response;
}
void StdioTransport::sendNotification(const std::string& method, const json& params) {
if (!m_running) return;
json notification = {
{"jsonrpc", "2.0"},
{"method", method}
};
if (!params.is_null()) {
notification["params"] = params;
}
writeLine(notification.dump());
}
void StdioTransport::writeLine(const std::string& line) {
std::string data = line + "\n";
#ifdef _WIN32
DWORD written;
WriteFile(m_stdinWrite, data.c_str(), static_cast<DWORD>(data.size()), &written, nullptr);
#else
ssize_t result = write(m_stdinFd, data.c_str(), data.size());
(void)result; // Ignore return value for now
#endif
}
std::string StdioTransport::readLine() {
std::string line;
char c;
#ifdef _WIN32
DWORD bytesRead;
while (m_running && ReadFile(m_stdoutRead, &c, 1, &bytesRead, nullptr) && bytesRead > 0) {
if (c == '\n') break;
if (c != '\r') line += c;
}
#else
while (m_running) {
ssize_t n = read(m_stdoutFd, &c, 1);
if (n <= 0) break;
if (c == '\n') break;
if (c != '\r') line += c;
}
#endif
if (!line.empty()) {
m_logger->debug("Received: {}", line.substr(0, 200));
}
return line;
}
} // namespace aissia::mcp

View File

@ -0,0 +1,80 @@
#pragma once
#include "MCPTransport.hpp"
#include <spdlog/spdlog.h>
#include <thread>
#include <mutex>
#include <condition_variable>
#include <queue>
#include <atomic>
#include <unordered_map>
#ifdef _WIN32
#include <windows.h>
#else
#include <unistd.h>
#include <sys/wait.h>
#include <signal.h>
#endif
namespace aissia::mcp {
/**
* @brief Stdio transport for MCP servers
*
* Spawns a child process and communicates via stdin/stdout using JSON-RPC.
*/
class StdioTransport : public IMCPTransport {
public:
StdioTransport(const MCPServerConfig& config);
~StdioTransport() override;
bool start() override;
void stop() override;
bool isRunning() const override;
JsonRpcResponse sendRequest(const JsonRpcRequest& request, int timeoutMs = 30000) override;
void sendNotification(const std::string& method, const json& params) override;
private:
MCPServerConfig m_config;
std::shared_ptr<spdlog::logger> m_logger;
std::atomic<bool> m_running{false};
std::atomic<int> m_nextRequestId{1};
// Process handles
#ifdef _WIN32
HANDLE m_processHandle = nullptr;
HANDLE m_stdinWrite = nullptr;
HANDLE m_stdoutRead = nullptr;
#else
pid_t m_pid = -1;
int m_stdinFd = -1;
int m_stdoutFd = -1;
#endif
// Reader thread
std::thread m_readerThread;
std::mutex m_mutex;
std::condition_variable m_responseCV;
// Pending requests
struct PendingRequest {
std::mutex mutex;
std::condition_variable cv;
JsonRpcResponse response;
bool hasResponse = false;
};
std::unordered_map<int, std::shared_ptr<PendingRequest>> m_pendingRequests;
// Methods
bool spawnProcess();
void readerLoop();
void writeLine(const std::string& line);
std::string readLine();
void handleMessage(const json& message);
};
} // namespace aissia::mcp

View File

@ -1,5 +1,6 @@
#include "IWindowTracker.hpp"
#include <spdlog/spdlog.h>
#include <spdlog/sinks/stdout_color_sinks.h>
#ifdef _WIN32
#include "Win32WindowTracker.hpp"

View File

@ -0,0 +1,187 @@
#pragma once
#include <grove/IIO.h>
#include <grove/JsonDataNode.h>
#include <nlohmann/json.hpp>
#include <spdlog/spdlog.h>
#include <spdlog/sinks/stdout_color_sinks.h>
#include <mutex>
#include <condition_variable>
#include <queue>
#include <chrono>
#include <atomic>
#include <unordered_map>
namespace aissia {
using json = nlohmann::json;
/**
* @brief Synchronous request/response bridge over IIO pub/sub
*
* Allows tools to make blocking requests to modules and wait for responses.
* Each request gets a unique correlation ID to match responses.
*
* Usage:
* auto response = bridge.request("scheduler:query", {"action": "get_current_task"}, 1000);
*/
class IOBridge {
public:
explicit IOBridge(grove::IIO* io) : m_io(io) {
m_logger = spdlog::get("IOBridge");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("IOBridge");
}
}
/**
* @brief Send a request and wait for response
*
* @param topic Topic to publish request to
* @param request Request data (will add correlation_id)
* @param timeoutMs Timeout in milliseconds
* @return Response data or error JSON
*/
json request(const std::string& topic, const json& request, int timeoutMs = 5000) {
std::string correlationId = generateCorrelationId();
// Create response promise
{
std::lock_guard<std::mutex> lock(m_mutex);
m_pendingRequests[correlationId] = std::make_shared<PendingRequest>();
}
// Build request with correlation ID
auto requestNode = std::make_unique<grove::JsonDataNode>("request");
requestNode->setString("correlation_id", correlationId);
// Copy all fields from input request
for (auto& [key, value] : request.items()) {
if (value.is_string()) {
requestNode->setString(key, value.get<std::string>());
} else if (value.is_number_integer()) {
requestNode->setInt(key, value.get<int>());
} else if (value.is_number_float()) {
requestNode->setDouble(key, value.get<double>());
} else if (value.is_boolean()) {
requestNode->setBool(key, value.get<bool>());
}
}
// Publish request
m_io->publish(topic, std::move(requestNode));
m_logger->debug("Request sent to {} with correlation_id={}", topic, correlationId);
// Wait for response
auto pending = m_pendingRequests[correlationId];
std::unique_lock<std::mutex> lock(pending->mutex);
bool received = pending->cv.wait_for(lock, std::chrono::milliseconds(timeoutMs), [&] {
return pending->hasResponse;
});
// Cleanup
{
std::lock_guard<std::mutex> globalLock(m_mutex);
m_pendingRequests.erase(correlationId);
}
if (!received) {
m_logger->warn("Request to {} timed out after {}ms", topic, timeoutMs);
return {{"error", "timeout"}, {"message", "Request timed out"}};
}
return pending->response;
}
/**
* @brief Process incoming response messages
*
* Call this from the service's process loop to handle responses
* that come back from modules.
*
* @param topic The topic the message was received on
* @param data The message data
*/
void handleResponse(const std::string& topic, const grove::IDataNode& data) {
std::string correlationId = data.getString("correlation_id", "");
if (correlationId.empty()) {
return; // Not a response to our request
}
std::shared_ptr<PendingRequest> pending;
{
std::lock_guard<std::mutex> lock(m_mutex);
auto it = m_pendingRequests.find(correlationId);
if (it == m_pendingRequests.end()) {
m_logger->debug("Received response for unknown correlation_id={}", correlationId);
return;
}
pending = it->second;
}
// Convert IDataNode to JSON
json response;
// Extract common fields - this is a simplified conversion
// For complex nested data, we'd need to traverse the tree
auto* jsonNode = dynamic_cast<const grove::JsonDataNode*>(&data);
if (jsonNode) {
response = jsonNode->getJsonData();
} else {
// Fallback: extract known field types
response["correlation_id"] = correlationId;
// Add more fields as needed based on what modules return
}
// Signal the waiting thread
{
std::lock_guard<std::mutex> lock(pending->mutex);
pending->response = response;
pending->hasResponse = true;
}
pending->cv.notify_one();
m_logger->debug("Response received for correlation_id={}", correlationId);
}
/**
* @brief Subscribe to response topics
*
* Call this during initialization to listen for responses
*/
void subscribeToResponses() {
if (!m_io) return;
grove::SubscriptionConfig config;
// Subscribe to all response topics
m_io->subscribe("scheduler:response", config);
m_io->subscribe("monitoring:response", config);
m_io->subscribe("storage:response", config);
m_io->subscribe("voice:response", config);
m_io->subscribe("tool:response", config);
}
private:
struct PendingRequest {
std::mutex mutex;
std::condition_variable cv;
json response;
bool hasResponse = false;
};
grove::IIO* m_io;
std::shared_ptr<spdlog::logger> m_logger;
std::mutex m_mutex;
std::unordered_map<std::string, std::shared_ptr<PendingRequest>> m_pendingRequests;
std::atomic<uint64_t> m_requestCounter{0};
std::string generateCorrelationId() {
auto now = std::chrono::steady_clock::now().time_since_epoch().count();
return "req_" + std::to_string(now) + "_" + std::to_string(m_requestCounter++);
}
};
} // namespace aissia

View File

@ -0,0 +1,412 @@
#include "InternalTools.hpp"
#include <spdlog/sinks/stdout_color_sinks.h>
namespace aissia {
InternalTools::InternalTools(grove::IIO* io) : m_io(io) {
m_logger = spdlog::get("InternalTools");
if (!m_logger) {
m_logger = spdlog::stdout_color_mt("InternalTools");
}
m_bridge = std::make_unique<IOBridge>(io);
m_bridge->subscribeToResponses();
registerTools();
m_logger->info("InternalTools initialized with {} tools", m_tools.size());
}
void InternalTools::registerTools() {
// ========================================================================
// SCHEDULER TOOLS
// ========================================================================
m_tools.push_back({
"get_current_task",
"Get the task currently being worked on. Returns task name, duration, and status.",
{
{"type", "object"},
{"properties", json::object()},
{"required", json::array()}
},
[this](const json& input) { return getCurrentTask(input); }
});
m_tools.push_back({
"list_tasks",
"List all planned tasks for today. Returns task names, estimated durations, and completion status.",
{
{"type", "object"},
{"properties", {
{"include_completed", {
{"type", "boolean"},
{"description", "Whether to include completed tasks"},
{"default", false}
}}
}},
{"required", json::array()}
},
[this](const json& input) { return listTasks(input); }
});
m_tools.push_back({
"start_task",
"Start working on a specific task by its ID or name.",
{
{"type", "object"},
{"properties", {
{"task_id", {
{"type", "string"},
{"description", "Task ID or name to start"}
}}
}},
{"required", {"task_id"}}
},
[this](const json& input) { return startTask(input); }
});
m_tools.push_back({
"complete_task",
"Mark the current task or a specific task as completed.",
{
{"type", "object"},
{"properties", {
{"task_id", {
{"type", "string"},
{"description", "Task ID to complete. If not provided, completes current task."}
}}
}},
{"required", json::array()}
},
[this](const json& input) { return completeTask(input); }
});
m_tools.push_back({
"start_break",
"Start a break period. The user will be reminded when break is over.",
{
{"type", "object"},
{"properties", {
{"duration_minutes", {
{"type", "integer"},
{"description", "Break duration in minutes"},
{"default", 10}
}},
{"reason", {
{"type", "string"},
{"description", "Reason for break (coffee, stretch, etc.)"}
}}
}},
{"required", json::array()}
},
[this](const json& input) { return startBreak(input); }
});
// ========================================================================
// MONITORING TOOLS
// ========================================================================
m_tools.push_back({
"get_focus_stats",
"Get focus statistics: time spent on productive vs distracting apps, current session duration.",
{
{"type", "object"},
{"properties", {
{"period", {
{"type", "string"},
{"enum", {"today", "week", "month"}},
{"description", "Time period for stats"},
{"default", "today"}
}}
}},
{"required", json::array()}
},
[this](const json& input) { return getFocusStats(input); }
});
m_tools.push_back({
"get_current_app",
"Get the currently active application name and window title.",
{
{"type", "object"},
{"properties", json::object()},
{"required", json::array()}
},
[this](const json& input) { return getCurrentApp(input); }
});
// ========================================================================
// STORAGE TOOLS
// ========================================================================
m_tools.push_back({
"save_note",
"Save a note or reminder for the user. Notes are searchable and timestamped.",
{
{"type", "object"},
{"properties", {
{"content", {
{"type", "string"},
{"description", "Note content to save"}
}},
{"tags", {
{"type", "array"},
{"items", {{"type", "string"}}},
{"description", "Optional tags for categorization"}
}}
}},
{"required", {"content"}}
},
[this](const json& input) { return saveNote(input); }
});
m_tools.push_back({
"query_notes",
"Search through saved notes by keyword or tag.",
{
{"type", "object"},
{"properties", {
{"query", {
{"type", "string"},
{"description", "Search query"}
}},
{"tag", {
{"type", "string"},
{"description", "Filter by tag"}
}},
{"limit", {
{"type", "integer"},
{"description", "Maximum number of notes to return"},
{"default", 10}
}}
}},
{"required", json::array()}
},
[this](const json& input) { return queryNotes(input); }
});
m_tools.push_back({
"get_session_history",
"Get history of work sessions with durations and focus scores.",
{
{"type", "object"},
{"properties", {
{"days", {
{"type", "integer"},
{"description", "Number of days to look back"},
{"default", 7}
}}
}},
{"required", json::array()}
},
[this](const json& input) { return getSessionHistory(input); }
});
// ========================================================================
// VOICE TOOLS
// ========================================================================
m_tools.push_back({
"speak",
"Make the assistant speak a message out loud using text-to-speech.",
{
{"type", "object"},
{"properties", {
{"message", {
{"type", "string"},
{"description", "Message to speak"}
}},
{"priority", {
{"type", "string"},
{"enum", {"low", "normal", "high"}},
{"description", "Speech priority"},
{"default", "normal"}
}}
}},
{"required", {"message"}}
},
[this](const json& input) { return speak(input); }
});
}
std::vector<ToolDefinition> InternalTools::getTools() const {
return m_tools;
}
void InternalTools::processResponses() {
if (!m_io) return;
while (m_io->hasMessages() > 0) {
auto msg = m_io->pullMessage();
// Check if this is a response message
if (msg.topic.find(":response") != std::string::npos && msg.data) {
m_bridge->handleResponse(msg.topic, *msg.data);
}
}
}
// ============================================================================
// TOOL IMPLEMENTATIONS
// ============================================================================
json InternalTools::getCurrentTask(const json& input) {
auto response = m_bridge->request("scheduler:query", {
{"action", "get_current_task"}
});
if (response.contains("error")) {
return response;
}
return {
{"task_id", response.value("task_id", "")},
{"task_name", response.value("task_name", "No active task")},
{"duration_minutes", response.value("duration_minutes", 0)},
{"started_at", response.value("started_at", "")}
};
}
json InternalTools::listTasks(const json& input) {
bool includeCompleted = input.value("include_completed", false);
auto response = m_bridge->request("scheduler:query", {
{"action", "list_tasks"},
{"include_completed", includeCompleted}
});
return response;
}
json InternalTools::startTask(const json& input) {
std::string taskId = input.value("task_id", "");
if (taskId.empty()) {
return {{"error", "missing_parameter"}, {"message", "task_id is required"}};
}
auto response = m_bridge->request("scheduler:command", {
{"action", "start_task"},
{"task_id", taskId}
});
return response;
}
json InternalTools::completeTask(const json& input) {
std::string taskId = input.value("task_id", "");
auto response = m_bridge->request("scheduler:command", {
{"action", "complete_task"},
{"task_id", taskId} // Empty = complete current task
});
return response;
}
json InternalTools::startBreak(const json& input) {
int duration = input.value("duration_minutes", 10);
std::string reason = input.value("reason", "break");
auto response = m_bridge->request("scheduler:command", {
{"action", "start_break"},
{"duration_minutes", duration},
{"reason", reason}
});
return {
{"success", true},
{"message", "Break started for " + std::to_string(duration) + " minutes"},
{"reason", reason}
};
}
json InternalTools::getFocusStats(const json& input) {
std::string period = input.value("period", "today");
auto response = m_bridge->request("monitoring:query", {
{"action", "get_focus_stats"},
{"period", period}
});
return response;
}
json InternalTools::getCurrentApp(const json& input) {
auto response = m_bridge->request("monitoring:query", {
{"action", "get_current_app"}
});
return response;
}
json InternalTools::saveNote(const json& input) {
std::string content = input.value("content", "");
if (content.empty()) {
return {{"error", "missing_parameter"}, {"message", "content is required"}};
}
json tags = input.value("tags", json::array());
auto response = m_bridge->request("storage:command", {
{"action", "save_note"},
{"content", content},
{"tags", tags}
});
return {
{"success", true},
{"message", "Note saved"},
{"note_id", response.value("note_id", "")}
};
}
json InternalTools::queryNotes(const json& input) {
std::string query = input.value("query", "");
std::string tag = input.value("tag", "");
int limit = input.value("limit", 10);
auto response = m_bridge->request("storage:query", {
{"action", "query_notes"},
{"query", query},
{"tag", tag},
{"limit", limit}
});
return response;
}
json InternalTools::getSessionHistory(const json& input) {
int days = input.value("days", 7);
auto response = m_bridge->request("storage:query", {
{"action", "get_session_history"},
{"days", days}
});
return response;
}
json InternalTools::speak(const json& input) {
std::string message = input.value("message", "");
if (message.empty()) {
return {{"error", "missing_parameter"}, {"message", "message is required"}};
}
std::string priority = input.value("priority", "normal");
// For speak, we don't wait for response - it's fire and forget
auto requestNode = std::make_unique<grove::JsonDataNode>("request");
requestNode->setString("action", "speak");
requestNode->setString("text", message);
requestNode->setString("priority", priority);
m_io->publish("voice:command", std::move(requestNode));
return {
{"success", true},
{"message", "Speech queued"}
};
}
} // namespace aissia

View File

@ -0,0 +1,70 @@
#pragma once
#include "IOBridge.hpp"
#include "../llm/ToolRegistry.hpp"
#include <grove/IIO.h>
#include <nlohmann/json.hpp>
#include <spdlog/spdlog.h>
#include <vector>
#include <memory>
namespace aissia {
using json = nlohmann::json;
/**
* @brief Internal tools that communicate with GroveEngine modules via IIO
*
* These tools allow the LLM to interact with:
* - SchedulerModule: tasks, breaks, hyperfocus
* - MonitoringModule: app usage, focus stats
* - StorageModule: notes, history
* - VoiceModule: TTS control
*/
class InternalTools {
public:
explicit InternalTools(grove::IIO* io);
/**
* @brief Get all tool definitions for registration
*/
std::vector<ToolDefinition> getTools() const;
/**
* @brief Process incoming responses from modules
*
* Call this in the service's process loop
*/
void processResponses();
/**
* @brief Get number of registered tools
*/
size_t size() const { return m_tools.size(); }
private:
grove::IIO* m_io;
std::unique_ptr<IOBridge> m_bridge;
std::shared_ptr<spdlog::logger> m_logger;
std::vector<ToolDefinition> m_tools;
// Tool implementations
json getCurrentTask(const json& input);
json listTasks(const json& input);
json startTask(const json& input);
json completeTask(const json& input);
json startBreak(const json& input);
json getFocusStats(const json& input);
json getCurrentApp(const json& input);
json saveNote(const json& input);
json queryNotes(const json& input);
json getSessionHistory(const json& input);
json speak(const json& input);
// Helper to create tool definitions
void registerTools();
};
} // namespace aissia