aissia/src/services/LLMService.hpp
StillHammer d17ee5fbdc feat: AISSIA rename and codebase updates
- Renamed project from Celuna to AISSIA
- Updated all documentation and configuration files
- Codebase improvements and fixes

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-03 18:37:13 +07:00

141 lines
4.1 KiB
C++

#pragma once
#include "IService.hpp"
#include "../shared/llm/ILLMProvider.hpp"
#include "../shared/llm/ToolRegistry.hpp"
#include "../shared/tools/InternalTools.hpp"
#include "../shared/tools/FileSystemTools.hpp"
#include "../shared/mcp/MCPClient.hpp"
#include <grove/IIO.h>
#include <grove/JsonDataNode.h>
#include <spdlog/spdlog.h>
#include <memory>
#include <string>
#include <queue>
#include <mutex>
#include <thread>
#include <atomic>
#include <condition_variable>
namespace celuna {
/**
* @brief LLM Service - Async HTTP calls to LLM providers
*
* Handles all LLM API calls in a background thread.
* Modules communicate via IIO:
*
* Subscribes to:
* - "llm:request" : { query, systemPrompt?, tools?, conversationId? }
*
* Publishes:
* - "llm:response" : { text, conversationId, tokens, iterations }
* - "llm:error" : { message, conversationId }
* - "llm:thinking" : { conversationId } (during agentic loop)
*/
class LLMService : public IService {
public:
LLMService();
~LLMService() override;
bool initialize(grove::IIO* io) override;
void process() override;
void shutdown() override;
std::string getName() const override { return "LLMService"; }
bool isHealthy() const override { return m_provider != nullptr; }
/// Load provider from config file
bool loadConfig(const std::string& configPath);
/// Register a tool that can be called by the LLM
void registerTool(const std::string& name, const std::string& description,
const nlohmann::json& schema,
std::function<nlohmann::json(const nlohmann::json&)> handler);
/// Load and initialize all tools (internal + MCP)
void initializeTools();
/// Load MCP server configurations
bool loadMCPConfig(const std::string& configPath);
/**
* @brief Synchronous response structure for MCP Server mode
*/
struct SyncResponse {
std::string text;
int tokens = 0;
int iterations = 0;
};
/**
* @brief Send message synchronously (blocking, for MCP Server mode)
*
* @param message User message
* @param conversationId Conversation ID (optional)
* @param systemPrompt Custom system prompt (optional)
* @return Sync response with text, tokens, iterations
*/
SyncResponse sendMessageSync(
const std::string& message,
const std::string& conversationId = "",
const std::string& systemPrompt = ""
);
private:
struct Request {
std::string query;
std::string systemPrompt;
std::string conversationId;
nlohmann::json tools;
int maxIterations = 10;
};
struct Response {
std::string text;
std::string conversationId;
int tokens = 0;
int iterations = 0;
bool isError = false;
};
// Configuration
std::string m_providerName = "claude";
std::string m_defaultSystemPrompt;
int m_maxIterations = 10;
// State
std::unique_ptr<ILLMProvider> m_provider;
ToolRegistry m_toolRegistry;
std::unique_ptr<InternalTools> m_internalTools;
std::unique_ptr<mcp::MCPClient> m_mcpClient;
std::map<std::string, nlohmann::json> m_conversations; // conversationId -> history
// Threading
std::thread m_workerThread;
std::atomic<bool> m_running{false};
std::queue<Request> m_requestQueue;
std::queue<Response> m_responseQueue;
std::mutex m_requestMutex;
std::mutex m_responseMutex;
std::condition_variable m_requestCV;
// Services
grove::IIO* m_io = nullptr;
std::shared_ptr<spdlog::logger> m_logger;
// Worker thread
void workerLoop();
Response processRequest(const Request& request);
nlohmann::json agenticLoop(const std::string& query, const std::string& systemPrompt,
nlohmann::json& messages, const nlohmann::json& tools,
int maxIterations);
// Message handling
void processIncomingMessages();
void publishResponses();
};
} // namespace celuna