#pragma once #include "ILLMProvider.hpp" #include #include #include namespace aissia { /** * @brief Factory for creating LLM providers * * Supported providers: * - "claude": Anthropic Claude (requires ANTHROPIC_API_KEY) * - "openai": OpenAI GPT (requires OPENAI_API_KEY) * - "ollama": Local Ollama (no API key required) * * Configuration format: * { * "provider": "claude", * "providers": { * "claude": { * "api_key_env": "ANTHROPIC_API_KEY", * "model": "claude-sonnet-4-20250514", * "max_tokens": 4096, * "base_url": "https://api.anthropic.com" * }, * "openai": { ... }, * "ollama": { ... } * } * } */ class LLMProviderFactory { public: /** * @brief Create a provider from configuration * * @param config Full configuration object * @return Unique pointer to the provider * @throws std::runtime_error if provider unknown or config invalid */ static std::unique_ptr create(const nlohmann::json& config); /** * @brief Create a specific provider by name * * @param providerName Provider identifier (claude, openai, ollama) * @param providerConfig Provider-specific configuration * @return Unique pointer to the provider */ static std::unique_ptr createProvider( const std::string& providerName, const nlohmann::json& providerConfig); }; } // namespace aissia