using Managing.Application.Abstractions.Services; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; namespace Managing.Api.Controllers; /// /// Controller for LLM (Large Language Model) operations with MCP tool calling support. /// Provides endpoints for chat interactions with automatic provider selection and BYOK (Bring Your Own Key) support. /// [ApiController] [Authorize] [Route("[controller]")] [Produces("application/json")] public class LlmController : BaseController { private readonly ILlmService _llmService; private readonly IMcpService _mcpService; private readonly ILogger _logger; public LlmController( ILlmService llmService, IMcpService mcpService, IUserService userService, ILogger logger) : base(userService) { _llmService = llmService; _mcpService = mcpService; _logger = logger; } /// /// Sends a chat message to an LLM with automatic provider selection and MCP tool calling support. /// Supports both auto mode (backend selects provider) and BYOK (user provides API key). /// /// The chat request with messages and optional provider/API key /// The LLM response with tool calls if applicable [HttpPost] [Route("Chat")] public async Task> Chat([FromBody] LlmChatRequest request) { if (request == null) { return BadRequest("Chat request is required"); } if (request.Messages == null || !request.Messages.Any()) { return BadRequest("At least one message is required"); } try { var user = await GetUser(); // Get available MCP tools var availableTools = await _mcpService.GetAvailableToolsAsync(); request.Tools = availableTools.ToList(); // Add system message to clarify that tools are optional and the LLM can respond directly // Check if a system message already exists var hasSystemMessage = request.Messages.Any(m => m.Role == "system"); if (!hasSystemMessage) { var systemMessage = new LlmMessage { Role = "system", Content = "You are a helpful AI assistant with expertise in quantitative finance, algorithmic trading, and financial mathematics. " + "You can answer questions directly using your knowledge. " + "Tools are available for specific operations (backtesting, agent management, market data retrieval, etc.) but are optional. " + "Use tools only when they are needed for the specific task. " + "For general questions, explanations, calculations, or discussions, respond directly without using tools." }; request.Messages.Insert(0, systemMessage); } // Send chat request to LLM var response = await _llmService.ChatAsync(user, request); // If LLM wants to call tools, execute them and get final response if (response.RequiresToolExecution && response.ToolCalls?.Any() == true) { _logger.LogInformation("LLM requested {Count} tool calls for user {UserId}", response.ToolCalls.Count, user.Id); // Execute all tool calls var toolResults = new List(); foreach (var toolCall in response.ToolCalls) { try { var toolResult = await _mcpService.ExecuteToolAsync(user, toolCall.Name, toolCall.Arguments); toolResults.Add(new LlmMessage { Role = "tool", Content = System.Text.Json.JsonSerializer.Serialize(toolResult), ToolCallId = toolCall.Id }); _logger.LogInformation("Successfully executed tool {ToolName} for user {UserId}", toolCall.Name, user.Id); } catch (Exception ex) { _logger.LogError(ex, "Error executing tool {ToolName} for user {UserId}", toolCall.Name, user.Id); toolResults.Add(new LlmMessage { Role = "tool", Content = $"Error executing tool: {ex.Message}", ToolCallId = toolCall.Id }); } } // Add assistant message with tool calls request.Messages.Add(new LlmMessage { Role = "assistant", Content = response.Content, ToolCalls = response.ToolCalls }); // Add tool results request.Messages.AddRange(toolResults); // Get final response from LLM var finalResponse = await _llmService.ChatAsync(user, request); return Ok(finalResponse); } return Ok(response); } catch (Exception ex) { _logger.LogError(ex, "Error processing chat request for user"); return StatusCode(500, $"Error processing chat request: {ex.Message}"); } } /// /// Gets the list of available LLM providers configured on the backend. /// /// List of provider names [HttpGet] [Route("Providers")] public async Task>> GetProviders() { try { var providers = await _llmService.GetAvailableProvidersAsync(); return Ok(providers); } catch (Exception ex) { _logger.LogError(ex, "Error getting available providers"); return StatusCode(500, $"Error getting available providers: {ex.Message}"); } } /// /// Gets the list of available MCP tools that the LLM can call. /// /// List of MCP tools with their descriptions and parameters [HttpGet] [Route("Tools")] public async Task>> GetTools() { try { var tools = await _mcpService.GetAvailableToolsAsync(); return Ok(tools); } catch (Exception ex) { _logger.LogError(ex, "Error getting available tools"); return StatusCode(500, $"Error getting available tools: {ex.Message}"); } } }