Implement LLM provider configuration and update user settings
- Added functionality to update the default LLM provider for users via a new endpoint in UserController. - Introduced LlmProvider enum to manage available LLM options: Auto, Gemini, OpenAI, and Claude. - Updated User and UserEntity models to include DefaultLlmProvider property. - Enhanced database context and migrations to support the new LLM provider configuration. - Integrated LLM services into the application bootstrap for dependency injection. - Updated TypeScript API client to include methods for managing LLM providers and chat requests.
This commit is contained in:
162
src/Managing.Api/Controllers/LlmController.cs
Normal file
162
src/Managing.Api/Controllers/LlmController.cs
Normal file
@@ -0,0 +1,162 @@
|
||||
using Managing.Application.Abstractions.Services;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
|
||||
namespace Managing.Api.Controllers;
|
||||
|
||||
/// <summary>
|
||||
/// Controller for LLM (Large Language Model) operations with MCP tool calling support.
|
||||
/// Provides endpoints for chat interactions with automatic provider selection and BYOK (Bring Your Own Key) support.
|
||||
/// </summary>
|
||||
[ApiController]
|
||||
[Authorize]
|
||||
[Route("[controller]")]
|
||||
[Produces("application/json")]
|
||||
public class LlmController : BaseController
|
||||
{
|
||||
private readonly ILlmService _llmService;
|
||||
private readonly IMcpService _mcpService;
|
||||
private readonly ILogger<LlmController> _logger;
|
||||
|
||||
public LlmController(
|
||||
ILlmService llmService,
|
||||
IMcpService mcpService,
|
||||
IUserService userService,
|
||||
ILogger<LlmController> logger) : base(userService)
|
||||
{
|
||||
_llmService = llmService;
|
||||
_mcpService = mcpService;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sends a chat message to an LLM with automatic provider selection and MCP tool calling support.
|
||||
/// Supports both auto mode (backend selects provider) and BYOK (user provides API key).
|
||||
/// </summary>
|
||||
/// <param name="request">The chat request with messages and optional provider/API key</param>
|
||||
/// <returns>The LLM response with tool calls if applicable</returns>
|
||||
[HttpPost]
|
||||
[Route("Chat")]
|
||||
public async Task<ActionResult<LlmChatResponse>> Chat([FromBody] LlmChatRequest request)
|
||||
{
|
||||
if (request == null)
|
||||
{
|
||||
return BadRequest("Chat request is required");
|
||||
}
|
||||
|
||||
if (request.Messages == null || !request.Messages.Any())
|
||||
{
|
||||
return BadRequest("At least one message is required");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var user = await GetUser();
|
||||
|
||||
// Get available MCP tools
|
||||
var availableTools = await _mcpService.GetAvailableToolsAsync();
|
||||
request.Tools = availableTools.ToList();
|
||||
|
||||
// Send chat request to LLM
|
||||
var response = await _llmService.ChatAsync(user, request);
|
||||
|
||||
// If LLM wants to call tools, execute them and get final response
|
||||
if (response.RequiresToolExecution && response.ToolCalls?.Any() == true)
|
||||
{
|
||||
_logger.LogInformation("LLM requested {Count} tool calls for user {UserId}",
|
||||
response.ToolCalls.Count, user.Id);
|
||||
|
||||
// Execute all tool calls
|
||||
var toolResults = new List<LlmMessage>();
|
||||
foreach (var toolCall in response.ToolCalls)
|
||||
{
|
||||
try
|
||||
{
|
||||
var toolResult = await _mcpService.ExecuteToolAsync(user, toolCall.Name, toolCall.Arguments);
|
||||
toolResults.Add(new LlmMessage
|
||||
{
|
||||
Role = "tool",
|
||||
Content = System.Text.Json.JsonSerializer.Serialize(toolResult),
|
||||
ToolCallId = toolCall.Id
|
||||
});
|
||||
_logger.LogInformation("Successfully executed tool {ToolName} for user {UserId}",
|
||||
toolCall.Name, user.Id);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error executing tool {ToolName} for user {UserId}",
|
||||
toolCall.Name, user.Id);
|
||||
toolResults.Add(new LlmMessage
|
||||
{
|
||||
Role = "tool",
|
||||
Content = $"Error executing tool: {ex.Message}",
|
||||
ToolCallId = toolCall.Id
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Add assistant message with tool calls
|
||||
request.Messages.Add(new LlmMessage
|
||||
{
|
||||
Role = "assistant",
|
||||
Content = response.Content,
|
||||
ToolCalls = response.ToolCalls
|
||||
});
|
||||
|
||||
// Add tool results
|
||||
request.Messages.AddRange(toolResults);
|
||||
|
||||
// Get final response from LLM
|
||||
var finalResponse = await _llmService.ChatAsync(user, request);
|
||||
return Ok(finalResponse);
|
||||
}
|
||||
|
||||
return Ok(response);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error processing chat request for user");
|
||||
return StatusCode(500, $"Error processing chat request: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the list of available LLM providers configured on the backend.
|
||||
/// </summary>
|
||||
/// <returns>List of provider names</returns>
|
||||
[HttpGet]
|
||||
[Route("Providers")]
|
||||
public async Task<ActionResult<IEnumerable<string>>> GetProviders()
|
||||
{
|
||||
try
|
||||
{
|
||||
var providers = await _llmService.GetAvailableProvidersAsync();
|
||||
return Ok(providers);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error getting available providers");
|
||||
return StatusCode(500, $"Error getting available providers: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the list of available MCP tools that the LLM can call.
|
||||
/// </summary>
|
||||
/// <returns>List of MCP tools with their descriptions and parameters</returns>
|
||||
[HttpGet]
|
||||
[Route("Tools")]
|
||||
public async Task<ActionResult<IEnumerable<McpToolDefinition>>> GetTools()
|
||||
{
|
||||
try
|
||||
{
|
||||
var tools = await _mcpService.GetAvailableToolsAsync();
|
||||
return Ok(tools);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error getting available tools");
|
||||
return StatusCode(500, $"Error getting available tools: {ex.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user