Implement LLM provider configuration and update user settings
- Added functionality to update the default LLM provider for users via a new endpoint in UserController. - Introduced LlmProvider enum to manage available LLM options: Auto, Gemini, OpenAI, and Claude. - Updated User and UserEntity models to include DefaultLlmProvider property. - Enhanced database context and migrations to support the new LLM provider configuration. - Integrated LLM services into the application bootstrap for dependency injection. - Updated TypeScript API client to include methods for managing LLM providers and chat requests.
This commit is contained in:
162
src/Managing.Api/Controllers/LlmController.cs
Normal file
162
src/Managing.Api/Controllers/LlmController.cs
Normal file
@@ -0,0 +1,162 @@
|
||||
using Managing.Application.Abstractions.Services;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
|
||||
namespace Managing.Api.Controllers;
|
||||
|
||||
/// <summary>
|
||||
/// Controller for LLM (Large Language Model) operations with MCP tool calling support.
|
||||
/// Provides endpoints for chat interactions with automatic provider selection and BYOK (Bring Your Own Key) support.
|
||||
/// </summary>
|
||||
[ApiController]
|
||||
[Authorize]
|
||||
[Route("[controller]")]
|
||||
[Produces("application/json")]
|
||||
public class LlmController : BaseController
|
||||
{
|
||||
private readonly ILlmService _llmService;
|
||||
private readonly IMcpService _mcpService;
|
||||
private readonly ILogger<LlmController> _logger;
|
||||
|
||||
public LlmController(
|
||||
ILlmService llmService,
|
||||
IMcpService mcpService,
|
||||
IUserService userService,
|
||||
ILogger<LlmController> logger) : base(userService)
|
||||
{
|
||||
_llmService = llmService;
|
||||
_mcpService = mcpService;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sends a chat message to an LLM with automatic provider selection and MCP tool calling support.
|
||||
/// Supports both auto mode (backend selects provider) and BYOK (user provides API key).
|
||||
/// </summary>
|
||||
/// <param name="request">The chat request with messages and optional provider/API key</param>
|
||||
/// <returns>The LLM response with tool calls if applicable</returns>
|
||||
[HttpPost]
|
||||
[Route("Chat")]
|
||||
public async Task<ActionResult<LlmChatResponse>> Chat([FromBody] LlmChatRequest request)
|
||||
{
|
||||
if (request == null)
|
||||
{
|
||||
return BadRequest("Chat request is required");
|
||||
}
|
||||
|
||||
if (request.Messages == null || !request.Messages.Any())
|
||||
{
|
||||
return BadRequest("At least one message is required");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var user = await GetUser();
|
||||
|
||||
// Get available MCP tools
|
||||
var availableTools = await _mcpService.GetAvailableToolsAsync();
|
||||
request.Tools = availableTools.ToList();
|
||||
|
||||
// Send chat request to LLM
|
||||
var response = await _llmService.ChatAsync(user, request);
|
||||
|
||||
// If LLM wants to call tools, execute them and get final response
|
||||
if (response.RequiresToolExecution && response.ToolCalls?.Any() == true)
|
||||
{
|
||||
_logger.LogInformation("LLM requested {Count} tool calls for user {UserId}",
|
||||
response.ToolCalls.Count, user.Id);
|
||||
|
||||
// Execute all tool calls
|
||||
var toolResults = new List<LlmMessage>();
|
||||
foreach (var toolCall in response.ToolCalls)
|
||||
{
|
||||
try
|
||||
{
|
||||
var toolResult = await _mcpService.ExecuteToolAsync(user, toolCall.Name, toolCall.Arguments);
|
||||
toolResults.Add(new LlmMessage
|
||||
{
|
||||
Role = "tool",
|
||||
Content = System.Text.Json.JsonSerializer.Serialize(toolResult),
|
||||
ToolCallId = toolCall.Id
|
||||
});
|
||||
_logger.LogInformation("Successfully executed tool {ToolName} for user {UserId}",
|
||||
toolCall.Name, user.Id);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error executing tool {ToolName} for user {UserId}",
|
||||
toolCall.Name, user.Id);
|
||||
toolResults.Add(new LlmMessage
|
||||
{
|
||||
Role = "tool",
|
||||
Content = $"Error executing tool: {ex.Message}",
|
||||
ToolCallId = toolCall.Id
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Add assistant message with tool calls
|
||||
request.Messages.Add(new LlmMessage
|
||||
{
|
||||
Role = "assistant",
|
||||
Content = response.Content,
|
||||
ToolCalls = response.ToolCalls
|
||||
});
|
||||
|
||||
// Add tool results
|
||||
request.Messages.AddRange(toolResults);
|
||||
|
||||
// Get final response from LLM
|
||||
var finalResponse = await _llmService.ChatAsync(user, request);
|
||||
return Ok(finalResponse);
|
||||
}
|
||||
|
||||
return Ok(response);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error processing chat request for user");
|
||||
return StatusCode(500, $"Error processing chat request: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the list of available LLM providers configured on the backend.
|
||||
/// </summary>
|
||||
/// <returns>List of provider names</returns>
|
||||
[HttpGet]
|
||||
[Route("Providers")]
|
||||
public async Task<ActionResult<IEnumerable<string>>> GetProviders()
|
||||
{
|
||||
try
|
||||
{
|
||||
var providers = await _llmService.GetAvailableProvidersAsync();
|
||||
return Ok(providers);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error getting available providers");
|
||||
return StatusCode(500, $"Error getting available providers: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the list of available MCP tools that the LLM can call.
|
||||
/// </summary>
|
||||
/// <returns>List of MCP tools with their descriptions and parameters</returns>
|
||||
[HttpGet]
|
||||
[Route("Tools")]
|
||||
public async Task<ActionResult<IEnumerable<McpToolDefinition>>> GetTools()
|
||||
{
|
||||
try
|
||||
{
|
||||
var tools = await _mcpService.GetAvailableToolsAsync();
|
||||
return Ok(tools);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error getting available tools");
|
||||
return StatusCode(500, $"Error getting available tools: {ex.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,7 @@ using Managing.Domain.Users;
|
||||
using MediatR;
|
||||
using Microsoft.AspNetCore.Authorization;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using static Managing.Common.Enums;
|
||||
|
||||
namespace Managing.Api.Controllers;
|
||||
|
||||
@@ -115,6 +116,31 @@ public class UserController : BaseController
|
||||
return Ok(updatedUser);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Updates the default LLM provider for the current user.
|
||||
/// </summary>
|
||||
/// <param name="defaultLlmProvider">The new default LLM provider to set (e.g., "Auto", "Gemini", "OpenAI", "Claude").</param>
|
||||
/// <returns>The updated user with the new default LLM provider.</returns>
|
||||
[Authorize]
|
||||
[HttpPut("default-llm-provider")]
|
||||
public async Task<ActionResult<User>> UpdateDefaultLlmProvider([FromBody] string defaultLlmProvider)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(defaultLlmProvider))
|
||||
{
|
||||
return BadRequest("Default LLM provider cannot be null or empty.");
|
||||
}
|
||||
|
||||
// Parse string to enum (case-insensitive)
|
||||
if (!Enum.TryParse<LlmProvider>(defaultLlmProvider, ignoreCase: true, out var providerEnum))
|
||||
{
|
||||
return BadRequest($"Invalid LLM provider '{defaultLlmProvider}'. Valid providers are: Auto, Gemini, OpenAI, Claude");
|
||||
}
|
||||
|
||||
var user = await GetUser();
|
||||
var updatedUser = await _userService.UpdateDefaultLlmProvider(user, providerEnum);
|
||||
return Ok(updatedUser);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests the Telegram channel configuration by sending a test message.
|
||||
/// </summary>
|
||||
|
||||
@@ -9,8 +9,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
"InfluxDb": {
|
||||
"Organization": "managing-org"
|
||||
},
|
||||
@@ -28,6 +26,17 @@
|
||||
"Flagsmith": {
|
||||
"ApiUrl": "https://flag.kaigen.ai/api/v1/"
|
||||
},
|
||||
"Llm": {
|
||||
"Gemini": {
|
||||
"DefaultModel": "gemini-2.0-flash"
|
||||
},
|
||||
"OpenAI": {
|
||||
"DefaultModel": "gpt-4o"
|
||||
},
|
||||
"Claude": {
|
||||
"DefaultModel": "claude-haiku-4-5-20251001"
|
||||
}
|
||||
},
|
||||
"N8n": {
|
||||
},
|
||||
"Sentry": {
|
||||
|
||||
@@ -0,0 +1,93 @@
|
||||
using Managing.Domain.Users;
|
||||
|
||||
namespace Managing.Application.Abstractions.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Service for interacting with LLM providers
|
||||
/// </summary>
|
||||
public interface ILlmService
|
||||
{
|
||||
/// <summary>
|
||||
/// Sends a chat message to the LLM and gets a response with tool calling support
|
||||
/// </summary>
|
||||
/// <param name="user">The user context</param>
|
||||
/// <param name="request">The chat request</param>
|
||||
/// <returns>The chat response</returns>
|
||||
Task<LlmChatResponse> ChatAsync(User user, LlmChatRequest request);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the list of available LLM providers
|
||||
/// </summary>
|
||||
/// <returns>List of provider names</returns>
|
||||
Task<IEnumerable<string>> GetAvailableProvidersAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request model for LLM chat
|
||||
/// </summary>
|
||||
public class LlmChatRequest
|
||||
{
|
||||
public List<LlmMessage> Messages { get; set; } = new();
|
||||
public string? Provider { get; set; } // null for auto-selection
|
||||
public string? ApiKey { get; set; } // BYOK (Bring Your Own Key)
|
||||
public bool Stream { get; set; } = false;
|
||||
public double Temperature { get; set; } = 0.7;
|
||||
public int MaxTokens { get; set; } = 4096;
|
||||
public List<McpToolDefinition>? Tools { get; set; } // Available MCP tools
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response model for LLM chat
|
||||
/// </summary>
|
||||
public class LlmChatResponse
|
||||
{
|
||||
public string Content { get; set; } = string.Empty;
|
||||
public string Provider { get; set; } = string.Empty;
|
||||
public string Model { get; set; } = string.Empty;
|
||||
public List<LlmToolCall>? ToolCalls { get; set; }
|
||||
public LlmUsage? Usage { get; set; }
|
||||
public bool RequiresToolExecution { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a message in the conversation
|
||||
/// </summary>
|
||||
public class LlmMessage
|
||||
{
|
||||
public string Role { get; set; } = string.Empty; // "user", "assistant", "system", "tool"
|
||||
public string Content { get; set; } = string.Empty;
|
||||
public List<LlmToolCall>? ToolCalls { get; set; }
|
||||
public string? ToolCallId { get; set; } // For tool response messages
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a tool call from the LLM
|
||||
/// </summary>
|
||||
public class LlmToolCall
|
||||
{
|
||||
public string Id { get; set; } = string.Empty;
|
||||
public string Name { get; set; } = string.Empty;
|
||||
public Dictionary<string, object> Arguments { get; set; } = new();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Usage statistics for the LLM request
|
||||
/// </summary>
|
||||
public class LlmUsage
|
||||
{
|
||||
public int PromptTokens { get; set; }
|
||||
public int CompletionTokens { get; set; }
|
||||
public int TotalTokens { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for an LLM provider
|
||||
/// </summary>
|
||||
public class LlmProviderConfig
|
||||
{
|
||||
public string Name { get; set; } = string.Empty;
|
||||
public string ApiKey { get; set; } = string.Empty;
|
||||
public string BaseUrl { get; set; } = string.Empty;
|
||||
public string DefaultModel { get; set; } = string.Empty;
|
||||
public bool Enabled { get; set; } = true;
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
using Managing.Domain.Users;
|
||||
|
||||
namespace Managing.Application.Abstractions.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Service for executing Model Context Protocol (MCP) tools
|
||||
/// </summary>
|
||||
public interface IMcpService
|
||||
{
|
||||
/// <summary>
|
||||
/// Executes an MCP tool with the given parameters
|
||||
/// </summary>
|
||||
/// <param name="user">The user context for the tool execution</param>
|
||||
/// <param name="toolName">The name of the tool to execute</param>
|
||||
/// <param name="parameters">The parameters for the tool as a dictionary</param>
|
||||
/// <returns>The result of the tool execution</returns>
|
||||
Task<object> ExecuteToolAsync(User user, string toolName, Dictionary<string, object>? parameters = null);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the list of available tools with their descriptions
|
||||
/// </summary>
|
||||
/// <returns>List of available tools with metadata</returns>
|
||||
Task<IEnumerable<McpToolDefinition>> GetAvailableToolsAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents an MCP tool definition
|
||||
/// </summary>
|
||||
public class McpToolDefinition
|
||||
{
|
||||
public string Name { get; set; } = string.Empty;
|
||||
public string Description { get; set; } = string.Empty;
|
||||
public Dictionary<string, McpParameterDefinition> Parameters { get; set; } = new();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a parameter definition for an MCP tool
|
||||
/// </summary>
|
||||
public class McpParameterDefinition
|
||||
{
|
||||
public string Type { get; set; } = string.Empty;
|
||||
public string Description { get; set; } = string.Empty;
|
||||
public bool Required { get; set; }
|
||||
public object? DefaultValue { get; set; }
|
||||
}
|
||||
@@ -12,6 +12,7 @@ public interface IUserService
|
||||
Task<User> UpdateAgentName(User user, string agentName);
|
||||
Task<User> UpdateAvatarUrl(User user, string avatarUrl);
|
||||
Task<User> UpdateTelegramChannel(User user, string telegramChannel);
|
||||
Task<User> UpdateDefaultLlmProvider(User user, LlmProvider defaultLlmProvider);
|
||||
Task<User> UpdateUserSettings(User user, UserSettingsDto settings);
|
||||
Task<User> GetUserByName(string name);
|
||||
Task<User> GetUserByAgentName(string agentName);
|
||||
|
||||
210
src/Managing.Application/LLM/LlmService.cs
Normal file
210
src/Managing.Application/LLM/LlmService.cs
Normal file
@@ -0,0 +1,210 @@
|
||||
using Managing.Application.Abstractions.Services;
|
||||
using Managing.Application.LLM.Providers;
|
||||
using Managing.Domain.Users;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using static Managing.Common.Enums;
|
||||
|
||||
namespace Managing.Application.LLM;
|
||||
|
||||
/// <summary>
|
||||
/// Service for interacting with LLM providers with auto-selection and BYOK support
|
||||
/// </summary>
|
||||
public class LlmService : ILlmService
|
||||
{
|
||||
private readonly IConfiguration _configuration;
|
||||
private readonly ILogger<LlmService> _logger;
|
||||
private readonly Dictionary<string, ILlmProvider> _providers;
|
||||
|
||||
public LlmService(
|
||||
IConfiguration configuration,
|
||||
ILogger<LlmService> logger,
|
||||
IHttpClientFactory httpClientFactory)
|
||||
{
|
||||
_configuration = configuration;
|
||||
_logger = logger;
|
||||
_providers = new Dictionary<string, ILlmProvider>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Initialize providers
|
||||
InitializeProviders(httpClientFactory);
|
||||
}
|
||||
|
||||
private void InitializeProviders(IHttpClientFactory httpClientFactory)
|
||||
{
|
||||
// Gemini Provider
|
||||
var geminiApiKey = _configuration["Llm:Gemini:ApiKey"];
|
||||
var geminiModel = _configuration["Llm:Gemini:DefaultModel"];
|
||||
if (!string.IsNullOrWhiteSpace(geminiApiKey))
|
||||
{
|
||||
var providerKey = ConvertLlmProviderToString(LlmProvider.Gemini);
|
||||
_providers[providerKey] = new GeminiProvider(geminiApiKey, geminiModel, httpClientFactory, _logger);
|
||||
_logger.LogInformation("Gemini provider initialized with model: {Model}", geminiModel ?? "default");
|
||||
}
|
||||
|
||||
// OpenAI Provider
|
||||
var openaiApiKey = _configuration["Llm:OpenAI:ApiKey"];
|
||||
var openaiModel = _configuration["Llm:OpenAI:DefaultModel"];
|
||||
if (!string.IsNullOrWhiteSpace(openaiApiKey))
|
||||
{
|
||||
var providerKey = ConvertLlmProviderToString(LlmProvider.OpenAI);
|
||||
_providers[providerKey] = new OpenAiProvider(openaiApiKey, openaiModel, httpClientFactory, _logger);
|
||||
_logger.LogInformation("OpenAI provider initialized with model: {Model}", openaiModel ?? "default");
|
||||
}
|
||||
|
||||
// Claude Provider
|
||||
var claudeApiKey = _configuration["Llm:Claude:ApiKey"];
|
||||
var claudeModel = _configuration["Llm:Claude:DefaultModel"];
|
||||
if (!string.IsNullOrWhiteSpace(claudeApiKey))
|
||||
{
|
||||
var providerKey = ConvertLlmProviderToString(LlmProvider.Claude);
|
||||
_providers[providerKey] = new ClaudeProvider(claudeApiKey, claudeModel, httpClientFactory, _logger);
|
||||
_logger.LogInformation("Claude provider initialized with model: {Model}", claudeModel ?? "default");
|
||||
}
|
||||
|
||||
if (_providers.Count == 0)
|
||||
{
|
||||
_logger.LogWarning("No LLM providers configured. Please add API keys to configuration.");
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<LlmChatResponse> ChatAsync(User user, LlmChatRequest request)
|
||||
{
|
||||
ILlmProvider provider;
|
||||
|
||||
// BYOK: If user provides their own API key
|
||||
if (!string.IsNullOrWhiteSpace(request.ApiKey))
|
||||
{
|
||||
var requestedProvider = ParseProviderString(request.Provider) ?? LlmProvider.Claude; // Default to Claude for BYOK
|
||||
var providerName = ConvertLlmProviderToString(requestedProvider);
|
||||
provider = CreateProviderWithCustomKey(requestedProvider, request.ApiKey);
|
||||
_logger.LogInformation("Using BYOK for provider: {Provider} for user: {UserId}", providerName, user.Id);
|
||||
}
|
||||
// Auto mode: Select provider automatically (use user's default if set, otherwise fallback to system default)
|
||||
else if (string.IsNullOrWhiteSpace(request.Provider) ||
|
||||
ParseProviderString(request.Provider) == LlmProvider.Auto)
|
||||
{
|
||||
// Check if user has a default provider preference (and it's not Auto)
|
||||
if (user.DefaultLlmProvider.HasValue &&
|
||||
user.DefaultLlmProvider.Value != LlmProvider.Auto)
|
||||
{
|
||||
var providerName = ConvertLlmProviderToString(user.DefaultLlmProvider.Value);
|
||||
if (_providers.TryGetValue(providerName, out var userPreferredProvider))
|
||||
{
|
||||
provider = userPreferredProvider;
|
||||
_logger.LogInformation("Using user's default provider: {Provider} for user: {UserId}", provider.Name, user.Id);
|
||||
}
|
||||
else
|
||||
{
|
||||
provider = SelectProvider();
|
||||
_logger.LogInformation("Auto-selected provider: {Provider} for user: {UserId} (user default {UserDefault} not available)",
|
||||
provider.Name, user.Id, user.DefaultLlmProvider.Value);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
provider = SelectProvider();
|
||||
_logger.LogInformation("Auto-selected provider: {Provider} for user: {UserId} (user default: {UserDefault})",
|
||||
provider.Name, user.Id, user.DefaultLlmProvider?.ToString() ?? "not set");
|
||||
}
|
||||
}
|
||||
// Explicit provider selection
|
||||
else
|
||||
{
|
||||
var requestedProvider = ParseProviderString(request.Provider);
|
||||
if (requestedProvider == null || requestedProvider == LlmProvider.Auto)
|
||||
{
|
||||
throw new InvalidOperationException($"Invalid provider '{request.Provider}'. Valid providers are: {string.Join(", ", Enum.GetNames<LlmProvider>())}");
|
||||
}
|
||||
|
||||
var providerName = ConvertLlmProviderToString(requestedProvider.Value);
|
||||
if (!_providers.TryGetValue(providerName, out provider!))
|
||||
{
|
||||
throw new InvalidOperationException($"Provider '{request.Provider}' is not available or not configured.");
|
||||
}
|
||||
_logger.LogInformation("Using specified provider: {Provider} for user: {UserId}", providerName, user.Id);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var response = await provider.ChatAsync(request);
|
||||
return response;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error calling LLM provider {Provider} for user {UserId}", provider.Name, user.Id);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
public Task<IEnumerable<string>> GetAvailableProvidersAsync()
|
||||
{
|
||||
return Task.FromResult(_providers.Keys.AsEnumerable());
|
||||
}
|
||||
|
||||
private ILlmProvider SelectProvider()
|
||||
{
|
||||
// Priority: OpenAI > Claude > Gemini
|
||||
var openaiKey = ConvertLlmProviderToString(LlmProvider.OpenAI);
|
||||
if (_providers.TryGetValue(openaiKey, out var openai))
|
||||
return openai;
|
||||
|
||||
var claudeKey = ConvertLlmProviderToString(LlmProvider.Claude);
|
||||
if (_providers.TryGetValue(claudeKey, out var claude))
|
||||
return claude;
|
||||
|
||||
var geminiKey = ConvertLlmProviderToString(LlmProvider.Gemini);
|
||||
if (_providers.TryGetValue(geminiKey, out var gemini))
|
||||
return gemini;
|
||||
|
||||
throw new InvalidOperationException("No LLM providers are configured. Please add API keys to configuration.");
|
||||
}
|
||||
|
||||
private ILlmProvider CreateProviderWithCustomKey(LlmProvider provider, string apiKey)
|
||||
{
|
||||
// This is a temporary instance with user's API key
|
||||
// Get default models from configuration
|
||||
var geminiModel = _configuration["Llm:Gemini:DefaultModel"];
|
||||
var openaiModel = _configuration["Llm:OpenAI:DefaultModel"];
|
||||
var claudeModel = _configuration["Llm:Claude:DefaultModel"];
|
||||
|
||||
return provider switch
|
||||
{
|
||||
LlmProvider.Gemini => new GeminiProvider(apiKey, geminiModel, null!, _logger),
|
||||
LlmProvider.OpenAI => new OpenAiProvider(apiKey, openaiModel, null!, _logger),
|
||||
LlmProvider.Claude => new ClaudeProvider(apiKey, claudeModel, null!, _logger),
|
||||
_ => throw new InvalidOperationException($"Cannot create provider with custom key for: {provider}. Only Gemini, OpenAI, and Claude are supported for BYOK.")
|
||||
};
|
||||
}
|
||||
|
||||
private string ConvertLlmProviderToString(LlmProvider provider)
|
||||
{
|
||||
return provider switch
|
||||
{
|
||||
LlmProvider.Auto => "auto",
|
||||
LlmProvider.Gemini => "gemini",
|
||||
LlmProvider.OpenAI => "openai",
|
||||
LlmProvider.Claude => "claude",
|
||||
_ => throw new ArgumentException($"Unknown LlmProvider enum value: {provider}")
|
||||
};
|
||||
}
|
||||
|
||||
private LlmProvider? ParseProviderString(string? providerString)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(providerString))
|
||||
return null;
|
||||
|
||||
// Try parsing as enum (case-insensitive)
|
||||
if (Enum.TryParse<LlmProvider>(providerString, ignoreCase: true, out var parsedProvider))
|
||||
return parsedProvider;
|
||||
|
||||
// Fallback to lowercase string matching for backward compatibility
|
||||
return providerString.ToLowerInvariant() switch
|
||||
{
|
||||
"auto" => LlmProvider.Auto,
|
||||
"gemini" => LlmProvider.Gemini,
|
||||
"openai" => LlmProvider.OpenAI,
|
||||
"claude" => LlmProvider.Claude,
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
}
|
||||
236
src/Managing.Application/LLM/McpService.cs
Normal file
236
src/Managing.Application/LLM/McpService.cs
Normal file
@@ -0,0 +1,236 @@
|
||||
using Managing.Application.Abstractions.Services;
|
||||
using Managing.Domain.Users;
|
||||
using Managing.Mcp.Tools;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using static Managing.Common.Enums;
|
||||
|
||||
namespace Managing.Application.LLM;
|
||||
|
||||
/// <summary>
|
||||
/// Service for executing Model Context Protocol (MCP) tools
|
||||
/// </summary>
|
||||
public class McpService : IMcpService
|
||||
{
|
||||
private readonly BacktestTools _backtestTools;
|
||||
private readonly ILogger<McpService> _logger;
|
||||
|
||||
public McpService(BacktestTools backtestTools, ILogger<McpService> logger)
|
||||
{
|
||||
_backtestTools = backtestTools;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<object> ExecuteToolAsync(User user, string toolName, Dictionary<string, object>? parameters = null)
|
||||
{
|
||||
_logger.LogInformation("Executing MCP tool: {ToolName} for user: {UserId}", toolName, user.Id);
|
||||
|
||||
try
|
||||
{
|
||||
return toolName.ToLowerInvariant() switch
|
||||
{
|
||||
"get_backtests_paginated" => await ExecuteGetBacktestsPaginated(user, parameters),
|
||||
_ => throw new InvalidOperationException($"Unknown tool: {toolName}")
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error executing MCP tool {ToolName} for user {UserId}", toolName, user.Id);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
public Task<IEnumerable<McpToolDefinition>> GetAvailableToolsAsync()
|
||||
{
|
||||
var tools = new List<McpToolDefinition>
|
||||
{
|
||||
new McpToolDefinition
|
||||
{
|
||||
Name = "get_backtests_paginated",
|
||||
Description = "Retrieves paginated backtests with filtering and sorting capabilities. Supports filters for score, winrate, drawdown, tickers, indicators, duration, and trading type.",
|
||||
Parameters = new Dictionary<string, McpParameterDefinition>
|
||||
{
|
||||
["page"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "integer",
|
||||
Description = "Page number (defaults to 1)",
|
||||
Required = false,
|
||||
DefaultValue = 1
|
||||
},
|
||||
["pageSize"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "integer",
|
||||
Description = "Number of items per page (defaults to 50, max 100)",
|
||||
Required = false,
|
||||
DefaultValue = 50
|
||||
},
|
||||
["sortBy"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "string",
|
||||
Description = "Field to sort by (Score, WinRate, GrowthPercentage, MaxDrawdown, SharpeRatio, FinalPnl, StartDate, EndDate, PositionCount)",
|
||||
Required = false,
|
||||
DefaultValue = "Score"
|
||||
},
|
||||
["sortOrder"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "string",
|
||||
Description = "Sort order - 'asc' or 'desc' (defaults to 'desc')",
|
||||
Required = false,
|
||||
DefaultValue = "desc"
|
||||
},
|
||||
["scoreMin"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "number",
|
||||
Description = "Minimum score filter (0-100)",
|
||||
Required = false
|
||||
},
|
||||
["scoreMax"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "number",
|
||||
Description = "Maximum score filter (0-100)",
|
||||
Required = false
|
||||
},
|
||||
["winrateMin"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "integer",
|
||||
Description = "Minimum winrate filter (0-100)",
|
||||
Required = false
|
||||
},
|
||||
["winrateMax"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "integer",
|
||||
Description = "Maximum winrate filter (0-100)",
|
||||
Required = false
|
||||
},
|
||||
["maxDrawdownMax"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "number",
|
||||
Description = "Maximum drawdown filter",
|
||||
Required = false
|
||||
},
|
||||
["tickers"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "string",
|
||||
Description = "Comma-separated list of tickers to filter by (e.g., 'BTC,ETH,SOL')",
|
||||
Required = false
|
||||
},
|
||||
["indicators"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "string",
|
||||
Description = "Comma-separated list of indicators to filter by",
|
||||
Required = false
|
||||
},
|
||||
["durationMinDays"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "number",
|
||||
Description = "Minimum duration in days",
|
||||
Required = false
|
||||
},
|
||||
["durationMaxDays"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "number",
|
||||
Description = "Maximum duration in days",
|
||||
Required = false
|
||||
},
|
||||
["name"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "string",
|
||||
Description = "Filter by name (contains search)",
|
||||
Required = false
|
||||
},
|
||||
["tradingType"] = new McpParameterDefinition
|
||||
{
|
||||
Type = "string",
|
||||
Description = "Trading type filter (Spot, Futures, BacktestSpot, BacktestFutures, Paper)",
|
||||
Required = false
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return Task.FromResult<IEnumerable<McpToolDefinition>>(tools);
|
||||
}
|
||||
|
||||
private async Task<object> ExecuteGetBacktestsPaginated(User user, Dictionary<string, object>? parameters)
|
||||
{
|
||||
var page = GetParameterValue<int>(parameters, "page", 1);
|
||||
var pageSize = GetParameterValue<int>(parameters, "pageSize", 50);
|
||||
var sortByString = GetParameterValue<string>(parameters, "sortBy", "Score");
|
||||
var sortOrder = GetParameterValue<string>(parameters, "sortOrder", "desc");
|
||||
var scoreMin = GetParameterValue<double?>(parameters, "scoreMin", null);
|
||||
var scoreMax = GetParameterValue<double?>(parameters, "scoreMax", null);
|
||||
var winrateMin = GetParameterValue<int?>(parameters, "winrateMin", null);
|
||||
var winrateMax = GetParameterValue<int?>(parameters, "winrateMax", null);
|
||||
var maxDrawdownMax = GetParameterValue<decimal?>(parameters, "maxDrawdownMax", null);
|
||||
var tickers = GetParameterValue<string?>(parameters, "tickers", null);
|
||||
var indicators = GetParameterValue<string?>(parameters, "indicators", null);
|
||||
var durationMinDays = GetParameterValue<double?>(parameters, "durationMinDays", null);
|
||||
var durationMaxDays = GetParameterValue<double?>(parameters, "durationMaxDays", null);
|
||||
var name = GetParameterValue<string?>(parameters, "name", null);
|
||||
var tradingTypeString = GetParameterValue<string?>(parameters, "tradingType", null);
|
||||
|
||||
// Parse sortBy enum
|
||||
if (!Enum.TryParse<BacktestSortableColumn>(sortByString, true, out var sortBy))
|
||||
{
|
||||
sortBy = BacktestSortableColumn.Score;
|
||||
}
|
||||
|
||||
// Parse tradingType enum
|
||||
TradingType? tradingType = null;
|
||||
if (!string.IsNullOrWhiteSpace(tradingTypeString) &&
|
||||
Enum.TryParse<TradingType>(tradingTypeString, true, out var parsedTradingType))
|
||||
{
|
||||
tradingType = parsedTradingType;
|
||||
}
|
||||
|
||||
return await _backtestTools.GetBacktestsPaginated(
|
||||
user,
|
||||
page,
|
||||
pageSize,
|
||||
sortBy,
|
||||
sortOrder,
|
||||
scoreMin,
|
||||
scoreMax,
|
||||
winrateMin,
|
||||
winrateMax,
|
||||
maxDrawdownMax,
|
||||
tickers,
|
||||
indicators,
|
||||
durationMinDays,
|
||||
durationMaxDays,
|
||||
name,
|
||||
tradingType);
|
||||
}
|
||||
|
||||
private T GetParameterValue<T>(Dictionary<string, object>? parameters, string key, T defaultValue)
|
||||
{
|
||||
if (parameters == null || !parameters.ContainsKey(key))
|
||||
{
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var value = parameters[key];
|
||||
if (value == null)
|
||||
{
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
// Handle nullable types
|
||||
var targetType = typeof(T);
|
||||
var underlyingType = Nullable.GetUnderlyingType(targetType);
|
||||
|
||||
if (underlyingType != null)
|
||||
{
|
||||
// It's a nullable type
|
||||
return (T)Convert.ChangeType(value, underlyingType);
|
||||
}
|
||||
|
||||
return (T)Convert.ChangeType(value, targetType);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
165
src/Managing.Application/LLM/Providers/ClaudeProvider.cs
Normal file
165
src/Managing.Application/LLM/Providers/ClaudeProvider.cs
Normal file
@@ -0,0 +1,165 @@
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Managing.Application.Abstractions.Services;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace Managing.Application.LLM.Providers;
|
||||
|
||||
/// <summary>
|
||||
/// Anthropic Claude API provider
|
||||
/// </summary>
|
||||
public class ClaudeProvider : ILlmProvider
|
||||
{
|
||||
private readonly string _apiKey;
|
||||
private readonly string _defaultModel;
|
||||
private readonly HttpClient _httpClient;
|
||||
private readonly ILogger _logger;
|
||||
private const string BaseUrl = "https://api.anthropic.com/v1";
|
||||
private const string FallbackModel = "claude-3-5-sonnet-20241022";
|
||||
private const string AnthropicVersion = "2023-06-01";
|
||||
|
||||
public string Name => "claude";
|
||||
|
||||
public ClaudeProvider(string apiKey, string? defaultModel, IHttpClientFactory? httpClientFactory, ILogger logger)
|
||||
{
|
||||
_apiKey = apiKey;
|
||||
_defaultModel = defaultModel ?? FallbackModel;
|
||||
_httpClient = httpClientFactory?.CreateClient() ?? new HttpClient();
|
||||
_httpClient.DefaultRequestHeaders.Add("x-api-key", _apiKey);
|
||||
_httpClient.DefaultRequestHeaders.Add("anthropic-version", AnthropicVersion);
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<LlmChatResponse> ChatAsync(LlmChatRequest request)
|
||||
{
|
||||
var url = $"{BaseUrl}/messages";
|
||||
|
||||
// Extract system message
|
||||
var systemMessage = request.Messages.FirstOrDefault(m => m.Role == "system")?.Content ?? "";
|
||||
var messages = request.Messages.Where(m => m.Role != "system").ToList();
|
||||
|
||||
var claudeRequest = new
|
||||
{
|
||||
model = _defaultModel,
|
||||
max_tokens = request.MaxTokens,
|
||||
temperature = request.Temperature,
|
||||
system = !string.IsNullOrWhiteSpace(systemMessage) ? systemMessage : null,
|
||||
messages = messages.Select(m => new
|
||||
{
|
||||
role = m.Role == "assistant" ? "assistant" : "user",
|
||||
content = m.Content
|
||||
}).ToArray(),
|
||||
tools = request.Tools?.Any() == true ? request.Tools.Select(t => new
|
||||
{
|
||||
name = t.Name,
|
||||
description = t.Description,
|
||||
input_schema = new
|
||||
{
|
||||
type = "object",
|
||||
properties = t.Parameters.ToDictionary(
|
||||
p => p.Key,
|
||||
p => new
|
||||
{
|
||||
type = p.Value.Type,
|
||||
description = p.Value.Description
|
||||
}
|
||||
),
|
||||
required = t.Parameters.Where(p => p.Value.Required).Select(p => p.Key).ToArray()
|
||||
}
|
||||
}).ToArray() : null
|
||||
};
|
||||
|
||||
var jsonOptions = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
var response = await _httpClient.PostAsJsonAsync(url, claudeRequest, jsonOptions);
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var errorContent = await response.Content.ReadAsStringAsync();
|
||||
_logger.LogError("Claude API error: {StatusCode} - {Error}", response.StatusCode, errorContent);
|
||||
throw new HttpRequestException($"Claude API error: {response.StatusCode} - {errorContent}");
|
||||
}
|
||||
|
||||
var claudeResponse = await response.Content.ReadFromJsonAsync<ClaudeResponse>(jsonOptions);
|
||||
return ConvertFromClaudeResponse(claudeResponse!);
|
||||
}
|
||||
|
||||
private LlmChatResponse ConvertFromClaudeResponse(ClaudeResponse response)
|
||||
{
|
||||
var textContent = response.Content?.FirstOrDefault(c => c.Type == "text");
|
||||
var toolUseContents = response.Content?.Where(c => c.Type == "tool_use").ToList();
|
||||
|
||||
var llmResponse = new LlmChatResponse
|
||||
{
|
||||
Content = textContent?.Text ?? "",
|
||||
Provider = Name,
|
||||
Model = response.Model ?? _defaultModel,
|
||||
Usage = response.Usage != null ? new LlmUsage
|
||||
{
|
||||
PromptTokens = response.Usage.InputTokens,
|
||||
CompletionTokens = response.Usage.OutputTokens,
|
||||
TotalTokens = response.Usage.InputTokens + response.Usage.OutputTokens
|
||||
} : null
|
||||
};
|
||||
|
||||
if (toolUseContents?.Any() == true)
|
||||
{
|
||||
llmResponse.ToolCalls = toolUseContents.Select(tc => new LlmToolCall
|
||||
{
|
||||
Id = tc.Id ?? Guid.NewGuid().ToString(),
|
||||
Name = tc.Name ?? "",
|
||||
Arguments = tc.Input ?? new Dictionary<string, object>()
|
||||
}).ToList();
|
||||
llmResponse.RequiresToolExecution = true;
|
||||
}
|
||||
|
||||
return llmResponse;
|
||||
}
|
||||
|
||||
private class ClaudeResponse
|
||||
{
|
||||
[JsonPropertyName("id")]
|
||||
public string? Id { get; set; }
|
||||
|
||||
[JsonPropertyName("model")]
|
||||
public string? Model { get; set; }
|
||||
|
||||
[JsonPropertyName("content")]
|
||||
public List<ClaudeContent>? Content { get; set; }
|
||||
|
||||
[JsonPropertyName("usage")]
|
||||
public ClaudeUsage? Usage { get; set; }
|
||||
}
|
||||
|
||||
private class ClaudeContent
|
||||
{
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("text")]
|
||||
public string? Text { get; set; }
|
||||
|
||||
[JsonPropertyName("id")]
|
||||
public string? Id { get; set; }
|
||||
|
||||
[JsonPropertyName("name")]
|
||||
public string? Name { get; set; }
|
||||
|
||||
[JsonPropertyName("input")]
|
||||
public Dictionary<string, object>? Input { get; set; }
|
||||
}
|
||||
|
||||
private class ClaudeUsage
|
||||
{
|
||||
[JsonPropertyName("input_tokens")]
|
||||
public int InputTokens { get; set; }
|
||||
|
||||
[JsonPropertyName("output_tokens")]
|
||||
public int OutputTokens { get; set; }
|
||||
}
|
||||
}
|
||||
210
src/Managing.Application/LLM/Providers/GeminiProvider.cs
Normal file
210
src/Managing.Application/LLM/Providers/GeminiProvider.cs
Normal file
@@ -0,0 +1,210 @@
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Managing.Application.Abstractions.Services;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace Managing.Application.LLM.Providers;
|
||||
|
||||
/// <summary>
|
||||
/// Google Gemini API provider
|
||||
/// </summary>
|
||||
public class GeminiProvider : ILlmProvider
|
||||
{
|
||||
private readonly string _apiKey;
|
||||
private readonly string _defaultModel;
|
||||
private readonly HttpClient _httpClient;
|
||||
private readonly ILogger _logger;
|
||||
private const string BaseUrl = "https://generativelanguage.googleapis.com/v1beta";
|
||||
private const string FallbackModel = "gemini-2.0-flash-exp";
|
||||
|
||||
public string Name => "gemini";
|
||||
|
||||
public GeminiProvider(string apiKey, string? defaultModel, IHttpClientFactory? httpClientFactory, ILogger logger)
|
||||
{
|
||||
_apiKey = apiKey;
|
||||
_defaultModel = defaultModel ?? FallbackModel;
|
||||
_httpClient = httpClientFactory?.CreateClient() ?? new HttpClient();
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<LlmChatResponse> ChatAsync(LlmChatRequest request)
|
||||
{
|
||||
var model = _defaultModel;
|
||||
var url = $"{BaseUrl}/models/{model}:generateContent?key={_apiKey}";
|
||||
|
||||
var geminiRequest = ConvertToGeminiRequest(request);
|
||||
var jsonOptions = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
var response = await _httpClient.PostAsJsonAsync(url, geminiRequest, jsonOptions);
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var errorContent = await response.Content.ReadAsStringAsync();
|
||||
_logger.LogError("Gemini API error: {StatusCode} - {Error}", response.StatusCode, errorContent);
|
||||
throw new HttpRequestException($"Gemini API error: {response.StatusCode} - {errorContent}");
|
||||
}
|
||||
|
||||
var geminiResponse = await response.Content.ReadFromJsonAsync<GeminiResponse>(jsonOptions);
|
||||
return ConvertFromGeminiResponse(geminiResponse!);
|
||||
}
|
||||
|
||||
private object ConvertToGeminiRequest(LlmChatRequest request)
|
||||
{
|
||||
var contents = request.Messages
|
||||
.Where(m => m.Role != "system") // Gemini doesn't support system messages in the same way
|
||||
.Select(m => new
|
||||
{
|
||||
role = m.Role == "assistant" ? "model" : "user",
|
||||
parts = new[]
|
||||
{
|
||||
new { text = m.Content }
|
||||
}
|
||||
}).ToList();
|
||||
|
||||
// Add system message as first user message if present
|
||||
var systemMessage = request.Messages.FirstOrDefault(m => m.Role == "system");
|
||||
if (systemMessage != null && !string.IsNullOrWhiteSpace(systemMessage.Content))
|
||||
{
|
||||
contents.Insert(0, new
|
||||
{
|
||||
role = "user",
|
||||
parts = new[]
|
||||
{
|
||||
new { text = $"System instructions: {systemMessage.Content}" }
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
var geminiRequest = new
|
||||
{
|
||||
contents,
|
||||
generationConfig = new
|
||||
{
|
||||
temperature = request.Temperature,
|
||||
maxOutputTokens = request.MaxTokens
|
||||
},
|
||||
tools = request.Tools?.Any() == true
|
||||
? new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
functionDeclarations = request.Tools.Select(t => new
|
||||
{
|
||||
name = t.Name,
|
||||
description = t.Description,
|
||||
parameters = new
|
||||
{
|
||||
type = "object",
|
||||
properties = t.Parameters.ToDictionary(
|
||||
p => p.Key,
|
||||
p => new
|
||||
{
|
||||
type = p.Value.Type,
|
||||
description = p.Value.Description
|
||||
}
|
||||
),
|
||||
required = t.Parameters.Where(p => p.Value.Required).Select(p => p.Key).ToArray()
|
||||
}
|
||||
}).ToArray()
|
||||
}
|
||||
}
|
||||
: null
|
||||
};
|
||||
|
||||
return geminiRequest;
|
||||
}
|
||||
|
||||
private LlmChatResponse ConvertFromGeminiResponse(GeminiResponse response)
|
||||
{
|
||||
var candidate = response.Candidates?.FirstOrDefault();
|
||||
if (candidate == null)
|
||||
{
|
||||
return new LlmChatResponse
|
||||
{
|
||||
Content = "",
|
||||
Provider = Name,
|
||||
Model = _defaultModel
|
||||
};
|
||||
}
|
||||
|
||||
var content = candidate.Content;
|
||||
var textPart = content?.Parts?.FirstOrDefault(p => !string.IsNullOrWhiteSpace(p.Text));
|
||||
var functionCallParts = content?.Parts?.Where(p => p.FunctionCall != null).ToList();
|
||||
|
||||
var llmResponse = new LlmChatResponse
|
||||
{
|
||||
Content = textPart?.Text ?? "",
|
||||
Provider = Name,
|
||||
Model = _defaultModel,
|
||||
Usage = response.UsageMetadata != null
|
||||
? new LlmUsage
|
||||
{
|
||||
PromptTokens = response.UsageMetadata.PromptTokenCount,
|
||||
CompletionTokens = response.UsageMetadata.CandidatesTokenCount,
|
||||
TotalTokens = response.UsageMetadata.TotalTokenCount
|
||||
}
|
||||
: null
|
||||
};
|
||||
|
||||
// Handle function calls (tool calls)
|
||||
if (functionCallParts?.Any() == true)
|
||||
{
|
||||
llmResponse.ToolCalls = functionCallParts.Select((fc, idx) => new LlmToolCall
|
||||
{
|
||||
Id = $"call_{idx}",
|
||||
Name = fc.FunctionCall!.Name,
|
||||
Arguments = fc.FunctionCall.Args ?? new Dictionary<string, object>()
|
||||
}).ToList();
|
||||
llmResponse.RequiresToolExecution = true;
|
||||
}
|
||||
|
||||
return llmResponse;
|
||||
}
|
||||
|
||||
// Gemini API response models
|
||||
private class GeminiResponse
|
||||
{
|
||||
[JsonPropertyName("candidates")] public List<GeminiCandidate>? Candidates { get; set; }
|
||||
|
||||
[JsonPropertyName("usageMetadata")] public GeminiUsageMetadata? UsageMetadata { get; set; }
|
||||
}
|
||||
|
||||
private class GeminiCandidate
|
||||
{
|
||||
[JsonPropertyName("content")] public GeminiContent? Content { get; set; }
|
||||
}
|
||||
|
||||
private class GeminiContent
|
||||
{
|
||||
[JsonPropertyName("parts")] public List<GeminiPart>? Parts { get; set; }
|
||||
}
|
||||
|
||||
private class GeminiPart
|
||||
{
|
||||
[JsonPropertyName("text")] public string? Text { get; set; }
|
||||
|
||||
[JsonPropertyName("functionCall")] public GeminiFunctionCall? FunctionCall { get; set; }
|
||||
}
|
||||
|
||||
private class GeminiFunctionCall
|
||||
{
|
||||
[JsonPropertyName("name")] public string Name { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("args")] public Dictionary<string, object>? Args { get; set; }
|
||||
}
|
||||
|
||||
private class GeminiUsageMetadata
|
||||
{
|
||||
[JsonPropertyName("promptTokenCount")] public int PromptTokenCount { get; set; }
|
||||
|
||||
[JsonPropertyName("candidatesTokenCount")]
|
||||
public int CandidatesTokenCount { get; set; }
|
||||
|
||||
[JsonPropertyName("totalTokenCount")] public int TotalTokenCount { get; set; }
|
||||
}
|
||||
}
|
||||
21
src/Managing.Application/LLM/Providers/ILlmProvider.cs
Normal file
21
src/Managing.Application/LLM/Providers/ILlmProvider.cs
Normal file
@@ -0,0 +1,21 @@
|
||||
using Managing.Application.Abstractions.Services;
|
||||
|
||||
namespace Managing.Application.LLM.Providers;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for LLM provider implementations
|
||||
/// </summary>
|
||||
public interface ILlmProvider
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the name of the provider (e.g., "gemini", "openai", "claude")
|
||||
/// </summary>
|
||||
string Name { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Sends a chat request to the provider
|
||||
/// </summary>
|
||||
/// <param name="request">The chat request</param>
|
||||
/// <returns>The chat response</returns>
|
||||
Task<LlmChatResponse> ChatAsync(LlmChatRequest request);
|
||||
}
|
||||
199
src/Managing.Application/LLM/Providers/OpenAiProvider.cs
Normal file
199
src/Managing.Application/LLM/Providers/OpenAiProvider.cs
Normal file
@@ -0,0 +1,199 @@
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Managing.Application.Abstractions.Services;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace Managing.Application.LLM.Providers;
|
||||
|
||||
/// <summary>
|
||||
/// OpenAI API provider
|
||||
/// </summary>
|
||||
public class OpenAiProvider : ILlmProvider
|
||||
{
|
||||
private readonly string _apiKey;
|
||||
private readonly string _defaultModel;
|
||||
private readonly HttpClient _httpClient;
|
||||
private readonly ILogger _logger;
|
||||
private const string BaseUrl = "https://api.openai.com/v1";
|
||||
private const string FallbackModel = "gpt-4o";
|
||||
|
||||
public string Name => "openai";
|
||||
|
||||
public OpenAiProvider(string apiKey, string? defaultModel, IHttpClientFactory? httpClientFactory, ILogger logger)
|
||||
{
|
||||
_apiKey = apiKey;
|
||||
_defaultModel = defaultModel ?? FallbackModel;
|
||||
_httpClient = httpClientFactory?.CreateClient() ?? new HttpClient();
|
||||
_httpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {_apiKey}");
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<LlmChatResponse> ChatAsync(LlmChatRequest request)
|
||||
{
|
||||
var url = $"{BaseUrl}/chat/completions";
|
||||
|
||||
var openAiRequest = new
|
||||
{
|
||||
model = _defaultModel,
|
||||
messages = request.Messages.Select(m => new
|
||||
{
|
||||
role = m.Role,
|
||||
content = m.Content,
|
||||
tool_calls = m.ToolCalls?.Select(tc => new
|
||||
{
|
||||
id = tc.Id,
|
||||
type = "function",
|
||||
function = new
|
||||
{
|
||||
name = tc.Name,
|
||||
arguments = JsonSerializer.Serialize(tc.Arguments)
|
||||
}
|
||||
}),
|
||||
tool_call_id = m.ToolCallId
|
||||
}).ToArray(),
|
||||
temperature = request.Temperature,
|
||||
max_tokens = request.MaxTokens,
|
||||
tools = request.Tools?.Any() == true ? request.Tools.Select(t => new
|
||||
{
|
||||
type = "function",
|
||||
function = new
|
||||
{
|
||||
name = t.Name,
|
||||
description = t.Description,
|
||||
parameters = new
|
||||
{
|
||||
type = "object",
|
||||
properties = t.Parameters.ToDictionary(
|
||||
p => p.Key,
|
||||
p => new
|
||||
{
|
||||
type = p.Value.Type,
|
||||
description = p.Value.Description
|
||||
}
|
||||
),
|
||||
required = t.Parameters.Where(p => p.Value.Required).Select(p => p.Key).ToArray()
|
||||
}
|
||||
}
|
||||
}).ToArray() : null
|
||||
};
|
||||
|
||||
var jsonOptions = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
var response = await _httpClient.PostAsJsonAsync(url, openAiRequest, jsonOptions);
|
||||
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
var errorContent = await response.Content.ReadAsStringAsync();
|
||||
_logger.LogError("OpenAI API error: {StatusCode} - {Error}", response.StatusCode, errorContent);
|
||||
throw new HttpRequestException($"OpenAI API error: {response.StatusCode} - {errorContent}");
|
||||
}
|
||||
|
||||
var openAiResponse = await response.Content.ReadFromJsonAsync<OpenAiResponse>(jsonOptions);
|
||||
return ConvertFromOpenAiResponse(openAiResponse!);
|
||||
}
|
||||
|
||||
private LlmChatResponse ConvertFromOpenAiResponse(OpenAiResponse response)
|
||||
{
|
||||
var choice = response.Choices?.FirstOrDefault();
|
||||
if (choice == null)
|
||||
{
|
||||
return new LlmChatResponse
|
||||
{
|
||||
Content = "",
|
||||
Provider = Name,
|
||||
Model = response.Model ?? _defaultModel
|
||||
};
|
||||
}
|
||||
|
||||
var llmResponse = new LlmChatResponse
|
||||
{
|
||||
Content = choice.Message?.Content ?? "",
|
||||
Provider = Name,
|
||||
Model = response.Model ?? _defaultModel,
|
||||
Usage = response.Usage != null ? new LlmUsage
|
||||
{
|
||||
PromptTokens = response.Usage.PromptTokens,
|
||||
CompletionTokens = response.Usage.CompletionTokens,
|
||||
TotalTokens = response.Usage.TotalTokens
|
||||
} : null
|
||||
};
|
||||
|
||||
if (choice.Message?.ToolCalls?.Any() == true)
|
||||
{
|
||||
llmResponse.ToolCalls = choice.Message.ToolCalls.Select(tc => new LlmToolCall
|
||||
{
|
||||
Id = tc.Id,
|
||||
Name = tc.Function.Name,
|
||||
Arguments = JsonSerializer.Deserialize<Dictionary<string, object>>(tc.Function.Arguments) ?? new()
|
||||
}).ToList();
|
||||
llmResponse.RequiresToolExecution = true;
|
||||
}
|
||||
|
||||
return llmResponse;
|
||||
}
|
||||
|
||||
private class OpenAiResponse
|
||||
{
|
||||
[JsonPropertyName("id")]
|
||||
public string? Id { get; set; }
|
||||
|
||||
[JsonPropertyName("model")]
|
||||
public string? Model { get; set; }
|
||||
|
||||
[JsonPropertyName("choices")]
|
||||
public List<OpenAiChoice>? Choices { get; set; }
|
||||
|
||||
[JsonPropertyName("usage")]
|
||||
public OpenAiUsage? Usage { get; set; }
|
||||
}
|
||||
|
||||
private class OpenAiChoice
|
||||
{
|
||||
[JsonPropertyName("message")]
|
||||
public OpenAiMessage? Message { get; set; }
|
||||
}
|
||||
|
||||
private class OpenAiMessage
|
||||
{
|
||||
[JsonPropertyName("content")]
|
||||
public string? Content { get; set; }
|
||||
|
||||
[JsonPropertyName("tool_calls")]
|
||||
public List<OpenAiToolCall>? ToolCalls { get; set; }
|
||||
}
|
||||
|
||||
private class OpenAiToolCall
|
||||
{
|
||||
[JsonPropertyName("id")]
|
||||
public string Id { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("function")]
|
||||
public OpenAiFunction Function { get; set; } = new();
|
||||
}
|
||||
|
||||
private class OpenAiFunction
|
||||
{
|
||||
[JsonPropertyName("name")]
|
||||
public string Name { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("arguments")]
|
||||
public string Arguments { get; set; } = "{}";
|
||||
}
|
||||
|
||||
private class OpenAiUsage
|
||||
{
|
||||
[JsonPropertyName("prompt_tokens")]
|
||||
public int PromptTokens { get; set; }
|
||||
|
||||
[JsonPropertyName("completion_tokens")]
|
||||
public int CompletionTokens { get; set; }
|
||||
|
||||
[JsonPropertyName("total_tokens")]
|
||||
public int TotalTokens { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -36,6 +36,7 @@
|
||||
<ProjectReference Include="..\Managing.Common\Managing.Common.csproj"/>
|
||||
<ProjectReference Include="..\Managing.Domain\Managing.Domain.csproj"/>
|
||||
<ProjectReference Include="..\Managing.Infrastructure.Database\Managing.Infrastructure.Databases.csproj"/>
|
||||
<ProjectReference Include="..\Managing.Mcp\Managing.Mcp.csproj"/>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -339,6 +339,22 @@ public class UserService : IUserService
|
||||
return user;
|
||||
}
|
||||
|
||||
public async Task<User> UpdateDefaultLlmProvider(User user, LlmProvider defaultLlmProvider)
|
||||
{
|
||||
user = await GetUserByName(user.Name);
|
||||
if (user.DefaultLlmProvider == defaultLlmProvider)
|
||||
return user;
|
||||
|
||||
// Update the default LLM provider on the provided user object
|
||||
user.DefaultLlmProvider = defaultLlmProvider;
|
||||
await _userRepository.SaveOrUpdateUserAsync(user);
|
||||
|
||||
_logger.LogInformation("Updated default LLM provider to {Provider} for user {UserId}",
|
||||
defaultLlmProvider, user.Id);
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
public async Task<User> UpdateUserSettings(User user, UserSettingsDto settings)
|
||||
{
|
||||
user = await GetUserByName(user.Name);
|
||||
|
||||
@@ -425,6 +425,11 @@ public static class ApiBootstrap
|
||||
// Admin services
|
||||
services.AddSingleton<IAdminConfigurationService, AdminConfigurationService>();
|
||||
|
||||
// LLM and MCP services
|
||||
services.AddScoped<ILlmService, Managing.Application.LLM.LlmService>();
|
||||
services.AddScoped<IMcpService, Managing.Application.LLM.McpService>();
|
||||
services.AddScoped<Managing.Mcp.Tools.BacktestTools>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
<ProjectReference Include="..\Managing.Infrastructure.Messengers\Managing.Infrastructure.Messengers.csproj"/>
|
||||
<ProjectReference Include="..\Managing.Infrastructure.Storage\Managing.Infrastructure.Storage.csproj"/>
|
||||
<ProjectReference Include="..\Managing.Infrastructure.Web3\Managing.Infrastructure.Evm.csproj"/>
|
||||
<ProjectReference Include="..\Managing.Mcp\Managing.Mcp.csproj"/>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -126,6 +126,14 @@ public static class Enums
|
||||
None
|
||||
}
|
||||
|
||||
public enum LlmProvider
|
||||
{
|
||||
Auto,
|
||||
Gemini,
|
||||
OpenAI,
|
||||
Claude
|
||||
}
|
||||
|
||||
public enum TradeDirection
|
||||
{
|
||||
None,
|
||||
|
||||
@@ -40,4 +40,7 @@ public class User
|
||||
[Id(17)] public decimal? SignalAgreementThreshold { get; set; }
|
||||
[Id(18)] public bool? AllowSignalTrendOverride { get; set; }
|
||||
[Id(19)] public TradingExchanges? DefaultExchange { get; set; }
|
||||
|
||||
// User Settings - LLM Configuration
|
||||
[Id(21)] public LlmProvider? DefaultLlmProvider { get; set; } = LlmProvider.Auto; // Default LLM provider
|
||||
}
|
||||
1797
src/Managing.Infrastructure.Database/Migrations/20260103140520_AddDefaultLlmProviderToUsers.Designer.cs
generated
Normal file
1797
src/Managing.Infrastructure.Database/Migrations/20260103140520_AddDefaultLlmProviderToUsers.Designer.cs
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,38 @@
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace Managing.Infrastructure.Databases.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class AddDefaultLlmProviderToUsers : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
// Add column with default value
|
||||
migrationBuilder.AddColumn<string>(
|
||||
name: "DefaultLlmProvider",
|
||||
table: "Users",
|
||||
type: "character varying(50)",
|
||||
maxLength: 50,
|
||||
nullable: true,
|
||||
defaultValue: "auto");
|
||||
|
||||
// Update existing NULL values to default
|
||||
migrationBuilder.Sql(@"
|
||||
UPDATE ""Users""
|
||||
SET ""DefaultLlmProvider"" = 'auto'
|
||||
WHERE ""DefaultLlmProvider"" IS NULL;
|
||||
");
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
migrationBuilder.DropColumn(
|
||||
name: "DefaultLlmProvider",
|
||||
table: "Users");
|
||||
}
|
||||
}
|
||||
}
|
||||
1796
src/Managing.Infrastructure.Database/Migrations/20260103141211_ConvertDefaultLlmProviderToEnum.Designer.cs
generated
Normal file
1796
src/Managing.Infrastructure.Database/Migrations/20260103141211_ConvertDefaultLlmProviderToEnum.Designer.cs
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,57 @@
|
||||
using Microsoft.EntityFrameworkCore.Migrations;
|
||||
|
||||
#nullable disable
|
||||
|
||||
namespace Managing.Infrastructure.Databases.Migrations
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public partial class ConvertDefaultLlmProviderToEnum : Migration
|
||||
{
|
||||
/// <inheritdoc />
|
||||
protected override void Up(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
// Update existing "auto" values to "Auto" (enum format)
|
||||
migrationBuilder.Sql(@"
|
||||
UPDATE ""Users""
|
||||
SET ""DefaultLlmProvider"" = 'Auto'
|
||||
WHERE ""DefaultLlmProvider"" = 'auto' OR ""DefaultLlmProvider"" IS NULL;
|
||||
");
|
||||
|
||||
// Alter column to use enum format (stored as text, default "Auto")
|
||||
migrationBuilder.AlterColumn<string>(
|
||||
name: "DefaultLlmProvider",
|
||||
table: "Users",
|
||||
type: "text",
|
||||
nullable: true,
|
||||
defaultValueSql: "'Auto'",
|
||||
oldClrType: typeof(string),
|
||||
oldType: "character varying(50)",
|
||||
oldMaxLength: 50,
|
||||
oldNullable: true,
|
||||
oldDefaultValue: "auto");
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
protected override void Down(MigrationBuilder migrationBuilder)
|
||||
{
|
||||
// Revert "Auto" values back to "auto" (lowercase)
|
||||
migrationBuilder.Sql(@"
|
||||
UPDATE ""Users""
|
||||
SET ""DefaultLlmProvider"" = 'auto'
|
||||
WHERE ""DefaultLlmProvider"" = 'Auto';
|
||||
");
|
||||
|
||||
migrationBuilder.AlterColumn<string>(
|
||||
name: "DefaultLlmProvider",
|
||||
table: "Users",
|
||||
type: "character varying(50)",
|
||||
maxLength: 50,
|
||||
nullable: true,
|
||||
defaultValue: "auto",
|
||||
oldClrType: typeof(string),
|
||||
oldType: "text",
|
||||
oldNullable: true,
|
||||
oldDefaultValueSql: "'Auto'");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1441,6 +1441,11 @@ namespace Managing.Infrastructure.Databases.Migrations
|
||||
b.Property<string>("DefaultExchange")
|
||||
.HasColumnType("text");
|
||||
|
||||
b.Property<string>("DefaultLlmProvider")
|
||||
.ValueGeneratedOnAdd()
|
||||
.HasColumnType("text")
|
||||
.HasDefaultValueSql("'Auto'");
|
||||
|
||||
b.Property<bool>("EnableAutoswap")
|
||||
.HasColumnType("boolean");
|
||||
|
||||
|
||||
@@ -34,6 +34,9 @@ public class UserEntity
|
||||
[Column(TypeName = "decimal(5,4)")] public decimal? SignalAgreementThreshold { get; set; } = 0.5m; // Default: 50% agreement required
|
||||
public bool? AllowSignalTrendOverride { get; set; } = true; // Default: Allow signal strategies to override trends
|
||||
public TradingExchanges? DefaultExchange { get; set; } = TradingExchanges.GmxV2; // Default exchange
|
||||
|
||||
// User Settings - LLM Configuration
|
||||
public LlmProvider? DefaultLlmProvider { get; set; } = LlmProvider.Auto; // Default LLM provider
|
||||
|
||||
// Navigation properties
|
||||
public virtual ICollection<AccountEntity> Accounts { get; set; } = new List<AccountEntity>();
|
||||
|
||||
@@ -105,6 +105,9 @@ public class ManagingDbContext : DbContext
|
||||
.HasConversion<string>(); // Store enum as string
|
||||
entity.Property(e => e.DefaultExchange)
|
||||
.HasConversion<string>(); // Store enum as string
|
||||
entity.Property(e => e.DefaultLlmProvider)
|
||||
.HasConversion<string>() // Store enum as string
|
||||
.HasDefaultValueSql("'Auto'"); // Default LLM provider
|
||||
|
||||
// Create indexes for performance
|
||||
entity.HasIndex(e => e.Name).IsUnique();
|
||||
|
||||
@@ -146,6 +146,7 @@ public static class PostgreSqlMappers
|
||||
SignalAgreementThreshold = entity.SignalAgreementThreshold,
|
||||
AllowSignalTrendOverride = entity.AllowSignalTrendOverride,
|
||||
DefaultExchange = entity.DefaultExchange,
|
||||
DefaultLlmProvider = entity.DefaultLlmProvider,
|
||||
Accounts = entity.Accounts?.Select(MapAccountWithoutUser).ToList() ?? new List<Account>()
|
||||
};
|
||||
}
|
||||
@@ -193,7 +194,8 @@ public static class PostgreSqlMappers
|
||||
TrendStrongAgreementThreshold = user.TrendStrongAgreementThreshold,
|
||||
SignalAgreementThreshold = user.SignalAgreementThreshold,
|
||||
AllowSignalTrendOverride = user.AllowSignalTrendOverride,
|
||||
DefaultExchange = user.DefaultExchange
|
||||
DefaultExchange = user.DefaultExchange,
|
||||
DefaultLlmProvider = user.DefaultLlmProvider
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -269,6 +269,7 @@ public class PostgreSqlUserRepository : BaseRepositoryWithLogging, IUserReposito
|
||||
existingUser.SignalAgreementThreshold = user.SignalAgreementThreshold;
|
||||
existingUser.AllowSignalTrendOverride = user.AllowSignalTrendOverride;
|
||||
existingUser.DefaultExchange = user.DefaultExchange;
|
||||
existingUser.DefaultLlmProvider = user.DefaultLlmProvider;
|
||||
|
||||
_context.Users.Update(existingUser);
|
||||
|
||||
|
||||
20
src/Managing.Mcp/Managing.Mcp.csproj
Normal file
20
src/Managing.Mcp/Managing.Mcp.csproj
Normal file
@@ -0,0 +1,20 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="8.0.2"/>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.3"/>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\Managing.Application.Abstractions\Managing.Application.Abstractions.csproj"/>
|
||||
<ProjectReference Include="..\Managing.Common\Managing.Common.csproj"/>
|
||||
<ProjectReference Include="..\Managing.Domain\Managing.Domain.csproj"/>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
137
src/Managing.Mcp/Tools/BacktestTools.cs
Normal file
137
src/Managing.Mcp/Tools/BacktestTools.cs
Normal file
@@ -0,0 +1,137 @@
|
||||
using Managing.Application.Abstractions.Services;
|
||||
using Managing.Application.Abstractions.Shared;
|
||||
using Managing.Domain.Users;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using static Managing.Common.Enums;
|
||||
|
||||
namespace Managing.Mcp.Tools;
|
||||
|
||||
/// <summary>
|
||||
/// MCP tools for backtest operations
|
||||
/// </summary>
|
||||
public class BacktestTools
|
||||
{
|
||||
private readonly IBacktester _backtester;
|
||||
private readonly ILogger<BacktestTools> _logger;
|
||||
|
||||
public BacktestTools(IBacktester backtester, ILogger<BacktestTools> logger)
|
||||
{
|
||||
_backtester = backtester;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Retrieves paginated backtests for a user with filtering and sorting capabilities
|
||||
/// </summary>
|
||||
/// <param name="user">The user requesting the backtests</param>
|
||||
/// <param name="page">Page number (defaults to 1)</param>
|
||||
/// <param name="pageSize">Number of items per page (defaults to 50, max 100)</param>
|
||||
/// <param name="sortBy">Field to sort by (Score, WinRate, GrowthPercentage, etc.)</param>
|
||||
/// <param name="sortOrder">Sort order - "asc" or "desc" (defaults to "desc")</param>
|
||||
/// <param name="scoreMin">Minimum score filter (0-100)</param>
|
||||
/// <param name="scoreMax">Maximum score filter (0-100)</param>
|
||||
/// <param name="winrateMin">Minimum winrate filter (0-100)</param>
|
||||
/// <param name="winrateMax">Maximum winrate filter (0-100)</param>
|
||||
/// <param name="maxDrawdownMax">Maximum drawdown filter</param>
|
||||
/// <param name="tickers">Comma-separated list of tickers to filter by</param>
|
||||
/// <param name="indicators">Comma-separated list of indicators to filter by</param>
|
||||
/// <param name="durationMinDays">Minimum duration in days</param>
|
||||
/// <param name="durationMaxDays">Maximum duration in days</param>
|
||||
/// <param name="name">Name contains filter</param>
|
||||
/// <param name="tradingType">Trading type filter (Spot, Futures, etc.)</param>
|
||||
/// <returns>Paginated backtest results with metadata</returns>
|
||||
public async Task<object> GetBacktestsPaginated(
|
||||
User user,
|
||||
int page = 1,
|
||||
int pageSize = 50,
|
||||
BacktestSortableColumn sortBy = BacktestSortableColumn.Score,
|
||||
string sortOrder = "desc",
|
||||
double? scoreMin = null,
|
||||
double? scoreMax = null,
|
||||
int? winrateMin = null,
|
||||
int? winrateMax = null,
|
||||
decimal? maxDrawdownMax = null,
|
||||
string? tickers = null,
|
||||
string? indicators = null,
|
||||
double? durationMinDays = null,
|
||||
double? durationMaxDays = null,
|
||||
string? name = null,
|
||||
TradingType? tradingType = null)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Validate inputs
|
||||
if (page < 1) page = 1;
|
||||
if (pageSize < 1 || pageSize > 100) pageSize = 50;
|
||||
if (sortOrder != "asc" && sortOrder != "desc") sortOrder = "desc";
|
||||
|
||||
// Parse multi-selects if provided
|
||||
var tickerList = string.IsNullOrWhiteSpace(tickers)
|
||||
? Array.Empty<string>()
|
||||
: tickers.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||
var indicatorList = string.IsNullOrWhiteSpace(indicators)
|
||||
? Array.Empty<string>()
|
||||
: indicators.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||
|
||||
var filter = new BacktestsFilter
|
||||
{
|
||||
NameContains = string.IsNullOrWhiteSpace(name) ? null : name.Trim(),
|
||||
ScoreMin = scoreMin,
|
||||
ScoreMax = scoreMax,
|
||||
WinrateMin = winrateMin,
|
||||
WinrateMax = winrateMax,
|
||||
MaxDrawdownMax = maxDrawdownMax,
|
||||
Tickers = tickerList,
|
||||
Indicators = indicatorList,
|
||||
DurationMin = durationMinDays.HasValue ? TimeSpan.FromDays(durationMinDays.Value) : null,
|
||||
DurationMax = durationMaxDays.HasValue ? TimeSpan.FromDays(durationMaxDays.Value) : null,
|
||||
TradingType = tradingType
|
||||
};
|
||||
|
||||
var (backtests, totalCount) = await _backtester.GetBacktestsByUserPaginatedAsync(
|
||||
user,
|
||||
page,
|
||||
pageSize,
|
||||
sortBy,
|
||||
sortOrder,
|
||||
filter);
|
||||
|
||||
var totalPages = (int)Math.Ceiling(totalCount / (double)pageSize);
|
||||
|
||||
return new
|
||||
{
|
||||
Backtests = backtests.Select(b => new
|
||||
{
|
||||
b.Id,
|
||||
b.Config,
|
||||
b.FinalPnl,
|
||||
b.WinRate,
|
||||
b.GrowthPercentage,
|
||||
b.HodlPercentage,
|
||||
b.StartDate,
|
||||
b.EndDate,
|
||||
b.MaxDrawdown,
|
||||
b.Fees,
|
||||
b.SharpeRatio,
|
||||
b.Score,
|
||||
b.ScoreMessage,
|
||||
b.InitialBalance,
|
||||
b.NetPnl,
|
||||
b.PositionCount,
|
||||
TradingType = b.Config.TradingType
|
||||
}),
|
||||
TotalCount = totalCount,
|
||||
CurrentPage = page,
|
||||
PageSize = pageSize,
|
||||
TotalPages = totalPages,
|
||||
HasNextPage = page < totalPages,
|
||||
HasPreviousPage = page > 1
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error getting paginated backtests for user {UserId}", user.Id);
|
||||
throw new InvalidOperationException($"Failed to retrieve backtests: {ex.Message}", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -61,6 +61,7 @@ export interface User {
|
||||
signalAgreementThreshold?: number | null;
|
||||
allowSignalTrendOverride?: boolean | null;
|
||||
defaultExchange?: TradingExchanges | null;
|
||||
defaultLlmProvider?: LlmProvider | null;
|
||||
}
|
||||
|
||||
export enum Confidence {
|
||||
@@ -70,6 +71,13 @@ export enum Confidence {
|
||||
None = "None",
|
||||
}
|
||||
|
||||
export enum LlmProvider {
|
||||
Auto = "Auto",
|
||||
Gemini = "Gemini",
|
||||
OpenAI = "OpenAI",
|
||||
Claude = "Claude",
|
||||
}
|
||||
|
||||
export interface Balance {
|
||||
tokenImage?: string | null;
|
||||
tokenName?: string | null;
|
||||
@@ -1435,6 +1443,57 @@ export interface JobStatusTypeSummary {
|
||||
count?: number;
|
||||
}
|
||||
|
||||
export interface LlmChatResponse {
|
||||
content?: string;
|
||||
provider?: string;
|
||||
model?: string;
|
||||
toolCalls?: LlmToolCall[] | null;
|
||||
usage?: LlmUsage | null;
|
||||
requiresToolExecution?: boolean;
|
||||
}
|
||||
|
||||
export interface LlmToolCall {
|
||||
id?: string;
|
||||
name?: string;
|
||||
arguments?: { [key: string]: any; };
|
||||
}
|
||||
|
||||
export interface LlmUsage {
|
||||
promptTokens?: number;
|
||||
completionTokens?: number;
|
||||
totalTokens?: number;
|
||||
}
|
||||
|
||||
export interface LlmChatRequest {
|
||||
messages?: LlmMessage[];
|
||||
provider?: string | null;
|
||||
apiKey?: string | null;
|
||||
stream?: boolean;
|
||||
temperature?: number;
|
||||
maxTokens?: number;
|
||||
tools?: McpToolDefinition[] | null;
|
||||
}
|
||||
|
||||
export interface LlmMessage {
|
||||
role?: string;
|
||||
content?: string;
|
||||
toolCalls?: LlmToolCall[] | null;
|
||||
toolCallId?: string | null;
|
||||
}
|
||||
|
||||
export interface McpToolDefinition {
|
||||
name?: string;
|
||||
description?: string;
|
||||
parameters?: { [key: string]: McpParameterDefinition; };
|
||||
}
|
||||
|
||||
export interface McpParameterDefinition {
|
||||
type?: string;
|
||||
description?: string;
|
||||
required?: boolean;
|
||||
defaultValue?: any | null;
|
||||
}
|
||||
|
||||
export interface ScenarioViewModel {
|
||||
name: string;
|
||||
indicators: IndicatorViewModel[];
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Auth } from '../pages/authPage/auth'
|
||||
import AiChatButton from '../components/organism/AiChatButton'
|
||||
|
||||
import MyRoutes from './routes'
|
||||
|
||||
@@ -6,6 +7,7 @@ const App = () => {
|
||||
return (
|
||||
<Auth>
|
||||
<MyRoutes />
|
||||
<AiChatButton />
|
||||
</Auth>
|
||||
)
|
||||
}
|
||||
|
||||
224
src/Managing.WebApp/src/components/organism/AiChat.tsx
Normal file
224
src/Managing.WebApp/src/components/organism/AiChat.tsx
Normal file
@@ -0,0 +1,224 @@
|
||||
import { useState, useRef, useEffect } from 'react'
|
||||
import { LlmClient } from '../../generated/ManagingApi'
|
||||
import { LlmMessage, LlmChatResponse } from '../../generated/ManagingApiTypes'
|
||||
import { AiChatService } from '../../services/aiChatService'
|
||||
import useApiUrlStore from '../../app/store/apiStore'
|
||||
|
||||
interface Message {
|
||||
role: 'user' | 'assistant' | 'system'
|
||||
content: string
|
||||
timestamp: Date
|
||||
}
|
||||
|
||||
interface AiChatProps {
|
||||
onClose?: () => void
|
||||
}
|
||||
|
||||
function AiChat({ onClose }: AiChatProps): JSX.Element {
|
||||
const [messages, setMessages] = useState<Message[]>([
|
||||
{
|
||||
role: 'system',
|
||||
content: 'You are a helpful AI assistant for the Managing trading platform. You can help users query their backtests, analyze trading strategies, and provide insights.',
|
||||
timestamp: new Date()
|
||||
}
|
||||
])
|
||||
const [input, setInput] = useState('')
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const [provider, setProvider] = useState<string>('auto')
|
||||
const [availableProviders, setAvailableProviders] = useState<string[]>([])
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null)
|
||||
const { apiUrl, userToken } = useApiUrlStore()
|
||||
|
||||
useEffect(() => {
|
||||
scrollToBottom()
|
||||
}, [messages])
|
||||
|
||||
useEffect(() => {
|
||||
loadProviders()
|
||||
}, [])
|
||||
|
||||
const scrollToBottom = () => {
|
||||
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' })
|
||||
}
|
||||
|
||||
const loadProviders = async () => {
|
||||
try {
|
||||
const llmClient = new LlmClient({}, apiUrl)
|
||||
const service = new AiChatService(llmClient)
|
||||
const providers = await service.getProviders()
|
||||
setAvailableProviders(['auto', ...providers])
|
||||
} catch (error) {
|
||||
console.error('Failed to load providers:', error)
|
||||
}
|
||||
}
|
||||
|
||||
const sendMessage = async () => {
|
||||
if (!input.trim() || isLoading) return
|
||||
|
||||
const userMessage: Message = {
|
||||
role: 'user',
|
||||
content: input,
|
||||
timestamp: new Date()
|
||||
}
|
||||
|
||||
setMessages(prev => [...prev, userMessage])
|
||||
setInput('')
|
||||
setIsLoading(true)
|
||||
|
||||
try {
|
||||
const llmClient = new LlmClient({}, apiUrl)
|
||||
const service = new AiChatService(llmClient)
|
||||
|
||||
// Convert messages to LlmMessage format
|
||||
const llmMessages: LlmMessage[] = messages
|
||||
.filter(m => m.role !== 'system' || messages.indexOf(m) === 0) // Include only first system message
|
||||
.map(m => ({
|
||||
role: m.role,
|
||||
content: m.content,
|
||||
toolCalls: undefined,
|
||||
toolCallId: undefined
|
||||
}))
|
||||
|
||||
// Add the new user message
|
||||
llmMessages.push({
|
||||
role: 'user',
|
||||
content: input,
|
||||
toolCalls: undefined,
|
||||
toolCallId: undefined
|
||||
})
|
||||
|
||||
const response: LlmChatResponse = await service.sendMessage(
|
||||
llmMessages,
|
||||
provider === 'auto' ? undefined : provider
|
||||
)
|
||||
|
||||
const assistantMessage: Message = {
|
||||
role: 'assistant',
|
||||
content: response.content || 'No response from AI',
|
||||
timestamp: new Date()
|
||||
}
|
||||
|
||||
setMessages(prev => [...prev, assistantMessage])
|
||||
} catch (error: any) {
|
||||
console.error('Error sending message:', error)
|
||||
const errorMessage: Message = {
|
||||
role: 'assistant',
|
||||
content: `Error: ${error?.message || 'Failed to get response from AI'}`,
|
||||
timestamp: new Date()
|
||||
}
|
||||
setMessages(prev => [...prev, errorMessage])
|
||||
} finally {
|
||||
setIsLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleKeyPress = (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
e.preventDefault()
|
||||
sendMessage()
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col h-full bg-base-100">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between p-4 border-b border-base-300">
|
||||
<div className="flex items-center gap-3">
|
||||
<div className="w-8 h-8 bg-primary rounded-full flex items-center justify-center">
|
||||
<svg className="w-5 h-5 text-primary-content" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9.663 17h4.673M12 3v1m6.364 1.636l-.707.707M21 12h-1M4 12H3m3.343-5.657l-.707-.707m2.828 9.9a5 5 0 117.072 0l-.548.547A3.374 3.374 0 0014 18.469V19a2 2 0 11-4 0v-.531c0-.895-.356-1.754-.988-2.386l-.548-.547z" />
|
||||
</svg>
|
||||
</div>
|
||||
<div>
|
||||
<h2 className="font-bold text-lg">AI Assistant</h2>
|
||||
<p className="text-sm text-base-content/60">Powered by MCP</p>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
{/* Provider Selection */}
|
||||
<select
|
||||
value={provider}
|
||||
onChange={(e) => setProvider(e.target.value)}
|
||||
className="select select-sm select-bordered"
|
||||
disabled={isLoading}
|
||||
>
|
||||
{availableProviders.map(p => (
|
||||
<option key={p} value={p}>
|
||||
{p === 'auto' ? 'Auto (Backend Selects)' : p.charAt(0).toUpperCase() + p.slice(1)}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
{onClose && (
|
||||
<button onClick={onClose} className="btn btn-sm btn-ghost btn-circle">
|
||||
<svg className="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M6 18L18 6M6 6l12 12" />
|
||||
</svg>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Messages */}
|
||||
<div className="flex-1 overflow-y-auto p-4 space-y-4">
|
||||
{messages.filter(m => m.role !== 'system').map((message, index) => (
|
||||
<div
|
||||
key={index}
|
||||
className={`flex ${message.role === 'user' ? 'justify-end' : 'justify-start'}`}
|
||||
>
|
||||
<div
|
||||
className={`max-w-[80%] p-3 rounded-lg ${
|
||||
message.role === 'user'
|
||||
? 'bg-primary text-primary-content'
|
||||
: 'bg-base-200 text-base-content'
|
||||
}`}
|
||||
>
|
||||
<p className="whitespace-pre-wrap break-words">{message.content}</p>
|
||||
<p className="text-xs opacity-60 mt-1">
|
||||
{message.timestamp.toLocaleTimeString()}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
{isLoading && (
|
||||
<div className="flex justify-start">
|
||||
<div className="bg-base-200 p-3 rounded-lg">
|
||||
<div className="flex gap-1">
|
||||
<span className="loading loading-dots loading-sm"></span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
<div ref={messagesEndRef} />
|
||||
</div>
|
||||
|
||||
{/* Input */}
|
||||
<div className="p-4 border-t border-base-300">
|
||||
<div className="flex gap-2">
|
||||
<textarea
|
||||
value={input}
|
||||
onChange={(e) => setInput(e.target.value)}
|
||||
onKeyPress={handleKeyPress}
|
||||
placeholder="Ask me anything about your backtests..."
|
||||
className="textarea textarea-bordered flex-1 resize-none"
|
||||
rows={2}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<button
|
||||
onClick={sendMessage}
|
||||
disabled={isLoading || !input.trim()}
|
||||
className="btn btn-primary"
|
||||
>
|
||||
<svg className="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 19l9 2-9-18-9 18 9-2zm0 0v-8" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
<p className="text-xs text-base-content/60 mt-2">
|
||||
Press Enter to send, Shift+Enter for new line
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default AiChat
|
||||
32
src/Managing.WebApp/src/components/organism/AiChatButton.tsx
Normal file
32
src/Managing.WebApp/src/components/organism/AiChatButton.tsx
Normal file
@@ -0,0 +1,32 @@
|
||||
import { useState } from 'react'
|
||||
import AiChat from './AiChat'
|
||||
|
||||
function AiChatButton(): JSX.Element {
|
||||
const [isOpen, setIsOpen] = useState(false)
|
||||
|
||||
return (
|
||||
<>
|
||||
{/* Floating Chat Button */}
|
||||
{!isOpen && (
|
||||
<button
|
||||
onClick={() => setIsOpen(true)}
|
||||
className="fixed bottom-6 right-6 btn btn-circle btn-primary btn-lg shadow-lg z-50 hover:scale-110 transition-transform"
|
||||
aria-label="Open AI Chat"
|
||||
>
|
||||
<svg className="w-6 h-6" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M8 10h.01M12 10h.01M16 10h.01M9 16H5a2 2 0 01-2-2V6a2 2 0 012-2h14a2 2 0 012 2v8a2 2 0 01-2 2h-5l-5 5v-5z" />
|
||||
</svg>
|
||||
</button>
|
||||
)}
|
||||
|
||||
{/* Chat Window */}
|
||||
{isOpen && (
|
||||
<div className="fixed bottom-6 right-6 w-[400px] h-[600px] bg-base-100 rounded-lg shadow-2xl z-50 border border-base-300 flex flex-col overflow-hidden">
|
||||
<AiChat onClose={() => setIsOpen(false)} />
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
export default AiChatButton
|
||||
@@ -2899,6 +2899,127 @@ export class JobClient extends AuthorizedApiBase {
|
||||
}
|
||||
}
|
||||
|
||||
export class LlmClient extends AuthorizedApiBase {
|
||||
private http: { fetch(url: RequestInfo, init?: RequestInit): Promise<Response> };
|
||||
private baseUrl: string;
|
||||
protected jsonParseReviver: ((key: string, value: any) => any) | undefined = undefined;
|
||||
|
||||
constructor(configuration: IConfig, baseUrl?: string, http?: { fetch(url: RequestInfo, init?: RequestInit): Promise<Response> }) {
|
||||
super(configuration);
|
||||
this.http = http ? http : window as any;
|
||||
this.baseUrl = baseUrl ?? "http://localhost:5000";
|
||||
}
|
||||
|
||||
llm_Chat(request: LlmChatRequest): Promise<LlmChatResponse> {
|
||||
let url_ = this.baseUrl + "/Llm/Chat";
|
||||
url_ = url_.replace(/[?&]$/, "");
|
||||
|
||||
const content_ = JSON.stringify(request);
|
||||
|
||||
let options_: RequestInit = {
|
||||
body: content_,
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json"
|
||||
}
|
||||
};
|
||||
|
||||
return this.transformOptions(options_).then(transformedOptions_ => {
|
||||
return this.http.fetch(url_, transformedOptions_);
|
||||
}).then((_response: Response) => {
|
||||
return this.processLlm_Chat(_response);
|
||||
});
|
||||
}
|
||||
|
||||
protected processLlm_Chat(response: Response): Promise<LlmChatResponse> {
|
||||
const status = response.status;
|
||||
let _headers: any = {}; if (response.headers && response.headers.forEach) { response.headers.forEach((v: any, k: any) => _headers[k] = v); };
|
||||
if (status === 200) {
|
||||
return response.text().then((_responseText) => {
|
||||
let result200: any = null;
|
||||
result200 = _responseText === "" ? null : JSON.parse(_responseText, this.jsonParseReviver) as LlmChatResponse;
|
||||
return result200;
|
||||
});
|
||||
} else if (status !== 200 && status !== 204) {
|
||||
return response.text().then((_responseText) => {
|
||||
return throwException("An unexpected server error occurred.", status, _responseText, _headers);
|
||||
});
|
||||
}
|
||||
return Promise.resolve<LlmChatResponse>(null as any);
|
||||
}
|
||||
|
||||
llm_GetProviders(): Promise<string[]> {
|
||||
let url_ = this.baseUrl + "/Llm/Providers";
|
||||
url_ = url_.replace(/[?&]$/, "");
|
||||
|
||||
let options_: RequestInit = {
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Accept": "application/json"
|
||||
}
|
||||
};
|
||||
|
||||
return this.transformOptions(options_).then(transformedOptions_ => {
|
||||
return this.http.fetch(url_, transformedOptions_);
|
||||
}).then((_response: Response) => {
|
||||
return this.processLlm_GetProviders(_response);
|
||||
});
|
||||
}
|
||||
|
||||
protected processLlm_GetProviders(response: Response): Promise<string[]> {
|
||||
const status = response.status;
|
||||
let _headers: any = {}; if (response.headers && response.headers.forEach) { response.headers.forEach((v: any, k: any) => _headers[k] = v); };
|
||||
if (status === 200) {
|
||||
return response.text().then((_responseText) => {
|
||||
let result200: any = null;
|
||||
result200 = _responseText === "" ? null : JSON.parse(_responseText, this.jsonParseReviver) as string[];
|
||||
return result200;
|
||||
});
|
||||
} else if (status !== 200 && status !== 204) {
|
||||
return response.text().then((_responseText) => {
|
||||
return throwException("An unexpected server error occurred.", status, _responseText, _headers);
|
||||
});
|
||||
}
|
||||
return Promise.resolve<string[]>(null as any);
|
||||
}
|
||||
|
||||
llm_GetTools(): Promise<McpToolDefinition[]> {
|
||||
let url_ = this.baseUrl + "/Llm/Tools";
|
||||
url_ = url_.replace(/[?&]$/, "");
|
||||
|
||||
let options_: RequestInit = {
|
||||
method: "GET",
|
||||
headers: {
|
||||
"Accept": "application/json"
|
||||
}
|
||||
};
|
||||
|
||||
return this.transformOptions(options_).then(transformedOptions_ => {
|
||||
return this.http.fetch(url_, transformedOptions_);
|
||||
}).then((_response: Response) => {
|
||||
return this.processLlm_GetTools(_response);
|
||||
});
|
||||
}
|
||||
|
||||
protected processLlm_GetTools(response: Response): Promise<McpToolDefinition[]> {
|
||||
const status = response.status;
|
||||
let _headers: any = {}; if (response.headers && response.headers.forEach) { response.headers.forEach((v: any, k: any) => _headers[k] = v); };
|
||||
if (status === 200) {
|
||||
return response.text().then((_responseText) => {
|
||||
let result200: any = null;
|
||||
result200 = _responseText === "" ? null : JSON.parse(_responseText, this.jsonParseReviver) as McpToolDefinition[];
|
||||
return result200;
|
||||
});
|
||||
} else if (status !== 200 && status !== 204) {
|
||||
return response.text().then((_responseText) => {
|
||||
return throwException("An unexpected server error occurred.", status, _responseText, _headers);
|
||||
});
|
||||
}
|
||||
return Promise.resolve<McpToolDefinition[]>(null as any);
|
||||
}
|
||||
}
|
||||
|
||||
export class MoneyManagementClient extends AuthorizedApiBase {
|
||||
private http: { fetch(url: RequestInfo, init?: RequestInit): Promise<Response> };
|
||||
private baseUrl: string;
|
||||
@@ -4388,6 +4509,45 @@ export class UserClient extends AuthorizedApiBase {
|
||||
return Promise.resolve<User>(null as any);
|
||||
}
|
||||
|
||||
user_UpdateDefaultLlmProvider(defaultLlmProvider: string): Promise<User> {
|
||||
let url_ = this.baseUrl + "/User/default-llm-provider";
|
||||
url_ = url_.replace(/[?&]$/, "");
|
||||
|
||||
const content_ = JSON.stringify(defaultLlmProvider);
|
||||
|
||||
let options_: RequestInit = {
|
||||
body: content_,
|
||||
method: "PUT",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json"
|
||||
}
|
||||
};
|
||||
|
||||
return this.transformOptions(options_).then(transformedOptions_ => {
|
||||
return this.http.fetch(url_, transformedOptions_);
|
||||
}).then((_response: Response) => {
|
||||
return this.processUser_UpdateDefaultLlmProvider(_response);
|
||||
});
|
||||
}
|
||||
|
||||
protected processUser_UpdateDefaultLlmProvider(response: Response): Promise<User> {
|
||||
const status = response.status;
|
||||
let _headers: any = {}; if (response.headers && response.headers.forEach) { response.headers.forEach((v: any, k: any) => _headers[k] = v); };
|
||||
if (status === 200) {
|
||||
return response.text().then((_responseText) => {
|
||||
let result200: any = null;
|
||||
result200 = _responseText === "" ? null : JSON.parse(_responseText, this.jsonParseReviver) as User;
|
||||
return result200;
|
||||
});
|
||||
} else if (status !== 200 && status !== 204) {
|
||||
return response.text().then((_responseText) => {
|
||||
return throwException("An unexpected server error occurred.", status, _responseText, _headers);
|
||||
});
|
||||
}
|
||||
return Promise.resolve<User>(null as any);
|
||||
}
|
||||
|
||||
user_TestTelegramChannel(): Promise<string> {
|
||||
let url_ = this.baseUrl + "/User/telegram-channel/test";
|
||||
url_ = url_.replace(/[?&]$/, "");
|
||||
@@ -4690,6 +4850,7 @@ export interface User {
|
||||
signalAgreementThreshold?: number | null;
|
||||
allowSignalTrendOverride?: boolean | null;
|
||||
defaultExchange?: TradingExchanges | null;
|
||||
defaultLlmProvider?: LlmProvider | null;
|
||||
}
|
||||
|
||||
export enum Confidence {
|
||||
@@ -4699,6 +4860,13 @@ export enum Confidence {
|
||||
None = "None",
|
||||
}
|
||||
|
||||
export enum LlmProvider {
|
||||
Auto = "Auto",
|
||||
Gemini = "Gemini",
|
||||
OpenAI = "OpenAI",
|
||||
Claude = "Claude",
|
||||
}
|
||||
|
||||
export interface Balance {
|
||||
tokenImage?: string | null;
|
||||
tokenName?: string | null;
|
||||
@@ -6064,6 +6232,57 @@ export interface JobStatusTypeSummary {
|
||||
count?: number;
|
||||
}
|
||||
|
||||
export interface LlmChatResponse {
|
||||
content?: string;
|
||||
provider?: string;
|
||||
model?: string;
|
||||
toolCalls?: LlmToolCall[] | null;
|
||||
usage?: LlmUsage | null;
|
||||
requiresToolExecution?: boolean;
|
||||
}
|
||||
|
||||
export interface LlmToolCall {
|
||||
id?: string;
|
||||
name?: string;
|
||||
arguments?: { [key: string]: any; };
|
||||
}
|
||||
|
||||
export interface LlmUsage {
|
||||
promptTokens?: number;
|
||||
completionTokens?: number;
|
||||
totalTokens?: number;
|
||||
}
|
||||
|
||||
export interface LlmChatRequest {
|
||||
messages?: LlmMessage[];
|
||||
provider?: string | null;
|
||||
apiKey?: string | null;
|
||||
stream?: boolean;
|
||||
temperature?: number;
|
||||
maxTokens?: number;
|
||||
tools?: McpToolDefinition[] | null;
|
||||
}
|
||||
|
||||
export interface LlmMessage {
|
||||
role?: string;
|
||||
content?: string;
|
||||
toolCalls?: LlmToolCall[] | null;
|
||||
toolCallId?: string | null;
|
||||
}
|
||||
|
||||
export interface McpToolDefinition {
|
||||
name?: string;
|
||||
description?: string;
|
||||
parameters?: { [key: string]: McpParameterDefinition; };
|
||||
}
|
||||
|
||||
export interface McpParameterDefinition {
|
||||
type?: string;
|
||||
description?: string;
|
||||
required?: boolean;
|
||||
defaultValue?: any | null;
|
||||
}
|
||||
|
||||
export interface ScenarioViewModel {
|
||||
name: string;
|
||||
indicators: IndicatorViewModel[];
|
||||
|
||||
@@ -61,6 +61,7 @@ export interface User {
|
||||
signalAgreementThreshold?: number | null;
|
||||
allowSignalTrendOverride?: boolean | null;
|
||||
defaultExchange?: TradingExchanges | null;
|
||||
defaultLlmProvider?: LlmProvider | null;
|
||||
}
|
||||
|
||||
export enum Confidence {
|
||||
@@ -70,6 +71,13 @@ export enum Confidence {
|
||||
None = "None",
|
||||
}
|
||||
|
||||
export enum LlmProvider {
|
||||
Auto = "Auto",
|
||||
Gemini = "Gemini",
|
||||
OpenAI = "OpenAI",
|
||||
Claude = "Claude",
|
||||
}
|
||||
|
||||
export interface Balance {
|
||||
tokenImage?: string | null;
|
||||
tokenName?: string | null;
|
||||
@@ -1435,6 +1443,57 @@ export interface JobStatusTypeSummary {
|
||||
count?: number;
|
||||
}
|
||||
|
||||
export interface LlmChatResponse {
|
||||
content?: string;
|
||||
provider?: string;
|
||||
model?: string;
|
||||
toolCalls?: LlmToolCall[] | null;
|
||||
usage?: LlmUsage | null;
|
||||
requiresToolExecution?: boolean;
|
||||
}
|
||||
|
||||
export interface LlmToolCall {
|
||||
id?: string;
|
||||
name?: string;
|
||||
arguments?: { [key: string]: any; };
|
||||
}
|
||||
|
||||
export interface LlmUsage {
|
||||
promptTokens?: number;
|
||||
completionTokens?: number;
|
||||
totalTokens?: number;
|
||||
}
|
||||
|
||||
export interface LlmChatRequest {
|
||||
messages?: LlmMessage[];
|
||||
provider?: string | null;
|
||||
apiKey?: string | null;
|
||||
stream?: boolean;
|
||||
temperature?: number;
|
||||
maxTokens?: number;
|
||||
tools?: McpToolDefinition[] | null;
|
||||
}
|
||||
|
||||
export interface LlmMessage {
|
||||
role?: string;
|
||||
content?: string;
|
||||
toolCalls?: LlmToolCall[] | null;
|
||||
toolCallId?: string | null;
|
||||
}
|
||||
|
||||
export interface McpToolDefinition {
|
||||
name?: string;
|
||||
description?: string;
|
||||
parameters?: { [key: string]: McpParameterDefinition; };
|
||||
}
|
||||
|
||||
export interface McpParameterDefinition {
|
||||
type?: string;
|
||||
description?: string;
|
||||
required?: boolean;
|
||||
defaultValue?: any | null;
|
||||
}
|
||||
|
||||
export interface ScenarioViewModel {
|
||||
name: string;
|
||||
indicators: IndicatorViewModel[];
|
||||
|
||||
43
src/Managing.WebApp/src/services/aiChatService.ts
Normal file
43
src/Managing.WebApp/src/services/aiChatService.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { LlmClient } from '../generated/ManagingApi'
|
||||
import { LlmChatRequest, LlmChatResponse, LlmMessage } from '../generated/ManagingApiTypes'
|
||||
|
||||
export class AiChatService {
|
||||
private llmClient: LlmClient
|
||||
|
||||
constructor(llmClient: LlmClient) {
|
||||
this.llmClient = llmClient
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a chat message to the AI with MCP tool calling support
|
||||
*/
|
||||
async sendMessage(messages: LlmMessage[], provider?: string, apiKey?: string): Promise<LlmChatResponse> {
|
||||
const request: LlmChatRequest = {
|
||||
messages,
|
||||
provider: provider || 'auto',
|
||||
apiKey: apiKey,
|
||||
stream: false,
|
||||
temperature: 0.7,
|
||||
maxTokens: 4096,
|
||||
tools: undefined // Will be populated by backend
|
||||
}
|
||||
|
||||
return await this.llmClient.llm_Chat(request)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available LLM providers
|
||||
*/
|
||||
async getProviders(): Promise<string[]> {
|
||||
return await this.llmClient.llm_GetProviders()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available MCP tools
|
||||
*/
|
||||
async getTools(): Promise<any[]> {
|
||||
return await this.llmClient.llm_GetTools()
|
||||
}
|
||||
}
|
||||
|
||||
export default AiChatService
|
||||
@@ -72,6 +72,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Managing.Domain.Tests", "Ma
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Managing.AppHost", "Managing.AppHost\Managing.AppHost.csproj", "{4712128B-F222-47C4-A347-AFF4E5BA02AE}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Managing.Mcp", "Managing.Mcp\Managing.Mcp.csproj", "{601B1A5B-568A-4238-AB93-78390FC52D91}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
@@ -256,6 +258,14 @@ Global
|
||||
{4712128B-F222-47C4-A347-AFF4E5BA02AE}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{4712128B-F222-47C4-A347-AFF4E5BA02AE}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{4712128B-F222-47C4-A347-AFF4E5BA02AE}.Release|x64.Build.0 = Release|Any CPU
|
||||
{601B1A5B-568A-4238-AB93-78390FC52D91}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{601B1A5B-568A-4238-AB93-78390FC52D91}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{601B1A5B-568A-4238-AB93-78390FC52D91}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{601B1A5B-568A-4238-AB93-78390FC52D91}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{601B1A5B-568A-4238-AB93-78390FC52D91}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{601B1A5B-568A-4238-AB93-78390FC52D91}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{601B1A5B-568A-4238-AB93-78390FC52D91}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{601B1A5B-568A-4238-AB93-78390FC52D91}.Release|x64.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
@@ -281,6 +291,7 @@ Global
|
||||
{B7D66A73-CA3A-4DE5-8E88-59D50C4018A6} = {A1296069-2816-43D4-882C-516BCB718D03}
|
||||
{55B059EF-F128-453F-B678-0FF00F1D2E95} = {8F2ECEA7-5BCA-45DF-B6E3-88AADD7AFD45}
|
||||
{3F835B88-4720-49C2-A4A5-FED2C860C4C4} = {8F2ECEA7-5BCA-45DF-B6E3-88AADD7AFD45}
|
||||
{601B1A5B-568A-4238-AB93-78390FC52D91} = {A1296069-2816-43D4-882C-516BCB718D03}
|
||||
EndGlobalSection
|
||||
GlobalSection(ExtensibilityGlobals) = postSolution
|
||||
SolutionGuid = {BD7CA081-CE52-4824-9777-C0562E54F3EA}
|
||||
|
||||
Reference in New Issue
Block a user