diff --git a/src/Managing.Api/Controllers/LlmController.cs b/src/Managing.Api/Controllers/LlmController.cs index 8fa26dde..748a63e4 100644 --- a/src/Managing.Api/Controllers/LlmController.cs +++ b/src/Managing.Api/Controllers/LlmController.cs @@ -408,6 +408,13 @@ public class LlmController : BaseController // Add tool results to conversation history chatRequest.Messages.AddRange(toolResults); + // Add a system reminder to prevent redundant tool calls + chatRequest.Messages.Add(new LlmMessage + { + Role = "system", + Content = "You now have all the data from the tool calls above. Analyze this data and provide your final answer to the user. DO NOT call the same tools again with the same arguments." + }); + // Add delay after tool calls before next LLM call to avoid rate limits await Task.Delay(DelayAfterToolCallsMs); @@ -417,7 +424,7 @@ public class LlmController : BaseController // If we hit max iterations, return the last response (even if it has tool calls) if (finalResponse == null) { - logger.LogWarning("Reached max iterations ({MaxIterations}) for user {UserId}. Returning last response.", + logger.LogWarning("Reached max iterations ({MaxIterations}) for user {UserId}. Forcing final response without tools.", maxIterations, user.Id); await SendProgressUpdate(connectionId, hubContext, logger, new LlmProgressUpdate @@ -428,6 +435,16 @@ public class LlmController : BaseController MaxIterations = maxIterations }); + // Remove tools to force a text-only response + chatRequest.Tools = null; + + // Add explicit instruction for final answer + chatRequest.Messages.Add(new LlmMessage + { + Role = "system", + Content = "This is your FINAL iteration. You MUST provide a complete answer to the user's question based on the data you've already retrieved. DO NOT request any tool calls. Summarize and analyze the information you have." + }); + finalResponse = await llmService.ChatAsync(user, chatRequest); } @@ -604,6 +621,13 @@ public class LlmController : BaseController // Add tool results to conversation history request.Messages.AddRange(toolResults); + // Add a system reminder to prevent redundant tool calls + request.Messages.Add(new LlmMessage + { + Role = "system", + Content = "You now have all the data from the tool calls above. Analyze this data and provide your final answer to the user. DO NOT call the same tools again with the same arguments." + }); + // Add delay after tool calls before next LLM call to avoid rate limits await Task.Delay(DelayAfterToolCallsMs); @@ -613,8 +637,19 @@ public class LlmController : BaseController // If we hit max iterations, return the last response (even if it has tool calls) if (finalResponse == null) { - _logger.LogWarning("Reached max iterations ({MaxIterations}) for user {UserId}. Returning last response.", + _logger.LogWarning("Reached max iterations ({MaxIterations}) for user {UserId}. Forcing final response without tools.", maxIterations, user.Id); + + // Remove tools to force a text-only response + request.Tools = null; + + // Add explicit instruction for final answer + request.Messages.Add(new LlmMessage + { + Role = "system", + Content = "This is your FINAL iteration. You MUST provide a complete answer to the user's question based on the data you've already retrieved. DO NOT request any tool calls. Summarize and analyze the information you have." + }); + // Get one more response to return something meaningful finalResponse = await _llmService.ChatAsync(user, request); } @@ -684,11 +719,11 @@ public class LlmController : BaseController (lastMessage.Contains("detail") || lastMessage.Contains("analyze") || lastMessage.Contains("position"))) return 4; - // Simple backtest queries ("best", "top", "show") only need 2 iterations (fetch + respond) + // Simple backtest queries ("best", "top", "show") need 3 iterations (fetch + possible retry + respond) if (lastMessage.Contains("backtest") && (lastMessage.Contains("best") || lastMessage.Contains("top") || lastMessage.Contains("show") || lastMessage.Contains("recent") || lastMessage.Contains("latest"))) - return 2; + return 3; // General analysis queries if (lastMessage.Contains("analyze")) diff --git a/src/Managing.WebApp/src/components/organism/AiChat.tsx b/src/Managing.WebApp/src/components/organism/AiChat.tsx index 2c003b51..cab80de6 100644 --- a/src/Managing.WebApp/src/components/organism/AiChat.tsx +++ b/src/Managing.WebApp/src/components/organism/AiChat.tsx @@ -34,6 +34,7 @@ function AiChat({ onClose }: AiChatProps): JSX.Element { const [messageHistory, setMessageHistory] = useState([]) const [historyIndex, setHistoryIndex] = useState(-1) const [tempInput, setTempInput] = useState('') + const [developerMode, setDeveloperMode] = useState(false) const messagesEndRef = useRef(null) const { apiUrl } = useApiUrlStore() @@ -105,6 +106,17 @@ function AiChat({ onClose }: AiChatProps): JSX.Element { let finalResponse: LlmChatResponse | null = null let lastUpdate: LlmProgressUpdate | null = null + // Messages to filter out (internal/system messages not useful for users) + // These only appear when developerMode is enabled + const filteredMessages = [ + 'Sending request to LLM...', + 'Waiting briefly to respect rate limits...', + 'Initializing conversation and loading available tools...' + ] + + // Pattern to match "Loaded X available tools. Preparing system context..." + const loadedToolsPattern = /^Loaded \d+ available tools\. Preparing system context\.\.\.$/ + for await (const update of service.sendMessageStream( llmMessages, provider === 'auto' ? undefined : provider @@ -112,18 +124,26 @@ function AiChat({ onClose }: AiChatProps): JSX.Element { lastUpdate = update setCurrentProgress(update) - // Add progress messages to chat history (except final_response) + // Add progress messages to chat history (except final_response and filtered messages) if (update.type !== 'final_response') { - const progressMessage: Message = { - role: 'progress', - content: update.message || '', - timestamp: new Date(), - progressType: update.type, - iteration: update.iteration, - maxIterations: update.maxIterations, - toolName: update.toolName + const message = update.message || '' + // Only filter these messages if developer mode is disabled + const shouldFilter = !developerMode && ( + filteredMessages.includes(message) || loadedToolsPattern.test(message) + ) + + if (!shouldFilter) { + const progressMessage: Message = { + role: 'progress', + content: message, + timestamp: new Date(), + progressType: update.type, + iteration: update.iteration, + maxIterations: update.maxIterations, + toolName: update.toolName + } + setMessages(prev => [...prev, progressMessage]) } - setMessages(prev => [...prev, progressMessage]) } // Handle different update types @@ -256,6 +276,18 @@ function AiChat({ onClose }: AiChatProps): JSX.Element {
+ {/* Developer Mode Toggle */} +
+ +
{/* Provider Selection */}