Enhance LlmController and AiChat component with system reminders and developer mode

- Added system reminders in LlmController to prevent redundant tool calls and ensure final responses are text-only.
- Updated AiChat component to include a developer mode toggle, allowing users to filter out internal messages during chat interactions.
- Adjusted message handling to improve clarity and user experience, particularly during tool execution and progress updates.
- Modified iteration handling for backtest queries to reflect updated logic for improved performance.
This commit is contained in:
2026-01-06 23:32:29 +07:00
parent 1b08655dfa
commit 3fd9463682
2 changed files with 81 additions and 14 deletions

View File

@@ -408,6 +408,13 @@ public class LlmController : BaseController
// Add tool results to conversation history // Add tool results to conversation history
chatRequest.Messages.AddRange(toolResults); chatRequest.Messages.AddRange(toolResults);
// Add a system reminder to prevent redundant tool calls
chatRequest.Messages.Add(new LlmMessage
{
Role = "system",
Content = "You now have all the data from the tool calls above. Analyze this data and provide your final answer to the user. DO NOT call the same tools again with the same arguments."
});
// Add delay after tool calls before next LLM call to avoid rate limits // Add delay after tool calls before next LLM call to avoid rate limits
await Task.Delay(DelayAfterToolCallsMs); await Task.Delay(DelayAfterToolCallsMs);
@@ -417,7 +424,7 @@ public class LlmController : BaseController
// If we hit max iterations, return the last response (even if it has tool calls) // If we hit max iterations, return the last response (even if it has tool calls)
if (finalResponse == null) if (finalResponse == null)
{ {
logger.LogWarning("Reached max iterations ({MaxIterations}) for user {UserId}. Returning last response.", logger.LogWarning("Reached max iterations ({MaxIterations}) for user {UserId}. Forcing final response without tools.",
maxIterations, user.Id); maxIterations, user.Id);
await SendProgressUpdate(connectionId, hubContext, logger, new LlmProgressUpdate await SendProgressUpdate(connectionId, hubContext, logger, new LlmProgressUpdate
@@ -428,6 +435,16 @@ public class LlmController : BaseController
MaxIterations = maxIterations MaxIterations = maxIterations
}); });
// Remove tools to force a text-only response
chatRequest.Tools = null;
// Add explicit instruction for final answer
chatRequest.Messages.Add(new LlmMessage
{
Role = "system",
Content = "This is your FINAL iteration. You MUST provide a complete answer to the user's question based on the data you've already retrieved. DO NOT request any tool calls. Summarize and analyze the information you have."
});
finalResponse = await llmService.ChatAsync(user, chatRequest); finalResponse = await llmService.ChatAsync(user, chatRequest);
} }
@@ -604,6 +621,13 @@ public class LlmController : BaseController
// Add tool results to conversation history // Add tool results to conversation history
request.Messages.AddRange(toolResults); request.Messages.AddRange(toolResults);
// Add a system reminder to prevent redundant tool calls
request.Messages.Add(new LlmMessage
{
Role = "system",
Content = "You now have all the data from the tool calls above. Analyze this data and provide your final answer to the user. DO NOT call the same tools again with the same arguments."
});
// Add delay after tool calls before next LLM call to avoid rate limits // Add delay after tool calls before next LLM call to avoid rate limits
await Task.Delay(DelayAfterToolCallsMs); await Task.Delay(DelayAfterToolCallsMs);
@@ -613,8 +637,19 @@ public class LlmController : BaseController
// If we hit max iterations, return the last response (even if it has tool calls) // If we hit max iterations, return the last response (even if it has tool calls)
if (finalResponse == null) if (finalResponse == null)
{ {
_logger.LogWarning("Reached max iterations ({MaxIterations}) for user {UserId}. Returning last response.", _logger.LogWarning("Reached max iterations ({MaxIterations}) for user {UserId}. Forcing final response without tools.",
maxIterations, user.Id); maxIterations, user.Id);
// Remove tools to force a text-only response
request.Tools = null;
// Add explicit instruction for final answer
request.Messages.Add(new LlmMessage
{
Role = "system",
Content = "This is your FINAL iteration. You MUST provide a complete answer to the user's question based on the data you've already retrieved. DO NOT request any tool calls. Summarize and analyze the information you have."
});
// Get one more response to return something meaningful // Get one more response to return something meaningful
finalResponse = await _llmService.ChatAsync(user, request); finalResponse = await _llmService.ChatAsync(user, request);
} }
@@ -684,11 +719,11 @@ public class LlmController : BaseController
(lastMessage.Contains("detail") || lastMessage.Contains("analyze") || lastMessage.Contains("position"))) (lastMessage.Contains("detail") || lastMessage.Contains("analyze") || lastMessage.Contains("position")))
return 4; return 4;
// Simple backtest queries ("best", "top", "show") only need 2 iterations (fetch + respond) // Simple backtest queries ("best", "top", "show") need 3 iterations (fetch + possible retry + respond)
if (lastMessage.Contains("backtest") && if (lastMessage.Contains("backtest") &&
(lastMessage.Contains("best") || lastMessage.Contains("top") || lastMessage.Contains("show") || (lastMessage.Contains("best") || lastMessage.Contains("top") || lastMessage.Contains("show") ||
lastMessage.Contains("recent") || lastMessage.Contains("latest"))) lastMessage.Contains("recent") || lastMessage.Contains("latest")))
return 2; return 3;
// General analysis queries // General analysis queries
if (lastMessage.Contains("analyze")) if (lastMessage.Contains("analyze"))

View File

@@ -34,6 +34,7 @@ function AiChat({ onClose }: AiChatProps): JSX.Element {
const [messageHistory, setMessageHistory] = useState<string[]>([]) const [messageHistory, setMessageHistory] = useState<string[]>([])
const [historyIndex, setHistoryIndex] = useState<number>(-1) const [historyIndex, setHistoryIndex] = useState<number>(-1)
const [tempInput, setTempInput] = useState<string>('') const [tempInput, setTempInput] = useState<string>('')
const [developerMode, setDeveloperMode] = useState<boolean>(false)
const messagesEndRef = useRef<HTMLDivElement>(null) const messagesEndRef = useRef<HTMLDivElement>(null)
const { apiUrl } = useApiUrlStore() const { apiUrl } = useApiUrlStore()
@@ -105,6 +106,17 @@ function AiChat({ onClose }: AiChatProps): JSX.Element {
let finalResponse: LlmChatResponse | null = null let finalResponse: LlmChatResponse | null = null
let lastUpdate: LlmProgressUpdate | null = null let lastUpdate: LlmProgressUpdate | null = null
// Messages to filter out (internal/system messages not useful for users)
// These only appear when developerMode is enabled
const filteredMessages = [
'Sending request to LLM...',
'Waiting briefly to respect rate limits...',
'Initializing conversation and loading available tools...'
]
// Pattern to match "Loaded X available tools. Preparing system context..."
const loadedToolsPattern = /^Loaded \d+ available tools\. Preparing system context\.\.\.$/
for await (const update of service.sendMessageStream( for await (const update of service.sendMessageStream(
llmMessages, llmMessages,
provider === 'auto' ? undefined : provider provider === 'auto' ? undefined : provider
@@ -112,18 +124,26 @@ function AiChat({ onClose }: AiChatProps): JSX.Element {
lastUpdate = update lastUpdate = update
setCurrentProgress(update) setCurrentProgress(update)
// Add progress messages to chat history (except final_response) // Add progress messages to chat history (except final_response and filtered messages)
if (update.type !== 'final_response') { if (update.type !== 'final_response') {
const progressMessage: Message = { const message = update.message || ''
role: 'progress', // Only filter these messages if developer mode is disabled
content: update.message || '', const shouldFilter = !developerMode && (
timestamp: new Date(), filteredMessages.includes(message) || loadedToolsPattern.test(message)
progressType: update.type, )
iteration: update.iteration,
maxIterations: update.maxIterations, if (!shouldFilter) {
toolName: update.toolName const progressMessage: Message = {
role: 'progress',
content: message,
timestamp: new Date(),
progressType: update.type,
iteration: update.iteration,
maxIterations: update.maxIterations,
toolName: update.toolName
}
setMessages(prev => [...prev, progressMessage])
} }
setMessages(prev => [...prev, progressMessage])
} }
// Handle different update types // Handle different update types
@@ -256,6 +276,18 @@ function AiChat({ onClose }: AiChatProps): JSX.Element {
</div> </div>
</div> </div>
<div className="flex items-center gap-2"> <div className="flex items-center gap-2">
{/* Developer Mode Toggle */}
<div className="tooltip tooltip-bottom" data-tip={developerMode ? 'Hide debug messages' : 'Show debug messages'}>
<button
onClick={() => setDeveloperMode(!developerMode)}
className={`btn btn-sm btn-ghost btn-circle ${developerMode ? 'bg-warning/20' : ''}`}
title={developerMode ? 'Developer Mode: ON' : 'Developer Mode: OFF'}
>
<svg className="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M10 20l4-16m4 4l4 4-4 4M6 16l-4-4 4-4" />
</svg>
</button>
</div>
{/* Provider Selection */} {/* Provider Selection */}
<select <select
value={provider} value={provider}