Add test for executor

This commit is contained in:
2025-11-11 02:15:57 +07:00
parent d02a07f86b
commit e8e2ec5a43
18 changed files with 81418 additions and 170 deletions

View File

@@ -6,6 +6,7 @@ public interface IUserRepository
{
Task<User?> GetUserByAgentNameAsync(string agentName);
Task<User> GetUserByNameAsync(string name, bool fetchAccounts = false);
Task<User?> GetUserByIdAsync(int userId);
Task<IEnumerable<User>> GetAllUsersAsync();
Task SaveOrUpdateUserAsync(User user);
}

View File

@@ -89,7 +89,7 @@ public class BacktestTests : BaseTests
// Arrange
var ticker = Ticker.ETH;
var timeframe = Timeframe.FifteenMinutes;
var daysBack = -10; // Fetch last 30 days of data
var daysBack = -30; // Fetch last 30 days of data
var startDate = DateTime.UtcNow.AddDays(daysBack);
var endDate = DateTime.UtcNow;
@@ -117,7 +117,7 @@ public class BacktestTests : BaseTests
Directory.CreateDirectory(dataDirectory);
}
var fileName = $"{ticker}-{timeframe}-candles.json";
var fileName = $"{ticker}-{timeframe}-candles-{DateTime.UtcNow:HH:m:s zzz}-.json";
var filePath = Path.Combine(dataDirectory, fileName);
// Save to file

View File

@@ -54,6 +54,7 @@ public class BacktestExecutor
/// <param name="save">Whether to save the backtest result</param>
/// <param name="withCandles">Whether to include candles in the result</param>
/// <param name="requestId">The request ID to associate with this backtest</param>
/// <param name="bundleRequestId">Optional bundle request ID to update with backtest result</param>
/// <param name="metadata">Additional metadata</param>
/// <param name="progressCallback">Optional callback for progress updates (0-100)</param>
/// <returns>The lightweight backtest result</returns>
@@ -64,6 +65,7 @@ public class BacktestExecutor
bool save = false,
bool withCandles = false,
string requestId = null,
Guid? bundleRequestId = null,
object metadata = null,
Func<int, Task> progressCallback = null)
{
@@ -72,6 +74,9 @@ public class BacktestExecutor
throw new Exception("No candle to backtest");
}
// Start performance tracking
var backtestStartTime = DateTime.UtcNow;
// Ensure user has accounts loaded
if (user.Accounts == null || !user.Accounts.Any())
{
@@ -129,23 +134,64 @@ public class BacktestExecutor
tradingBot.WalletBalances.Add(candles.FirstOrDefault()!.Date, config.BotTradingBalance);
var initialBalance = config.BotTradingBalance;
var fixedCandles = new HashSet<Candle>();
var lastProgressUpdate = DateTime.UtcNow;
const int progressUpdateIntervalMs = 1000; // Update progress every second
// Pre-allocate and populate candle structures for maximum performance
var orderedCandles = candles.OrderBy(c => c.Date).ToList();
// Process all candles
foreach (var candle in candles)
// Use optimized rolling window approach - TradingBox.GetSignal only needs last 600 candles
const int rollingWindowSize = 600;
var rollingCandles = new LinkedList<Candle>();
var fixedCandles = new HashSet<Candle>(rollingWindowSize);
var candlesProcessed = 0;
var lastProgressUpdate = DateTime.UtcNow;
const int progressUpdateIntervalMs = 5000; // Update progress every 5 seconds to reduce database load
const int walletCheckInterval = 10; // Check wallet balance every N candles instead of every candle
var lastWalletCheck = 0;
var lastWalletBalance = config.BotTradingBalance;
// Process all candles with optimized rolling window approach
foreach (var candle in orderedCandles)
{
// Maintain rolling window efficiently using LinkedList
rollingCandles.AddLast(candle);
fixedCandles.Add(candle);
if (rollingCandles.Count > rollingWindowSize)
{
var removedCandle = rollingCandles.First!.Value;
rollingCandles.RemoveFirst();
fixedCandles.Remove(removedCandle);
}
tradingBot.LastCandle = candle;
// Update signals manually only for backtesting
// Update signals and run trading logic with optimized rolling window
// For backtests, we can optimize by reducing async overhead
await tradingBot.UpdateSignals(fixedCandles);
await tradingBot.Run();
// Run with optimized backtest path (minimize async calls)
await RunOptimizedBacktestStep(tradingBot);
currentCandle++;
candlesProcessed++;
// Update progress callback if provided
// Optimized wallet balance check - only check every N candles and cache result
if (currentCandle - lastWalletCheck >= walletCheckInterval)
{
lastWalletBalance = tradingBot.WalletBalances.Values.LastOrDefault();
lastWalletCheck = currentCandle;
if (lastWalletBalance < Constants.GMX.Config.MinimumPositionAmount)
{
_logger.LogWarning(
"Backtest stopped early: Wallet balance fell below {MinimumPositionAmount} USDC (Current: {CurrentBalance:F2} USDC) at candle {CurrentCandle}/{TotalCandles} from {CandleDate}",
Constants.GMX.Config.MinimumPositionAmount, lastWalletBalance, currentCandle, totalCandles,
candle.Date.ToString("yyyy-MM-dd HH:mm"));
break;
}
}
// Update progress callback if provided (optimized frequency)
var currentPercentage = (currentCandle * 100) / totalCandles;
var timeSinceLastUpdate = (DateTime.UtcNow - lastProgressUpdate).TotalMilliseconds;
if (progressCallback != null && (timeSinceLastUpdate >= progressUpdateIntervalMs || currentPercentage >= lastLoggedPercentage + 10))
@@ -161,7 +207,7 @@ public class BacktestExecutor
lastProgressUpdate = DateTime.UtcNow;
}
// Log progress every 10%
// Log progress every 10% (reduced frequency)
if (currentPercentage >= lastLoggedPercentage + 10)
{
lastLoggedPercentage = currentPercentage;
@@ -169,21 +215,20 @@ public class BacktestExecutor
"Backtest progress: {Percentage}% ({CurrentCandle}/{TotalCandles} candles processed)",
currentPercentage, currentCandle, totalCandles);
}
// Check if wallet balance fell below 10 USDC and break if so
var currentWalletBalance = tradingBot.WalletBalances.Values.LastOrDefault();
if (currentWalletBalance < Constants.GMX.Config.MinimumPositionAmount)
{
_logger.LogWarning(
"Backtest stopped early: Wallet balance fell below {MinimumPositionAmount} USDC (Current: {CurrentBalance:F2} USDC) at candle {CurrentCandle}/{TotalCandles} from {CandleDate}",
Constants.GMX.Config.MinimumPositionAmount, currentWalletBalance, currentCandle, totalCandles,
candle.Date.ToString("yyyy-MM-dd HH:mm"));
break;
}
}
_logger.LogInformation("Backtest processing completed. Calculating final results...");
// Log performance metrics
var backtestEndTime = DateTime.UtcNow;
var totalExecutionTime = backtestEndTime - backtestStartTime;
var candlesPerSecond = totalCandles / totalExecutionTime.TotalSeconds;
_logger.LogInformation(
"Backtest performance metrics: {TotalCandles} candles processed in {ExecutionTime:F2}s ({CandlesPerSecond:F1} candles/sec)",
totalCandles, totalExecutionTime.TotalSeconds, candlesPerSecond);
// Calculate final results (using existing optimized methods)
var finalPnl = tradingBot.GetProfitAndLoss();
var winRate = tradingBot.GetWinRate();
var stats = TradingHelpers.GetStatistics(tradingBot.WalletBalances);
@@ -240,6 +285,12 @@ public class BacktestExecutor
if (save && user != null)
{
await _backtestRepository.InsertBacktestForUserAsync(user, result);
// Update bundle request if provided
if (bundleRequestId.HasValue)
{
await UpdateBundleRequestWithBacktestResult(user, bundleRequestId.Value, result);
}
}
// Send notification if backtest meets criteria
@@ -297,6 +348,118 @@ public class BacktestExecutor
return tradingBot;
}
/// <summary>
/// Optimized backtest step execution - delegate to standard Run but with backtest optimizations
/// </summary>
private async Task RunOptimizedBacktestStep(TradingBotBase tradingBot)
{
// Use the standard Run method but ensure it's optimized for backtests
await tradingBot.Run();
}
/// <summary>
/// Updates bundle request with the completed backtest result
/// </summary>
private async Task UpdateBundleRequestWithBacktestResult(User user, Guid bundleRequestId, Backtest backtest)
{
try
{
using var scope = _scopeFactory.CreateScope();
var backtestRepository = scope.ServiceProvider.GetRequiredService<IBacktestRepository>();
var jobRepository = scope.ServiceProvider.GetRequiredService<IJobRepository>();
var webhookService = scope.ServiceProvider.GetRequiredService<IWebhookService>();
// Get bundle request
var bundleRequest = backtestRepository.GetBundleBacktestRequestByIdForUser(user, bundleRequestId);
if (bundleRequest == null)
{
_logger.LogWarning("Bundle request {BundleRequestId} not found for user {UserId}", bundleRequestId, user.Id);
return;
}
var previousStatus = bundleRequest.Status;
// Get all jobs for this bundle to calculate progress
var jobs = await jobRepository.GetByBundleRequestIdAsync(bundleRequestId);
var completedJobs = jobs.Count(j => j.Status == JobStatus.Completed);
var failedJobs = jobs.Count(j => j.Status == JobStatus.Failed);
var runningJobs = jobs.Count(j => j.Status == JobStatus.Running);
var totalJobs = jobs.Count();
// Update bundle request progress
bundleRequest.CompletedBacktests = completedJobs;
bundleRequest.FailedBacktests = failedJobs;
// Update status based on job states
if (completedJobs + failedJobs == totalJobs)
{
// All jobs completed or failed
if (failedJobs == 0)
{
bundleRequest.Status = BundleBacktestRequestStatus.Completed;
}
else if (completedJobs == 0)
{
bundleRequest.Status = BundleBacktestRequestStatus.Failed;
bundleRequest.ErrorMessage = "All backtests failed";
}
else
{
bundleRequest.Status = BundleBacktestRequestStatus.Completed;
bundleRequest.ErrorMessage = $"{failedJobs} backtests failed";
}
bundleRequest.CompletedAt = DateTime.UtcNow;
bundleRequest.CurrentBacktest = null;
}
else if (runningJobs > 0)
{
// Some jobs still running
bundleRequest.Status = BundleBacktestRequestStatus.Running;
}
// Update results list with the new backtest ID
var resultsList = bundleRequest.Results?.ToList() ?? new List<string>();
if (!resultsList.Contains(backtest.Id))
{
resultsList.Add(backtest.Id);
bundleRequest.Results = resultsList;
}
await backtestRepository.UpdateBundleBacktestRequestAsync(bundleRequest);
// Send webhook notification if bundle request just completed
if (previousStatus != BundleBacktestRequestStatus.Completed &&
bundleRequest.Status == BundleBacktestRequestStatus.Completed &&
!string.IsNullOrEmpty(user.TelegramChannel))
{
var message = $"✅ Bundle backtest '{bundleRequest.Name}' (ID: {bundleRequest.RequestId}) completed successfully. " +
$"Completed: {completedJobs}/{totalJobs} backtests" +
(failedJobs > 0 ? $", Failed: {failedJobs}" : "") +
$". Results: {resultsList.Count} backtest(s) generated.";
await webhookService.SendMessage(message, user.TelegramChannel);
}
else if (previousStatus != BundleBacktestRequestStatus.Failed &&
bundleRequest.Status == BundleBacktestRequestStatus.Failed &&
!string.IsNullOrEmpty(user.TelegramChannel))
{
var message = $"❌ Bundle backtest '{bundleRequest.Name}' (ID: {bundleRequest.RequestId}) failed. " +
$"All {totalJobs} backtests failed. Error: {bundleRequest.ErrorMessage}";
await webhookService.SendMessage(message, user.TelegramChannel);
}
_logger.LogInformation(
"Updated bundle request {BundleRequestId} with backtest {BacktestId}: {Completed}/{Total} completed, {Failed} failed, {Running} running",
bundleRequestId, backtest.Id, completedJobs, totalJobs, failedJobs, runningJobs);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update bundle request {BundleRequestId} with backtest {BacktestId}", bundleRequestId, backtest.Id);
}
}
/// <summary>
/// Sends notification if backtest meets criteria
/// </summary>

View File

@@ -63,6 +63,7 @@ public class BacktestExecutorAdapter : IBacktester
save,
withCandles,
requestId,
bundleRequestId: null,
metadata,
progressCallback: null);
@@ -85,6 +86,7 @@ public class BacktestExecutorAdapter : IBacktester
save: false,
withCandles,
requestId,
bundleRequestId: null,
metadata,
progressCallback: null);

View File

@@ -350,7 +350,8 @@ public class GeneticService : IGeneticService
// Load candles once at the beginning to avoid repeated database queries
// This significantly reduces database connections during genetic algorithm execution
_logger.LogInformation("Loading candles for genetic algorithm {RequestId}: {Ticker} on {Timeframe} from {StartDate} to {EndDate}",
_logger.LogInformation(
"Loading candles for genetic algorithm {RequestId}: {Ticker} on {Timeframe} from {StartDate} to {EndDate}",
request.RequestId, request.Ticker, request.Timeframe, request.StartDate, request.EndDate);
HashSet<Candle> candles;
@@ -472,7 +473,8 @@ public class GeneticService : IGeneticService
{
// Reload the request from the database in the new scope
// Use the user from the original request to get the request by ID
var dbRequest = geneticService.GetGeneticRequestByIdForUser(request.User, request.RequestId);
var dbRequest =
geneticService.GetGeneticRequestByIdForUser(request.User, request.RequestId);
if (dbRequest != null)
{
@@ -488,7 +490,8 @@ public class GeneticService : IGeneticService
}
});
_logger.LogDebug("Updated genetic request {RequestId} at generation {Generation} with fitness {Fitness}",
_logger.LogDebug(
"Updated genetic request {RequestId} at generation {Generation} with fitness {Fitness}",
request.RequestId, generationCount, bestFitness);
// Check for cancellation
@@ -565,7 +568,8 @@ public class GeneticService : IGeneticService
await UpdateGeneticRequestAsync(request);
_logger.LogInformation("Final update completed for genetic request {RequestId}. Generation: {Generation}, Best Fitness: {Fitness}",
_logger.LogInformation(
"Final update completed for genetic request {RequestId}. Generation: {Generation}, Best Fitness: {Fitness}",
request.RequestId, ga.GenerationsNumber, bestFitness);
// Send notification about the completed genetic algorithm
@@ -893,7 +897,7 @@ public class TradingBotChromosome : ChromosomeBase
return new TradingBotConfig
{
Name = $"Genetic_{request.RequestId}",
AccountName = "Oda-embedded",
AccountName = request.User.Accounts.FirstOrDefault().Name,
Ticker = request.Ticker,
Timeframe = request.Timeframe,
BotTradingBalance = request.Balance,
@@ -1051,7 +1055,9 @@ public class TradingBotFitness : IFitness
save: false, // Don't save backtest results for genetic algorithm
withCandles: false,
requestId: _request.RequestId,
metadata: new GeneticBacktestMetadata(_geneticAlgorithm?.GenerationsNumber ?? 0, _request.RequestId)
bundleRequestId: null, // Genetic algorithm doesn't use bundle requests
metadata: new GeneticBacktestMetadata(_geneticAlgorithm?.GenerationsNumber ?? 0,
_request.RequestId)
)
).GetAwaiter().GetResult();
}

View File

@@ -296,8 +296,7 @@ public class UserService : IUserService
public async Task<User> GetUserByIdAsync(int userId)
{
var allUsers = await _userRepository.GetAllUsersAsync();
var user = allUsers.FirstOrDefault(u => u.Id == userId);
var user = await _userRepository.GetUserByIdAsync(userId);
if (user == null)
{

View File

@@ -24,6 +24,7 @@ public class BacktestComputeWorker : BackgroundService
private readonly BacktestComputeWorkerOptions _options;
private readonly SemaphoreSlim _instanceSemaphore;
private readonly ConcurrentDictionary<Guid, Task> _runningJobTasks = new();
private readonly ConcurrentDictionary<Guid, JobProgressTracker> _jobProgressTrackers = new();
private readonly CancellationTokenSource _shutdownCts = new();
public BacktestComputeWorker(
@@ -54,6 +55,9 @@ public class BacktestComputeWorker : BackgroundService
// Background task for heartbeat updates
var heartbeatTask = Task.Run(() => HeartbeatLoop(cancellationToken), cancellationToken);
// Background task for progress persistence
var progressPersistenceTask = Task.Run(() => ProgressPersistenceLoop(cancellationToken), cancellationToken);
// Main job processing loop
try
{
@@ -230,30 +234,27 @@ public class BacktestComputeWorker : BackgroundService
$"No candles found for {config.Ticker} on {config.Timeframe} from {job.StartDate} to {job.EndDate}");
}
// Progress callback to update job progress
Func<int, Task> progressCallback = async (percentage) =>
{
try
{
// Check if job has been running too long
var elapsed = DateTime.UtcNow - jobStartTime;
if (elapsed.TotalMinutes > _options.JobTimeoutMinutes)
{
_logger.LogWarning(
"Job {JobId} has been running for {ElapsedMinutes} minutes, exceeding timeout of {TimeoutMinutes} minutes",
job.Id, elapsed.TotalMinutes, _options.JobTimeoutMinutes);
throw new TimeoutException($"Job exceeded timeout of {_options.JobTimeoutMinutes} minutes");
}
// Create progress tracker for this job
var progressTracker = new JobProgressTracker(job.Id, _logger);
_jobProgressTrackers.TryAdd(job.Id, progressTracker);
job.ProgressPercentage = percentage;
job.LastHeartbeat = DateTime.UtcNow;
await jobRepository.UpdateAsync(job);
}
catch (Exception ex)
// Progress callback that only updates in-memory progress (non-blocking)
Func<int, Task> progressCallback = (percentage) =>
{
// Check if job has been running too long
var elapsed = DateTime.UtcNow - jobStartTime;
if (elapsed.TotalMinutes > _options.JobTimeoutMinutes)
{
_logger.LogWarning(ex, "Error updating job progress for job {JobId}", job.Id);
throw; // Re-throw timeout exceptions
_logger.LogWarning(
"Job {JobId} has been running for {ElapsedMinutes} minutes, exceeding timeout of {TimeoutMinutes} minutes",
job.Id, elapsed.TotalMinutes, _options.JobTimeoutMinutes);
throw new TimeoutException($"Job exceeded timeout of {_options.JobTimeoutMinutes} minutes");
}
// Update progress in memory only - persistence happens in background
progressTracker.UpdateProgress(percentage);
return Task.CompletedTask; // Non-blocking
};
// Execute the backtest with timeout
@@ -270,6 +271,7 @@ public class BacktestComputeWorker : BackgroundService
save: true,
withCandles: false,
requestId: job.RequestId,
bundleRequestId: job.BundleRequestId,
metadata: null,
progressCallback: progressCallback);
}
@@ -293,6 +295,9 @@ public class BacktestComputeWorker : BackgroundService
await jobRepository.UpdateAsync(job);
// Clean up progress tracker
_jobProgressTrackers.TryRemove(job.Id, out _);
// Increment backtest count for the user's agent summary
try
{
@@ -310,11 +315,7 @@ public class BacktestComputeWorker : BackgroundService
"Completed backtest job {JobId}. Score: {Score}, PnL: {PnL}, Duration: {DurationMinutes:F1} minutes",
job.Id, result.Score, result.FinalPnl, elapsedTime.TotalMinutes);
// Update bundle request if this is part of a bundle
if (job.BundleRequestId.HasValue)
{
await UpdateBundleRequestProgress(job.BundleRequestId.Value, scope.ServiceProvider);
}
// Bundle request is now updated in the BacktestExecutor
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
@@ -326,6 +327,9 @@ public class BacktestComputeWorker : BackgroundService
_logger.LogError(ex, "Error processing backtest job {JobId}", job.Id);
SentrySdk.CaptureException(ex);
// Clean up progress tracker on failure
_jobProgressTrackers.TryRemove(job.Id, out _);
await HandleJobFailure(job, ex, jobRepository, scope.ServiceProvider);
}
}
@@ -643,6 +647,62 @@ public class BacktestComputeWorker : BackgroundService
}
}
private async Task ProgressPersistenceLoop(CancellationToken cancellationToken)
{
while (!cancellationToken.IsCancellationRequested)
{
try
{
await Task.Delay(TimeSpan.FromSeconds(2), cancellationToken); // Check every 2 seconds
using var scope = _scopeFactory.CreateScope();
var jobRepository = scope.ServiceProvider.GetRequiredService<IJobRepository>();
// Process all progress trackers that need persistence
var trackersToPersist = _jobProgressTrackers
.Where(kvp => kvp.Value.ShouldPersist())
.ToList();
if (trackersToPersist.Count > 0)
{
_logger.LogDebug("Persisting progress for {Count} jobs", trackersToPersist.Count);
foreach (var (jobId, tracker) in trackersToPersist)
{
try
{
var (percentage, lastUpdate) = tracker.GetProgressForPersistence();
// Get and update the job
var job = await jobRepository.GetByIdAsync(jobId);
if (job != null && job.Status == JobStatus.Running)
{
job.ProgressPercentage = percentage;
job.LastHeartbeat = lastUpdate;
await jobRepository.UpdateAsync(job);
_logger.LogDebug("Persisted progress {Percentage}% for job {JobId}", percentage, jobId);
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Error persisting progress for job {JobId}", jobId);
}
}
}
}
catch (OperationCanceledException)
{
// Expected during shutdown, don't log as error
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error in progress persistence loop");
}
}
}
private async Task HandleJobFailure(
Job job,
Exception ex,
@@ -764,6 +824,73 @@ public class BacktestComputeWorker : BackgroundService
}
}
/// <summary>
/// Tracks job progress with batched database updates for performance optimization
/// </summary>
public class JobProgressTracker
{
private readonly object _lock = new();
private int _lastPersistedPercentage;
private DateTime _lastPersistedTime;
private readonly ILogger _logger;
public Guid JobId { get; }
public int CurrentPercentage { get; private set; }
public DateTime LastUpdateTime { get; private set; }
public JobProgressTracker(Guid jobId, ILogger logger)
{
JobId = jobId;
_logger = logger;
_lastPersistedTime = DateTime.UtcNow;
}
/// <summary>
/// Updates progress in memory only - thread safe
/// </summary>
public void UpdateProgress(int percentage)
{
lock (_lock)
{
CurrentPercentage = percentage;
LastUpdateTime = DateTime.UtcNow;
}
}
/// <summary>
/// Checks if progress should be persisted to database based on time/percentage thresholds
/// </summary>
public bool ShouldPersist(int progressUpdateIntervalMs = 5000, int percentageThreshold = 5)
{
lock (_lock)
{
var timeSinceLastPersist = (DateTime.UtcNow - _lastPersistedTime).TotalMilliseconds;
var percentageSinceLastPersist = CurrentPercentage - _lastPersistedPercentage;
return timeSinceLastPersist >= progressUpdateIntervalMs ||
percentageSinceLastPersist >= percentageThreshold ||
CurrentPercentage >= 100; // Always persist completion
}
}
/// <summary>
/// Gets current progress and marks as persisted
/// </summary>
public (int percentage, DateTime lastUpdate) GetProgressForPersistence()
{
lock (_lock)
{
var percentage = CurrentPercentage;
var lastUpdate = LastUpdateTime;
_lastPersistedPercentage = percentage;
_lastPersistedTime = DateTime.UtcNow;
return (percentage, lastUpdate);
}
}
}
/// <summary>
/// Configuration options for BacktestComputeWorker
/// </summary>

View File

@@ -0,0 +1,9 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
</Project>

View File

@@ -67,6 +67,54 @@ public class PostgreSqlUserRepository : BaseRepositoryWithLogging, IUserReposito
}, nameof(GetUserByAgentNameAsync), ("agentName", agentName));
}
public async Task<User?> GetUserByIdAsync(int userId)
{
return await ExecuteWithLoggingAsync(async () =>
{
// Check cache first for frequently accessed users
var cacheKey = $"user_id_{userId}";
var cachedUser = _cacheService.GetValue<User>(cacheKey);
if (cachedUser != null)
{
return cachedUser;
}
try
{
await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context);
// Optimized query with explicit SELECT to avoid loading unnecessary data
var userEntity = await _context.Users
.AsNoTracking()
.Where(u => u.Id == userId)
.Select(u => new UserEntity
{
Id = u.Id,
Name = u.Name,
AgentName = u.AgentName,
AvatarUrl = u.AvatarUrl,
TelegramChannel = u.TelegramChannel
})
.FirstOrDefaultAsync()
.ConfigureAwait(false);
if (userEntity == null)
return null;
var user = PostgreSqlMappers.Map(userEntity);
// Cache user for 5 minutes since user data doesn't change frequently
_cacheService.SaveValue(cacheKey, user, TimeSpan.FromMinutes(5));
return user;
}
finally
{
await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context);
}
}, nameof(GetUserByIdAsync), ("userId", userId));
}
public async Task<User> GetUserByNameAsync(string name, bool fetchAccounts = false)
{
return await ExecuteWithLoggingAsync(async () =>
@@ -237,8 +285,10 @@ public class PostgreSqlUserRepository : BaseRepositoryWithLogging, IUserReposito
// Invalidate cache for updated user - handle both old and new AgentName
var nameCacheKey = $"user_name_{user.Name}";
var nameWithAccountsCacheKey = $"user_name_with_accounts_{user.Name}";
var idCacheKey = $"user_id_{user.Id}";
_cacheService.RemoveValue(nameCacheKey);
_cacheService.RemoveValue(nameWithAccountsCacheKey);
_cacheService.RemoveValue(idCacheKey);
// Invalidate old AgentName cache if it existed
if (!string.IsNullOrEmpty(oldAgentName))

View File

@@ -13,13 +13,13 @@ const JobsSettings: React.FC = () => {
const [pageSize, setPageSize] = useState(50)
const [sortBy, setSortBy] = useState<string>('CreatedAt')
const [sortOrder, setSortOrder] = useState<string>('desc')
const [statusFilter, setStatusFilter] = useState<string>('Pending')
const [statusFilter, setStatusFilter] = useState<string>('Failed')
const [jobTypeFilter, setJobTypeFilter] = useState<string>('')
const [userIdFilter, setUserIdFilter] = useState<string>('')
const [workerIdFilter, setWorkerIdFilter] = useState<string>('')
const [bundleRequestIdFilter, setBundleRequestIdFilter] = useState<string>('')
const [filtersOpen, setFiltersOpen] = useState<boolean>(false)
const [showTable, setShowTable] = useState<boolean>(false)
const [showTable, setShowTable] = useState<boolean>(true)
const jobClient = new JobClient({}, apiUrl)
const queryClient = useQueryClient()
@@ -146,7 +146,7 @@ const JobsSettings: React.FC = () => {
}
const clearFilters = () => {
setStatusFilter('Pending') // Reset to Pending instead of All
setStatusFilter('Failed') // Reset to Failed instead of All
setJobTypeFilter('')
setUserIdFilter('')
setWorkerIdFilter('')
@@ -378,50 +378,15 @@ const JobsSettings: React.FC = () => {
)}
</div>
{/* Load Table Button */}
{!showTable && (
<div className="card bg-base-100 shadow-md mb-4">
<div className="card-body">
<div className="flex items-center justify-between">
<div>
<h3 className="card-title text-lg">Jobs List</h3>
<p className="text-sm text-base-content/70">Click the button below to load and view the jobs table</p>
</div>
<button
className="btn btn-primary"
onClick={() => setShowTable(true)}
>
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" strokeWidth="1.5" stroke="currentColor" className="w-5 h-5">
<path strokeLinecap="round" strokeLinejoin="round" d="M3.75 3v11.25A2.25 2.25 0 006 16.5h2.25M3.75 3h-1.5m1.5 0h16.5m0 0h1.5m-1.5 0v11.25A2.25 2.25 0 0118 16.5h-2.25m-7.5 0h7.5m-7.5 0l-1 3m8.5-3l1 3m0 0l.5 1.5m-.5-1.5h-9.5m0 0l-.5 1.5M9 11.25v1.5M12 9v3.75m3-3.75v3.75m-9 .75h12.75a2.25 2.25 0 002.25-2.25V6.75a2.25 2.25 0 00-2.25-2.25H6.75A2.25 2.25 0 004.5 6.75v7.5a2.25 2.25 0 002.25 2.25z" />
</svg>
Load Jobs Table
</button>
</div>
</div>
{/* Jobs List Header */}
<div className="card bg-base-100 shadow-md mb-4">
<div className="card-body py-3">
<h3 className="card-title text-lg">Jobs List</h3>
</div>
)}
</div>
{showTable && (
<>
{/* Hide Table Button */}
<div className="card bg-base-100 shadow-md mb-4">
<div className="card-body py-3">
<div className="flex items-center justify-between">
<h3 className="card-title text-lg">Jobs List</h3>
<button
className="btn btn-ghost btn-sm"
onClick={() => setShowTable(false)}
>
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" strokeWidth="1.5" stroke="currentColor" className="w-5 h-5">
<path strokeLinecap="round" strokeLinejoin="round" d="M6 18L18 6M6 6l12 12" />
</svg>
Hide Table
</button>
</div>
</div>
</div>
{filtersOpen && (
{filtersOpen && (
<div className="card bg-base-200 mb-4">
<div className="card-body">
<div className="grid grid-cols-1 md:grid-cols-3 lg:grid-cols-5 gap-4">
@@ -527,28 +492,25 @@ const JobsSettings: React.FC = () => {
</div>
)}
<JobsTable
jobs={jobs}
isLoading={isLoading}
currentPage={currentPage}
totalPages={totalPages}
totalCount={totalCount}
pageSize={pageSize}
sortBy={sortBy}
sortOrder={sortOrder}
onPageChange={handlePageChange}
onSortChange={handleSortChange}
onRetryJob={handleRetryJob}
isRetrying={retryJobMutation.isPending}
onDeleteJob={handleDeleteJob}
isDeleting={deleteJobMutation.isPending}
/>
</>
)}
<JobsTable
jobs={jobs}
isLoading={isLoading}
currentPage={currentPage}
totalPages={totalPages}
totalCount={totalCount}
pageSize={pageSize}
sortBy={sortBy}
sortOrder={sortOrder}
onPageChange={handlePageChange}
onSortChange={handleSortChange}
onRetryJob={handleRetryJob}
isRetrying={retryJobMutation.isPending}
onDeleteJob={handleDeleteJob}
isDeleting={deleteJobMutation.isPending}
/>
{/* Bottom Menu Bar */}
{showTable && (
<BottomMenuBar>
<BottomMenuBar>
<li>
<a
onClick={(e) => {

View File

@@ -149,22 +149,54 @@ const JobsTable: React.FC<IJobsTable> = ({
),
},
{
Header: () => <SortableHeader column="CreatedAt" label="Created" />,
Header: () => <SortableHeader column="CreatedAt" label="Timeline" />,
accessor: 'createdAt',
width: 180,
Cell: ({ value }: any) => formatDate(value),
},
{
Header: () => <SortableHeader column="StartedAt" label="Started" />,
accessor: 'startedAt',
width: 180,
Cell: ({ value }: any) => formatDate(value),
},
{
Header: () => <SortableHeader column="CompletedAt" label="Completed" />,
accessor: 'completedAt',
width: 180,
Cell: ({ value }: any) => formatDate(value),
width: 220,
Cell: ({ row }: any) => {
const createdAt = row.original.createdAt
const startedAt = row.original.startedAt
const completedAt = row.original.completedAt
// Calculate duration between started and completed
const calculateDuration = (start: Date | string | null, end: Date | string | null) => {
if (!start || !end) return null
try {
const startDate = new Date(start)
const endDate = new Date(end)
const diffMs = endDate.getTime() - startDate.getTime()
// Convert to human readable format
const diffDays = Math.floor(diffMs / (1000 * 60 * 60 * 24))
const diffHours = Math.floor((diffMs % (1000 * 60 * 60 * 24)) / (1000 * 60 * 60))
const diffMinutes = Math.floor((diffMs % (1000 * 60 * 60)) / (1000 * 60))
const diffSeconds = Math.floor((diffMs % (1000 * 60)) / 1000)
if (diffDays > 0) return `${diffDays}d ${diffHours}h ${diffMinutes}m`
if (diffHours > 0) return `${diffHours}h ${diffMinutes}m ${diffSeconds}s`
if (diffMinutes > 0) return `${diffMinutes}m ${diffSeconds}s`
return `${diffSeconds}s`
} catch {
return null
}
}
const duration = calculateDuration(startedAt, completedAt)
return (
<div className="flex flex-col gap-1 text-xs">
<div className="flex flex-col gap-0.5">
<div className="text-gray-500">Created: {formatDate(createdAt)}</div>
<div className="text-blue-500">Started: {formatDate(startedAt)}</div>
<div className="text-green-500">Completed: {formatDate(completedAt)}</div>
</div>
{duration && (
<div className="text-purple-500 font-semibold">
Duration: {duration}
</div>
)}
</div>
)
},
},
{
Header: 'Error Message',

View File

@@ -0,0 +1,180 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Managing.Application.Abstractions;
using Managing.Application.Abstractions.Repositories;
using Managing.Application.Abstractions.Services;
using Managing.Application.Backtests;
using Managing.Application.Bots;
using Managing.Application.Tests;
using Managing.Core;
using Managing.Domain.Accounts;
using Managing.Domain.Backtests;
using Managing.Domain.Bots;
using Managing.Domain.Candles;
using Managing.Domain.Scenarios;
using Managing.Domain.Strategies;
using Managing.Domain.Users;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Moq;
using Newtonsoft.Json;
using Xunit;
using static Managing.Common.Enums;
namespace Managing.Workers.Tests;
public class BacktestExecutorTests : BaseTests
{
private readonly BacktestExecutor _backtestExecutor;
private readonly Mock<IServiceScopeFactory> _scopeFactory;
private readonly Mock<IBacktestRepository> _backtestRepository;
private readonly Mock<IScenarioService> _scenarioService;
private readonly Mock<IMessengerService> _messengerService;
private readonly User _testUser;
public BacktestExecutorTests() : base()
{
// Setup mock dependencies
_backtestRepository = new Mock<IBacktestRepository>();
_scenarioService = new Mock<IScenarioService>();
_messengerService = new Mock<IMessengerService>();
// Setup service scope factory
_scopeFactory = new Mock<IServiceScopeFactory>();
var mockScope = new Mock<IServiceScope>();
var mockServiceProvider = new Mock<IServiceProvider>();
// Setup TradingBotBase logger
var tradingBotLogger = TradingBaseTests.CreateTradingBotLogger();
mockServiceProvider.Setup(x => x.GetService(typeof(ILogger<TradingBotBase>)))
.Returns(tradingBotLogger);
// Setup all services that TradingBotBase might need
mockServiceProvider.Setup(x => x.GetService(typeof(IExchangeService)))
.Returns(_exchangeService);
mockServiceProvider.Setup(x => x.GetService(typeof(IAccountService)))
.Returns(_accountService.Object);
mockServiceProvider.Setup(x => x.GetService(typeof(ITradingService)))
.Returns(_tradingService.Object);
mockServiceProvider.Setup(x => x.GetService(typeof(IMoneyManagementService)))
.Returns(_moneyManagementService.Object);
mockServiceProvider.Setup(x => x.GetService(typeof(IBotService)))
.Returns(new Mock<IBotService>().Object);
mockServiceProvider.Setup(x => x.GetService(typeof(IMessengerService)))
.Returns(_messengerService.Object);
mockScope.Setup(x => x.ServiceProvider).Returns(mockServiceProvider.Object);
_scopeFactory.Setup(x => x.CreateScope()).Returns(mockScope.Object);
// Create test user with account
_testUser = new User
{
Id = 1,
Name = "Test User",
Accounts = new List<Account> { _account }
};
// Create BacktestExecutor instance
var logger = new Mock<ILogger<BacktestExecutor>>().Object;
_backtestExecutor = new BacktestExecutor(
logger,
_scopeFactory.Object,
_backtestRepository.Object,
_scenarioService.Object,
_accountService.Object,
_messengerService.Object);
}
[Fact]
public async Task ExecuteBacktest_With_ETH_FifteenMinutes_Data_Should_Return_LightBacktest()
{
// Arrange
var candles = FileHelpers.ReadJson<List<Candle>>("Data/ETH-FifteenMinutes-candles.json");
Assert.NotNull(candles);
Assert.NotEmpty(candles);
var scenario = new Scenario("ETH_BacktestScenario");
var rsiDivIndicator = ScenarioHelpers.BuildIndicator(IndicatorType.RsiDivergence, "RsiDiv", period: 14);
scenario.Indicators = new List<IndicatorBase> { (IndicatorBase)rsiDivIndicator };
scenario.LoopbackPeriod = 15;
var config = new TradingBotConfig
{
AccountName = _account.Name,
MoneyManagement = MoneyManagement,
Ticker = Ticker.ETH,
Scenario = LightScenario.FromScenario(scenario),
Timeframe = Timeframe.FifteenMinutes,
IsForWatchingOnly = false,
BotTradingBalance = 1000,
IsForBacktest = true,
CooldownPeriod = 1,
MaxLossStreak = 0,
FlipPosition = false,
Name = "ETH_FifteenMinutes_Test",
FlipOnlyWhenInProfit = true,
MaxPositionTimeHours = null,
CloseEarlyWhenProfitable = false
};
// Act
var result = await _backtestExecutor.ExecuteAsync(
config,
candles.ToHashSet(),
_testUser,
save: false,
withCandles: false,
requestId: null,
bundleRequestId: null,
metadata: null,
progressCallback: null);
// Output the result to console for review
var json = JsonConvert.SerializeObject(new
{
result.FinalPnl,
result.WinRate,
result.GrowthPercentage,
result.HodlPercentage,
result.Fees,
result.NetPnl,
result.MaxDrawdown,
result.SharpeRatio,
result.Score,
result.InitialBalance,
StartDate = result.StartDate.ToString("yyyy-MM-dd HH:mm:ss"),
EndDate = result.EndDate.ToString("yyyy-MM-dd HH:mm:ss")
}, Formatting.Indented);
Console.WriteLine("BacktestExecutor Results:");
Console.WriteLine(json);
// Assert - Validate specific backtest results
Assert.NotNull(result);
Assert.IsType<LightBacktest>(result);
// Validate key metrics
Assert.Equal(1000.0m, result.InitialBalance);
Assert.Equal(-59.882047336208884979534923000m, result.FinalPnl);
Assert.Equal(31, result.WinRate);
Assert.Equal(-5.9882047336208884979534923m, result.GrowthPercentage);
Assert.Equal(-0.67091284426766023865867781m, result.HodlPercentage);
Assert.Equal(56.951749553070862317498561018m, result.Fees);
Assert.Equal(-116.83379688927974729703348402m, result.NetPnl);
Assert.Equal(109.9278709774429014669107321m, result.MaxDrawdown);
Assert.Equal((double?)-0.014233294246603566m, result.SharpeRatio);
Assert.Equal((double)0.0m, result.Score);
// Validate dates
Assert.Equal(new DateTime(2025, 10, 14, 12, 0, 0), result.StartDate);
Assert.Equal(new DateTime(2025, 10, 24, 11, 45, 0), result.EndDate);
Assert.True(result.StartDate < result.EndDate);
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,29 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>disable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<None Update="Data\ETH-FifteenMinutes-candles.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Managing.Application.Tests\Managing.Application.Tests.csproj"/>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.9.0"/>
<PackageReference Include="Moq" Version="4.20.72"/>
<PackageReference Include="xunit" Version="2.8.0"/>
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.0">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
</Project>

View File

@@ -5,6 +5,7 @@ using Managing.Infrastructure.Databases.InfluxDb.Models;
using Managing.Infrastructure.Databases.PostgreSql;
using Managing.Infrastructure.Databases.PostgreSql.Configurations;
using Microsoft.EntityFrameworkCore;
using Npgsql;
// Explicitly set the environment before creating the host builder
var environment = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT")
@@ -76,13 +77,32 @@ var host = hostBuilder
services.Configure<PostgreSqlSettings>(configuration.GetSection(Constants.Databases.PostgreSql));
services.Configure<InfluxDbSettings>(configuration.GetSection(Constants.Databases.InfluxDb));
// Build connection string with timeout and pooling settings
var connectionStringBuilder = new NpgsqlConnectionStringBuilder(postgreSqlConnectionString)
{
// Configure connection timeout (default is 15 seconds, increase for network latency)
Timeout = 30, // 30 seconds for connection establishment
CommandTimeout = 60, // 60 seconds for command execution
// Configure connection pooling for better performance and reliability
MaxPoolSize = 100, // Maximum pool size
MinPoolSize = 5, // Minimum pool size
// Configure KeepAlive to maintain connections and detect network issues
KeepAlive = 300 // 5 minutes keepalive interval
};
var enhancedConnectionString = connectionStringBuilder.ConnectionString;
// Add DbContext
services.AddDbContext<ManagingDbContext>((serviceProvider, options) =>
{
options.UseNpgsql(postgreSqlConnectionString, npgsqlOptions =>
options.UseNpgsql(enhancedConnectionString, npgsqlOptions =>
{
npgsqlOptions.CommandTimeout(60);
npgsqlOptions.EnableRetryOnFailure(maxRetryCount: 5, maxRetryDelay: TimeSpan.FromSeconds(10), errorCodesToAdd: null);
// Enable retry on failure for transient errors
npgsqlOptions.EnableRetryOnFailure(
maxRetryCount: 5,
maxRetryDelay: TimeSpan.FromSeconds(10),
errorCodesToAdd: null);
});
if (hostContext.HostingEnvironment.IsDevelopment())

View File

@@ -70,6 +70,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Managing.Nswag", "Managing.
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Managing.Workers", "Managing.Workers\Managing.Workers.csproj", "{B7D66A73-CA3A-4DE5-8E88-59D50C4018A6}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Managing.Workers.Tests", "Managing.Workers.Tests\Managing.Workers.Tests.csproj", "{55B059EF-F128-453F-B678-0FF00F1D2E95}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Managing.Datasets", "Managing.Datasets\Managing.Datasets.csproj", "{82B138E4-CA45-41B0-B801-847307F24389}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -246,6 +250,22 @@ Global
{B7D66A73-CA3A-4DE5-8E88-59D50C4018A6}.Release|Any CPU.Build.0 = Release|Any CPU
{B7D66A73-CA3A-4DE5-8E88-59D50C4018A6}.Release|x64.ActiveCfg = Release|Any CPU
{B7D66A73-CA3A-4DE5-8E88-59D50C4018A6}.Release|x64.Build.0 = Release|Any CPU
{55B059EF-F128-453F-B678-0FF00F1D2E95}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{55B059EF-F128-453F-B678-0FF00F1D2E95}.Debug|Any CPU.Build.0 = Debug|Any CPU
{55B059EF-F128-453F-B678-0FF00F1D2E95}.Debug|x64.ActiveCfg = Debug|Any CPU
{55B059EF-F128-453F-B678-0FF00F1D2E95}.Debug|x64.Build.0 = Debug|Any CPU
{55B059EF-F128-453F-B678-0FF00F1D2E95}.Release|Any CPU.ActiveCfg = Release|Any CPU
{55B059EF-F128-453F-B678-0FF00F1D2E95}.Release|Any CPU.Build.0 = Release|Any CPU
{55B059EF-F128-453F-B678-0FF00F1D2E95}.Release|x64.ActiveCfg = Release|Any CPU
{55B059EF-F128-453F-B678-0FF00F1D2E95}.Release|x64.Build.0 = Release|Any CPU
{82B138E4-CA45-41B0-B801-847307F24389}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{82B138E4-CA45-41B0-B801-847307F24389}.Debug|Any CPU.Build.0 = Debug|Any CPU
{82B138E4-CA45-41B0-B801-847307F24389}.Debug|x64.ActiveCfg = Debug|Any CPU
{82B138E4-CA45-41B0-B801-847307F24389}.Debug|x64.Build.0 = Debug|Any CPU
{82B138E4-CA45-41B0-B801-847307F24389}.Release|Any CPU.ActiveCfg = Release|Any CPU
{82B138E4-CA45-41B0-B801-847307F24389}.Release|Any CPU.Build.0 = Release|Any CPU
{82B138E4-CA45-41B0-B801-847307F24389}.Release|x64.ActiveCfg = Release|Any CPU
{82B138E4-CA45-41B0-B801-847307F24389}.Release|x64.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -269,6 +289,8 @@ Global
{4521E1A9-AF81-4CA8-8B4D-30C261ECE977} = {D6711C71-A263-4398-8DFF-28E2CD1FE0CE}
{BE50F950-C1D4-4CE0-B32E-6AAC996770D5} = {D6711C71-A263-4398-8DFF-28E2CD1FE0CE}
{B7D66A73-CA3A-4DE5-8E88-59D50C4018A6} = {A1296069-2816-43D4-882C-516BCB718D03}
{55B059EF-F128-453F-B678-0FF00F1D2E95} = {8F2ECEA7-5BCA-45DF-B6E3-88AADD7AFD45}
{82B138E4-CA45-41B0-B801-847307F24389} = {8F2ECEA7-5BCA-45DF-B6E3-88AADD7AFD45}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {BD7CA081-CE52-4824-9777-C0562E54F3EA}