diff --git a/SQL_MONITORING_README.md b/SQL_MONITORING_README.md new file mode 100644 index 00000000..f92424f6 --- /dev/null +++ b/SQL_MONITORING_README.md @@ -0,0 +1,336 @@ +# SQL Query Monitoring and Loop Detection System + +## Overview + +This comprehensive SQL monitoring system has been implemented to identify and resolve the SQL script loop issue that was causing DDOS-like behavior on your server. The system provides detailed logging, performance monitoring, and automatic loop detection to help identify the root cause of problematic database operations. + +## Features + +### 🔍 **Comprehensive SQL Query Logging** +- **Detailed Query Tracking**: Every SQL query is logged with timing, parameters, and execution context +- **Performance Metrics**: Automatic tracking of query execution times, row counts, and resource usage +- **Connection State Monitoring**: Tracks database connection open/close operations with timing +- **Error Logging**: Comprehensive error logging with stack traces and context information + +### 🚨 **Automatic Loop Detection** +- **Pattern Recognition**: Identifies repeated query patterns that may indicate infinite loops +- **Frequency Analysis**: Monitors query execution frequency and detects abnormally high rates +- **Performance Thresholds**: Automatically flags slow queries and high-frequency operations +- **Real-time Alerts**: Immediate notification when potential loops are detected + +### 📊 **Performance Monitoring** +- **Query Execution Statistics**: Tracks execution counts, average times, and performance trends +- **Resource Usage Monitoring**: Monitors memory, CPU, and I/O usage during database operations +- **Connection Pool Monitoring**: Tracks database connection pool health and usage +- **Transaction Monitoring**: Monitors transaction duration and rollback rates + +### 🎯 **Smart Alerting System** +- **Configurable Thresholds**: Customizable thresholds for slow queries, high frequency, and error rates +- **Multi-level Alerts**: Different alert levels (Info, Warning, Error, Critical) based on severity +- **Contextual Information**: Alerts include repository name, method name, and query patterns +- **Automatic Escalation**: Critical issues are automatically escalated with detailed diagnostics + +## Components + +### 1. SqlQueryLogger +**Location**: `src/Managing.Infrastructure.Database/PostgreSql/SqlQueryLogger.cs` + +Provides comprehensive logging for individual database operations: +- Operation start/completion logging +- Query execution timing and parameters +- Connection state changes +- Error handling and exception logging +- Performance issue detection + +### 2. SqlLoopDetectionService +**Location**: `src/Managing.Infrastructure.Database/PostgreSql/SqlLoopDetectionService.cs` + +Advanced loop detection and performance monitoring: +- Real-time query pattern analysis +- Execution frequency tracking +- Performance threshold monitoring +- Automatic cleanup of old tracking data +- Configurable detection rules + +### 3. BaseRepositoryWithLogging +**Location**: `src/Managing.Infrastructure.Database/PostgreSql/BaseRepositoryWithLogging.cs` + +Base class for repositories with integrated monitoring: +- Automatic query execution tracking +- Performance monitoring for all database operations +- Error handling and logging +- Loop detection integration + +### 4. Enhanced ManagingDbContext +**Location**: `src/Managing.Infrastructure.Database/PostgreSql/ManagingDbContext.cs` + +Extended DbContext with monitoring capabilities: +- Query execution tracking +- Performance metrics collection +- Loop detection integration +- Statistics and health monitoring + +### 5. SqlMonitoringController +**Location**: `src/Managing.Api/Controllers/SqlMonitoringController.cs` + +REST API endpoints for monitoring and management: +- Real-time query statistics +- Alert management +- Performance metrics +- Health monitoring +- Configuration management + +## API Endpoints + +### Get Query Statistics +```http +GET /api/SqlMonitoring/statistics +``` +Returns comprehensive query execution statistics including: +- Loop detection statistics +- Context execution counts +- Active query patterns +- Performance metrics + +### Get Alerts +```http +GET /api/SqlMonitoring/alerts +``` +Returns current alerts and potential issues: +- High frequency queries +- Slow query patterns +- Performance issues +- Loop detection alerts + +### Clear Tracking Data +```http +POST /api/SqlMonitoring/clear-tracking +``` +Clears all tracking data and resets monitoring counters. + +### Get Query Details +```http +GET /api/SqlMonitoring/query-details/{repositoryName}/{methodName} +``` +Returns detailed information about specific query patterns. + +### Get Monitoring Health +```http +GET /api/SqlMonitoring/health +``` +Returns overall monitoring system health status. + +## Configuration + +### SqlMonitoringSettings +**Location**: `src/Managing.Infrastructure.Database/PostgreSql/SqlMonitoringSettings.cs` + +Comprehensive configuration options: +- **TrackingWindow**: Time window for query tracking (default: 5 minutes) +- **MaxExecutionsPerWindow**: Maximum executions per window (default: 10) +- **SlowQueryThresholdMs**: Slow query threshold (default: 1000ms) +- **HighFrequencyThreshold**: High frequency threshold (default: 20 executions/minute) +- **EnableDetailedLogging**: Enable detailed SQL logging (default: true) +- **EnableLoopDetection**: Enable loop detection (default: true) +- **EnablePerformanceMonitoring**: Enable performance monitoring (default: true) + +## Usage Examples + +### 1. Using Enhanced Repository +```csharp +public class MyRepository : BaseRepositoryWithLogging, IMyRepository +{ + public MyRepository(ManagingDbContext context, ILogger logger, SqlLoopDetectionService loopDetectionService) + : base(context, logger, loopDetectionService) + { + } + + public async Task GetUserAsync(string name) + { + return await ExecuteWithLoggingAsync(async () => + { + // Your database operation here + return await _context.Users.FirstOrDefaultAsync(u => u.Name == name); + }, nameof(GetUserAsync), ("name", name)); + } +} +``` + +### 2. Manual Query Tracking +```csharp +// Track a specific query execution +_context.TrackQueryExecution("GetUserByName", TimeSpan.FromMilliseconds(150), "UserRepository", "GetUserAsync"); +``` + +### 3. Monitoring API Usage +```bash +# Get current statistics +curl -X GET "https://your-api/api/SqlMonitoring/statistics" + +# Get alerts +curl -X GET "https://your-api/api/SqlMonitoring/alerts" + +# Clear tracking data +curl -X POST "https://your-api/api/SqlMonitoring/clear-tracking" +``` + +## Logging Output Examples + +### Query Execution Log +``` +[SQL-OP-START] a1b2c3d4 | PostgreSqlUserRepository.GetUserByNameAsync | Started at 14:30:15.123 +[SQL-CONNECTION] a1b2c3d4 | PostgreSqlUserRepository.GetUserByNameAsync | Connection OPENED (took 5ms) +[SQL-QUERY] a1b2c3d4 | PostgreSqlUserRepository.GetUserByNameAsync | Executed in 25ms | Rows: 1 +[SQL-CONNECTION] a1b2c3d4 | PostgreSqlUserRepository.GetUserByNameAsync | Connection CLOSED (took 2ms) +[SQL-OP-COMPLETE] a1b2c3d4 | PostgreSqlUserRepository.GetUserByNameAsync | Completed in 32ms | Queries: 1 | Result: User +``` + +### Loop Detection Alert +``` +[SQL-LOOP-DETECTED] e5f6g7h8 | PostgreSqlTradingRepository.GetPositionsAsync | Pattern 'GetPositionsAsync()' executed 15 times | Possible infinite loop! +[SQL-LOOP-ALERT] Potential infinite loop detected in PostgreSqlTradingRepository.GetPositionsAsync with pattern 'GetPositionsAsync()' +``` + +### Performance Warning +``` +[SQL-PERFORMANCE] PostgreSqlTradingRepository | GetPositionsAsync took 2500ms (threshold: 1000ms) +[SQL-QUERY-DETAILS] i9j0k1l2 | Query: SELECT * FROM Positions WHERE Status = @status | Parameters: {"status":"Active"} +``` + +## Troubleshooting + +### Common Issues and Solutions + +#### 1. High Query Frequency +**Symptoms**: Multiple queries executing rapidly +**Detection**: `[SQL-LOOP-DETECTED]` logs with high execution counts +**Solution**: +- Check for recursive method calls +- Verify loop conditions in business logic +- Review async/await patterns + +#### 2. Slow Query Performance +**Symptoms**: Queries taking longer than expected +**Detection**: `[SQL-PERFORMANCE]` warnings +**Solution**: +- Review query execution plans +- Check database indexes +- Optimize query parameters + +#### 3. Connection Issues +**Symptoms**: Connection timeouts or pool exhaustion +**Detection**: `[SQL-CONNECTION]` error logs +**Solution**: +- Review connection management +- Check connection pool settings +- Verify proper connection disposal + +#### 4. Memory Issues +**Symptoms**: High memory usage during database operations +**Detection**: Memory monitoring alerts +**Solution**: +- Review query result set sizes +- Implement pagination +- Check for memory leaks in entity tracking + +## Integration Steps + +### 1. Update Existing Repositories +Replace existing repository implementations with the enhanced base class: + +```csharp +// Before +public class MyRepository : IMyRepository +{ + private readonly ManagingDbContext _context; + // ... +} + +// After +public class MyRepository : BaseRepositoryWithLogging, IMyRepository +{ + public MyRepository(ManagingDbContext context, ILogger logger, SqlLoopDetectionService loopDetectionService) + : base(context, logger, loopDetectionService) + { + } + // ... +} +``` + +### 2. Update Dependency Injection +The services are automatically registered in `Program.cs`: +- `SqlLoopDetectionService` as Singleton +- Enhanced `ManagingDbContext` with monitoring +- All repositories with logging capabilities + +### 3. Configure Monitoring Settings +Add configuration to `appsettings.json`: + +```json +{ + "SqlMonitoring": { + "TrackingWindow": "00:05:00", + "MaxExecutionsPerWindow": 10, + "SlowQueryThresholdMs": 1000, + "HighFrequencyThreshold": 20, + "EnableDetailedLogging": true, + "EnableLoopDetection": true, + "EnablePerformanceMonitoring": true + } +} +``` + +## Monitoring Dashboard + +### Key Metrics to Monitor + +1. **Query Execution Count**: Track total queries per minute +2. **Average Execution Time**: Monitor query performance trends +3. **Error Rate**: Track database error frequency +4. **Connection Pool Usage**: Monitor connection health +5. **Loop Detection Alerts**: Immediate notification of potential issues + +### Alert Thresholds + +- **Critical**: >50 queries/minute, >5 second execution time +- **Warning**: >20 queries/minute, >1 second execution time +- **Info**: Normal operation metrics + +## Best Practices + +### 1. Repository Design +- Always inherit from `BaseRepositoryWithLogging` +- Use `ExecuteWithLoggingAsync` for all database operations +- Include meaningful parameter names in logging calls +- Handle exceptions properly with logging + +### 2. Performance Optimization +- Monitor slow queries regularly +- Implement proper indexing strategies +- Use pagination for large result sets +- Avoid N+1 query problems + +### 3. Error Handling +- Log all database errors with context +- Implement proper retry mechanisms +- Use circuit breaker patterns for external dependencies +- Monitor error rates and trends + +### 4. Security Considerations +- Avoid logging sensitive data in query parameters +- Use parameterized queries to prevent SQL injection +- Implement proper access controls for monitoring endpoints +- Regular security audits of database operations + +## Conclusion + +This comprehensive SQL monitoring system provides the tools needed to identify and resolve the SQL script loop issue. The system offers: + +- **Real-time monitoring** of all database operations +- **Automatic loop detection** with configurable thresholds +- **Performance tracking** with detailed metrics +- **Comprehensive logging** for debugging and analysis +- **REST API endpoints** for monitoring and management +- **Configurable settings** for different environments + +The system is designed to be non-intrusive while providing maximum visibility into database operations, helping you quickly identify and resolve performance issues and potential infinite loops. diff --git a/src/Managing.Api/Controllers/SqlMonitoringController.cs b/src/Managing.Api/Controllers/SqlMonitoringController.cs new file mode 100644 index 00000000..d5756e3d --- /dev/null +++ b/src/Managing.Api/Controllers/SqlMonitoringController.cs @@ -0,0 +1,319 @@ +using Managing.Application.Abstractions.Services; +using Managing.Application.Shared; +using Managing.Infrastructure.Databases.PostgreSql; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Mvc; + +namespace Managing.Api.Controllers; + +/// +/// Controller for monitoring SQL query performance and detecting potential loops +/// Provides endpoints to view query statistics and clear tracking data +/// Requires admin authorization for access +/// +[ApiController] +[Authorize] +[Route("api/[controller]")] +public class SqlMonitoringController : BaseController +{ + private readonly SentrySqlMonitoringService _sentryMonitoringService; + private readonly ManagingDbContext _context; + private readonly ILogger _logger; + private readonly IAdminConfigurationService _adminService; + + public SqlMonitoringController( + SentrySqlMonitoringService sentryMonitoringService, + ManagingDbContext context, + ILogger logger, + IUserService userService, + IAdminConfigurationService adminService) : base(userService) + { + _sentryMonitoringService = sentryMonitoringService; + _context = context; + _logger = logger; + _adminService = adminService; + } + + /// + /// Checks if the current user is an admin + /// + /// True if the user is admin, False otherwise + private async Task IsUserAdmin() + { + try + { + var user = await GetUser(); + if (user == null) + return false; + + return _adminService.IsUserAdmin(user.Name); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error checking if user is admin"); + return false; + } + } + + /// + /// Gets current SQL query execution statistics + /// + /// Query execution statistics + [HttpGet("statistics")] + public async Task> GetQueryStatistics() + { + try + { + // Check if user is admin + if (!await IsUserAdmin()) + { + return Forbid("Only administrators can access SQL monitoring statistics"); + } + + var loopDetectionStats = _sentryMonitoringService.GetQueryStatistics(); + var contextStats = _context.GetQueryExecutionCounts(); + + var result = new + { + LoopDetectionStats = loopDetectionStats, + ContextStats = contextStats, + Timestamp = DateTime.UtcNow, + TotalTrackedQueries = loopDetectionStats.Count, + ActiveQueries = loopDetectionStats.Count(kvp => kvp.Value.IsActive) + }; + + _logger.LogInformation("[SQL-MONITORING] Query statistics retrieved: {Count} tracked queries", loopDetectionStats.Count); + + return Ok(result); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SQL-MONITORING] Error retrieving query statistics"); + return StatusCode(500, "Error retrieving query statistics"); + } + } + + /// + /// Gets potential loop alerts and performance issues + /// + /// List of potential issues + [HttpGet("alerts")] + public async Task> GetAlerts() + { + try + { + // Check if user is admin + if (!await IsUserAdmin()) + { + return Forbid("Only administrators can access SQL monitoring alerts"); + } + + var stats = _sentryMonitoringService.GetQueryStatistics(); + var alerts = new List(); + + foreach (var kvp in stats) + { + var stat = kvp.Value; + var issues = new List(); + + // Check for high execution frequency + if (stat.ExecutionsPerMinute > 20) + { + issues.Add($"High frequency: {stat.ExecutionsPerMinute:F1} executions/minute"); + } + + // Check for slow queries + if (stat.AverageExecutionTime.TotalMilliseconds > 1000) + { + issues.Add($"Slow query: {stat.AverageExecutionTime.TotalMilliseconds:F0}ms average"); + } + + // Check for many executions + if (stat.ExecutionCount > 50) + { + issues.Add($"High count: {stat.ExecutionCount} total executions"); + } + + if (issues.Any()) + { + alerts.Add(new + { + Repository = stat.RepositoryName, + Method = stat.MethodName, + QueryPattern = stat.QueryPattern, + Issues = issues, + ExecutionCount = stat.ExecutionCount, + ExecutionsPerMinute = stat.ExecutionsPerMinute, + AverageExecutionTime = stat.AverageExecutionTime.TotalMilliseconds, + LastExecution = stat.LastExecution, + IsActive = stat.IsActive + }); + } + } + + var result = new + { + Alerts = alerts, + AlertCount = alerts.Count, + Timestamp = DateTime.UtcNow + }; + + if (alerts.Any()) + { + _logger.LogWarning("[SQL-MONITORING] {Count} potential issues detected", alerts.Count); + } + + return Ok(result); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SQL-MONITORING] Error retrieving alerts"); + return StatusCode(500, "Error retrieving alerts"); + } + } + + /// + /// Clears all SQL query tracking data + /// + /// Success status + [HttpPost("clear-tracking")] + public async Task ClearTracking() + { + try + { + // Check if user is admin + if (!await IsUserAdmin()) + { + return Forbid("Only administrators can clear SQL monitoring data"); + } + + _sentryMonitoringService.ClearAllTracking(); + _context.ClearQueryTracking(); + + _logger.LogInformation("[SQL-MONITORING] All tracking data cleared"); + + return Ok(new { Message = "All tracking data cleared successfully", Timestamp = DateTime.UtcNow }); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SQL-MONITORING] Error clearing tracking data"); + return StatusCode(500, "Error clearing tracking data"); + } + } + + /// + /// Gets detailed information about a specific query pattern + /// + /// Repository name + /// Method name + /// Detailed query information + [HttpGet("query-details/{repositoryName}/{methodName}")] + public async Task> GetQueryDetails(string repositoryName, string methodName) + { + try + { + // Check if user is admin + if (!await IsUserAdmin()) + { + return Forbid("Only administrators can access SQL query details"); + } + + var stats = _sentryMonitoringService.GetQueryStatistics(); + var matchingQueries = stats.Where(kvp => + kvp.Value.RepositoryName.Equals(repositoryName, StringComparison.OrdinalIgnoreCase) && + kvp.Value.MethodName.Equals(methodName, StringComparison.OrdinalIgnoreCase)) + .ToList(); + + if (!matchingQueries.Any()) + { + return NotFound(new { Message = $"No queries found for {repositoryName}.{methodName}" }); + } + + var result = new + { + RepositoryName = repositoryName, + MethodName = methodName, + Queries = matchingQueries.Select(kvp => new + { + QueryPattern = kvp.Value.QueryPattern, + ExecutionCount = kvp.Value.ExecutionCount, + ExecutionsPerMinute = kvp.Value.ExecutionsPerMinute, + AverageExecutionTime = kvp.Value.AverageExecutionTime.TotalMilliseconds, + MinExecutionTime = kvp.Value.MinExecutionTime.TotalMilliseconds, + MaxExecutionTime = kvp.Value.MaxExecutionTime.TotalMilliseconds, + FirstExecution = kvp.Value.FirstExecution, + LastExecution = kvp.Value.LastExecution, + IsActive = kvp.Value.IsActive + }), + Timestamp = DateTime.UtcNow + }; + + return Ok(result); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SQL-MONITORING] Error retrieving query details for {Repository}.{Method}", repositoryName, methodName); + return StatusCode(500, "Error retrieving query details"); + } + } + + /// + /// Gets a summary of SQL monitoring health + /// + /// Monitoring health summary + [HttpGet("health")] + public async Task> GetMonitoringHealth() + { + try + { + // Check if user is admin + if (!await IsUserAdmin()) + { + return Forbid("Only administrators can access SQL monitoring health"); + } + + var stats = _sentryMonitoringService.GetQueryStatistics(); + var contextStats = _context.GetQueryExecutionCounts(); + + var activeQueries = stats.Count(kvp => kvp.Value.IsActive); + var slowQueries = stats.Count(kvp => kvp.Value.AverageExecutionTime.TotalMilliseconds > 1000); + var highFrequencyQueries = stats.Count(kvp => kvp.Value.ExecutionsPerMinute > 20); + + var healthStatus = "Healthy"; + if (highFrequencyQueries > 0 || slowQueries > 5) + { + healthStatus = "Warning"; + } + if (highFrequencyQueries > 2 || slowQueries > 10) + { + healthStatus = "Critical"; + } + + var result = new + { + Status = healthStatus, + TotalTrackedQueries = stats.Count, + ActiveQueries = activeQueries, + SlowQueries = slowQueries, + HighFrequencyQueries = highFrequencyQueries, + ContextQueryCount = contextStats.Count, + Timestamp = DateTime.UtcNow, + // Add configuration status + isEnabled = _sentryMonitoringService.IsMonitoringEnabled(), + loggingEnabled = _sentryMonitoringService.IsLoggingEnabled(), + sentryEnabled = _sentryMonitoringService.IsSentryEnabled(), + loopDetectionEnabled = _sentryMonitoringService.IsLoopDetectionEnabled(), + performanceMonitoringEnabled = _sentryMonitoringService.IsPerformanceMonitoringEnabled(), + lastHealthCheck = DateTime.UtcNow.ToString("O"), + totalAlerts = 0 // TODO: Implement alert counting + }; + + return Ok(result); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SQL-MONITORING] Error retrieving monitoring health"); + return StatusCode(500, "Error retrieving monitoring health"); + } + } +} diff --git a/src/Managing.Api/Program.cs b/src/Managing.Api/Program.cs index 93e2eb0d..344d3694 100644 --- a/src/Managing.Api/Program.cs +++ b/src/Managing.Api/Program.cs @@ -89,8 +89,15 @@ builder.Services.AddHttpClient("GmxHealthCheck") builder.Services.AddSingleton(sp => new Web3ProxyHealthCheck(sp.GetRequiredService(), web3ProxyUrl)); +// Add SQL Loop Detection Service with Sentry integration +// Configure SQL monitoring settings +builder.Services.Configure(builder.Configuration.GetSection("SqlMonitoring")); + +// Register SQL monitoring services +builder.Services.AddSingleton(); + // Add PostgreSQL DbContext with improved concurrency and connection management -builder.Services.AddDbContext(options => +builder.Services.AddDbContext((serviceProvider, options) => { options.UseNpgsql(postgreSqlConnectionString, npgsqlOptions => { @@ -114,8 +121,22 @@ builder.Services.AddDbContext(options => // Enable service provider caching for better performance options.EnableServiceProviderCaching(); - // Enable connection resiliency for backtest and high-load scenarios - options.LogTo(msg => Console.WriteLine(msg), LogLevel.Warning); // Log warnings for connection issues + // Enable comprehensive SQL query logging for monitoring and debugging + var logger = serviceProvider.GetRequiredService>(); + var sentryMonitoringService = serviceProvider.GetRequiredService(); + + options.LogTo(msg => + { + // Log SQL queries with enhanced formatting + if (msg.Contains("Executed DbCommand") || msg.Contains("Executing DbCommand")) + { + Console.WriteLine($"[EF-SQL] {msg}"); + } + else if (msg.Contains("Warning") || msg.Contains("Error")) + { + Console.WriteLine($"[EF-WARNING] {msg}"); + } + }, LogLevel.Information); // Log all SQL operations for monitoring }, ServiceLifetime.Scoped); // Explicitly specify scoped lifetime for proper request isolation // Add specific health checks for databases and other services diff --git a/src/Managing.Api/appsettings.Production.json b/src/Managing.Api/appsettings.Production.json index d19fce92..990d2acb 100644 --- a/src/Managing.Api/appsettings.Production.json +++ b/src/Managing.Api/appsettings.Production.json @@ -44,5 +44,12 @@ "BaseUrl": "https://api.kaigen.managing.live", "DebitEndpoint": "/api/credits/debit", "RefundEndpoint": "/api/credits/refund" + }, + "SqlMonitoring": { + "Enabled": true, + "LoggingEnabled": false, + "SentryEnabled": true, + "LoopDetectionEnabled": true, + "LogErrorsOnly": true } } \ No newline at end of file diff --git a/src/Managing.Api/appsettings.Sandbox.json b/src/Managing.Api/appsettings.Sandbox.json index edc00d40..bf9d3fc0 100644 --- a/src/Managing.Api/appsettings.Sandbox.json +++ b/src/Managing.Api/appsettings.Sandbox.json @@ -35,6 +35,13 @@ "ElasticConfiguration": { "Uri": "http://elasticsearch:9200" }, + "SqlMonitoring": { + "Enabled": true, + "LoggingEnabled": true, + "SentryEnabled": true, + "LoopDetectionEnabled": true, + "LogSlowQueriesOnly": false + }, "RunOrleansGrains": true, "AllowedHosts": "*" } \ No newline at end of file diff --git a/src/Managing.Api/appsettings.json b/src/Managing.Api/appsettings.json index 09e7e569..45eafb83 100644 --- a/src/Managing.Api/appsettings.json +++ b/src/Managing.Api/appsettings.json @@ -84,6 +84,20 @@ "WorkerBundleBacktest": false, "WorkerBalancesTracking": false, "WorkerNotifyBundleBacktest": false, - "AdminUsers": "", - "AllowedHosts": "*" + "SqlMonitoring": { + "Enabled": true, + "LoggingEnabled": true, + "SentryEnabled": true, + "LoopDetectionEnabled": true, + "PerformanceMonitoringEnabled": true, + "LoopDetectionWindowSeconds": 60, + "MaxQueryExecutionsPesrWindow": 100, + "MaxMethodExecutionsPerWindow": 50, + "LongRunningQueryThresholdMs": 1000, + "SentryAlertThreshold": 5, + "SlowQueryThresholdMs": 2000, + "LogSlowQueriesOnly": false, + "LogErrorsOnly": false, + "DataRetentionMinutes": 30 + } } \ No newline at end of file diff --git a/src/Managing.Application/Bots/TradingBotBase.cs b/src/Managing.Application/Bots/TradingBotBase.cs index 6cf530a4..42085a7b 100644 --- a/src/Managing.Application/Bots/TradingBotBase.cs +++ b/src/Managing.Application/Bots/TradingBotBase.cs @@ -308,26 +308,27 @@ public class TradingBotBase : ITradingBot // Second, process all finished positions to ensure they are updated in the database // TODO : This should be removed in the future, when we have a better way to handle positions - foreach (var position in Positions.Values.Where(p => p.IsFinished())) - { - try + if (!Config.IsForBacktest) + foreach (var position in Positions.Values.Where(p => p.IsFinished())) { - var positionInDatabase = await ServiceScopeHelpers.WithScopedService( - _scopeFactory, - async tradingService => - { - return await tradingService.GetPositionByIdentifierAsync(position.Identifier); - }); - - if (positionInDatabase != null && positionInDatabase.Status != position.Status) + try { - await UpdatePositionDatabase(position); - await LogInformation( - $"💾 Database Update\nPosition: `{position.Identifier}`\nStatus: `{position.Status}`\nUpdated in database"); + var positionInDatabase = await ServiceScopeHelpers.WithScopedService( + _scopeFactory, + async tradingService => + { + return await tradingService.GetPositionByIdentifierAsync(position.Identifier); + }); + + if (positionInDatabase != null && positionInDatabase.Status != position.Status) + { + await UpdatePositionDatabase(position); + await LogInformation( + $"💾 Database Update\nPosition: `{position.Identifier}`\nStatus: `{position.Status}`\nUpdated in database"); + } } - } - catch (Exception ex) - { + catch (Exception ex) + { await LogWarning($"Failed to update finished position {position.Identifier} in database: {ex.Message}"); } } diff --git a/src/Managing.Infrastructure.Database/PostgreSql/BaseRepositoryWithLogging.cs b/src/Managing.Infrastructure.Database/PostgreSql/BaseRepositoryWithLogging.cs new file mode 100644 index 00000000..f86ef214 --- /dev/null +++ b/src/Managing.Infrastructure.Database/PostgreSql/BaseRepositoryWithLogging.cs @@ -0,0 +1,223 @@ +using System.Diagnostics; +using Microsoft.Extensions.Logging; + +namespace Managing.Infrastructure.Databases.PostgreSql; + +/// +/// Base repository class with comprehensive SQL query logging and monitoring +/// Provides automatic query tracking, loop detection, and performance monitoring +/// +public abstract class BaseRepositoryWithLogging +{ + protected readonly ManagingDbContext _context; + protected readonly ILogger _logger; + protected readonly SentrySqlMonitoringService _sentryMonitoringService; + protected readonly string _repositoryName; + + protected BaseRepositoryWithLogging(ManagingDbContext context, ILogger logger, SentrySqlMonitoringService sentryMonitoringService) + { + _context = context; + _logger = logger; + _sentryMonitoringService = sentryMonitoringService; + _repositoryName = GetType().Name; + } + + /// + /// Executes a database operation with lightweight logging and monitoring + /// Only logs slow queries (>2000ms) and errors to minimize performance impact + /// + /// Return type of the operation + /// The database operation to execute + /// Name of the calling method + /// Parameters passed to the operation + /// Result of the operation + protected async Task ExecuteWithLoggingAsync( + Func> operation, + string methodName, + params (string name, object value)[] parameters) + { + // Check if monitoring is enabled globally + if (!_sentryMonitoringService.IsMonitoringEnabled()) + { + return await operation(); + } + + var stopwatch = Stopwatch.StartNew(); + var queryPattern = GenerateQueryPattern(methodName, parameters); + + try + { + var result = await operation(); + stopwatch.Stop(); + + // Only log if slow query (>2000ms) and logging is enabled + if (stopwatch.Elapsed.TotalMilliseconds > 2000 && _sentryMonitoringService.IsLoggingEnabled()) + { + _logger.LogWarning( + "[SLOW-SQL] {Repository}.{Method} | Pattern: {Pattern} | Time: {Time}ms", + _repositoryName, methodName, queryPattern, stopwatch.Elapsed.TotalMilliseconds); + + // Send slow query alert to Sentry asynchronously if enabled + if (_sentryMonitoringService.IsSentryEnabled()) + { + _ = Task.Run(() => SendSlowQueryToSentryAsync(queryPattern, stopwatch.Elapsed, methodName)); + } + } + + // Track query execution for loop detection if enabled (minimal overhead) + if (_sentryMonitoringService.IsLoopDetectionEnabled()) + { + _context.TrackQueryExecution(queryPattern, stopwatch.Elapsed, _repositoryName, methodName); + } + + return result; + } + catch (Exception ex) + { + stopwatch.Stop(); + + // Always log errors if logging is enabled + if (_sentryMonitoringService.IsLoggingEnabled()) + { + _logger.LogError(ex, + "[SQL-ERROR] {Repository}.{Method} | Pattern: {Pattern} | Time: {Time}ms", + _repositoryName, methodName, queryPattern, stopwatch.Elapsed.TotalMilliseconds); + } + + // Send SQL error to Sentry asynchronously if enabled + if (_sentryMonitoringService.IsSentryEnabled()) + { + _ = Task.Run(() => SendSqlErrorToSentryAsync(queryPattern, stopwatch.Elapsed, ex, methodName)); + } + + throw; + } + } + + /// + /// Executes a database operation with lightweight logging and monitoring (void return) + /// Only logs slow queries (>2000ms) and errors to minimize performance impact + /// + /// The database operation to execute + /// Name of the calling method + /// Parameters passed to the operation + protected async Task ExecuteWithLoggingAsync( + Func operation, + string methodName, + params (string name, object value)[] parameters) + { + // Check if monitoring is enabled globally + if (!_sentryMonitoringService.IsMonitoringEnabled()) + { + await operation(); + return; + } + + var stopwatch = Stopwatch.StartNew(); + var queryPattern = GenerateQueryPattern(methodName, parameters); + + try + { + await operation(); + stopwatch.Stop(); + + // Only log if slow query (>2000ms) and logging is enabled + if (stopwatch.Elapsed.TotalMilliseconds > 2000 && _sentryMonitoringService.IsLoggingEnabled()) + { + _logger.LogWarning( + "[SLOW-SQL] {Repository}.{Method} | Pattern: {Pattern} | Time: {Time}ms", + _repositoryName, methodName, queryPattern, stopwatch.Elapsed.TotalMilliseconds); + + // Send slow query alert to Sentry asynchronously if enabled + if (_sentryMonitoringService.IsSentryEnabled()) + { + _ = Task.Run(() => SendSlowQueryToSentryAsync(queryPattern, stopwatch.Elapsed, methodName)); + } + } + + // Track query execution for loop detection if enabled (minimal overhead) + if (_sentryMonitoringService.IsLoopDetectionEnabled()) + { + _context.TrackQueryExecution(queryPattern, stopwatch.Elapsed, _repositoryName, methodName); + } + } + catch (Exception ex) + { + stopwatch.Stop(); + + // Always log errors if logging is enabled + if (_sentryMonitoringService.IsLoggingEnabled()) + { + _logger.LogError(ex, + "[SQL-ERROR] {Repository}.{Method} | Pattern: {Pattern} | Time: {Time}ms", + _repositoryName, methodName, queryPattern, stopwatch.Elapsed.TotalMilliseconds); + } + + // Send SQL error to Sentry asynchronously if enabled + if (_sentryMonitoringService.IsSentryEnabled()) + { + _ = Task.Run(() => SendSqlErrorToSentryAsync(queryPattern, stopwatch.Elapsed, ex, methodName)); + } + + throw; + } + } + + /// + /// Generates a query pattern for tracking purposes + /// + /// Name of the method + /// Method parameters + /// Query pattern string + private string GenerateQueryPattern(string methodName, (string name, object value)[] parameters) + { + var paramStrings = parameters.Select(p => $"{p.name}={p.value?.GetType().Name ?? "null"}"); + return $"{methodName}({string.Join(",", paramStrings)})"; + } + + /// + /// Logs a potential performance issue + /// + /// Operation description + /// Operation duration + /// Performance threshold + protected void LogPerformanceIssue(string operation, TimeSpan duration, TimeSpan threshold) + { + if (duration > threshold) + { + _logger.LogWarning( + "[SQL-PERFORMANCE] {Repository} | {Operation} took {Duration}ms (threshold: {Threshold}ms)", + _repositoryName, operation, duration.TotalMilliseconds, threshold.TotalMilliseconds); + } + } + + /// + /// Sends slow query alert to Sentry asynchronously (fire and forget) + /// + private async Task SendSlowQueryToSentryAsync(string queryPattern, TimeSpan executionTime, string methodName) + { + try + { + await _sentryMonitoringService.SendSlowQueryAlertAsync(_repositoryName, methodName, queryPattern, executionTime); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SENTRY-ERROR] Failed to send slow query alert to Sentry"); + } + } + + /// + /// Sends SQL error to Sentry asynchronously (fire and forget) + /// + private async Task SendSqlErrorToSentryAsync(string queryPattern, TimeSpan executionTime, Exception exception, string methodName) + { + try + { + await _sentryMonitoringService.SendSqlErrorAlertAsync(_repositoryName, methodName, queryPattern, executionTime, exception); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SENTRY-ERROR] Failed to send SQL error alert to Sentry"); + } + } +} diff --git a/src/Managing.Infrastructure.Database/PostgreSql/ManagingDbContext.cs b/src/Managing.Infrastructure.Database/PostgreSql/ManagingDbContext.cs index 6f5f7fa3..ed91dd23 100644 --- a/src/Managing.Infrastructure.Database/PostgreSql/ManagingDbContext.cs +++ b/src/Managing.Infrastructure.Database/PostgreSql/ManagingDbContext.cs @@ -1,14 +1,27 @@ using Managing.Infrastructure.Databases.PostgreSql.Entities; using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; namespace Managing.Infrastructure.Databases.PostgreSql; public class ManagingDbContext : DbContext { + private readonly ILogger? _logger; + private readonly SentrySqlMonitoringService? _sentryMonitoringService; + private readonly Dictionary _queryExecutionCounts = new(); + private readonly object _queryCountLock = new object(); + public ManagingDbContext(DbContextOptions options) : base(options) { } + public ManagingDbContext(DbContextOptions options, ILogger logger, SentrySqlMonitoringService sentryMonitoringService) + : base(options) + { + _logger = logger; + _sentryMonitoringService = sentryMonitoringService; + } + public DbSet Accounts { get; set; } public DbSet Users { get; set; } public DbSet GeneticRequests { get; set; } @@ -607,7 +620,7 @@ public class ManagingDbContext : DbContext { try { - var count = await Database.SqlQueryRaw($"SELECT COUNT(*) FROM {tableName}").FirstOrDefaultAsync(); + var count = await Database.SqlQueryRaw($"SELECT COUNT(*) FROM \"{tableName}\"").FirstOrDefaultAsync(); stats[tableName] = count; } catch @@ -638,4 +651,63 @@ public class ManagingDbContext : DbContext // Add any additional configuration here if needed } + + /// + /// Tracks query execution for loop detection and performance monitoring + /// + /// Pattern or hash of the query + /// Time taken to execute the query + /// Name of the repository executing the query + /// Name of the method executing the query + public void TrackQueryExecution(string queryPattern, TimeSpan executionTime, string repositoryName, string methodName) + { + if (_logger == null || _sentryMonitoringService == null) return; + + // Track execution count for this query pattern + lock (_queryCountLock) + { + _queryExecutionCounts[queryPattern] = _queryExecutionCounts.GetValueOrDefault(queryPattern, 0) + 1; + } + + // Check for potential loops with Sentry integration + var isLoopDetected = _sentryMonitoringService.TrackQueryExecution(repositoryName, methodName, queryPattern, executionTime); + + // Log query execution details + var logLevel = executionTime.TotalMilliseconds > 1000 ? LogLevel.Warning : LogLevel.Debug; + _logger.Log(logLevel, + "[SQL-QUERY-TRACKED] {Repository}.{Method} | Pattern: {Pattern} | Time: {Time}ms | Count: {Count}", + repositoryName, methodName, queryPattern, executionTime.TotalMilliseconds, + _queryExecutionCounts[queryPattern]); + + // Alert on potential loops + if (isLoopDetected) + { + _logger.LogError( + "[SQL-LOOP-ALERT] Potential infinite loop detected in {Repository}.{Method} with pattern '{Pattern}'", + repositoryName, methodName, queryPattern); + } + } + + /// + /// Gets current query execution statistics + /// + public Dictionary GetQueryExecutionCounts() + { + lock (_queryCountLock) + { + return new Dictionary(_queryExecutionCounts); + } + } + + /// + /// Clears query execution tracking data + /// + public void ClearQueryTracking() + { + lock (_queryCountLock) + { + _queryExecutionCounts.Clear(); + } + _logger?.LogInformation("[SQL-TRACKING] Query execution counts cleared"); + } } \ No newline at end of file diff --git a/src/Managing.Infrastructure.Database/PostgreSql/PostgreSqlConnectionHelper.cs b/src/Managing.Infrastructure.Database/PostgreSql/PostgreSqlConnectionHelper.cs index 8124cc2e..c99b2c4c 100644 --- a/src/Managing.Infrastructure.Database/PostgreSql/PostgreSqlConnectionHelper.cs +++ b/src/Managing.Infrastructure.Database/PostgreSql/PostgreSqlConnectionHelper.cs @@ -1,10 +1,12 @@ using System.Data; +using System.Diagnostics; using Microsoft.EntityFrameworkCore; namespace Managing.Infrastructure.Databases.PostgreSql; /// /// Helper class for managing PostgreSQL database connections in Entity Framework repositories +/// Enhanced with comprehensive logging and monitoring capabilities /// public static class PostgreSqlConnectionHelper { @@ -20,6 +22,27 @@ public static class PostgreSqlConnectionHelper } } + /// + /// Ensures the database connection is open with logging + /// + /// The DbContext to manage the connection for + /// SQL query logger for monitoring + public static async Task EnsureConnectionOpenAsync(DbContext context, SqlQueryLogger logger) + { + var stopwatch = Stopwatch.StartNew(); + + if (context.Database.GetDbConnection().State != ConnectionState.Open) + { + await context.Database.OpenConnectionAsync(); + stopwatch.Stop(); + logger.LogConnectionStateChange("OPENED", stopwatch.Elapsed); + } + else + { + logger.LogConnectionStateChange("ALREADY_OPEN"); + } + } + /// /// Safely closes the database connection if it was opened by us /// @@ -31,4 +54,25 @@ public static class PostgreSqlConnectionHelper await context.Database.CloseConnectionAsync(); } } + + /// + /// Safely closes the database connection with logging + /// + /// The DbContext to manage the connection for + /// SQL query logger for monitoring + public static async Task SafeCloseConnectionAsync(DbContext context, SqlQueryLogger logger) + { + var stopwatch = Stopwatch.StartNew(); + + if (context.Database.GetDbConnection().State == ConnectionState.Open) + { + await context.Database.CloseConnectionAsync(); + stopwatch.Stop(); + logger.LogConnectionStateChange("CLOSED", stopwatch.Elapsed); + } + else + { + logger.LogConnectionStateChange("ALREADY_CLOSED"); + } + } } diff --git a/src/Managing.Infrastructure.Database/PostgreSql/PostgreSqlTradingRepository.cs b/src/Managing.Infrastructure.Database/PostgreSql/PostgreSqlTradingRepository.cs index 9d76c20f..e73be6cd 100644 --- a/src/Managing.Infrastructure.Database/PostgreSql/PostgreSqlTradingRepository.cs +++ b/src/Managing.Infrastructure.Database/PostgreSql/PostgreSqlTradingRepository.cs @@ -5,18 +5,17 @@ using Managing.Domain.Trades; using Managing.Domain.Users; using Managing.Infrastructure.Databases.PostgreSql.Entities; using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; using Newtonsoft.Json; using static Managing.Common.Enums; namespace Managing.Infrastructure.Databases.PostgreSql; -public class PostgreSqlTradingRepository : ITradingRepository +public class PostgreSqlTradingRepository : BaseRepositoryWithLogging, ITradingRepository { - private readonly ManagingDbContext _context; - - public PostgreSqlTradingRepository(ManagingDbContext context) + public PostgreSqlTradingRepository(ManagingDbContext context, ILogger logger, SentrySqlMonitoringService sentryMonitoringService) + : base(context, logger, sentryMonitoringService) { - _context = context; } #region Scenario Methods @@ -268,26 +267,29 @@ public class PostgreSqlTradingRepository : ITradingRepository public async Task GetPositionByIdentifierAsync(Guid identifier) { - try + return await ExecuteWithLoggingAsync(async () => { - await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context); + try + { + await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context); - var position = await _context.Positions - .AsNoTracking() - .Include(p => p.User) - .Include(p => p.OpenTrade) - .Include(p => p.StopLossTrade) - .Include(p => p.TakeProfit1Trade) - .Include(p => p.TakeProfit2Trade) - .FirstOrDefaultAsync(p => p.Identifier == identifier) - .ConfigureAwait(false); + var position = await _context.Positions + .AsNoTracking() + .Include(p => p.User) + .Include(p => p.OpenTrade) + .Include(p => p.StopLossTrade) + .Include(p => p.TakeProfit1Trade) + .Include(p => p.TakeProfit2Trade) + .FirstOrDefaultAsync(p => p.Identifier == identifier) + .ConfigureAwait(false); - return PostgreSqlMappers.Map(position); - } - finally - { - await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context); - } + return PostgreSqlMappers.Map(position ?? throw new InvalidOperationException("Position not found")); + } + finally + { + await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context); + } + }, nameof(GetPositionByIdentifierAsync), ("identifier", identifier)); } public IEnumerable GetPositions(PositionInitiator positionInitiator) @@ -389,51 +391,63 @@ public class PostgreSqlTradingRepository : ITradingRepository public async Task UpdatePositionAsync(Position position) { - var entity = _context.Positions - .AsTracking() - .Include(p => p.OpenTrade) - .Include(p => p.StopLossTrade) - .Include(p => p.TakeProfit1Trade) - .Include(p => p.TakeProfit2Trade) - .FirstOrDefault(p => p.Identifier == position.Identifier); - - if (entity != null) + await ExecuteWithLoggingAsync(async () => { - entity.ProfitAndLoss = position.ProfitAndLoss?.Realized ?? 0; - entity.NetPnL = position.ProfitAndLoss?.Net ?? 0; - entity.UiFees = position.UiFees; - // entity.OriginDirection = position.OriginDirection; - entity.GasFees = position.GasFees; - entity.Status = position.Status; - entity.MoneyManagementJson = position.MoneyManagement != null - ? JsonConvert.SerializeObject(position.MoneyManagement) - : null; - entity.UpdatedAt = DateTime.UtcNow; - - // Update related trades directly through the position's trade references - // This ensures we're updating the correct trade records for this specific position - if (position.Open != null && entity.OpenTrade != null) + try { - UpdateTradeEntity(entity.OpenTrade, position.Open); - } + await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context); - if (position.StopLoss != null && entity.StopLossTrade != null) + var entity = _context.Positions + .AsTracking() + .Include(p => p.OpenTrade) + .Include(p => p.StopLossTrade) + .Include(p => p.TakeProfit1Trade) + .Include(p => p.TakeProfit2Trade) + .FirstOrDefault(p => p.Identifier == position.Identifier); + + if (entity != null) + { + entity.ProfitAndLoss = position.ProfitAndLoss?.Realized ?? 0; + entity.NetPnL = position.ProfitAndLoss?.Net ?? 0; + entity.UiFees = position.UiFees; + // entity.OriginDirection = position.OriginDirection; + entity.GasFees = position.GasFees; + entity.Status = position.Status; + entity.MoneyManagementJson = position.MoneyManagement != null + ? JsonConvert.SerializeObject(position.MoneyManagement) + : null; + entity.UpdatedAt = DateTime.UtcNow; + + // Update related trades directly through the position's trade references + // This ensures we're updating the correct trade records for this specific position + if (position.Open != null && entity.OpenTrade != null) + { + UpdateTradeEntity(entity.OpenTrade, position.Open); + } + + if (position.StopLoss != null && entity.StopLossTrade != null) + { + UpdateTradeEntity(entity.StopLossTrade, position.StopLoss); + } + + if (position.TakeProfit1 != null && entity.TakeProfit1Trade != null) + { + UpdateTradeEntity(entity.TakeProfit1Trade, position.TakeProfit1); + } + + if (position.TakeProfit2 != null && entity.TakeProfit2Trade != null) + { + UpdateTradeEntity(entity.TakeProfit2Trade, position.TakeProfit2); + } + + await _context.SaveChangesAsync(); + } + } + finally { - UpdateTradeEntity(entity.StopLossTrade, position.StopLoss); + await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context); } - - if (position.TakeProfit1 != null && entity.TakeProfit1Trade != null) - { - UpdateTradeEntity(entity.TakeProfit1Trade, position.TakeProfit1); - } - - if (position.TakeProfit2 != null && entity.TakeProfit2Trade != null) - { - UpdateTradeEntity(entity.TakeProfit2Trade, position.TakeProfit2); - } - - await _context.SaveChangesAsync(); - } + }, nameof(UpdatePositionAsync), ("positionIdentifier", position.Identifier), ("positionStatus", position.Status)); } /// diff --git a/src/Managing.Infrastructure.Database/PostgreSql/PostgreSqlUserRepository.cs b/src/Managing.Infrastructure.Database/PostgreSql/PostgreSqlUserRepository.cs index d02808a6..a7c58681 100644 --- a/src/Managing.Infrastructure.Database/PostgreSql/PostgreSqlUserRepository.cs +++ b/src/Managing.Infrastructure.Database/PostgreSql/PostgreSqlUserRepository.cs @@ -1,119 +1,128 @@ using Managing.Application.Abstractions.Repositories; using Managing.Domain.Users; using Microsoft.EntityFrameworkCore; +using Microsoft.Extensions.Logging; namespace Managing.Infrastructure.Databases.PostgreSql; -public class PostgreSqlUserRepository : IUserRepository +public class PostgreSqlUserRepository : BaseRepositoryWithLogging, IUserRepository { - private readonly ManagingDbContext _context; - - public PostgreSqlUserRepository(ManagingDbContext context) + public PostgreSqlUserRepository(ManagingDbContext context, ILogger logger, SentrySqlMonitoringService sentryMonitoringService) + : base(context, logger, sentryMonitoringService) { - _context = context; } - - public async Task GetUserByAgentNameAsync(string agentName) { - try + return await ExecuteWithLoggingAsync(async () => { - await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context); + try + { + await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context); - var userEntity = await _context.Users - .AsNoTracking() - .FirstOrDefaultAsync(u => u.AgentName == agentName) - .ConfigureAwait(false); + var userEntity = await _context.Users + .AsNoTracking() + .FirstOrDefaultAsync(u => u.AgentName == agentName) + .ConfigureAwait(false); - return PostgreSqlMappers.Map(userEntity); - } - finally - { - // Always ensure the connection is closed after the operation - await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context); - } + return PostgreSqlMappers.Map(userEntity ?? throw new InvalidOperationException("User not found")); + } + finally + { + // Always ensure the connection is closed after the operation + await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context); + } + }, nameof(GetUserByAgentNameAsync), ("agentName", agentName)); } public async Task GetUserByNameAsync(string name) { - try + return await ExecuteWithLoggingAsync(async () => { - await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context); + try + { + await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context); - var userEntity = await _context.Users - .AsNoTracking() - .FirstOrDefaultAsync(u => u.Name == name) - .ConfigureAwait(false); + var userEntity = await _context.Users + .AsNoTracking() + .FirstOrDefaultAsync(u => u.Name == name) + .ConfigureAwait(false); - return PostgreSqlMappers.Map(userEntity); - } - finally - { - // Always ensure the connection is closed after the operation - await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context); - } + return PostgreSqlMappers.Map(userEntity ?? throw new InvalidOperationException("User not found")); + } + finally + { + // Always ensure the connection is closed after the operation + await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context); + } + }, nameof(GetUserByNameAsync), ("name", name)); } public async Task> GetAllUsersAsync() { - try + return await ExecuteWithLoggingAsync(async () => { - await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context); + try + { + await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context); - var userEntities = await _context.Users - .AsNoTracking() - .ToListAsync() - .ConfigureAwait(false); + var userEntities = await _context.Users + .AsNoTracking() + .ToListAsync() + .ConfigureAwait(false); - return userEntities.Select(PostgreSqlMappers.Map); - } - finally - { - // Always ensure the connection is closed after the operation - await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context); - } + return userEntities.Select(PostgreSqlMappers.Map); + } + finally + { + // Always ensure the connection is closed after the operation + await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context); + } + }, nameof(GetAllUsersAsync)); } public async Task SaveOrUpdateUserAsync(User user) { - try + await ExecuteWithLoggingAsync(async () => { - var existingUser = await _context.Users - .AsTracking() - .FirstOrDefaultAsync(u => u.Name == user.Name) - .ConfigureAwait(false); - - if (existingUser != null) + try { - // Update existing user - existingUser.AgentName = user.AgentName; - existingUser.AvatarUrl = user.AvatarUrl; - existingUser.TelegramChannel = user.TelegramChannel; + var existingUser = await _context.Users + .AsTracking() + .FirstOrDefaultAsync(u => u.Name == user.Name) + .ConfigureAwait(false); - _context.Users.Update(existingUser); + if (existingUser != null) + { + // Update existing user + existingUser.AgentName = user.AgentName; + existingUser.AvatarUrl = user.AvatarUrl; + existingUser.TelegramChannel = user.TelegramChannel; - // Update the user object with the existing user's ID - user.Id = existingUser.Id; - } - else - { - // Insert new user - var userEntity = PostgreSqlMappers.Map(user); - _context.Users.Add(userEntity); + _context.Users.Update(existingUser); + + // Update the user object with the existing user's ID + user.Id = existingUser.Id; + } + else + { + // Insert new user + var userEntity = PostgreSqlMappers.Map(user); + _context.Users.Add(userEntity); + + // Update the user object with the database-generated ID after save + await _context.SaveChangesAsync().ConfigureAwait(false); + user.Id = userEntity.Id; + return; // Exit early since we already saved + } - // Update the user object with the database-generated ID after save await _context.SaveChangesAsync().ConfigureAwait(false); - user.Id = userEntity.Id; - return; // Exit early since we already saved } - - await _context.SaveChangesAsync().ConfigureAwait(false); - } - catch (Exception e) - { - Console.WriteLine(e); - throw new Exception("Cannot save or update user"); - } + catch (Exception e) + { + Console.WriteLine(e); + throw new Exception("Cannot save or update user"); + } + }, nameof(SaveOrUpdateUserAsync), ("userName", user.Name), ("userId", user.Id)); } } \ No newline at end of file diff --git a/src/Managing.Infrastructure.Database/PostgreSql/SentrySqlMonitoringService.cs b/src/Managing.Infrastructure.Database/PostgreSql/SentrySqlMonitoringService.cs new file mode 100644 index 00000000..771f611c --- /dev/null +++ b/src/Managing.Infrastructure.Database/PostgreSql/SentrySqlMonitoringService.cs @@ -0,0 +1,573 @@ +using System.Collections.Concurrent; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace Managing.Infrastructure.Databases.PostgreSql; + +/// +/// Enhanced SQL loop detection service with Sentry integration +/// Monitors query patterns and execution frequency, sending critical alerts to Sentry +/// +public class SentrySqlMonitoringService +{ + private readonly ILogger _logger; + private readonly SqlMonitoringSettings _settings; + private readonly ConcurrentDictionary _queryTrackers; + private readonly Timer _cleanupTimer; + + public SentrySqlMonitoringService(ILogger logger, IOptions settings) + { + _logger = logger; + _settings = settings.Value; + _queryTrackers = new ConcurrentDictionary(); + + // Setup cleanup timer to remove old tracking data + _cleanupTimer = new Timer(CleanupOldTrackers, null, TimeSpan.FromMinutes(1), TimeSpan.FromMinutes(1)); + } + + /// + /// Tracks a query execution and detects potential loops with Sentry integration + /// + /// Name of the repository executing the query + /// Name of the method executing the query + /// Pattern or hash of the query being executed + /// Time taken to execute the query + /// True if a potential loop is detected + public bool TrackQueryExecution(string repositoryName, string methodName, string queryPattern, TimeSpan executionTime) + { + var key = $"{repositoryName}.{methodName}.{queryPattern}"; + var now = DateTime.UtcNow; + + var tracker = _queryTrackers.AddOrUpdate(key, + new QueryExecutionTracker + { + RepositoryName = repositoryName, + MethodName = methodName, + QueryPattern = queryPattern, + FirstExecution = now, + LastExecution = now, + ExecutionCount = 1, + TotalExecutionTime = executionTime, + MaxExecutionTime = executionTime, + MinExecutionTime = executionTime + }, + (k, existing) => + { + existing.LastExecution = now; + existing.ExecutionCount++; + existing.TotalExecutionTime += executionTime; + existing.MaxExecutionTime = existing.MaxExecutionTime > executionTime ? existing.MaxExecutionTime : executionTime; + existing.MinExecutionTime = existing.MinExecutionTime < executionTime ? existing.MinExecutionTime : executionTime; + return existing; + }); + + // Check for potential loop conditions + var timeSinceFirst = now - tracker.FirstExecution; + var executionsPerMinute = tracker.ExecutionCount / Math.Max(timeSinceFirst.TotalMinutes, 0.1); + + var isLoopDetected = false; + var isCriticalAlert = false; + var reasons = new List(); + var sentryTags = new Dictionary(); + var sentryExtras = new Dictionary(); + + // Check execution frequency + if (executionsPerMinute > 20) + { + isLoopDetected = true; + reasons.Add($"High frequency: {executionsPerMinute:F1} executions/minute"); + + if (executionsPerMinute > 50) // Critical frequency threshold + { + isCriticalAlert = true; + sentryTags["alert_level"] = "critical"; + sentryTags["issue_type"] = "high_frequency_query"; + } + } + + // Check total execution count in window + if (tracker.ExecutionCount > _settings.MaxQueryExecutionsPerWindow) + { + isLoopDetected = true; + reasons.Add($"High count: {tracker.ExecutionCount} executions in {timeSinceFirst.TotalMinutes:F1} minutes"); + + if (tracker.ExecutionCount > _settings.SentryAlertThreshold * _settings.MaxQueryExecutionsPerWindow) + { + isCriticalAlert = true; + sentryTags["alert_level"] = "critical"; + sentryTags["issue_type"] = "high_execution_count"; + } + } + + // Check for rapid successive executions + if (tracker.ExecutionCount > 5 && timeSinceFirst.TotalSeconds < 10) + { + isLoopDetected = true; + isCriticalAlert = true; + reasons.Add($"Rapid execution: {tracker.ExecutionCount} executions in {timeSinceFirst.TotalSeconds:F1} seconds"); + sentryTags["alert_level"] = "critical"; + sentryTags["issue_type"] = "rapid_execution"; + } + + // Check for consistently slow queries + if (tracker.ExecutionCount > 3 && tracker.AverageExecutionTime.TotalMilliseconds > 1000) + { + isLoopDetected = true; + reasons.Add($"Consistently slow: {tracker.AverageExecutionTime.TotalMilliseconds:F0}ms average"); + + if (tracker.AverageExecutionTime > TimeSpan.FromSeconds(5)) // Critical slow query threshold + { + isCriticalAlert = true; + sentryTags["alert_level"] = "critical"; + sentryTags["issue_type"] = "slow_query"; + } + } + + // Prepare Sentry data + sentryTags["repository"] = repositoryName; + sentryTags["method"] = methodName; + sentryTags["query_pattern"] = queryPattern; + sentryTags["environment"] = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown"; + + sentryExtras["execution_count"] = tracker.ExecutionCount; + sentryExtras["executions_per_minute"] = executionsPerMinute; + sentryExtras["average_execution_time_ms"] = tracker.AverageExecutionTime.TotalMilliseconds; + sentryExtras["min_execution_time_ms"] = tracker.MinExecutionTime.TotalMilliseconds; + sentryExtras["max_execution_time_ms"] = tracker.MaxExecutionTime.TotalMilliseconds; + sentryExtras["total_execution_time_ms"] = tracker.TotalExecutionTime.TotalMilliseconds; + sentryExtras["first_execution"] = tracker.FirstExecution.ToString("yyyy-MM-dd HH:mm:ss.fff"); + sentryExtras["last_execution"] = tracker.LastExecution.ToString("yyyy-MM-dd HH:mm:ss.fff"); + sentryExtras["time_window_minutes"] = timeSinceFirst.TotalMinutes; + sentryExtras["detection_reasons"] = string.Join("; ", reasons); + + if (isLoopDetected) + { + _logger.LogWarning( + "[SQL-LOOP-DETECTED] {Repository}.{Method} | Pattern: {Pattern} | Count: {Count} | Reasons: {Reasons} | Avg Time: {AvgTime}ms", + repositoryName, methodName, queryPattern, tracker.ExecutionCount, + string.Join(", ", reasons), tracker.AverageExecutionTime.TotalMilliseconds); + + // Log detailed execution history + _logger.LogWarning( + "[SQL-LOOP-DETAILS] {Repository}.{Method} | First: {First} | Last: {Last} | Min: {Min}ms | Max: {Max}ms | Total: {Total}ms", + repositoryName, methodName, tracker.FirstExecution.ToString("HH:mm:ss.fff"), + tracker.LastExecution.ToString("HH:mm:ss.fff"), tracker.MinExecutionTime.TotalMilliseconds, + tracker.MaxExecutionTime.TotalMilliseconds, tracker.TotalExecutionTime.TotalMilliseconds); + } + + // Send to Sentry for critical alerts + if (isCriticalAlert) + { + SendCriticalAlertToSentry(repositoryName, methodName, queryPattern, reasons, sentryTags, sentryExtras); + } + else if (isLoopDetected) + { + SendWarningToSentry(repositoryName, methodName, queryPattern, reasons, sentryTags, sentryExtras); + } + + return isLoopDetected; + } + + /// + /// Sends a critical alert to Sentry for immediate attention + /// + private void SendCriticalAlertToSentry(string repositoryName, string methodName, string queryPattern, + List reasons, Dictionary tags, Dictionary extras) + { + try + { + var message = $"CRITICAL SQL Loop Detected: {repositoryName}.{methodName}"; + var exception = new InvalidOperationException($"Potential infinite SQL loop detected: {string.Join(", ", reasons)}"); + + // Add SQL-specific data to exception + exception.Data["Repository"] = repositoryName; + exception.Data["Method"] = methodName; + exception.Data["QueryPattern"] = queryPattern; + exception.Data["DetectionReasons"] = string.Join("; ", reasons); + + var sentryId = SentrySdk.CaptureException(exception, scope => + { + // Set tags for filtering and grouping + foreach (var tag in tags) + { + scope.SetTag(tag.Key, tag.Value); + } + + // Set extra data for debugging + foreach (var extra in extras) + { + scope.SetExtra(extra.Key, extra.Value); + } + + // Set fingerprint for better grouping + scope.SetFingerprint(new[] { "sql-loop-detection", repositoryName, methodName }); + + // Set level + scope.Level = SentryLevel.Error; + + // Add breadcrumb + scope.AddBreadcrumb( + message: $"Critical SQL loop detected in {repositoryName}.{methodName}", + category: "sql-monitoring", + level: BreadcrumbLevel.Error, + data: new Dictionary + { + ["query_pattern"] = queryPattern, + ["execution_count"] = extras["execution_count"].ToString(), + ["executions_per_minute"] = extras["executions_per_minute"].ToString() + } + ); + + // Set user context if available + scope.SetExtra("repository", repositoryName); + scope.SetExtra("method", methodName); + scope.SetExtra("query_pattern", queryPattern); + scope.SetExtra("detection_time", DateTime.UtcNow); + scope.SetExtra("alert_type", "critical_loop_detection"); + }); + + _logger.LogError( + "[SENTRY-CRITICAL] Sent critical SQL loop alert to Sentry: {SentryId} | {Repository}.{Method} | {Reasons}", + sentryId, repositoryName, methodName, string.Join(", ", reasons)); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SENTRY-ERROR] Failed to send critical alert to Sentry for {Repository}.{Method}", + repositoryName, methodName); + } + } + + /// + /// Sends a warning to Sentry for monitoring purposes + /// + private void SendWarningToSentry(string repositoryName, string methodName, string queryPattern, + List reasons, Dictionary tags, Dictionary extras) + { + try + { + var message = $"SQL Performance Warning: {repositoryName}.{methodName}"; + + var sentryId = SentrySdk.CaptureMessage(message, scope => + { + // Set tags for filtering and grouping + foreach (var tag in tags) + { + scope.SetTag(tag.Key, tag.Value); + } + + // Set extra data for debugging + foreach (var extra in extras) + { + scope.SetExtra(extra.Key, extra.Value); + } + + // Set fingerprint for better grouping + scope.SetFingerprint(new[] { "sql-performance-warning", repositoryName, methodName }); + + // Set level + scope.Level = SentryLevel.Warning; + + // Add breadcrumb + scope.AddBreadcrumb( + message: $"SQL performance warning in {repositoryName}.{methodName}", + category: "sql-monitoring", + level: BreadcrumbLevel.Warning, + data: new Dictionary + { + ["query_pattern"] = queryPattern, + ["execution_count"] = extras["execution_count"].ToString(), + ["executions_per_minute"] = extras["executions_per_minute"].ToString() + } + ); + + // Set context + scope.SetExtra("repository", repositoryName); + scope.SetExtra("method", methodName); + scope.SetExtra("query_pattern", queryPattern); + scope.SetExtra("detection_time", DateTime.UtcNow); + scope.SetExtra("alert_type", "performance_warning"); + }); + + _logger.LogWarning( + "[SENTRY-WARNING] Sent SQL performance warning to Sentry: {SentryId} | {Repository}.{Method} | {Reasons}", + sentryId, repositoryName, methodName, string.Join(", ", reasons)); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SENTRY-ERROR] Failed to send warning to Sentry for {Repository}.{Method}", + repositoryName, methodName); + } + } + + /// + /// Sends a custom performance metric to Sentry + /// + public void SendPerformanceMetricToSentry(string repositoryName, string methodName, string metricName, + double value, Dictionary tags = null) + { + try + { + var sentryTags = tags ?? new Dictionary(); + sentryTags["repository"] = repositoryName; + sentryTags["method"] = methodName; + sentryTags["metric_name"] = metricName; + + SentrySdk.AddBreadcrumb( + message: $"SQL Performance Metric: {metricName} = {value}", + category: "sql-performance", + level: BreadcrumbLevel.Info, + data: new Dictionary + { + ["repository"] = repositoryName, + ["method"] = methodName, + ["metric_name"] = metricName, + ["value"] = value.ToString() + }); + + _logger.LogDebug("[SENTRY-METRIC] Sent performance metric to Sentry: {Metric} = {Value} for {Repository}.{Method}", + metricName, value, repositoryName, methodName); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SENTRY-ERROR] Failed to send performance metric to Sentry"); + } + } + + /// + /// Gets current statistics for all tracked queries + /// + public Dictionary GetQueryStatistics() + { + var stats = new Dictionary(); + var now = DateTime.UtcNow; + + foreach (var kvp in _queryTrackers) + { + var tracker = kvp.Value; + var timeSinceFirst = now - tracker.FirstExecution; + + stats[kvp.Key] = new QueryExecutionStats + { + RepositoryName = tracker.RepositoryName, + MethodName = tracker.MethodName, + QueryPattern = tracker.QueryPattern, + ExecutionCount = tracker.ExecutionCount, + FirstExecution = tracker.FirstExecution, + LastExecution = tracker.LastExecution, + AverageExecutionTime = tracker.AverageExecutionTime, + MinExecutionTime = tracker.MinExecutionTime, + MaxExecutionTime = tracker.MaxExecutionTime, + ExecutionsPerMinute = tracker.ExecutionCount / Math.Max(timeSinceFirst.TotalMinutes, 0.1), + IsActive = timeSinceFirst < TimeSpan.FromSeconds(_settings.LoopDetectionWindowSeconds) + }; + } + + return stats; + } + + /// + /// Clears all tracking data + /// + public void ClearAllTracking() + { + _queryTrackers.Clear(); + _logger.LogInformation("[SQL-LOOP-DETECTION] All tracking data cleared"); + } + + private void CleanupOldTrackers(object? state) + { + var now = DateTime.UtcNow; + var keysToRemove = new List(); + + foreach (var kvp in _queryTrackers) + { + var timeSinceLastExecution = now - kvp.Value.LastExecution; + // Use configurable retention period for monitoring dashboard + // This allows users to see query statistics even if queries haven't been executed recently + var retentionPeriod = TimeSpan.FromMinutes(_settings.DataRetentionMinutes); + + if (timeSinceLastExecution > retentionPeriod) + { + keysToRemove.Add(kvp.Key); + } + } + + foreach (var key in keysToRemove) + { + _queryTrackers.TryRemove(key, out _); + } + + if (keysToRemove.Count > 0) + { + _logger.LogDebug("[SQL-MONITORING] Cleaned up {Count} old trackers (retention: {RetentionMinutes} minutes)", keysToRemove.Count, _settings.DataRetentionMinutes); + } + } + + /// + /// Sends slow query alert to Sentry asynchronously + /// + public async Task SendSlowQueryAlertAsync(string repositoryName, string methodName, string queryPattern, TimeSpan executionTime) + { + try + { + var message = $"Slow SQL Query: {repositoryName}.{methodName}"; + var exception = new TimeoutException($"SQL query took {executionTime.TotalMilliseconds:F0}ms to execute"); + + var sentryId = SentrySdk.CaptureException(exception, scope => + { + scope.SetTag("repository", repositoryName); + scope.SetTag("method", methodName); + scope.SetTag("alert_type", "slow_query"); + scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown"); + + scope.SetExtra("query_pattern", queryPattern); + scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds); + scope.SetExtra("threshold_ms", 2000); + + scope.SetFingerprint(new[] { "slow-query", repositoryName, methodName }); + scope.Level = SentryLevel.Warning; + + scope.AddBreadcrumb( + message: $"Slow SQL query in {repositoryName}.{methodName}", + category: "sql-monitoring", + level: BreadcrumbLevel.Warning, + data: new Dictionary + { + ["query_pattern"] = queryPattern, + ["execution_time_ms"] = executionTime.TotalMilliseconds.ToString() + } + ); + }); + + _logger.LogWarning( + "[SENTRY-SLOW-QUERY] Sent slow query alert to Sentry: {SentryId} | {Repository}.{Method} | {Time}ms", + sentryId, repositoryName, methodName, executionTime.TotalMilliseconds); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SENTRY-ERROR] Failed to send slow query alert to Sentry"); + } + } + + /// + /// Sends SQL error alert to Sentry asynchronously + /// + public async Task SendSqlErrorAlertAsync(string repositoryName, string methodName, string queryPattern, TimeSpan executionTime, Exception exception) + { + try + { + var sentryId = SentrySdk.CaptureException(exception, scope => + { + scope.SetTag("repository", repositoryName); + scope.SetTag("method", methodName); + scope.SetTag("alert_type", "sql_error"); + scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown"); + + scope.SetExtra("query_pattern", queryPattern); + scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds); + scope.SetExtra("error_type", exception.GetType().Name); + + scope.SetFingerprint(new[] { "sql-error", repositoryName, methodName, exception.GetType().Name }); + scope.Level = SentryLevel.Error; + + scope.AddBreadcrumb( + message: $"SQL error in {repositoryName}.{methodName}", + category: "sql-monitoring", + level: BreadcrumbLevel.Error, + data: new Dictionary + { + ["query_pattern"] = queryPattern, + ["execution_time_ms"] = executionTime.TotalMilliseconds.ToString(), + ["error_type"] = exception.GetType().Name + } + ); + }); + + _logger.LogError( + "[SENTRY-SQL-ERROR] Sent SQL error alert to Sentry: {SentryId} | {Repository}.{Method} | {Error}", + sentryId, repositoryName, methodName, exception.Message); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SENTRY-ERROR] Failed to send SQL error alert to Sentry"); + } + } + + /// + /// Checks if monitoring is enabled globally + /// + public bool IsMonitoringEnabled() + { + return _settings.Enabled; + } + + /// + /// Checks if logging is enabled + /// + public bool IsLoggingEnabled() + { + return _settings.LoggingEnabled; + } + + /// + /// Checks if Sentry integration is enabled + /// + public bool IsSentryEnabled() + { + return _settings.SentryEnabled; + } + + /// + /// Checks if loop detection is enabled + /// + public bool IsLoopDetectionEnabled() + { + return _settings.LoopDetectionEnabled; + } + + /// + /// Checks if performance monitoring is enabled + /// + public bool IsPerformanceMonitoringEnabled() + { + return _settings.PerformanceMonitoringEnabled; + } + + /// + /// Checks if a query should be logged based on configuration + /// + public bool ShouldLogQuery(TimeSpan executionTime) + { + if (!_settings.LoggingEnabled) return false; + + if (_settings.LogErrorsOnly) return false; // Only log errors, not normal queries + + if (_settings.LogSlowQueriesOnly) + { + return executionTime.TotalMilliseconds > _settings.SlowQueryThresholdMs; + } + + return true; // Log all queries if logging is enabled + } + + public void Dispose() + { + _cleanupTimer?.Dispose(); + } + + private class QueryExecutionTracker + { + public string RepositoryName { get; set; } = string.Empty; + public string MethodName { get; set; } = string.Empty; + public string QueryPattern { get; set; } = string.Empty; + public DateTime FirstExecution { get; set; } + public DateTime LastExecution { get; set; } + public int ExecutionCount { get; set; } + public TimeSpan TotalExecutionTime { get; set; } + public TimeSpan MaxExecutionTime { get; set; } + public TimeSpan MinExecutionTime { get; set; } + + public TimeSpan AverageExecutionTime => + ExecutionCount > 0 ? TimeSpan.FromTicks(TotalExecutionTime.Ticks / ExecutionCount) : TimeSpan.Zero; + } +} diff --git a/src/Managing.Infrastructure.Database/PostgreSql/SqlLoopDetectionService.cs b/src/Managing.Infrastructure.Database/PostgreSql/SqlLoopDetectionService.cs new file mode 100644 index 00000000..8543791f --- /dev/null +++ b/src/Managing.Infrastructure.Database/PostgreSql/SqlLoopDetectionService.cs @@ -0,0 +1,221 @@ +using System.Collections.Concurrent; +using Microsoft.Extensions.Logging; + +namespace Managing.Infrastructure.Databases.PostgreSql; + +/// +/// Service for detecting potential SQL query loops and performance issues +/// Monitors query patterns and execution frequency to identify problematic operations +/// +public class SqlLoopDetectionService +{ + private readonly ILogger _logger; + private readonly ConcurrentDictionary _queryTrackers; + private readonly Timer _cleanupTimer; + private readonly TimeSpan _trackingWindow = TimeSpan.FromMinutes(5); + private readonly int _maxExecutionsPerWindow = 10; + private readonly TimeSpan _cleanupInterval = TimeSpan.FromMinutes(1); + + public SqlLoopDetectionService(ILogger logger) + { + _logger = logger; + _queryTrackers = new ConcurrentDictionary(); + + // Setup cleanup timer to remove old tracking data + _cleanupTimer = new Timer(CleanupOldTrackers, null, _cleanupInterval, _cleanupInterval); + } + + /// + /// Tracks a query execution and detects potential loops + /// + /// Name of the repository executing the query + /// Name of the method executing the query + /// Pattern or hash of the query being executed + /// Time taken to execute the query + /// True if a potential loop is detected + public bool TrackQueryExecution(string repositoryName, string methodName, string queryPattern, TimeSpan executionTime) + { + var key = $"{repositoryName}.{methodName}.{queryPattern}"; + var now = DateTime.UtcNow; + + var tracker = _queryTrackers.AddOrUpdate(key, + new QueryExecutionTracker + { + RepositoryName = repositoryName, + MethodName = methodName, + QueryPattern = queryPattern, + FirstExecution = now, + LastExecution = now, + ExecutionCount = 1, + TotalExecutionTime = executionTime, + MaxExecutionTime = executionTime, + MinExecutionTime = executionTime + }, + (k, existing) => + { + existing.LastExecution = now; + existing.ExecutionCount++; + existing.TotalExecutionTime += executionTime; + existing.MaxExecutionTime = existing.MaxExecutionTime > executionTime ? existing.MaxExecutionTime : executionTime; + existing.MinExecutionTime = existing.MinExecutionTime < executionTime ? existing.MinExecutionTime : executionTime; + return existing; + }); + + // Check for potential loop conditions + var timeSinceFirst = now - tracker.FirstExecution; + var executionsPerMinute = tracker.ExecutionCount / Math.Max(timeSinceFirst.TotalMinutes, 0.1); + + var isLoopDetected = false; + var reasons = new List(); + + // Check execution frequency + if (executionsPerMinute > 20) + { + isLoopDetected = true; + reasons.Add($"High frequency: {executionsPerMinute:F1} executions/minute"); + } + + // Check total execution count in window + if (tracker.ExecutionCount > _maxExecutionsPerWindow) + { + isLoopDetected = true; + reasons.Add($"High count: {tracker.ExecutionCount} executions in {timeSinceFirst.TotalMinutes:F1} minutes"); + } + + // Check for rapid successive executions + if (tracker.ExecutionCount > 5 && timeSinceFirst.TotalSeconds < 10) + { + isLoopDetected = true; + reasons.Add($"Rapid execution: {tracker.ExecutionCount} executions in {timeSinceFirst.TotalSeconds:F1} seconds"); + } + + // Check for consistently slow queries + if (tracker.ExecutionCount > 3 && tracker.AverageExecutionTime.TotalMilliseconds > 1000) + { + isLoopDetected = true; + reasons.Add($"Consistently slow: {tracker.AverageExecutionTime.TotalMilliseconds:F0}ms average"); + } + + if (isLoopDetected) + { + _logger.LogWarning( + "[SQL-LOOP-DETECTED] {Repository}.{Method} | Pattern: {Pattern} | Count: {Count} | Reasons: {Reasons} | Avg Time: {AvgTime}ms", + repositoryName, methodName, queryPattern, tracker.ExecutionCount, + string.Join(", ", reasons), tracker.AverageExecutionTime.TotalMilliseconds); + + // Log detailed execution history + _logger.LogWarning( + "[SQL-LOOP-DETAILS] {Repository}.{Method} | First: {First} | Last: {Last} | Min: {Min}ms | Max: {Max}ms | Total: {Total}ms", + repositoryName, methodName, tracker.FirstExecution.ToString("HH:mm:ss.fff"), + tracker.LastExecution.ToString("HH:mm:ss.fff"), tracker.MinExecutionTime.TotalMilliseconds, + tracker.MaxExecutionTime.TotalMilliseconds, tracker.TotalExecutionTime.TotalMilliseconds); + } + + return isLoopDetected; + } + + /// + /// Gets current statistics for all tracked queries + /// + public Dictionary GetQueryStatistics() + { + var stats = new Dictionary(); + var now = DateTime.UtcNow; + + foreach (var kvp in _queryTrackers) + { + var tracker = kvp.Value; + var timeSinceFirst = now - tracker.FirstExecution; + + stats[kvp.Key] = new QueryExecutionStats + { + RepositoryName = tracker.RepositoryName, + MethodName = tracker.MethodName, + QueryPattern = tracker.QueryPattern, + ExecutionCount = tracker.ExecutionCount, + FirstExecution = tracker.FirstExecution, + LastExecution = tracker.LastExecution, + AverageExecutionTime = tracker.AverageExecutionTime, + MinExecutionTime = tracker.MinExecutionTime, + MaxExecutionTime = tracker.MaxExecutionTime, + ExecutionsPerMinute = tracker.ExecutionCount / Math.Max(timeSinceFirst.TotalMinutes, 0.1), + IsActive = timeSinceFirst < _trackingWindow + }; + } + + return stats; + } + + /// + /// Clears all tracking data + /// + public void ClearAllTracking() + { + _queryTrackers.Clear(); + _logger.LogInformation("[SQL-LOOP-DETECTION] All tracking data cleared"); + } + + private void CleanupOldTrackers(object? state) + { + var now = DateTime.UtcNow; + var keysToRemove = new List(); + + foreach (var kvp in _queryTrackers) + { + var timeSinceLastExecution = now - kvp.Value.LastExecution; + if (timeSinceLastExecution > _trackingWindow) + { + keysToRemove.Add(kvp.Key); + } + } + + foreach (var key in keysToRemove) + { + _queryTrackers.TryRemove(key, out _); + } + + if (keysToRemove.Count > 0) + { + _logger.LogDebug("[SQL-LOOP-DETECTION] Cleaned up {Count} old trackers", keysToRemove.Count); + } + } + + public void Dispose() + { + _cleanupTimer?.Dispose(); + } + + private class QueryExecutionTracker + { + public string RepositoryName { get; set; } = string.Empty; + public string MethodName { get; set; } = string.Empty; + public string QueryPattern { get; set; } = string.Empty; + public DateTime FirstExecution { get; set; } + public DateTime LastExecution { get; set; } + public int ExecutionCount { get; set; } + public TimeSpan TotalExecutionTime { get; set; } + public TimeSpan MaxExecutionTime { get; set; } + public TimeSpan MinExecutionTime { get; set; } + + public TimeSpan AverageExecutionTime => + ExecutionCount > 0 ? TimeSpan.FromTicks(TotalExecutionTime.Ticks / ExecutionCount) : TimeSpan.Zero; + } +} + +/// +/// Statistics for query execution tracking +/// +public class QueryExecutionStats +{ + public string RepositoryName { get; set; } = string.Empty; + public string MethodName { get; set; } = string.Empty; + public string QueryPattern { get; set; } = string.Empty; + public int ExecutionCount { get; set; } + public DateTime FirstExecution { get; set; } + public DateTime LastExecution { get; set; } + public TimeSpan AverageExecutionTime { get; set; } + public TimeSpan MinExecutionTime { get; set; } + public TimeSpan MaxExecutionTime { get; set; } + public double ExecutionsPerMinute { get; set; } + public bool IsActive { get; set; } +} diff --git a/src/Managing.Infrastructure.Database/PostgreSql/SqlMonitoringSettings.cs b/src/Managing.Infrastructure.Database/PostgreSql/SqlMonitoringSettings.cs new file mode 100644 index 00000000..11d59cfd --- /dev/null +++ b/src/Managing.Infrastructure.Database/PostgreSql/SqlMonitoringSettings.cs @@ -0,0 +1,77 @@ +namespace Managing.Infrastructure.Databases.PostgreSql; + +/// +/// Configuration settings for SQL query monitoring and loop detection +/// +public class SqlMonitoringSettings +{ + /// + /// Whether SQL monitoring is enabled globally (default: true) + /// + public bool Enabled { get; set; } = true; + + /// + /// Whether SQL query logging is enabled (default: true) + /// + public bool LoggingEnabled { get; set; } = true; + + /// + /// Whether Sentry integration is enabled (default: true) + /// + public bool SentryEnabled { get; set; } = true; + + /// + /// Whether loop detection is enabled (default: true) + /// + public bool LoopDetectionEnabled { get; set; } = true; + + /// + /// Whether performance monitoring is enabled (default: true) + /// + public bool PerformanceMonitoringEnabled { get; set; } = true; + + /// + /// Time window for loop detection in seconds (default: 60) + /// + public int LoopDetectionWindowSeconds { get; set; } = 60; + + /// + /// Maximum query executions per window for loop detection (default: 100) + /// + public int MaxQueryExecutionsPerWindow { get; set; } = 100; + + /// + /// Maximum method executions per window for loop detection (default: 50) + /// + public int MaxMethodExecutionsPerWindow { get; set; } = 50; + + /// + /// Threshold for long-running queries in milliseconds (default: 1000) + /// + public int LongRunningQueryThresholdMs { get; set; } = 1000; + + /// + /// Threshold for Sentry alerts (default: 5) + /// + public int SentryAlertThreshold { get; set; } = 5; + + /// + /// Threshold for slow queries in milliseconds (default: 2000) + /// + public int SlowQueryThresholdMs { get; set; } = 2000; + + /// + /// Whether to log only slow queries (reduces overhead) (default: false) + /// + public bool LogSlowQueriesOnly { get; set; } = false; + + /// + /// Whether to log only errors (minimal overhead) (default: false) + /// + public bool LogErrorsOnly { get; set; } = false; + + /// + /// Data retention period in minutes for monitoring dashboard (default: 30) + /// + public int DataRetentionMinutes { get; set; } = 30; +} diff --git a/src/Managing.Infrastructure.Database/PostgreSql/SqlQueryLogger.cs b/src/Managing.Infrastructure.Database/PostgreSql/SqlQueryLogger.cs new file mode 100644 index 00000000..3336c481 --- /dev/null +++ b/src/Managing.Infrastructure.Database/PostgreSql/SqlQueryLogger.cs @@ -0,0 +1,425 @@ +using System.Diagnostics; +using System.Text.Json; +using Microsoft.Extensions.Logging; + +namespace Managing.Infrastructure.Databases.PostgreSql; + +/// +/// Comprehensive SQL query logger for monitoring and debugging database operations +/// Provides detailed logging with timing, parameters, and performance metrics +/// +public class SqlQueryLogger : IDisposable +{ + private readonly ILogger _logger; + private readonly Stopwatch _stopwatch; + private readonly string _operationId; + private readonly DateTime _startTime; + private readonly string _methodName; + private readonly string _repositoryName; + private readonly Dictionary _parameters; + private readonly List _executedQueries; + private bool _disposed = false; + + public SqlQueryLogger(ILogger logger, string repositoryName, string methodName) + { + _logger = logger; + _repositoryName = repositoryName; + _methodName = methodName; + _operationId = Guid.NewGuid().ToString("N")[..8]; // Short ID for correlation + _startTime = DateTime.UtcNow; + _stopwatch = Stopwatch.StartNew(); + _parameters = new Dictionary(); + _executedQueries = new List(); + } + + /// + /// Logs the start of a database operation + /// + public void LogOperationStart(params (string name, object value)[] parameters) + { + foreach (var (name, value) in parameters) + { + _parameters[name] = value; + } + + _logger.LogInformation( + "[SQL-OP-START] {OperationId} | {Repository}.{Method} | Started at {StartTime}", + _operationId, _repositoryName, _methodName, _startTime.ToString("HH:mm:ss.fff")); + } + + /// + /// Logs a SQL query execution with timing and parameters + /// + public void LogQueryExecution(string query, TimeSpan executionTime, int? rowsAffected = null, Exception? exception = null) + { + _executedQueries.Add(query); + + var logLevel = exception != null ? LogLevel.Error : + executionTime.TotalMilliseconds > 1000 ? LogLevel.Warning : LogLevel.Information; + + var logMessage = exception != null + ? "[SQL-QUERY-ERROR] {OperationId} | {Repository}.{Method} | Query failed after {ExecutionTime}ms | Error: {Error}" + : "[SQL-QUERY] {OperationId} | {Repository}.{Method} | Executed in {ExecutionTime}ms | Rows: {RowsAffected}"; + + var args = new object[] + { + _operationId, _repositoryName, _methodName, executionTime.TotalMilliseconds, + rowsAffected ?? 0 + }; + + if (exception != null) + { + args[4] = exception.Message; + _logger.LogError(exception, logMessage, args); + + // Send SQL error to Sentry + SendSqlErrorToSentry(query, executionTime, exception, rowsAffected); + } + else + { + _logger.Log(logLevel, logMessage, args); + + // Send slow query alert to Sentry + if (executionTime.TotalMilliseconds > 2000) // Critical slow query threshold + { + SendSlowQueryToSentry(query, executionTime, rowsAffected); + } + else if (executionTime.TotalMilliseconds > 1000) // Warning threshold + { + SendSlowQueryWarningToSentry(query, executionTime, rowsAffected); + } + } + + // Log query details for slow queries or errors + if (executionTime.TotalMilliseconds > 500 || exception != null) + { + _logger.LogWarning( + "[SQL-QUERY-DETAILS] {OperationId} | Query: {Query} | Parameters: {Parameters}", + _operationId, + TruncateQuery(query, 500), + JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false })); + } + } + + /// + /// Logs the completion of a database operation with summary + /// + public void LogOperationComplete(object? result = null, Exception? exception = null) + { + _stopwatch.Stop(); + var totalTime = _stopwatch.Elapsed; + + var logLevel = exception != null ? LogLevel.Error : + totalTime.TotalMilliseconds > 2000 ? LogLevel.Warning : LogLevel.Information; + + var logMessage = exception != null + ? "[SQL-OP-ERROR] {OperationId} | {Repository}.{Method} | Failed after {TotalTime}ms | Queries: {QueryCount} | Error: {Error}" + : "[SQL-OP-COMPLETE] {OperationId} | {Repository}.{Method} | Completed in {TotalTime}ms | Queries: {QueryCount} | Result: {ResultType}"; + + var args = new object[] + { + _operationId, _repositoryName, _methodName, totalTime.TotalMilliseconds, + _executedQueries.Count, result?.GetType().Name ?? "void" + }; + + if (exception != null) + { + args[5] = exception.Message; + _logger.LogError(exception, logMessage, args); + } + else + { + _logger.Log(logLevel, logMessage, args); + } + + // Log operation summary for long-running operations + if (totalTime.TotalMilliseconds > 1000 || _executedQueries.Count > 5) + { + _logger.LogWarning( + "[SQL-OP-SUMMARY] {OperationId} | Parameters: {Parameters} | Query Count: {QueryCount} | Total Time: {TotalTime}ms", + _operationId, + JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false }), + _executedQueries.Count, + totalTime.TotalMilliseconds); + } + } + + /// + /// Logs potential loop detection based on query patterns + /// + public void LogPotentialLoopDetection(string queryPattern, int occurrenceCount) + { + _logger.LogWarning( + "[SQL-LOOP-DETECTED] {OperationId} | {Repository}.{Method} | Pattern '{Pattern}' executed {Count} times | Possible infinite loop!", + _operationId, _repositoryName, _methodName, queryPattern, occurrenceCount); + + // Send critical alert to Sentry for loop detection + SendLoopDetectionToSentry(queryPattern, occurrenceCount); + } + + /// + /// Sends loop detection alert to Sentry + /// + private void SendLoopDetectionToSentry(string queryPattern, int occurrenceCount) + { + try + { + var message = $"SQL Loop Detection: {_repositoryName}.{_methodName}"; + var exception = new InvalidOperationException($"Potential infinite SQL loop detected: {queryPattern} executed {occurrenceCount} times"); + + var sentryId = SentrySdk.CaptureException(exception, scope => + { + scope.SetTag("operation_id", _operationId); + scope.SetTag("repository", _repositoryName); + scope.SetTag("method", _methodName); + scope.SetTag("query_pattern", queryPattern); + scope.SetTag("alert_type", "sql_loop_detection"); + scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown"); + + scope.SetExtra("occurrence_count", occurrenceCount); + scope.SetExtra("operation_duration_ms", _stopwatch.Elapsed.TotalMilliseconds); + scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false })); + scope.SetExtra("executed_queries_count", _executedQueries.Count); + scope.SetExtra("start_time", _startTime.ToString("yyyy-MM-dd HH:mm:ss.fff")); + + scope.SetFingerprint(new[] { "sql-loop-detection", _repositoryName, _methodName, queryPattern }); + scope.Level = SentryLevel.Error; + + scope.AddBreadcrumb( + message: $"SQL loop detected in {_repositoryName}.{_methodName}", + category: "sql-monitoring", + level: BreadcrumbLevel.Error, + data: new Dictionary + { + ["query_pattern"] = queryPattern, + ["occurrence_count"] = occurrenceCount.ToString(), + ["operation_id"] = _operationId + } + ); + + scope.SetExtra("operation_id", _operationId); + scope.SetExtra("repository", _repositoryName); + scope.SetExtra("method", _methodName); + scope.SetExtra("query_pattern", queryPattern); + scope.SetExtra("occurrence_count", occurrenceCount); + scope.SetExtra("start_time", _startTime); + scope.SetExtra("duration_ms", _stopwatch.Elapsed.TotalMilliseconds); + scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters)); + }); + + _logger.LogError( + "[SENTRY-LOOP-ALERT] Sent loop detection alert to Sentry: {SentryId} | {Repository}.{Method} | Pattern: {Pattern} | Count: {Count}", + sentryId, _repositoryName, _methodName, queryPattern, occurrenceCount); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SENTRY-ERROR] Failed to send loop detection alert to Sentry"); + } + } + + /// + /// Logs connection state changes + /// + public void LogConnectionStateChange(string state, TimeSpan? duration = null) + { + var message = duration.HasValue + ? "[SQL-CONNECTION] {OperationId} | {Repository}.{Method} | Connection {State} (took {Duration}ms)" + : "[SQL-CONNECTION] {OperationId} | {Repository}.{Method} | Connection {State}"; + + var args = duration.HasValue + ? new object[] { _operationId, _repositoryName, _methodName, state, duration.Value.TotalMilliseconds } + : new object[] { _operationId, _repositoryName, _methodName, state }; + + _logger.LogInformation(message, args); + } + + /// + /// Sends SQL error to Sentry + /// + private void SendSqlErrorToSentry(string query, TimeSpan executionTime, Exception exception, int? rowsAffected) + { + try + { + var sentryId = SentrySdk.CaptureException(exception, scope => + { + scope.SetTag("operation_id", _operationId); + scope.SetTag("repository", _repositoryName); + scope.SetTag("method", _methodName); + scope.SetTag("alert_type", "sql_error"); + scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown"); + + scope.SetExtra("query", TruncateQuery(query, 1000)); + scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds); + scope.SetExtra("rows_affected", rowsAffected ?? 0); + scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false })); + scope.SetExtra("operation_duration_ms", _stopwatch.Elapsed.TotalMilliseconds); + + scope.SetFingerprint(new[] { "sql-error", _repositoryName, _methodName, exception.GetType().Name }); + scope.Level = SentryLevel.Error; + + scope.AddBreadcrumb( + message: $"SQL error in {_repositoryName}.{_methodName}", + category: "sql-monitoring", + level: BreadcrumbLevel.Error, + data: new Dictionary + { + ["query"] = TruncateQuery(query, 200), + ["execution_time_ms"] = executionTime.TotalMilliseconds.ToString(), + ["operation_id"] = _operationId + } + ); + + scope.SetExtra("operation_id", _operationId); + scope.SetExtra("repository", _repositoryName); + scope.SetExtra("method", _methodName); + scope.SetExtra("query", TruncateQuery(query, 1000)); + scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds); + scope.SetExtra("rows_affected", rowsAffected ?? 0); + scope.SetExtra("error_type", exception.GetType().Name); + scope.SetExtra("error_message", exception.Message); + }); + + _logger.LogError( + "[SENTRY-SQL-ERROR] Sent SQL error to Sentry: {SentryId} | {Repository}.{Method} | {Error}", + sentryId, _repositoryName, _methodName, exception.Message); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SENTRY-ERROR] Failed to send SQL error to Sentry"); + } + } + + /// + /// Sends critical slow query alert to Sentry + /// + private void SendSlowQueryToSentry(string query, TimeSpan executionTime, int? rowsAffected) + { + try + { + var message = $"Critical Slow SQL Query: {_repositoryName}.{_methodName}"; + var exception = new TimeoutException($"SQL query took {executionTime.TotalMilliseconds:F0}ms to execute"); + + var sentryId = SentrySdk.CaptureException(exception, scope => + { + scope.SetTag("operation_id", _operationId); + scope.SetTag("repository", _repositoryName); + scope.SetTag("method", _methodName); + scope.SetTag("alert_type", "slow_query_critical"); + scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown"); + + scope.SetExtra("query", TruncateQuery(query, 1000)); + scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds); + scope.SetExtra("rows_affected", rowsAffected ?? 0); + scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false })); + scope.SetExtra("operation_duration_ms", _stopwatch.Elapsed.TotalMilliseconds); + + scope.SetFingerprint(new[] { "slow-query-critical", _repositoryName, _methodName }); + scope.Level = SentryLevel.Error; + + scope.AddBreadcrumb( + message: $"Critical slow SQL query in {_repositoryName}.{_methodName}", + category: "sql-monitoring", + level: BreadcrumbLevel.Error, + data: new Dictionary + { + ["query"] = TruncateQuery(query, 200), + ["execution_time_ms"] = executionTime.TotalMilliseconds.ToString(), + ["operation_id"] = _operationId + } + ); + + scope.SetExtra("operation_id", _operationId); + scope.SetExtra("repository", _repositoryName); + scope.SetExtra("method", _methodName); + scope.SetExtra("query", TruncateQuery(query, 1000)); + scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds); + scope.SetExtra("rows_affected", rowsAffected ?? 0); + scope.SetExtra("threshold_ms", 2000); + scope.SetExtra("severity", "critical"); + }); + + _logger.LogError( + "[SENTRY-SLOW-QUERY] Sent critical slow query alert to Sentry: {SentryId} | {Repository}.{Method} | {Time}ms", + sentryId, _repositoryName, _methodName, executionTime.TotalMilliseconds); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SENTRY-ERROR] Failed to send slow query alert to Sentry"); + } + } + + /// + /// Sends slow query warning to Sentry + /// + private void SendSlowQueryWarningToSentry(string query, TimeSpan executionTime, int? rowsAffected) + { + try + { + var message = $"Slow SQL Query Warning: {_repositoryName}.{_methodName}"; + + var sentryId = SentrySdk.CaptureMessage(message, scope => + { + scope.SetTag("operation_id", _operationId); + scope.SetTag("repository", _repositoryName); + scope.SetTag("method", _methodName); + scope.SetTag("alert_type", "slow_query_warning"); + scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown"); + + scope.SetExtra("query", TruncateQuery(query, 1000)); + scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds); + scope.SetExtra("rows_affected", rowsAffected ?? 0); + scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false })); + scope.SetExtra("operation_duration_ms", _stopwatch.Elapsed.TotalMilliseconds); + + scope.SetFingerprint(new[] { "slow-query-warning", _repositoryName, _methodName }); + scope.Level = SentryLevel.Warning; + + scope.AddBreadcrumb( + message: $"Slow SQL query warning in {_repositoryName}.{_methodName}", + category: "sql-monitoring", + level: BreadcrumbLevel.Warning, + data: new Dictionary + { + ["query"] = TruncateQuery(query, 200), + ["execution_time_ms"] = executionTime.TotalMilliseconds.ToString(), + ["operation_id"] = _operationId + } + ); + + scope.SetExtra("operation_id", _operationId); + scope.SetExtra("repository", _repositoryName); + scope.SetExtra("method", _methodName); + scope.SetExtra("query", TruncateQuery(query, 1000)); + scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds); + scope.SetExtra("rows_affected", rowsAffected ?? 0); + scope.SetExtra("threshold_ms", 1000); + scope.SetExtra("severity", "warning"); + }); + + _logger.LogWarning( + "[SENTRY-SLOW-QUERY-WARNING] Sent slow query warning to Sentry: {SentryId} | {Repository}.{Method} | {Time}ms", + sentryId, _repositoryName, _methodName, executionTime.TotalMilliseconds); + } + catch (Exception ex) + { + _logger.LogError(ex, "[SENTRY-ERROR] Failed to send slow query warning to Sentry"); + } + } + + private static string TruncateQuery(string query, int maxLength) + { + if (string.IsNullOrEmpty(query) || query.Length <= maxLength) + return query; + + return query[..maxLength] + "... [TRUNCATED]"; + } + + public void Dispose() + { + if (!_disposed) + { + _stopwatch?.Stop(); + _disposed = true; + } + } +} diff --git a/src/Managing.Web3Proxy/test/plugins/close-position.test.ts b/src/Managing.Web3Proxy/test/plugins/close-position.test.ts index 4338a017..a109d2d9 100644 --- a/src/Managing.Web3Proxy/test/plugins/close-position.test.ts +++ b/src/Managing.Web3Proxy/test/plugins/close-position.test.ts @@ -9,8 +9,8 @@ test('GMX Position Closing', async (t) => { const result = await closeGmxPositionImpl( sdk, - "ADA", - TradeDirection.Long + "ETH", + TradeDirection.Short ) console.log('Position closing result:', result) assert.ok(result, 'Position closing result should be defined') diff --git a/src/Managing.WebApp/src/components/mollecules/LogIn/LogIn.tsx b/src/Managing.WebApp/src/components/mollecules/LogIn/LogIn.tsx index c1d8c204..5c2bb259 100644 --- a/src/Managing.WebApp/src/components/mollecules/LogIn/LogIn.tsx +++ b/src/Managing.WebApp/src/components/mollecules/LogIn/LogIn.tsx @@ -48,7 +48,7 @@ const LogIn = () => { .user_CreateToken({ address: walletAddress, message: message, - name: form.name, + name: user?.id, signature: signature, }) .then((data) => { @@ -101,19 +101,6 @@ const LogIn = () => { action="#" onSubmit={handleSubmit(onSubmit)} > -
- - -
+ + + {isLoading ? ( +
+ +
+ ) : ( +
+ {/* Overview Cards */} +
+
+
Total Tracked Queries
+
+ {statistics?.totalTrackedQueries || 0} +
+
All time
+
+
+
Active Queries
+
+ {statistics?.activeQueries || 0} +
+
Currently monitored
+
+
+
Total Alerts
+
+ {alerts?.length || 0} +
+
All alerts
+
+
+
Monitoring Status
+
+ {health?.isEnabled ? 'Active' : 'Inactive'} +
+
System status
+
+
+ + {/* Health Status */} +
+
+

System Health

+ {health ? ( +
+
+ Monitoring + + {health.isEnabled ? 'Enabled' : 'Disabled'} + +
+
+ Logging + + {health.loggingEnabled ? 'Enabled' : 'Disabled'} + +
+
+ Sentry + + {health.sentryEnabled ? 'Enabled' : 'Disabled'} + +
+
+ Loop Detection + + {health.loopDetectionEnabled ? 'Enabled' : 'Disabled'} + +
+
+ Performance + + {health.performanceMonitoringEnabled ? 'Enabled' : 'Disabled'} + +
+
+ ) : ( +
+ + + + Unable to fetch monitoring health data +
+ )} +
+
+ + {/* Alerts Section */} +
+
+

Recent Alerts

+ {alertsData.length === 0 ? ( +
+ + + + No alerts found. The system is running smoothly! +
+ ) : ( +
+ + + )} + + + + {/* Statistics Section */} +
+
+

Query Statistics

+ {statisticsData.length === 0 ? ( +
+ + + + No statistics available yet +
+ ) : ( +
+
+ + )} + + + + )} + + ) +} + +export default SqlMonitoring