Add monitoring on queries with sentry alert + Fix check position list in db for backtest

This commit is contained in:
2025-10-10 00:15:02 +07:00
parent ffb98fe359
commit e4c2f8b7a5
24 changed files with 3340 additions and 179 deletions

View File

@@ -0,0 +1,223 @@
using System.Diagnostics;
using Microsoft.Extensions.Logging;
namespace Managing.Infrastructure.Databases.PostgreSql;
/// <summary>
/// Base repository class with comprehensive SQL query logging and monitoring
/// Provides automatic query tracking, loop detection, and performance monitoring
/// </summary>
public abstract class BaseRepositoryWithLogging
{
protected readonly ManagingDbContext _context;
protected readonly ILogger<SqlQueryLogger> _logger;
protected readonly SentrySqlMonitoringService _sentryMonitoringService;
protected readonly string _repositoryName;
protected BaseRepositoryWithLogging(ManagingDbContext context, ILogger<SqlQueryLogger> logger, SentrySqlMonitoringService sentryMonitoringService)
{
_context = context;
_logger = logger;
_sentryMonitoringService = sentryMonitoringService;
_repositoryName = GetType().Name;
}
/// <summary>
/// Executes a database operation with lightweight logging and monitoring
/// Only logs slow queries (>2000ms) and errors to minimize performance impact
/// </summary>
/// <typeparam name="T">Return type of the operation</typeparam>
/// <param name="operation">The database operation to execute</param>
/// <param name="methodName">Name of the calling method</param>
/// <param name="parameters">Parameters passed to the operation</param>
/// <returns>Result of the operation</returns>
protected async Task<T> ExecuteWithLoggingAsync<T>(
Func<Task<T>> operation,
string methodName,
params (string name, object value)[] parameters)
{
// Check if monitoring is enabled globally
if (!_sentryMonitoringService.IsMonitoringEnabled())
{
return await operation();
}
var stopwatch = Stopwatch.StartNew();
var queryPattern = GenerateQueryPattern(methodName, parameters);
try
{
var result = await operation();
stopwatch.Stop();
// Only log if slow query (>2000ms) and logging is enabled
if (stopwatch.Elapsed.TotalMilliseconds > 2000 && _sentryMonitoringService.IsLoggingEnabled())
{
_logger.LogWarning(
"[SLOW-SQL] {Repository}.{Method} | Pattern: {Pattern} | Time: {Time}ms",
_repositoryName, methodName, queryPattern, stopwatch.Elapsed.TotalMilliseconds);
// Send slow query alert to Sentry asynchronously if enabled
if (_sentryMonitoringService.IsSentryEnabled())
{
_ = Task.Run(() => SendSlowQueryToSentryAsync(queryPattern, stopwatch.Elapsed, methodName));
}
}
// Track query execution for loop detection if enabled (minimal overhead)
if (_sentryMonitoringService.IsLoopDetectionEnabled())
{
_context.TrackQueryExecution(queryPattern, stopwatch.Elapsed, _repositoryName, methodName);
}
return result;
}
catch (Exception ex)
{
stopwatch.Stop();
// Always log errors if logging is enabled
if (_sentryMonitoringService.IsLoggingEnabled())
{
_logger.LogError(ex,
"[SQL-ERROR] {Repository}.{Method} | Pattern: {Pattern} | Time: {Time}ms",
_repositoryName, methodName, queryPattern, stopwatch.Elapsed.TotalMilliseconds);
}
// Send SQL error to Sentry asynchronously if enabled
if (_sentryMonitoringService.IsSentryEnabled())
{
_ = Task.Run(() => SendSqlErrorToSentryAsync(queryPattern, stopwatch.Elapsed, ex, methodName));
}
throw;
}
}
/// <summary>
/// Executes a database operation with lightweight logging and monitoring (void return)
/// Only logs slow queries (>2000ms) and errors to minimize performance impact
/// </summary>
/// <param name="operation">The database operation to execute</param>
/// <param name="methodName">Name of the calling method</param>
/// <param name="parameters">Parameters passed to the operation</param>
protected async Task ExecuteWithLoggingAsync(
Func<Task> operation,
string methodName,
params (string name, object value)[] parameters)
{
// Check if monitoring is enabled globally
if (!_sentryMonitoringService.IsMonitoringEnabled())
{
await operation();
return;
}
var stopwatch = Stopwatch.StartNew();
var queryPattern = GenerateQueryPattern(methodName, parameters);
try
{
await operation();
stopwatch.Stop();
// Only log if slow query (>2000ms) and logging is enabled
if (stopwatch.Elapsed.TotalMilliseconds > 2000 && _sentryMonitoringService.IsLoggingEnabled())
{
_logger.LogWarning(
"[SLOW-SQL] {Repository}.{Method} | Pattern: {Pattern} | Time: {Time}ms",
_repositoryName, methodName, queryPattern, stopwatch.Elapsed.TotalMilliseconds);
// Send slow query alert to Sentry asynchronously if enabled
if (_sentryMonitoringService.IsSentryEnabled())
{
_ = Task.Run(() => SendSlowQueryToSentryAsync(queryPattern, stopwatch.Elapsed, methodName));
}
}
// Track query execution for loop detection if enabled (minimal overhead)
if (_sentryMonitoringService.IsLoopDetectionEnabled())
{
_context.TrackQueryExecution(queryPattern, stopwatch.Elapsed, _repositoryName, methodName);
}
}
catch (Exception ex)
{
stopwatch.Stop();
// Always log errors if logging is enabled
if (_sentryMonitoringService.IsLoggingEnabled())
{
_logger.LogError(ex,
"[SQL-ERROR] {Repository}.{Method} | Pattern: {Pattern} | Time: {Time}ms",
_repositoryName, methodName, queryPattern, stopwatch.Elapsed.TotalMilliseconds);
}
// Send SQL error to Sentry asynchronously if enabled
if (_sentryMonitoringService.IsSentryEnabled())
{
_ = Task.Run(() => SendSqlErrorToSentryAsync(queryPattern, stopwatch.Elapsed, ex, methodName));
}
throw;
}
}
/// <summary>
/// Generates a query pattern for tracking purposes
/// </summary>
/// <param name="methodName">Name of the method</param>
/// <param name="parameters">Method parameters</param>
/// <returns>Query pattern string</returns>
private string GenerateQueryPattern(string methodName, (string name, object value)[] parameters)
{
var paramStrings = parameters.Select(p => $"{p.name}={p.value?.GetType().Name ?? "null"}");
return $"{methodName}({string.Join(",", paramStrings)})";
}
/// <summary>
/// Logs a potential performance issue
/// </summary>
/// <param name="operation">Operation description</param>
/// <param name="duration">Operation duration</param>
/// <param name="threshold">Performance threshold</param>
protected void LogPerformanceIssue(string operation, TimeSpan duration, TimeSpan threshold)
{
if (duration > threshold)
{
_logger.LogWarning(
"[SQL-PERFORMANCE] {Repository} | {Operation} took {Duration}ms (threshold: {Threshold}ms)",
_repositoryName, operation, duration.TotalMilliseconds, threshold.TotalMilliseconds);
}
}
/// <summary>
/// Sends slow query alert to Sentry asynchronously (fire and forget)
/// </summary>
private async Task SendSlowQueryToSentryAsync(string queryPattern, TimeSpan executionTime, string methodName)
{
try
{
await _sentryMonitoringService.SendSlowQueryAlertAsync(_repositoryName, methodName, queryPattern, executionTime);
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send slow query alert to Sentry");
}
}
/// <summary>
/// Sends SQL error to Sentry asynchronously (fire and forget)
/// </summary>
private async Task SendSqlErrorToSentryAsync(string queryPattern, TimeSpan executionTime, Exception exception, string methodName)
{
try
{
await _sentryMonitoringService.SendSqlErrorAlertAsync(_repositoryName, methodName, queryPattern, executionTime, exception);
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send SQL error alert to Sentry");
}
}
}

View File

@@ -1,14 +1,27 @@
using Managing.Infrastructure.Databases.PostgreSql.Entities;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Managing.Infrastructure.Databases.PostgreSql;
public class ManagingDbContext : DbContext
{
private readonly ILogger<ManagingDbContext>? _logger;
private readonly SentrySqlMonitoringService? _sentryMonitoringService;
private readonly Dictionary<string, int> _queryExecutionCounts = new();
private readonly object _queryCountLock = new object();
public ManagingDbContext(DbContextOptions<ManagingDbContext> options) : base(options)
{
}
public ManagingDbContext(DbContextOptions<ManagingDbContext> options, ILogger<ManagingDbContext> logger, SentrySqlMonitoringService sentryMonitoringService)
: base(options)
{
_logger = logger;
_sentryMonitoringService = sentryMonitoringService;
}
public DbSet<AccountEntity> Accounts { get; set; }
public DbSet<UserEntity> Users { get; set; }
public DbSet<GeneticRequestEntity> GeneticRequests { get; set; }
@@ -607,7 +620,7 @@ public class ManagingDbContext : DbContext
{
try
{
var count = await Database.SqlQueryRaw<long>($"SELECT COUNT(*) FROM {tableName}").FirstOrDefaultAsync();
var count = await Database.SqlQueryRaw<long>($"SELECT COUNT(*) FROM \"{tableName}\"").FirstOrDefaultAsync();
stats[tableName] = count;
}
catch
@@ -638,4 +651,63 @@ public class ManagingDbContext : DbContext
// Add any additional configuration here if needed
}
/// <summary>
/// Tracks query execution for loop detection and performance monitoring
/// </summary>
/// <param name="queryPattern">Pattern or hash of the query</param>
/// <param name="executionTime">Time taken to execute the query</param>
/// <param name="repositoryName">Name of the repository executing the query</param>
/// <param name="methodName">Name of the method executing the query</param>
public void TrackQueryExecution(string queryPattern, TimeSpan executionTime, string repositoryName, string methodName)
{
if (_logger == null || _sentryMonitoringService == null) return;
// Track execution count for this query pattern
lock (_queryCountLock)
{
_queryExecutionCounts[queryPattern] = _queryExecutionCounts.GetValueOrDefault(queryPattern, 0) + 1;
}
// Check for potential loops with Sentry integration
var isLoopDetected = _sentryMonitoringService.TrackQueryExecution(repositoryName, methodName, queryPattern, executionTime);
// Log query execution details
var logLevel = executionTime.TotalMilliseconds > 1000 ? LogLevel.Warning : LogLevel.Debug;
_logger.Log(logLevel,
"[SQL-QUERY-TRACKED] {Repository}.{Method} | Pattern: {Pattern} | Time: {Time}ms | Count: {Count}",
repositoryName, methodName, queryPattern, executionTime.TotalMilliseconds,
_queryExecutionCounts[queryPattern]);
// Alert on potential loops
if (isLoopDetected)
{
_logger.LogError(
"[SQL-LOOP-ALERT] Potential infinite loop detected in {Repository}.{Method} with pattern '{Pattern}'",
repositoryName, methodName, queryPattern);
}
}
/// <summary>
/// Gets current query execution statistics
/// </summary>
public Dictionary<string, int> GetQueryExecutionCounts()
{
lock (_queryCountLock)
{
return new Dictionary<string, int>(_queryExecutionCounts);
}
}
/// <summary>
/// Clears query execution tracking data
/// </summary>
public void ClearQueryTracking()
{
lock (_queryCountLock)
{
_queryExecutionCounts.Clear();
}
_logger?.LogInformation("[SQL-TRACKING] Query execution counts cleared");
}
}

View File

@@ -1,10 +1,12 @@
using System.Data;
using System.Diagnostics;
using Microsoft.EntityFrameworkCore;
namespace Managing.Infrastructure.Databases.PostgreSql;
/// <summary>
/// Helper class for managing PostgreSQL database connections in Entity Framework repositories
/// Enhanced with comprehensive logging and monitoring capabilities
/// </summary>
public static class PostgreSqlConnectionHelper
{
@@ -20,6 +22,27 @@ public static class PostgreSqlConnectionHelper
}
}
/// <summary>
/// Ensures the database connection is open with logging
/// </summary>
/// <param name="context">The DbContext to manage the connection for</param>
/// <param name="logger">SQL query logger for monitoring</param>
public static async Task EnsureConnectionOpenAsync(DbContext context, SqlQueryLogger logger)
{
var stopwatch = Stopwatch.StartNew();
if (context.Database.GetDbConnection().State != ConnectionState.Open)
{
await context.Database.OpenConnectionAsync();
stopwatch.Stop();
logger.LogConnectionStateChange("OPENED", stopwatch.Elapsed);
}
else
{
logger.LogConnectionStateChange("ALREADY_OPEN");
}
}
/// <summary>
/// Safely closes the database connection if it was opened by us
/// </summary>
@@ -31,4 +54,25 @@ public static class PostgreSqlConnectionHelper
await context.Database.CloseConnectionAsync();
}
}
/// <summary>
/// Safely closes the database connection with logging
/// </summary>
/// <param name="context">The DbContext to manage the connection for</param>
/// <param name="logger">SQL query logger for monitoring</param>
public static async Task SafeCloseConnectionAsync(DbContext context, SqlQueryLogger logger)
{
var stopwatch = Stopwatch.StartNew();
if (context.Database.GetDbConnection().State == ConnectionState.Open)
{
await context.Database.CloseConnectionAsync();
stopwatch.Stop();
logger.LogConnectionStateChange("CLOSED", stopwatch.Elapsed);
}
else
{
logger.LogConnectionStateChange("ALREADY_CLOSED");
}
}
}

View File

@@ -5,18 +5,17 @@ using Managing.Domain.Trades;
using Managing.Domain.Users;
using Managing.Infrastructure.Databases.PostgreSql.Entities;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
using static Managing.Common.Enums;
namespace Managing.Infrastructure.Databases.PostgreSql;
public class PostgreSqlTradingRepository : ITradingRepository
public class PostgreSqlTradingRepository : BaseRepositoryWithLogging, ITradingRepository
{
private readonly ManagingDbContext _context;
public PostgreSqlTradingRepository(ManagingDbContext context)
public PostgreSqlTradingRepository(ManagingDbContext context, ILogger<SqlQueryLogger> logger, SentrySqlMonitoringService sentryMonitoringService)
: base(context, logger, sentryMonitoringService)
{
_context = context;
}
#region Scenario Methods
@@ -268,26 +267,29 @@ public class PostgreSqlTradingRepository : ITradingRepository
public async Task<Position> GetPositionByIdentifierAsync(Guid identifier)
{
try
return await ExecuteWithLoggingAsync(async () =>
{
await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context);
try
{
await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context);
var position = await _context.Positions
.AsNoTracking()
.Include(p => p.User)
.Include(p => p.OpenTrade)
.Include(p => p.StopLossTrade)
.Include(p => p.TakeProfit1Trade)
.Include(p => p.TakeProfit2Trade)
.FirstOrDefaultAsync(p => p.Identifier == identifier)
.ConfigureAwait(false);
var position = await _context.Positions
.AsNoTracking()
.Include(p => p.User)
.Include(p => p.OpenTrade)
.Include(p => p.StopLossTrade)
.Include(p => p.TakeProfit1Trade)
.Include(p => p.TakeProfit2Trade)
.FirstOrDefaultAsync(p => p.Identifier == identifier)
.ConfigureAwait(false);
return PostgreSqlMappers.Map(position);
}
finally
{
await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context);
}
return PostgreSqlMappers.Map(position ?? throw new InvalidOperationException("Position not found"));
}
finally
{
await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context);
}
}, nameof(GetPositionByIdentifierAsync), ("identifier", identifier));
}
public IEnumerable<Position> GetPositions(PositionInitiator positionInitiator)
@@ -389,51 +391,63 @@ public class PostgreSqlTradingRepository : ITradingRepository
public async Task UpdatePositionAsync(Position position)
{
var entity = _context.Positions
.AsTracking()
.Include(p => p.OpenTrade)
.Include(p => p.StopLossTrade)
.Include(p => p.TakeProfit1Trade)
.Include(p => p.TakeProfit2Trade)
.FirstOrDefault(p => p.Identifier == position.Identifier);
if (entity != null)
await ExecuteWithLoggingAsync(async () =>
{
entity.ProfitAndLoss = position.ProfitAndLoss?.Realized ?? 0;
entity.NetPnL = position.ProfitAndLoss?.Net ?? 0;
entity.UiFees = position.UiFees;
// entity.OriginDirection = position.OriginDirection;
entity.GasFees = position.GasFees;
entity.Status = position.Status;
entity.MoneyManagementJson = position.MoneyManagement != null
? JsonConvert.SerializeObject(position.MoneyManagement)
: null;
entity.UpdatedAt = DateTime.UtcNow;
// Update related trades directly through the position's trade references
// This ensures we're updating the correct trade records for this specific position
if (position.Open != null && entity.OpenTrade != null)
try
{
UpdateTradeEntity(entity.OpenTrade, position.Open);
}
await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context);
if (position.StopLoss != null && entity.StopLossTrade != null)
var entity = _context.Positions
.AsTracking()
.Include(p => p.OpenTrade)
.Include(p => p.StopLossTrade)
.Include(p => p.TakeProfit1Trade)
.Include(p => p.TakeProfit2Trade)
.FirstOrDefault(p => p.Identifier == position.Identifier);
if (entity != null)
{
entity.ProfitAndLoss = position.ProfitAndLoss?.Realized ?? 0;
entity.NetPnL = position.ProfitAndLoss?.Net ?? 0;
entity.UiFees = position.UiFees;
// entity.OriginDirection = position.OriginDirection;
entity.GasFees = position.GasFees;
entity.Status = position.Status;
entity.MoneyManagementJson = position.MoneyManagement != null
? JsonConvert.SerializeObject(position.MoneyManagement)
: null;
entity.UpdatedAt = DateTime.UtcNow;
// Update related trades directly through the position's trade references
// This ensures we're updating the correct trade records for this specific position
if (position.Open != null && entity.OpenTrade != null)
{
UpdateTradeEntity(entity.OpenTrade, position.Open);
}
if (position.StopLoss != null && entity.StopLossTrade != null)
{
UpdateTradeEntity(entity.StopLossTrade, position.StopLoss);
}
if (position.TakeProfit1 != null && entity.TakeProfit1Trade != null)
{
UpdateTradeEntity(entity.TakeProfit1Trade, position.TakeProfit1);
}
if (position.TakeProfit2 != null && entity.TakeProfit2Trade != null)
{
UpdateTradeEntity(entity.TakeProfit2Trade, position.TakeProfit2);
}
await _context.SaveChangesAsync();
}
}
finally
{
UpdateTradeEntity(entity.StopLossTrade, position.StopLoss);
await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context);
}
if (position.TakeProfit1 != null && entity.TakeProfit1Trade != null)
{
UpdateTradeEntity(entity.TakeProfit1Trade, position.TakeProfit1);
}
if (position.TakeProfit2 != null && entity.TakeProfit2Trade != null)
{
UpdateTradeEntity(entity.TakeProfit2Trade, position.TakeProfit2);
}
await _context.SaveChangesAsync();
}
}, nameof(UpdatePositionAsync), ("positionIdentifier", position.Identifier), ("positionStatus", position.Status));
}
/// <summary>

View File

@@ -1,119 +1,128 @@
using Managing.Application.Abstractions.Repositories;
using Managing.Domain.Users;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Logging;
namespace Managing.Infrastructure.Databases.PostgreSql;
public class PostgreSqlUserRepository : IUserRepository
public class PostgreSqlUserRepository : BaseRepositoryWithLogging, IUserRepository
{
private readonly ManagingDbContext _context;
public PostgreSqlUserRepository(ManagingDbContext context)
public PostgreSqlUserRepository(ManagingDbContext context, ILogger<SqlQueryLogger> logger, SentrySqlMonitoringService sentryMonitoringService)
: base(context, logger, sentryMonitoringService)
{
_context = context;
}
public async Task<User> GetUserByAgentNameAsync(string agentName)
{
try
return await ExecuteWithLoggingAsync(async () =>
{
await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context);
try
{
await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context);
var userEntity = await _context.Users
.AsNoTracking()
.FirstOrDefaultAsync(u => u.AgentName == agentName)
.ConfigureAwait(false);
var userEntity = await _context.Users
.AsNoTracking()
.FirstOrDefaultAsync(u => u.AgentName == agentName)
.ConfigureAwait(false);
return PostgreSqlMappers.Map(userEntity);
}
finally
{
// Always ensure the connection is closed after the operation
await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context);
}
return PostgreSqlMappers.Map(userEntity ?? throw new InvalidOperationException("User not found"));
}
finally
{
// Always ensure the connection is closed after the operation
await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context);
}
}, nameof(GetUserByAgentNameAsync), ("agentName", agentName));
}
public async Task<User> GetUserByNameAsync(string name)
{
try
return await ExecuteWithLoggingAsync(async () =>
{
await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context);
try
{
await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context);
var userEntity = await _context.Users
.AsNoTracking()
.FirstOrDefaultAsync(u => u.Name == name)
.ConfigureAwait(false);
var userEntity = await _context.Users
.AsNoTracking()
.FirstOrDefaultAsync(u => u.Name == name)
.ConfigureAwait(false);
return PostgreSqlMappers.Map(userEntity);
}
finally
{
// Always ensure the connection is closed after the operation
await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context);
}
return PostgreSqlMappers.Map(userEntity ?? throw new InvalidOperationException("User not found"));
}
finally
{
// Always ensure the connection is closed after the operation
await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context);
}
}, nameof(GetUserByNameAsync), ("name", name));
}
public async Task<IEnumerable<User>> GetAllUsersAsync()
{
try
return await ExecuteWithLoggingAsync(async () =>
{
await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context);
try
{
await PostgreSqlConnectionHelper.EnsureConnectionOpenAsync(_context);
var userEntities = await _context.Users
.AsNoTracking()
.ToListAsync()
.ConfigureAwait(false);
var userEntities = await _context.Users
.AsNoTracking()
.ToListAsync()
.ConfigureAwait(false);
return userEntities.Select(PostgreSqlMappers.Map);
}
finally
{
// Always ensure the connection is closed after the operation
await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context);
}
return userEntities.Select(PostgreSqlMappers.Map);
}
finally
{
// Always ensure the connection is closed after the operation
await PostgreSqlConnectionHelper.SafeCloseConnectionAsync(_context);
}
}, nameof(GetAllUsersAsync));
}
public async Task SaveOrUpdateUserAsync(User user)
{
try
await ExecuteWithLoggingAsync(async () =>
{
var existingUser = await _context.Users
.AsTracking()
.FirstOrDefaultAsync(u => u.Name == user.Name)
.ConfigureAwait(false);
if (existingUser != null)
try
{
// Update existing user
existingUser.AgentName = user.AgentName;
existingUser.AvatarUrl = user.AvatarUrl;
existingUser.TelegramChannel = user.TelegramChannel;
var existingUser = await _context.Users
.AsTracking()
.FirstOrDefaultAsync(u => u.Name == user.Name)
.ConfigureAwait(false);
_context.Users.Update(existingUser);
if (existingUser != null)
{
// Update existing user
existingUser.AgentName = user.AgentName;
existingUser.AvatarUrl = user.AvatarUrl;
existingUser.TelegramChannel = user.TelegramChannel;
// Update the user object with the existing user's ID
user.Id = existingUser.Id;
}
else
{
// Insert new user
var userEntity = PostgreSqlMappers.Map(user);
_context.Users.Add(userEntity);
_context.Users.Update(existingUser);
// Update the user object with the existing user's ID
user.Id = existingUser.Id;
}
else
{
// Insert new user
var userEntity = PostgreSqlMappers.Map(user);
_context.Users.Add(userEntity);
// Update the user object with the database-generated ID after save
await _context.SaveChangesAsync().ConfigureAwait(false);
user.Id = userEntity.Id;
return; // Exit early since we already saved
}
// Update the user object with the database-generated ID after save
await _context.SaveChangesAsync().ConfigureAwait(false);
user.Id = userEntity.Id;
return; // Exit early since we already saved
}
await _context.SaveChangesAsync().ConfigureAwait(false);
}
catch (Exception e)
{
Console.WriteLine(e);
throw new Exception("Cannot save or update user");
}
catch (Exception e)
{
Console.WriteLine(e);
throw new Exception("Cannot save or update user");
}
}, nameof(SaveOrUpdateUserAsync), ("userName", user.Name), ("userId", user.Id));
}
}

View File

@@ -0,0 +1,573 @@
using System.Collections.Concurrent;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace Managing.Infrastructure.Databases.PostgreSql;
/// <summary>
/// Enhanced SQL loop detection service with Sentry integration
/// Monitors query patterns and execution frequency, sending critical alerts to Sentry
/// </summary>
public class SentrySqlMonitoringService
{
private readonly ILogger<SentrySqlMonitoringService> _logger;
private readonly SqlMonitoringSettings _settings;
private readonly ConcurrentDictionary<string, QueryExecutionTracker> _queryTrackers;
private readonly Timer _cleanupTimer;
public SentrySqlMonitoringService(ILogger<SentrySqlMonitoringService> logger, IOptions<SqlMonitoringSettings> settings)
{
_logger = logger;
_settings = settings.Value;
_queryTrackers = new ConcurrentDictionary<string, QueryExecutionTracker>();
// Setup cleanup timer to remove old tracking data
_cleanupTimer = new Timer(CleanupOldTrackers, null, TimeSpan.FromMinutes(1), TimeSpan.FromMinutes(1));
}
/// <summary>
/// Tracks a query execution and detects potential loops with Sentry integration
/// </summary>
/// <param name="repositoryName">Name of the repository executing the query</param>
/// <param name="methodName">Name of the method executing the query</param>
/// <param name="queryPattern">Pattern or hash of the query being executed</param>
/// <param name="executionTime">Time taken to execute the query</param>
/// <returns>True if a potential loop is detected</returns>
public bool TrackQueryExecution(string repositoryName, string methodName, string queryPattern, TimeSpan executionTime)
{
var key = $"{repositoryName}.{methodName}.{queryPattern}";
var now = DateTime.UtcNow;
var tracker = _queryTrackers.AddOrUpdate(key,
new QueryExecutionTracker
{
RepositoryName = repositoryName,
MethodName = methodName,
QueryPattern = queryPattern,
FirstExecution = now,
LastExecution = now,
ExecutionCount = 1,
TotalExecutionTime = executionTime,
MaxExecutionTime = executionTime,
MinExecutionTime = executionTime
},
(k, existing) =>
{
existing.LastExecution = now;
existing.ExecutionCount++;
existing.TotalExecutionTime += executionTime;
existing.MaxExecutionTime = existing.MaxExecutionTime > executionTime ? existing.MaxExecutionTime : executionTime;
existing.MinExecutionTime = existing.MinExecutionTime < executionTime ? existing.MinExecutionTime : executionTime;
return existing;
});
// Check for potential loop conditions
var timeSinceFirst = now - tracker.FirstExecution;
var executionsPerMinute = tracker.ExecutionCount / Math.Max(timeSinceFirst.TotalMinutes, 0.1);
var isLoopDetected = false;
var isCriticalAlert = false;
var reasons = new List<string>();
var sentryTags = new Dictionary<string, string>();
var sentryExtras = new Dictionary<string, object>();
// Check execution frequency
if (executionsPerMinute > 20)
{
isLoopDetected = true;
reasons.Add($"High frequency: {executionsPerMinute:F1} executions/minute");
if (executionsPerMinute > 50) // Critical frequency threshold
{
isCriticalAlert = true;
sentryTags["alert_level"] = "critical";
sentryTags["issue_type"] = "high_frequency_query";
}
}
// Check total execution count in window
if (tracker.ExecutionCount > _settings.MaxQueryExecutionsPerWindow)
{
isLoopDetected = true;
reasons.Add($"High count: {tracker.ExecutionCount} executions in {timeSinceFirst.TotalMinutes:F1} minutes");
if (tracker.ExecutionCount > _settings.SentryAlertThreshold * _settings.MaxQueryExecutionsPerWindow)
{
isCriticalAlert = true;
sentryTags["alert_level"] = "critical";
sentryTags["issue_type"] = "high_execution_count";
}
}
// Check for rapid successive executions
if (tracker.ExecutionCount > 5 && timeSinceFirst.TotalSeconds < 10)
{
isLoopDetected = true;
isCriticalAlert = true;
reasons.Add($"Rapid execution: {tracker.ExecutionCount} executions in {timeSinceFirst.TotalSeconds:F1} seconds");
sentryTags["alert_level"] = "critical";
sentryTags["issue_type"] = "rapid_execution";
}
// Check for consistently slow queries
if (tracker.ExecutionCount > 3 && tracker.AverageExecutionTime.TotalMilliseconds > 1000)
{
isLoopDetected = true;
reasons.Add($"Consistently slow: {tracker.AverageExecutionTime.TotalMilliseconds:F0}ms average");
if (tracker.AverageExecutionTime > TimeSpan.FromSeconds(5)) // Critical slow query threshold
{
isCriticalAlert = true;
sentryTags["alert_level"] = "critical";
sentryTags["issue_type"] = "slow_query";
}
}
// Prepare Sentry data
sentryTags["repository"] = repositoryName;
sentryTags["method"] = methodName;
sentryTags["query_pattern"] = queryPattern;
sentryTags["environment"] = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown";
sentryExtras["execution_count"] = tracker.ExecutionCount;
sentryExtras["executions_per_minute"] = executionsPerMinute;
sentryExtras["average_execution_time_ms"] = tracker.AverageExecutionTime.TotalMilliseconds;
sentryExtras["min_execution_time_ms"] = tracker.MinExecutionTime.TotalMilliseconds;
sentryExtras["max_execution_time_ms"] = tracker.MaxExecutionTime.TotalMilliseconds;
sentryExtras["total_execution_time_ms"] = tracker.TotalExecutionTime.TotalMilliseconds;
sentryExtras["first_execution"] = tracker.FirstExecution.ToString("yyyy-MM-dd HH:mm:ss.fff");
sentryExtras["last_execution"] = tracker.LastExecution.ToString("yyyy-MM-dd HH:mm:ss.fff");
sentryExtras["time_window_minutes"] = timeSinceFirst.TotalMinutes;
sentryExtras["detection_reasons"] = string.Join("; ", reasons);
if (isLoopDetected)
{
_logger.LogWarning(
"[SQL-LOOP-DETECTED] {Repository}.{Method} | Pattern: {Pattern} | Count: {Count} | Reasons: {Reasons} | Avg Time: {AvgTime}ms",
repositoryName, methodName, queryPattern, tracker.ExecutionCount,
string.Join(", ", reasons), tracker.AverageExecutionTime.TotalMilliseconds);
// Log detailed execution history
_logger.LogWarning(
"[SQL-LOOP-DETAILS] {Repository}.{Method} | First: {First} | Last: {Last} | Min: {Min}ms | Max: {Max}ms | Total: {Total}ms",
repositoryName, methodName, tracker.FirstExecution.ToString("HH:mm:ss.fff"),
tracker.LastExecution.ToString("HH:mm:ss.fff"), tracker.MinExecutionTime.TotalMilliseconds,
tracker.MaxExecutionTime.TotalMilliseconds, tracker.TotalExecutionTime.TotalMilliseconds);
}
// Send to Sentry for critical alerts
if (isCriticalAlert)
{
SendCriticalAlertToSentry(repositoryName, methodName, queryPattern, reasons, sentryTags, sentryExtras);
}
else if (isLoopDetected)
{
SendWarningToSentry(repositoryName, methodName, queryPattern, reasons, sentryTags, sentryExtras);
}
return isLoopDetected;
}
/// <summary>
/// Sends a critical alert to Sentry for immediate attention
/// </summary>
private void SendCriticalAlertToSentry(string repositoryName, string methodName, string queryPattern,
List<string> reasons, Dictionary<string, string> tags, Dictionary<string, object> extras)
{
try
{
var message = $"CRITICAL SQL Loop Detected: {repositoryName}.{methodName}";
var exception = new InvalidOperationException($"Potential infinite SQL loop detected: {string.Join(", ", reasons)}");
// Add SQL-specific data to exception
exception.Data["Repository"] = repositoryName;
exception.Data["Method"] = methodName;
exception.Data["QueryPattern"] = queryPattern;
exception.Data["DetectionReasons"] = string.Join("; ", reasons);
var sentryId = SentrySdk.CaptureException(exception, scope =>
{
// Set tags for filtering and grouping
foreach (var tag in tags)
{
scope.SetTag(tag.Key, tag.Value);
}
// Set extra data for debugging
foreach (var extra in extras)
{
scope.SetExtra(extra.Key, extra.Value);
}
// Set fingerprint for better grouping
scope.SetFingerprint(new[] { "sql-loop-detection", repositoryName, methodName });
// Set level
scope.Level = SentryLevel.Error;
// Add breadcrumb
scope.AddBreadcrumb(
message: $"Critical SQL loop detected in {repositoryName}.{methodName}",
category: "sql-monitoring",
level: BreadcrumbLevel.Error,
data: new Dictionary<string, string>
{
["query_pattern"] = queryPattern,
["execution_count"] = extras["execution_count"].ToString(),
["executions_per_minute"] = extras["executions_per_minute"].ToString()
}
);
// Set user context if available
scope.SetExtra("repository", repositoryName);
scope.SetExtra("method", methodName);
scope.SetExtra("query_pattern", queryPattern);
scope.SetExtra("detection_time", DateTime.UtcNow);
scope.SetExtra("alert_type", "critical_loop_detection");
});
_logger.LogError(
"[SENTRY-CRITICAL] Sent critical SQL loop alert to Sentry: {SentryId} | {Repository}.{Method} | {Reasons}",
sentryId, repositoryName, methodName, string.Join(", ", reasons));
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send critical alert to Sentry for {Repository}.{Method}",
repositoryName, methodName);
}
}
/// <summary>
/// Sends a warning to Sentry for monitoring purposes
/// </summary>
private void SendWarningToSentry(string repositoryName, string methodName, string queryPattern,
List<string> reasons, Dictionary<string, string> tags, Dictionary<string, object> extras)
{
try
{
var message = $"SQL Performance Warning: {repositoryName}.{methodName}";
var sentryId = SentrySdk.CaptureMessage(message, scope =>
{
// Set tags for filtering and grouping
foreach (var tag in tags)
{
scope.SetTag(tag.Key, tag.Value);
}
// Set extra data for debugging
foreach (var extra in extras)
{
scope.SetExtra(extra.Key, extra.Value);
}
// Set fingerprint for better grouping
scope.SetFingerprint(new[] { "sql-performance-warning", repositoryName, methodName });
// Set level
scope.Level = SentryLevel.Warning;
// Add breadcrumb
scope.AddBreadcrumb(
message: $"SQL performance warning in {repositoryName}.{methodName}",
category: "sql-monitoring",
level: BreadcrumbLevel.Warning,
data: new Dictionary<string, string>
{
["query_pattern"] = queryPattern,
["execution_count"] = extras["execution_count"].ToString(),
["executions_per_minute"] = extras["executions_per_minute"].ToString()
}
);
// Set context
scope.SetExtra("repository", repositoryName);
scope.SetExtra("method", methodName);
scope.SetExtra("query_pattern", queryPattern);
scope.SetExtra("detection_time", DateTime.UtcNow);
scope.SetExtra("alert_type", "performance_warning");
});
_logger.LogWarning(
"[SENTRY-WARNING] Sent SQL performance warning to Sentry: {SentryId} | {Repository}.{Method} | {Reasons}",
sentryId, repositoryName, methodName, string.Join(", ", reasons));
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send warning to Sentry for {Repository}.{Method}",
repositoryName, methodName);
}
}
/// <summary>
/// Sends a custom performance metric to Sentry
/// </summary>
public void SendPerformanceMetricToSentry(string repositoryName, string methodName, string metricName,
double value, Dictionary<string, string> tags = null)
{
try
{
var sentryTags = tags ?? new Dictionary<string, string>();
sentryTags["repository"] = repositoryName;
sentryTags["method"] = methodName;
sentryTags["metric_name"] = metricName;
SentrySdk.AddBreadcrumb(
message: $"SQL Performance Metric: {metricName} = {value}",
category: "sql-performance",
level: BreadcrumbLevel.Info,
data: new Dictionary<string, string>
{
["repository"] = repositoryName,
["method"] = methodName,
["metric_name"] = metricName,
["value"] = value.ToString()
});
_logger.LogDebug("[SENTRY-METRIC] Sent performance metric to Sentry: {Metric} = {Value} for {Repository}.{Method}",
metricName, value, repositoryName, methodName);
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send performance metric to Sentry");
}
}
/// <summary>
/// Gets current statistics for all tracked queries
/// </summary>
public Dictionary<string, QueryExecutionStats> GetQueryStatistics()
{
var stats = new Dictionary<string, QueryExecutionStats>();
var now = DateTime.UtcNow;
foreach (var kvp in _queryTrackers)
{
var tracker = kvp.Value;
var timeSinceFirst = now - tracker.FirstExecution;
stats[kvp.Key] = new QueryExecutionStats
{
RepositoryName = tracker.RepositoryName,
MethodName = tracker.MethodName,
QueryPattern = tracker.QueryPattern,
ExecutionCount = tracker.ExecutionCount,
FirstExecution = tracker.FirstExecution,
LastExecution = tracker.LastExecution,
AverageExecutionTime = tracker.AverageExecutionTime,
MinExecutionTime = tracker.MinExecutionTime,
MaxExecutionTime = tracker.MaxExecutionTime,
ExecutionsPerMinute = tracker.ExecutionCount / Math.Max(timeSinceFirst.TotalMinutes, 0.1),
IsActive = timeSinceFirst < TimeSpan.FromSeconds(_settings.LoopDetectionWindowSeconds)
};
}
return stats;
}
/// <summary>
/// Clears all tracking data
/// </summary>
public void ClearAllTracking()
{
_queryTrackers.Clear();
_logger.LogInformation("[SQL-LOOP-DETECTION] All tracking data cleared");
}
private void CleanupOldTrackers(object? state)
{
var now = DateTime.UtcNow;
var keysToRemove = new List<string>();
foreach (var kvp in _queryTrackers)
{
var timeSinceLastExecution = now - kvp.Value.LastExecution;
// Use configurable retention period for monitoring dashboard
// This allows users to see query statistics even if queries haven't been executed recently
var retentionPeriod = TimeSpan.FromMinutes(_settings.DataRetentionMinutes);
if (timeSinceLastExecution > retentionPeriod)
{
keysToRemove.Add(kvp.Key);
}
}
foreach (var key in keysToRemove)
{
_queryTrackers.TryRemove(key, out _);
}
if (keysToRemove.Count > 0)
{
_logger.LogDebug("[SQL-MONITORING] Cleaned up {Count} old trackers (retention: {RetentionMinutes} minutes)", keysToRemove.Count, _settings.DataRetentionMinutes);
}
}
/// <summary>
/// Sends slow query alert to Sentry asynchronously
/// </summary>
public async Task SendSlowQueryAlertAsync(string repositoryName, string methodName, string queryPattern, TimeSpan executionTime)
{
try
{
var message = $"Slow SQL Query: {repositoryName}.{methodName}";
var exception = new TimeoutException($"SQL query took {executionTime.TotalMilliseconds:F0}ms to execute");
var sentryId = SentrySdk.CaptureException(exception, scope =>
{
scope.SetTag("repository", repositoryName);
scope.SetTag("method", methodName);
scope.SetTag("alert_type", "slow_query");
scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown");
scope.SetExtra("query_pattern", queryPattern);
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("threshold_ms", 2000);
scope.SetFingerprint(new[] { "slow-query", repositoryName, methodName });
scope.Level = SentryLevel.Warning;
scope.AddBreadcrumb(
message: $"Slow SQL query in {repositoryName}.{methodName}",
category: "sql-monitoring",
level: BreadcrumbLevel.Warning,
data: new Dictionary<string, string>
{
["query_pattern"] = queryPattern,
["execution_time_ms"] = executionTime.TotalMilliseconds.ToString()
}
);
});
_logger.LogWarning(
"[SENTRY-SLOW-QUERY] Sent slow query alert to Sentry: {SentryId} | {Repository}.{Method} | {Time}ms",
sentryId, repositoryName, methodName, executionTime.TotalMilliseconds);
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send slow query alert to Sentry");
}
}
/// <summary>
/// Sends SQL error alert to Sentry asynchronously
/// </summary>
public async Task SendSqlErrorAlertAsync(string repositoryName, string methodName, string queryPattern, TimeSpan executionTime, Exception exception)
{
try
{
var sentryId = SentrySdk.CaptureException(exception, scope =>
{
scope.SetTag("repository", repositoryName);
scope.SetTag("method", methodName);
scope.SetTag("alert_type", "sql_error");
scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown");
scope.SetExtra("query_pattern", queryPattern);
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("error_type", exception.GetType().Name);
scope.SetFingerprint(new[] { "sql-error", repositoryName, methodName, exception.GetType().Name });
scope.Level = SentryLevel.Error;
scope.AddBreadcrumb(
message: $"SQL error in {repositoryName}.{methodName}",
category: "sql-monitoring",
level: BreadcrumbLevel.Error,
data: new Dictionary<string, string>
{
["query_pattern"] = queryPattern,
["execution_time_ms"] = executionTime.TotalMilliseconds.ToString(),
["error_type"] = exception.GetType().Name
}
);
});
_logger.LogError(
"[SENTRY-SQL-ERROR] Sent SQL error alert to Sentry: {SentryId} | {Repository}.{Method} | {Error}",
sentryId, repositoryName, methodName, exception.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send SQL error alert to Sentry");
}
}
/// <summary>
/// Checks if monitoring is enabled globally
/// </summary>
public bool IsMonitoringEnabled()
{
return _settings.Enabled;
}
/// <summary>
/// Checks if logging is enabled
/// </summary>
public bool IsLoggingEnabled()
{
return _settings.LoggingEnabled;
}
/// <summary>
/// Checks if Sentry integration is enabled
/// </summary>
public bool IsSentryEnabled()
{
return _settings.SentryEnabled;
}
/// <summary>
/// Checks if loop detection is enabled
/// </summary>
public bool IsLoopDetectionEnabled()
{
return _settings.LoopDetectionEnabled;
}
/// <summary>
/// Checks if performance monitoring is enabled
/// </summary>
public bool IsPerformanceMonitoringEnabled()
{
return _settings.PerformanceMonitoringEnabled;
}
/// <summary>
/// Checks if a query should be logged based on configuration
/// </summary>
public bool ShouldLogQuery(TimeSpan executionTime)
{
if (!_settings.LoggingEnabled) return false;
if (_settings.LogErrorsOnly) return false; // Only log errors, not normal queries
if (_settings.LogSlowQueriesOnly)
{
return executionTime.TotalMilliseconds > _settings.SlowQueryThresholdMs;
}
return true; // Log all queries if logging is enabled
}
public void Dispose()
{
_cleanupTimer?.Dispose();
}
private class QueryExecutionTracker
{
public string RepositoryName { get; set; } = string.Empty;
public string MethodName { get; set; } = string.Empty;
public string QueryPattern { get; set; } = string.Empty;
public DateTime FirstExecution { get; set; }
public DateTime LastExecution { get; set; }
public int ExecutionCount { get; set; }
public TimeSpan TotalExecutionTime { get; set; }
public TimeSpan MaxExecutionTime { get; set; }
public TimeSpan MinExecutionTime { get; set; }
public TimeSpan AverageExecutionTime =>
ExecutionCount > 0 ? TimeSpan.FromTicks(TotalExecutionTime.Ticks / ExecutionCount) : TimeSpan.Zero;
}
}

View File

@@ -0,0 +1,221 @@
using System.Collections.Concurrent;
using Microsoft.Extensions.Logging;
namespace Managing.Infrastructure.Databases.PostgreSql;
/// <summary>
/// Service for detecting potential SQL query loops and performance issues
/// Monitors query patterns and execution frequency to identify problematic operations
/// </summary>
public class SqlLoopDetectionService
{
private readonly ILogger<SqlLoopDetectionService> _logger;
private readonly ConcurrentDictionary<string, QueryExecutionTracker> _queryTrackers;
private readonly Timer _cleanupTimer;
private readonly TimeSpan _trackingWindow = TimeSpan.FromMinutes(5);
private readonly int _maxExecutionsPerWindow = 10;
private readonly TimeSpan _cleanupInterval = TimeSpan.FromMinutes(1);
public SqlLoopDetectionService(ILogger<SqlLoopDetectionService> logger)
{
_logger = logger;
_queryTrackers = new ConcurrentDictionary<string, QueryExecutionTracker>();
// Setup cleanup timer to remove old tracking data
_cleanupTimer = new Timer(CleanupOldTrackers, null, _cleanupInterval, _cleanupInterval);
}
/// <summary>
/// Tracks a query execution and detects potential loops
/// </summary>
/// <param name="repositoryName">Name of the repository executing the query</param>
/// <param name="methodName">Name of the method executing the query</param>
/// <param name="queryPattern">Pattern or hash of the query being executed</param>
/// <param name="executionTime">Time taken to execute the query</param>
/// <returns>True if a potential loop is detected</returns>
public bool TrackQueryExecution(string repositoryName, string methodName, string queryPattern, TimeSpan executionTime)
{
var key = $"{repositoryName}.{methodName}.{queryPattern}";
var now = DateTime.UtcNow;
var tracker = _queryTrackers.AddOrUpdate(key,
new QueryExecutionTracker
{
RepositoryName = repositoryName,
MethodName = methodName,
QueryPattern = queryPattern,
FirstExecution = now,
LastExecution = now,
ExecutionCount = 1,
TotalExecutionTime = executionTime,
MaxExecutionTime = executionTime,
MinExecutionTime = executionTime
},
(k, existing) =>
{
existing.LastExecution = now;
existing.ExecutionCount++;
existing.TotalExecutionTime += executionTime;
existing.MaxExecutionTime = existing.MaxExecutionTime > executionTime ? existing.MaxExecutionTime : executionTime;
existing.MinExecutionTime = existing.MinExecutionTime < executionTime ? existing.MinExecutionTime : executionTime;
return existing;
});
// Check for potential loop conditions
var timeSinceFirst = now - tracker.FirstExecution;
var executionsPerMinute = tracker.ExecutionCount / Math.Max(timeSinceFirst.TotalMinutes, 0.1);
var isLoopDetected = false;
var reasons = new List<string>();
// Check execution frequency
if (executionsPerMinute > 20)
{
isLoopDetected = true;
reasons.Add($"High frequency: {executionsPerMinute:F1} executions/minute");
}
// Check total execution count in window
if (tracker.ExecutionCount > _maxExecutionsPerWindow)
{
isLoopDetected = true;
reasons.Add($"High count: {tracker.ExecutionCount} executions in {timeSinceFirst.TotalMinutes:F1} minutes");
}
// Check for rapid successive executions
if (tracker.ExecutionCount > 5 && timeSinceFirst.TotalSeconds < 10)
{
isLoopDetected = true;
reasons.Add($"Rapid execution: {tracker.ExecutionCount} executions in {timeSinceFirst.TotalSeconds:F1} seconds");
}
// Check for consistently slow queries
if (tracker.ExecutionCount > 3 && tracker.AverageExecutionTime.TotalMilliseconds > 1000)
{
isLoopDetected = true;
reasons.Add($"Consistently slow: {tracker.AverageExecutionTime.TotalMilliseconds:F0}ms average");
}
if (isLoopDetected)
{
_logger.LogWarning(
"[SQL-LOOP-DETECTED] {Repository}.{Method} | Pattern: {Pattern} | Count: {Count} | Reasons: {Reasons} | Avg Time: {AvgTime}ms",
repositoryName, methodName, queryPattern, tracker.ExecutionCount,
string.Join(", ", reasons), tracker.AverageExecutionTime.TotalMilliseconds);
// Log detailed execution history
_logger.LogWarning(
"[SQL-LOOP-DETAILS] {Repository}.{Method} | First: {First} | Last: {Last} | Min: {Min}ms | Max: {Max}ms | Total: {Total}ms",
repositoryName, methodName, tracker.FirstExecution.ToString("HH:mm:ss.fff"),
tracker.LastExecution.ToString("HH:mm:ss.fff"), tracker.MinExecutionTime.TotalMilliseconds,
tracker.MaxExecutionTime.TotalMilliseconds, tracker.TotalExecutionTime.TotalMilliseconds);
}
return isLoopDetected;
}
/// <summary>
/// Gets current statistics for all tracked queries
/// </summary>
public Dictionary<string, QueryExecutionStats> GetQueryStatistics()
{
var stats = new Dictionary<string, QueryExecutionStats>();
var now = DateTime.UtcNow;
foreach (var kvp in _queryTrackers)
{
var tracker = kvp.Value;
var timeSinceFirst = now - tracker.FirstExecution;
stats[kvp.Key] = new QueryExecutionStats
{
RepositoryName = tracker.RepositoryName,
MethodName = tracker.MethodName,
QueryPattern = tracker.QueryPattern,
ExecutionCount = tracker.ExecutionCount,
FirstExecution = tracker.FirstExecution,
LastExecution = tracker.LastExecution,
AverageExecutionTime = tracker.AverageExecutionTime,
MinExecutionTime = tracker.MinExecutionTime,
MaxExecutionTime = tracker.MaxExecutionTime,
ExecutionsPerMinute = tracker.ExecutionCount / Math.Max(timeSinceFirst.TotalMinutes, 0.1),
IsActive = timeSinceFirst < _trackingWindow
};
}
return stats;
}
/// <summary>
/// Clears all tracking data
/// </summary>
public void ClearAllTracking()
{
_queryTrackers.Clear();
_logger.LogInformation("[SQL-LOOP-DETECTION] All tracking data cleared");
}
private void CleanupOldTrackers(object? state)
{
var now = DateTime.UtcNow;
var keysToRemove = new List<string>();
foreach (var kvp in _queryTrackers)
{
var timeSinceLastExecution = now - kvp.Value.LastExecution;
if (timeSinceLastExecution > _trackingWindow)
{
keysToRemove.Add(kvp.Key);
}
}
foreach (var key in keysToRemove)
{
_queryTrackers.TryRemove(key, out _);
}
if (keysToRemove.Count > 0)
{
_logger.LogDebug("[SQL-LOOP-DETECTION] Cleaned up {Count} old trackers", keysToRemove.Count);
}
}
public void Dispose()
{
_cleanupTimer?.Dispose();
}
private class QueryExecutionTracker
{
public string RepositoryName { get; set; } = string.Empty;
public string MethodName { get; set; } = string.Empty;
public string QueryPattern { get; set; } = string.Empty;
public DateTime FirstExecution { get; set; }
public DateTime LastExecution { get; set; }
public int ExecutionCount { get; set; }
public TimeSpan TotalExecutionTime { get; set; }
public TimeSpan MaxExecutionTime { get; set; }
public TimeSpan MinExecutionTime { get; set; }
public TimeSpan AverageExecutionTime =>
ExecutionCount > 0 ? TimeSpan.FromTicks(TotalExecutionTime.Ticks / ExecutionCount) : TimeSpan.Zero;
}
}
/// <summary>
/// Statistics for query execution tracking
/// </summary>
public class QueryExecutionStats
{
public string RepositoryName { get; set; } = string.Empty;
public string MethodName { get; set; } = string.Empty;
public string QueryPattern { get; set; } = string.Empty;
public int ExecutionCount { get; set; }
public DateTime FirstExecution { get; set; }
public DateTime LastExecution { get; set; }
public TimeSpan AverageExecutionTime { get; set; }
public TimeSpan MinExecutionTime { get; set; }
public TimeSpan MaxExecutionTime { get; set; }
public double ExecutionsPerMinute { get; set; }
public bool IsActive { get; set; }
}

View File

@@ -0,0 +1,77 @@
namespace Managing.Infrastructure.Databases.PostgreSql;
/// <summary>
/// Configuration settings for SQL query monitoring and loop detection
/// </summary>
public class SqlMonitoringSettings
{
/// <summary>
/// Whether SQL monitoring is enabled globally (default: true)
/// </summary>
public bool Enabled { get; set; } = true;
/// <summary>
/// Whether SQL query logging is enabled (default: true)
/// </summary>
public bool LoggingEnabled { get; set; } = true;
/// <summary>
/// Whether Sentry integration is enabled (default: true)
/// </summary>
public bool SentryEnabled { get; set; } = true;
/// <summary>
/// Whether loop detection is enabled (default: true)
/// </summary>
public bool LoopDetectionEnabled { get; set; } = true;
/// <summary>
/// Whether performance monitoring is enabled (default: true)
/// </summary>
public bool PerformanceMonitoringEnabled { get; set; } = true;
/// <summary>
/// Time window for loop detection in seconds (default: 60)
/// </summary>
public int LoopDetectionWindowSeconds { get; set; } = 60;
/// <summary>
/// Maximum query executions per window for loop detection (default: 100)
/// </summary>
public int MaxQueryExecutionsPerWindow { get; set; } = 100;
/// <summary>
/// Maximum method executions per window for loop detection (default: 50)
/// </summary>
public int MaxMethodExecutionsPerWindow { get; set; } = 50;
/// <summary>
/// Threshold for long-running queries in milliseconds (default: 1000)
/// </summary>
public int LongRunningQueryThresholdMs { get; set; } = 1000;
/// <summary>
/// Threshold for Sentry alerts (default: 5)
/// </summary>
public int SentryAlertThreshold { get; set; } = 5;
/// <summary>
/// Threshold for slow queries in milliseconds (default: 2000)
/// </summary>
public int SlowQueryThresholdMs { get; set; } = 2000;
/// <summary>
/// Whether to log only slow queries (reduces overhead) (default: false)
/// </summary>
public bool LogSlowQueriesOnly { get; set; } = false;
/// <summary>
/// Whether to log only errors (minimal overhead) (default: false)
/// </summary>
public bool LogErrorsOnly { get; set; } = false;
/// <summary>
/// Data retention period in minutes for monitoring dashboard (default: 30)
/// </summary>
public int DataRetentionMinutes { get; set; } = 30;
}

View File

@@ -0,0 +1,425 @@
using System.Diagnostics;
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace Managing.Infrastructure.Databases.PostgreSql;
/// <summary>
/// Comprehensive SQL query logger for monitoring and debugging database operations
/// Provides detailed logging with timing, parameters, and performance metrics
/// </summary>
public class SqlQueryLogger : IDisposable
{
private readonly ILogger<SqlQueryLogger> _logger;
private readonly Stopwatch _stopwatch;
private readonly string _operationId;
private readonly DateTime _startTime;
private readonly string _methodName;
private readonly string _repositoryName;
private readonly Dictionary<string, object> _parameters;
private readonly List<string> _executedQueries;
private bool _disposed = false;
public SqlQueryLogger(ILogger<SqlQueryLogger> logger, string repositoryName, string methodName)
{
_logger = logger;
_repositoryName = repositoryName;
_methodName = methodName;
_operationId = Guid.NewGuid().ToString("N")[..8]; // Short ID for correlation
_startTime = DateTime.UtcNow;
_stopwatch = Stopwatch.StartNew();
_parameters = new Dictionary<string, object>();
_executedQueries = new List<string>();
}
/// <summary>
/// Logs the start of a database operation
/// </summary>
public void LogOperationStart(params (string name, object value)[] parameters)
{
foreach (var (name, value) in parameters)
{
_parameters[name] = value;
}
_logger.LogInformation(
"[SQL-OP-START] {OperationId} | {Repository}.{Method} | Started at {StartTime}",
_operationId, _repositoryName, _methodName, _startTime.ToString("HH:mm:ss.fff"));
}
/// <summary>
/// Logs a SQL query execution with timing and parameters
/// </summary>
public void LogQueryExecution(string query, TimeSpan executionTime, int? rowsAffected = null, Exception? exception = null)
{
_executedQueries.Add(query);
var logLevel = exception != null ? LogLevel.Error :
executionTime.TotalMilliseconds > 1000 ? LogLevel.Warning : LogLevel.Information;
var logMessage = exception != null
? "[SQL-QUERY-ERROR] {OperationId} | {Repository}.{Method} | Query failed after {ExecutionTime}ms | Error: {Error}"
: "[SQL-QUERY] {OperationId} | {Repository}.{Method} | Executed in {ExecutionTime}ms | Rows: {RowsAffected}";
var args = new object[]
{
_operationId, _repositoryName, _methodName, executionTime.TotalMilliseconds,
rowsAffected ?? 0
};
if (exception != null)
{
args[4] = exception.Message;
_logger.LogError(exception, logMessage, args);
// Send SQL error to Sentry
SendSqlErrorToSentry(query, executionTime, exception, rowsAffected);
}
else
{
_logger.Log(logLevel, logMessage, args);
// Send slow query alert to Sentry
if (executionTime.TotalMilliseconds > 2000) // Critical slow query threshold
{
SendSlowQueryToSentry(query, executionTime, rowsAffected);
}
else if (executionTime.TotalMilliseconds > 1000) // Warning threshold
{
SendSlowQueryWarningToSentry(query, executionTime, rowsAffected);
}
}
// Log query details for slow queries or errors
if (executionTime.TotalMilliseconds > 500 || exception != null)
{
_logger.LogWarning(
"[SQL-QUERY-DETAILS] {OperationId} | Query: {Query} | Parameters: {Parameters}",
_operationId,
TruncateQuery(query, 500),
JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false }));
}
}
/// <summary>
/// Logs the completion of a database operation with summary
/// </summary>
public void LogOperationComplete(object? result = null, Exception? exception = null)
{
_stopwatch.Stop();
var totalTime = _stopwatch.Elapsed;
var logLevel = exception != null ? LogLevel.Error :
totalTime.TotalMilliseconds > 2000 ? LogLevel.Warning : LogLevel.Information;
var logMessage = exception != null
? "[SQL-OP-ERROR] {OperationId} | {Repository}.{Method} | Failed after {TotalTime}ms | Queries: {QueryCount} | Error: {Error}"
: "[SQL-OP-COMPLETE] {OperationId} | {Repository}.{Method} | Completed in {TotalTime}ms | Queries: {QueryCount} | Result: {ResultType}";
var args = new object[]
{
_operationId, _repositoryName, _methodName, totalTime.TotalMilliseconds,
_executedQueries.Count, result?.GetType().Name ?? "void"
};
if (exception != null)
{
args[5] = exception.Message;
_logger.LogError(exception, logMessage, args);
}
else
{
_logger.Log(logLevel, logMessage, args);
}
// Log operation summary for long-running operations
if (totalTime.TotalMilliseconds > 1000 || _executedQueries.Count > 5)
{
_logger.LogWarning(
"[SQL-OP-SUMMARY] {OperationId} | Parameters: {Parameters} | Query Count: {QueryCount} | Total Time: {TotalTime}ms",
_operationId,
JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false }),
_executedQueries.Count,
totalTime.TotalMilliseconds);
}
}
/// <summary>
/// Logs potential loop detection based on query patterns
/// </summary>
public void LogPotentialLoopDetection(string queryPattern, int occurrenceCount)
{
_logger.LogWarning(
"[SQL-LOOP-DETECTED] {OperationId} | {Repository}.{Method} | Pattern '{Pattern}' executed {Count} times | Possible infinite loop!",
_operationId, _repositoryName, _methodName, queryPattern, occurrenceCount);
// Send critical alert to Sentry for loop detection
SendLoopDetectionToSentry(queryPattern, occurrenceCount);
}
/// <summary>
/// Sends loop detection alert to Sentry
/// </summary>
private void SendLoopDetectionToSentry(string queryPattern, int occurrenceCount)
{
try
{
var message = $"SQL Loop Detection: {_repositoryName}.{_methodName}";
var exception = new InvalidOperationException($"Potential infinite SQL loop detected: {queryPattern} executed {occurrenceCount} times");
var sentryId = SentrySdk.CaptureException(exception, scope =>
{
scope.SetTag("operation_id", _operationId);
scope.SetTag("repository", _repositoryName);
scope.SetTag("method", _methodName);
scope.SetTag("query_pattern", queryPattern);
scope.SetTag("alert_type", "sql_loop_detection");
scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown");
scope.SetExtra("occurrence_count", occurrenceCount);
scope.SetExtra("operation_duration_ms", _stopwatch.Elapsed.TotalMilliseconds);
scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false }));
scope.SetExtra("executed_queries_count", _executedQueries.Count);
scope.SetExtra("start_time", _startTime.ToString("yyyy-MM-dd HH:mm:ss.fff"));
scope.SetFingerprint(new[] { "sql-loop-detection", _repositoryName, _methodName, queryPattern });
scope.Level = SentryLevel.Error;
scope.AddBreadcrumb(
message: $"SQL loop detected in {_repositoryName}.{_methodName}",
category: "sql-monitoring",
level: BreadcrumbLevel.Error,
data: new Dictionary<string, string>
{
["query_pattern"] = queryPattern,
["occurrence_count"] = occurrenceCount.ToString(),
["operation_id"] = _operationId
}
);
scope.SetExtra("operation_id", _operationId);
scope.SetExtra("repository", _repositoryName);
scope.SetExtra("method", _methodName);
scope.SetExtra("query_pattern", queryPattern);
scope.SetExtra("occurrence_count", occurrenceCount);
scope.SetExtra("start_time", _startTime);
scope.SetExtra("duration_ms", _stopwatch.Elapsed.TotalMilliseconds);
scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters));
});
_logger.LogError(
"[SENTRY-LOOP-ALERT] Sent loop detection alert to Sentry: {SentryId} | {Repository}.{Method} | Pattern: {Pattern} | Count: {Count}",
sentryId, _repositoryName, _methodName, queryPattern, occurrenceCount);
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send loop detection alert to Sentry");
}
}
/// <summary>
/// Logs connection state changes
/// </summary>
public void LogConnectionStateChange(string state, TimeSpan? duration = null)
{
var message = duration.HasValue
? "[SQL-CONNECTION] {OperationId} | {Repository}.{Method} | Connection {State} (took {Duration}ms)"
: "[SQL-CONNECTION] {OperationId} | {Repository}.{Method} | Connection {State}";
var args = duration.HasValue
? new object[] { _operationId, _repositoryName, _methodName, state, duration.Value.TotalMilliseconds }
: new object[] { _operationId, _repositoryName, _methodName, state };
_logger.LogInformation(message, args);
}
/// <summary>
/// Sends SQL error to Sentry
/// </summary>
private void SendSqlErrorToSentry(string query, TimeSpan executionTime, Exception exception, int? rowsAffected)
{
try
{
var sentryId = SentrySdk.CaptureException(exception, scope =>
{
scope.SetTag("operation_id", _operationId);
scope.SetTag("repository", _repositoryName);
scope.SetTag("method", _methodName);
scope.SetTag("alert_type", "sql_error");
scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown");
scope.SetExtra("query", TruncateQuery(query, 1000));
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("rows_affected", rowsAffected ?? 0);
scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false }));
scope.SetExtra("operation_duration_ms", _stopwatch.Elapsed.TotalMilliseconds);
scope.SetFingerprint(new[] { "sql-error", _repositoryName, _methodName, exception.GetType().Name });
scope.Level = SentryLevel.Error;
scope.AddBreadcrumb(
message: $"SQL error in {_repositoryName}.{_methodName}",
category: "sql-monitoring",
level: BreadcrumbLevel.Error,
data: new Dictionary<string, string>
{
["query"] = TruncateQuery(query, 200),
["execution_time_ms"] = executionTime.TotalMilliseconds.ToString(),
["operation_id"] = _operationId
}
);
scope.SetExtra("operation_id", _operationId);
scope.SetExtra("repository", _repositoryName);
scope.SetExtra("method", _methodName);
scope.SetExtra("query", TruncateQuery(query, 1000));
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("rows_affected", rowsAffected ?? 0);
scope.SetExtra("error_type", exception.GetType().Name);
scope.SetExtra("error_message", exception.Message);
});
_logger.LogError(
"[SENTRY-SQL-ERROR] Sent SQL error to Sentry: {SentryId} | {Repository}.{Method} | {Error}",
sentryId, _repositoryName, _methodName, exception.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send SQL error to Sentry");
}
}
/// <summary>
/// Sends critical slow query alert to Sentry
/// </summary>
private void SendSlowQueryToSentry(string query, TimeSpan executionTime, int? rowsAffected)
{
try
{
var message = $"Critical Slow SQL Query: {_repositoryName}.{_methodName}";
var exception = new TimeoutException($"SQL query took {executionTime.TotalMilliseconds:F0}ms to execute");
var sentryId = SentrySdk.CaptureException(exception, scope =>
{
scope.SetTag("operation_id", _operationId);
scope.SetTag("repository", _repositoryName);
scope.SetTag("method", _methodName);
scope.SetTag("alert_type", "slow_query_critical");
scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown");
scope.SetExtra("query", TruncateQuery(query, 1000));
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("rows_affected", rowsAffected ?? 0);
scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false }));
scope.SetExtra("operation_duration_ms", _stopwatch.Elapsed.TotalMilliseconds);
scope.SetFingerprint(new[] { "slow-query-critical", _repositoryName, _methodName });
scope.Level = SentryLevel.Error;
scope.AddBreadcrumb(
message: $"Critical slow SQL query in {_repositoryName}.{_methodName}",
category: "sql-monitoring",
level: BreadcrumbLevel.Error,
data: new Dictionary<string, string>
{
["query"] = TruncateQuery(query, 200),
["execution_time_ms"] = executionTime.TotalMilliseconds.ToString(),
["operation_id"] = _operationId
}
);
scope.SetExtra("operation_id", _operationId);
scope.SetExtra("repository", _repositoryName);
scope.SetExtra("method", _methodName);
scope.SetExtra("query", TruncateQuery(query, 1000));
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("rows_affected", rowsAffected ?? 0);
scope.SetExtra("threshold_ms", 2000);
scope.SetExtra("severity", "critical");
});
_logger.LogError(
"[SENTRY-SLOW-QUERY] Sent critical slow query alert to Sentry: {SentryId} | {Repository}.{Method} | {Time}ms",
sentryId, _repositoryName, _methodName, executionTime.TotalMilliseconds);
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send slow query alert to Sentry");
}
}
/// <summary>
/// Sends slow query warning to Sentry
/// </summary>
private void SendSlowQueryWarningToSentry(string query, TimeSpan executionTime, int? rowsAffected)
{
try
{
var message = $"Slow SQL Query Warning: {_repositoryName}.{_methodName}";
var sentryId = SentrySdk.CaptureMessage(message, scope =>
{
scope.SetTag("operation_id", _operationId);
scope.SetTag("repository", _repositoryName);
scope.SetTag("method", _methodName);
scope.SetTag("alert_type", "slow_query_warning");
scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown");
scope.SetExtra("query", TruncateQuery(query, 1000));
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("rows_affected", rowsAffected ?? 0);
scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false }));
scope.SetExtra("operation_duration_ms", _stopwatch.Elapsed.TotalMilliseconds);
scope.SetFingerprint(new[] { "slow-query-warning", _repositoryName, _methodName });
scope.Level = SentryLevel.Warning;
scope.AddBreadcrumb(
message: $"Slow SQL query warning in {_repositoryName}.{_methodName}",
category: "sql-monitoring",
level: BreadcrumbLevel.Warning,
data: new Dictionary<string, string>
{
["query"] = TruncateQuery(query, 200),
["execution_time_ms"] = executionTime.TotalMilliseconds.ToString(),
["operation_id"] = _operationId
}
);
scope.SetExtra("operation_id", _operationId);
scope.SetExtra("repository", _repositoryName);
scope.SetExtra("method", _methodName);
scope.SetExtra("query", TruncateQuery(query, 1000));
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("rows_affected", rowsAffected ?? 0);
scope.SetExtra("threshold_ms", 1000);
scope.SetExtra("severity", "warning");
});
_logger.LogWarning(
"[SENTRY-SLOW-QUERY-WARNING] Sent slow query warning to Sentry: {SentryId} | {Repository}.{Method} | {Time}ms",
sentryId, _repositoryName, _methodName, executionTime.TotalMilliseconds);
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send slow query warning to Sentry");
}
}
private static string TruncateQuery(string query, int maxLength)
{
if (string.IsNullOrEmpty(query) || query.Length <= maxLength)
return query;
return query[..maxLength] + "... [TRUNCATED]";
}
public void Dispose()
{
if (!_disposed)
{
_stopwatch?.Stop();
_disposed = true;
}
}
}