Add monitoring on queries with sentry alert + Fix check position list in db for backtest

This commit is contained in:
2025-10-10 00:15:02 +07:00
parent ffb98fe359
commit e4c2f8b7a5
24 changed files with 3340 additions and 179 deletions

View File

@@ -0,0 +1,425 @@
using System.Diagnostics;
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace Managing.Infrastructure.Databases.PostgreSql;
/// <summary>
/// Comprehensive SQL query logger for monitoring and debugging database operations
/// Provides detailed logging with timing, parameters, and performance metrics
/// </summary>
public class SqlQueryLogger : IDisposable
{
private readonly ILogger<SqlQueryLogger> _logger;
private readonly Stopwatch _stopwatch;
private readonly string _operationId;
private readonly DateTime _startTime;
private readonly string _methodName;
private readonly string _repositoryName;
private readonly Dictionary<string, object> _parameters;
private readonly List<string> _executedQueries;
private bool _disposed = false;
public SqlQueryLogger(ILogger<SqlQueryLogger> logger, string repositoryName, string methodName)
{
_logger = logger;
_repositoryName = repositoryName;
_methodName = methodName;
_operationId = Guid.NewGuid().ToString("N")[..8]; // Short ID for correlation
_startTime = DateTime.UtcNow;
_stopwatch = Stopwatch.StartNew();
_parameters = new Dictionary<string, object>();
_executedQueries = new List<string>();
}
/// <summary>
/// Logs the start of a database operation
/// </summary>
public void LogOperationStart(params (string name, object value)[] parameters)
{
foreach (var (name, value) in parameters)
{
_parameters[name] = value;
}
_logger.LogInformation(
"[SQL-OP-START] {OperationId} | {Repository}.{Method} | Started at {StartTime}",
_operationId, _repositoryName, _methodName, _startTime.ToString("HH:mm:ss.fff"));
}
/// <summary>
/// Logs a SQL query execution with timing and parameters
/// </summary>
public void LogQueryExecution(string query, TimeSpan executionTime, int? rowsAffected = null, Exception? exception = null)
{
_executedQueries.Add(query);
var logLevel = exception != null ? LogLevel.Error :
executionTime.TotalMilliseconds > 1000 ? LogLevel.Warning : LogLevel.Information;
var logMessage = exception != null
? "[SQL-QUERY-ERROR] {OperationId} | {Repository}.{Method} | Query failed after {ExecutionTime}ms | Error: {Error}"
: "[SQL-QUERY] {OperationId} | {Repository}.{Method} | Executed in {ExecutionTime}ms | Rows: {RowsAffected}";
var args = new object[]
{
_operationId, _repositoryName, _methodName, executionTime.TotalMilliseconds,
rowsAffected ?? 0
};
if (exception != null)
{
args[4] = exception.Message;
_logger.LogError(exception, logMessage, args);
// Send SQL error to Sentry
SendSqlErrorToSentry(query, executionTime, exception, rowsAffected);
}
else
{
_logger.Log(logLevel, logMessage, args);
// Send slow query alert to Sentry
if (executionTime.TotalMilliseconds > 2000) // Critical slow query threshold
{
SendSlowQueryToSentry(query, executionTime, rowsAffected);
}
else if (executionTime.TotalMilliseconds > 1000) // Warning threshold
{
SendSlowQueryWarningToSentry(query, executionTime, rowsAffected);
}
}
// Log query details for slow queries or errors
if (executionTime.TotalMilliseconds > 500 || exception != null)
{
_logger.LogWarning(
"[SQL-QUERY-DETAILS] {OperationId} | Query: {Query} | Parameters: {Parameters}",
_operationId,
TruncateQuery(query, 500),
JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false }));
}
}
/// <summary>
/// Logs the completion of a database operation with summary
/// </summary>
public void LogOperationComplete(object? result = null, Exception? exception = null)
{
_stopwatch.Stop();
var totalTime = _stopwatch.Elapsed;
var logLevel = exception != null ? LogLevel.Error :
totalTime.TotalMilliseconds > 2000 ? LogLevel.Warning : LogLevel.Information;
var logMessage = exception != null
? "[SQL-OP-ERROR] {OperationId} | {Repository}.{Method} | Failed after {TotalTime}ms | Queries: {QueryCount} | Error: {Error}"
: "[SQL-OP-COMPLETE] {OperationId} | {Repository}.{Method} | Completed in {TotalTime}ms | Queries: {QueryCount} | Result: {ResultType}";
var args = new object[]
{
_operationId, _repositoryName, _methodName, totalTime.TotalMilliseconds,
_executedQueries.Count, result?.GetType().Name ?? "void"
};
if (exception != null)
{
args[5] = exception.Message;
_logger.LogError(exception, logMessage, args);
}
else
{
_logger.Log(logLevel, logMessage, args);
}
// Log operation summary for long-running operations
if (totalTime.TotalMilliseconds > 1000 || _executedQueries.Count > 5)
{
_logger.LogWarning(
"[SQL-OP-SUMMARY] {OperationId} | Parameters: {Parameters} | Query Count: {QueryCount} | Total Time: {TotalTime}ms",
_operationId,
JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false }),
_executedQueries.Count,
totalTime.TotalMilliseconds);
}
}
/// <summary>
/// Logs potential loop detection based on query patterns
/// </summary>
public void LogPotentialLoopDetection(string queryPattern, int occurrenceCount)
{
_logger.LogWarning(
"[SQL-LOOP-DETECTED] {OperationId} | {Repository}.{Method} | Pattern '{Pattern}' executed {Count} times | Possible infinite loop!",
_operationId, _repositoryName, _methodName, queryPattern, occurrenceCount);
// Send critical alert to Sentry for loop detection
SendLoopDetectionToSentry(queryPattern, occurrenceCount);
}
/// <summary>
/// Sends loop detection alert to Sentry
/// </summary>
private void SendLoopDetectionToSentry(string queryPattern, int occurrenceCount)
{
try
{
var message = $"SQL Loop Detection: {_repositoryName}.{_methodName}";
var exception = new InvalidOperationException($"Potential infinite SQL loop detected: {queryPattern} executed {occurrenceCount} times");
var sentryId = SentrySdk.CaptureException(exception, scope =>
{
scope.SetTag("operation_id", _operationId);
scope.SetTag("repository", _repositoryName);
scope.SetTag("method", _methodName);
scope.SetTag("query_pattern", queryPattern);
scope.SetTag("alert_type", "sql_loop_detection");
scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown");
scope.SetExtra("occurrence_count", occurrenceCount);
scope.SetExtra("operation_duration_ms", _stopwatch.Elapsed.TotalMilliseconds);
scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false }));
scope.SetExtra("executed_queries_count", _executedQueries.Count);
scope.SetExtra("start_time", _startTime.ToString("yyyy-MM-dd HH:mm:ss.fff"));
scope.SetFingerprint(new[] { "sql-loop-detection", _repositoryName, _methodName, queryPattern });
scope.Level = SentryLevel.Error;
scope.AddBreadcrumb(
message: $"SQL loop detected in {_repositoryName}.{_methodName}",
category: "sql-monitoring",
level: BreadcrumbLevel.Error,
data: new Dictionary<string, string>
{
["query_pattern"] = queryPattern,
["occurrence_count"] = occurrenceCount.ToString(),
["operation_id"] = _operationId
}
);
scope.SetExtra("operation_id", _operationId);
scope.SetExtra("repository", _repositoryName);
scope.SetExtra("method", _methodName);
scope.SetExtra("query_pattern", queryPattern);
scope.SetExtra("occurrence_count", occurrenceCount);
scope.SetExtra("start_time", _startTime);
scope.SetExtra("duration_ms", _stopwatch.Elapsed.TotalMilliseconds);
scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters));
});
_logger.LogError(
"[SENTRY-LOOP-ALERT] Sent loop detection alert to Sentry: {SentryId} | {Repository}.{Method} | Pattern: {Pattern} | Count: {Count}",
sentryId, _repositoryName, _methodName, queryPattern, occurrenceCount);
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send loop detection alert to Sentry");
}
}
/// <summary>
/// Logs connection state changes
/// </summary>
public void LogConnectionStateChange(string state, TimeSpan? duration = null)
{
var message = duration.HasValue
? "[SQL-CONNECTION] {OperationId} | {Repository}.{Method} | Connection {State} (took {Duration}ms)"
: "[SQL-CONNECTION] {OperationId} | {Repository}.{Method} | Connection {State}";
var args = duration.HasValue
? new object[] { _operationId, _repositoryName, _methodName, state, duration.Value.TotalMilliseconds }
: new object[] { _operationId, _repositoryName, _methodName, state };
_logger.LogInformation(message, args);
}
/// <summary>
/// Sends SQL error to Sentry
/// </summary>
private void SendSqlErrorToSentry(string query, TimeSpan executionTime, Exception exception, int? rowsAffected)
{
try
{
var sentryId = SentrySdk.CaptureException(exception, scope =>
{
scope.SetTag("operation_id", _operationId);
scope.SetTag("repository", _repositoryName);
scope.SetTag("method", _methodName);
scope.SetTag("alert_type", "sql_error");
scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown");
scope.SetExtra("query", TruncateQuery(query, 1000));
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("rows_affected", rowsAffected ?? 0);
scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false }));
scope.SetExtra("operation_duration_ms", _stopwatch.Elapsed.TotalMilliseconds);
scope.SetFingerprint(new[] { "sql-error", _repositoryName, _methodName, exception.GetType().Name });
scope.Level = SentryLevel.Error;
scope.AddBreadcrumb(
message: $"SQL error in {_repositoryName}.{_methodName}",
category: "sql-monitoring",
level: BreadcrumbLevel.Error,
data: new Dictionary<string, string>
{
["query"] = TruncateQuery(query, 200),
["execution_time_ms"] = executionTime.TotalMilliseconds.ToString(),
["operation_id"] = _operationId
}
);
scope.SetExtra("operation_id", _operationId);
scope.SetExtra("repository", _repositoryName);
scope.SetExtra("method", _methodName);
scope.SetExtra("query", TruncateQuery(query, 1000));
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("rows_affected", rowsAffected ?? 0);
scope.SetExtra("error_type", exception.GetType().Name);
scope.SetExtra("error_message", exception.Message);
});
_logger.LogError(
"[SENTRY-SQL-ERROR] Sent SQL error to Sentry: {SentryId} | {Repository}.{Method} | {Error}",
sentryId, _repositoryName, _methodName, exception.Message);
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send SQL error to Sentry");
}
}
/// <summary>
/// Sends critical slow query alert to Sentry
/// </summary>
private void SendSlowQueryToSentry(string query, TimeSpan executionTime, int? rowsAffected)
{
try
{
var message = $"Critical Slow SQL Query: {_repositoryName}.{_methodName}";
var exception = new TimeoutException($"SQL query took {executionTime.TotalMilliseconds:F0}ms to execute");
var sentryId = SentrySdk.CaptureException(exception, scope =>
{
scope.SetTag("operation_id", _operationId);
scope.SetTag("repository", _repositoryName);
scope.SetTag("method", _methodName);
scope.SetTag("alert_type", "slow_query_critical");
scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown");
scope.SetExtra("query", TruncateQuery(query, 1000));
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("rows_affected", rowsAffected ?? 0);
scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false }));
scope.SetExtra("operation_duration_ms", _stopwatch.Elapsed.TotalMilliseconds);
scope.SetFingerprint(new[] { "slow-query-critical", _repositoryName, _methodName });
scope.Level = SentryLevel.Error;
scope.AddBreadcrumb(
message: $"Critical slow SQL query in {_repositoryName}.{_methodName}",
category: "sql-monitoring",
level: BreadcrumbLevel.Error,
data: new Dictionary<string, string>
{
["query"] = TruncateQuery(query, 200),
["execution_time_ms"] = executionTime.TotalMilliseconds.ToString(),
["operation_id"] = _operationId
}
);
scope.SetExtra("operation_id", _operationId);
scope.SetExtra("repository", _repositoryName);
scope.SetExtra("method", _methodName);
scope.SetExtra("query", TruncateQuery(query, 1000));
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("rows_affected", rowsAffected ?? 0);
scope.SetExtra("threshold_ms", 2000);
scope.SetExtra("severity", "critical");
});
_logger.LogError(
"[SENTRY-SLOW-QUERY] Sent critical slow query alert to Sentry: {SentryId} | {Repository}.{Method} | {Time}ms",
sentryId, _repositoryName, _methodName, executionTime.TotalMilliseconds);
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send slow query alert to Sentry");
}
}
/// <summary>
/// Sends slow query warning to Sentry
/// </summary>
private void SendSlowQueryWarningToSentry(string query, TimeSpan executionTime, int? rowsAffected)
{
try
{
var message = $"Slow SQL Query Warning: {_repositoryName}.{_methodName}";
var sentryId = SentrySdk.CaptureMessage(message, scope =>
{
scope.SetTag("operation_id", _operationId);
scope.SetTag("repository", _repositoryName);
scope.SetTag("method", _methodName);
scope.SetTag("alert_type", "slow_query_warning");
scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown");
scope.SetExtra("query", TruncateQuery(query, 1000));
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("rows_affected", rowsAffected ?? 0);
scope.SetExtra("parameters", JsonSerializer.Serialize(_parameters, new JsonSerializerOptions { WriteIndented = false }));
scope.SetExtra("operation_duration_ms", _stopwatch.Elapsed.TotalMilliseconds);
scope.SetFingerprint(new[] { "slow-query-warning", _repositoryName, _methodName });
scope.Level = SentryLevel.Warning;
scope.AddBreadcrumb(
message: $"Slow SQL query warning in {_repositoryName}.{_methodName}",
category: "sql-monitoring",
level: BreadcrumbLevel.Warning,
data: new Dictionary<string, string>
{
["query"] = TruncateQuery(query, 200),
["execution_time_ms"] = executionTime.TotalMilliseconds.ToString(),
["operation_id"] = _operationId
}
);
scope.SetExtra("operation_id", _operationId);
scope.SetExtra("repository", _repositoryName);
scope.SetExtra("method", _methodName);
scope.SetExtra("query", TruncateQuery(query, 1000));
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
scope.SetExtra("rows_affected", rowsAffected ?? 0);
scope.SetExtra("threshold_ms", 1000);
scope.SetExtra("severity", "warning");
});
_logger.LogWarning(
"[SENTRY-SLOW-QUERY-WARNING] Sent slow query warning to Sentry: {SentryId} | {Repository}.{Method} | {Time}ms",
sentryId, _repositoryName, _methodName, executionTime.TotalMilliseconds);
}
catch (Exception ex)
{
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send slow query warning to Sentry");
}
}
private static string TruncateQuery(string query, int maxLength)
{
if (string.IsNullOrEmpty(query) || query.Length <= maxLength)
return query;
return query[..maxLength] + "... [TRUNCATED]";
}
public void Dispose()
{
if (!_disposed)
{
_stopwatch?.Stop();
_disposed = true;
}
}
}