574 lines
23 KiB
C#
574 lines
23 KiB
C#
using System.Collections.Concurrent;
|
|
using Microsoft.Extensions.Logging;
|
|
using Microsoft.Extensions.Options;
|
|
|
|
namespace Managing.Infrastructure.Databases.PostgreSql;
|
|
|
|
/// <summary>
|
|
/// Enhanced SQL loop detection service with Sentry integration
|
|
/// Monitors query patterns and execution frequency, sending critical alerts to Sentry
|
|
/// </summary>
|
|
public class SentrySqlMonitoringService
|
|
{
|
|
private readonly ILogger<SentrySqlMonitoringService> _logger;
|
|
private readonly SqlMonitoringSettings _settings;
|
|
private readonly ConcurrentDictionary<string, QueryExecutionTracker> _queryTrackers;
|
|
private readonly Timer _cleanupTimer;
|
|
|
|
public SentrySqlMonitoringService(ILogger<SentrySqlMonitoringService> logger, IOptions<SqlMonitoringSettings> settings)
|
|
{
|
|
_logger = logger;
|
|
_settings = settings.Value;
|
|
_queryTrackers = new ConcurrentDictionary<string, QueryExecutionTracker>();
|
|
|
|
// Setup cleanup timer to remove old tracking data
|
|
_cleanupTimer = new Timer(CleanupOldTrackers, null, TimeSpan.FromMinutes(1), TimeSpan.FromMinutes(1));
|
|
}
|
|
|
|
/// <summary>
|
|
/// Tracks a query execution and detects potential loops with Sentry integration
|
|
/// </summary>
|
|
/// <param name="repositoryName">Name of the repository executing the query</param>
|
|
/// <param name="methodName">Name of the method executing the query</param>
|
|
/// <param name="queryPattern">Pattern or hash of the query being executed</param>
|
|
/// <param name="executionTime">Time taken to execute the query</param>
|
|
/// <returns>True if a potential loop is detected</returns>
|
|
public bool TrackQueryExecution(string repositoryName, string methodName, string queryPattern, TimeSpan executionTime)
|
|
{
|
|
var key = $"{repositoryName}.{methodName}.{queryPattern}";
|
|
var now = DateTime.UtcNow;
|
|
|
|
var tracker = _queryTrackers.AddOrUpdate(key,
|
|
new QueryExecutionTracker
|
|
{
|
|
RepositoryName = repositoryName,
|
|
MethodName = methodName,
|
|
QueryPattern = queryPattern,
|
|
FirstExecution = now,
|
|
LastExecution = now,
|
|
ExecutionCount = 1,
|
|
TotalExecutionTime = executionTime,
|
|
MaxExecutionTime = executionTime,
|
|
MinExecutionTime = executionTime
|
|
},
|
|
(k, existing) =>
|
|
{
|
|
existing.LastExecution = now;
|
|
existing.ExecutionCount++;
|
|
existing.TotalExecutionTime += executionTime;
|
|
existing.MaxExecutionTime = existing.MaxExecutionTime > executionTime ? existing.MaxExecutionTime : executionTime;
|
|
existing.MinExecutionTime = existing.MinExecutionTime < executionTime ? existing.MinExecutionTime : executionTime;
|
|
return existing;
|
|
});
|
|
|
|
// Check for potential loop conditions
|
|
var timeSinceFirst = now - tracker.FirstExecution;
|
|
var executionsPerMinute = tracker.ExecutionCount / Math.Max(timeSinceFirst.TotalMinutes, 0.1);
|
|
|
|
var isLoopDetected = false;
|
|
var isCriticalAlert = false;
|
|
var reasons = new List<string>();
|
|
var sentryTags = new Dictionary<string, string>();
|
|
var sentryExtras = new Dictionary<string, object>();
|
|
|
|
// Check execution frequency
|
|
if (executionsPerMinute > 20)
|
|
{
|
|
isLoopDetected = true;
|
|
reasons.Add($"High frequency: {executionsPerMinute:F1} executions/minute");
|
|
|
|
if (executionsPerMinute > 50) // Critical frequency threshold
|
|
{
|
|
isCriticalAlert = true;
|
|
sentryTags["alert_level"] = "critical";
|
|
sentryTags["issue_type"] = "high_frequency_query";
|
|
}
|
|
}
|
|
|
|
// Check total execution count in window
|
|
if (tracker.ExecutionCount > _settings.MaxQueryExecutionsPerWindow)
|
|
{
|
|
isLoopDetected = true;
|
|
reasons.Add($"High count: {tracker.ExecutionCount} executions in {timeSinceFirst.TotalMinutes:F1} minutes");
|
|
|
|
if (tracker.ExecutionCount > _settings.SentryAlertThreshold * _settings.MaxQueryExecutionsPerWindow)
|
|
{
|
|
isCriticalAlert = true;
|
|
sentryTags["alert_level"] = "critical";
|
|
sentryTags["issue_type"] = "high_execution_count";
|
|
}
|
|
}
|
|
|
|
// Check for rapid successive executions
|
|
if (tracker.ExecutionCount > 5 && timeSinceFirst.TotalSeconds < 10)
|
|
{
|
|
isLoopDetected = true;
|
|
isCriticalAlert = true;
|
|
reasons.Add($"Rapid execution: {tracker.ExecutionCount} executions in {timeSinceFirst.TotalSeconds:F1} seconds");
|
|
sentryTags["alert_level"] = "critical";
|
|
sentryTags["issue_type"] = "rapid_execution";
|
|
}
|
|
|
|
// Check for consistently slow queries
|
|
if (tracker.ExecutionCount > 3 && tracker.AverageExecutionTime.TotalMilliseconds > 1000)
|
|
{
|
|
isLoopDetected = true;
|
|
reasons.Add($"Consistently slow: {tracker.AverageExecutionTime.TotalMilliseconds:F0}ms average");
|
|
|
|
if (tracker.AverageExecutionTime > TimeSpan.FromSeconds(5)) // Critical slow query threshold
|
|
{
|
|
isCriticalAlert = true;
|
|
sentryTags["alert_level"] = "critical";
|
|
sentryTags["issue_type"] = "slow_query";
|
|
}
|
|
}
|
|
|
|
// Prepare Sentry data
|
|
sentryTags["repository"] = repositoryName;
|
|
sentryTags["method"] = methodName;
|
|
sentryTags["query_pattern"] = queryPattern;
|
|
sentryTags["environment"] = Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown";
|
|
|
|
sentryExtras["execution_count"] = tracker.ExecutionCount;
|
|
sentryExtras["executions_per_minute"] = executionsPerMinute;
|
|
sentryExtras["average_execution_time_ms"] = tracker.AverageExecutionTime.TotalMilliseconds;
|
|
sentryExtras["min_execution_time_ms"] = tracker.MinExecutionTime.TotalMilliseconds;
|
|
sentryExtras["max_execution_time_ms"] = tracker.MaxExecutionTime.TotalMilliseconds;
|
|
sentryExtras["total_execution_time_ms"] = tracker.TotalExecutionTime.TotalMilliseconds;
|
|
sentryExtras["first_execution"] = tracker.FirstExecution.ToString("yyyy-MM-dd HH:mm:ss.fff");
|
|
sentryExtras["last_execution"] = tracker.LastExecution.ToString("yyyy-MM-dd HH:mm:ss.fff");
|
|
sentryExtras["time_window_minutes"] = timeSinceFirst.TotalMinutes;
|
|
sentryExtras["detection_reasons"] = string.Join("; ", reasons);
|
|
|
|
if (isLoopDetected)
|
|
{
|
|
_logger.LogWarning(
|
|
"[SQL-LOOP-DETECTED] {Repository}.{Method} | Pattern: {Pattern} | Count: {Count} | Reasons: {Reasons} | Avg Time: {AvgTime}ms",
|
|
repositoryName, methodName, queryPattern, tracker.ExecutionCount,
|
|
string.Join(", ", reasons), tracker.AverageExecutionTime.TotalMilliseconds);
|
|
|
|
// Log detailed execution history
|
|
_logger.LogWarning(
|
|
"[SQL-LOOP-DETAILS] {Repository}.{Method} | First: {First} | Last: {Last} | Min: {Min}ms | Max: {Max}ms | Total: {Total}ms",
|
|
repositoryName, methodName, tracker.FirstExecution.ToString("HH:mm:ss.fff"),
|
|
tracker.LastExecution.ToString("HH:mm:ss.fff"), tracker.MinExecutionTime.TotalMilliseconds,
|
|
tracker.MaxExecutionTime.TotalMilliseconds, tracker.TotalExecutionTime.TotalMilliseconds);
|
|
}
|
|
|
|
// Send to Sentry for critical alerts
|
|
if (isCriticalAlert)
|
|
{
|
|
SendCriticalAlertToSentry(repositoryName, methodName, queryPattern, reasons, sentryTags, sentryExtras);
|
|
}
|
|
else if (isLoopDetected)
|
|
{
|
|
SendWarningToSentry(repositoryName, methodName, queryPattern, reasons, sentryTags, sentryExtras);
|
|
}
|
|
|
|
return isLoopDetected;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Sends a critical alert to Sentry for immediate attention
|
|
/// </summary>
|
|
private void SendCriticalAlertToSentry(string repositoryName, string methodName, string queryPattern,
|
|
List<string> reasons, Dictionary<string, string> tags, Dictionary<string, object> extras)
|
|
{
|
|
try
|
|
{
|
|
var message = $"CRITICAL SQL Loop Detected: {repositoryName}.{methodName}";
|
|
var exception = new InvalidOperationException($"Potential infinite SQL loop detected: {string.Join(", ", reasons)}");
|
|
|
|
// Add SQL-specific data to exception
|
|
exception.Data["Repository"] = repositoryName;
|
|
exception.Data["Method"] = methodName;
|
|
exception.Data["QueryPattern"] = queryPattern;
|
|
exception.Data["DetectionReasons"] = string.Join("; ", reasons);
|
|
|
|
var sentryId = SentrySdk.CaptureException(exception, scope =>
|
|
{
|
|
// Set tags for filtering and grouping
|
|
foreach (var tag in tags)
|
|
{
|
|
scope.SetTag(tag.Key, tag.Value);
|
|
}
|
|
|
|
// Set extra data for debugging
|
|
foreach (var extra in extras)
|
|
{
|
|
scope.SetExtra(extra.Key, extra.Value);
|
|
}
|
|
|
|
// Set fingerprint for better grouping
|
|
scope.SetFingerprint(new[] { "sql-loop-detection", repositoryName, methodName });
|
|
|
|
// Set level
|
|
scope.Level = SentryLevel.Error;
|
|
|
|
// Add breadcrumb
|
|
scope.AddBreadcrumb(
|
|
message: $"Critical SQL loop detected in {repositoryName}.{methodName}",
|
|
category: "sql-monitoring",
|
|
level: BreadcrumbLevel.Error,
|
|
data: new Dictionary<string, string>
|
|
{
|
|
["query_pattern"] = queryPattern,
|
|
["execution_count"] = extras["execution_count"].ToString(),
|
|
["executions_per_minute"] = extras["executions_per_minute"].ToString()
|
|
}
|
|
);
|
|
|
|
// Set user context if available
|
|
scope.SetExtra("repository", repositoryName);
|
|
scope.SetExtra("method", methodName);
|
|
scope.SetExtra("query_pattern", queryPattern);
|
|
scope.SetExtra("detection_time", DateTime.UtcNow);
|
|
scope.SetExtra("alert_type", "critical_loop_detection");
|
|
});
|
|
|
|
_logger.LogError(
|
|
"[SENTRY-CRITICAL] Sent critical SQL loop alert to Sentry: {SentryId} | {Repository}.{Method} | {Reasons}",
|
|
sentryId, repositoryName, methodName, string.Join(", ", reasons));
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send critical alert to Sentry for {Repository}.{Method}",
|
|
repositoryName, methodName);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Sends a warning to Sentry for monitoring purposes
|
|
/// </summary>
|
|
private void SendWarningToSentry(string repositoryName, string methodName, string queryPattern,
|
|
List<string> reasons, Dictionary<string, string> tags, Dictionary<string, object> extras)
|
|
{
|
|
try
|
|
{
|
|
var message = $"SQL Performance Warning: {repositoryName}.{methodName}";
|
|
|
|
var sentryId = SentrySdk.CaptureMessage(message, scope =>
|
|
{
|
|
// Set tags for filtering and grouping
|
|
foreach (var tag in tags)
|
|
{
|
|
scope.SetTag(tag.Key, tag.Value);
|
|
}
|
|
|
|
// Set extra data for debugging
|
|
foreach (var extra in extras)
|
|
{
|
|
scope.SetExtra(extra.Key, extra.Value);
|
|
}
|
|
|
|
// Set fingerprint for better grouping
|
|
scope.SetFingerprint(new[] { "sql-performance-warning", repositoryName, methodName });
|
|
|
|
// Set level
|
|
scope.Level = SentryLevel.Warning;
|
|
|
|
// Add breadcrumb
|
|
scope.AddBreadcrumb(
|
|
message: $"SQL performance warning in {repositoryName}.{methodName}",
|
|
category: "sql-monitoring",
|
|
level: BreadcrumbLevel.Warning,
|
|
data: new Dictionary<string, string>
|
|
{
|
|
["query_pattern"] = queryPattern,
|
|
["execution_count"] = extras["execution_count"].ToString(),
|
|
["executions_per_minute"] = extras["executions_per_minute"].ToString()
|
|
}
|
|
);
|
|
|
|
// Set context
|
|
scope.SetExtra("repository", repositoryName);
|
|
scope.SetExtra("method", methodName);
|
|
scope.SetExtra("query_pattern", queryPattern);
|
|
scope.SetExtra("detection_time", DateTime.UtcNow);
|
|
scope.SetExtra("alert_type", "performance_warning");
|
|
});
|
|
|
|
_logger.LogWarning(
|
|
"[SENTRY-WARNING] Sent SQL performance warning to Sentry: {SentryId} | {Repository}.{Method} | {Reasons}",
|
|
sentryId, repositoryName, methodName, string.Join(", ", reasons));
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send warning to Sentry for {Repository}.{Method}",
|
|
repositoryName, methodName);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Sends a custom performance metric to Sentry
|
|
/// </summary>
|
|
public void SendPerformanceMetricToSentry(string repositoryName, string methodName, string metricName,
|
|
double value, Dictionary<string, string> tags = null)
|
|
{
|
|
try
|
|
{
|
|
var sentryTags = tags ?? new Dictionary<string, string>();
|
|
sentryTags["repository"] = repositoryName;
|
|
sentryTags["method"] = methodName;
|
|
sentryTags["metric_name"] = metricName;
|
|
|
|
SentrySdk.AddBreadcrumb(
|
|
message: $"SQL Performance Metric: {metricName} = {value}",
|
|
category: "sql-performance",
|
|
level: BreadcrumbLevel.Info,
|
|
data: new Dictionary<string, string>
|
|
{
|
|
["repository"] = repositoryName,
|
|
["method"] = methodName,
|
|
["metric_name"] = metricName,
|
|
["value"] = value.ToString()
|
|
});
|
|
|
|
_logger.LogDebug("[SENTRY-METRIC] Sent performance metric to Sentry: {Metric} = {Value} for {Repository}.{Method}",
|
|
metricName, value, repositoryName, methodName);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send performance metric to Sentry");
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Gets current statistics for all tracked queries
|
|
/// </summary>
|
|
public Dictionary<string, QueryExecutionStats> GetQueryStatistics()
|
|
{
|
|
var stats = new Dictionary<string, QueryExecutionStats>();
|
|
var now = DateTime.UtcNow;
|
|
|
|
foreach (var kvp in _queryTrackers)
|
|
{
|
|
var tracker = kvp.Value;
|
|
var timeSinceFirst = now - tracker.FirstExecution;
|
|
|
|
stats[kvp.Key] = new QueryExecutionStats
|
|
{
|
|
RepositoryName = tracker.RepositoryName,
|
|
MethodName = tracker.MethodName,
|
|
QueryPattern = tracker.QueryPattern,
|
|
ExecutionCount = tracker.ExecutionCount,
|
|
FirstExecution = tracker.FirstExecution,
|
|
LastExecution = tracker.LastExecution,
|
|
AverageExecutionTime = tracker.AverageExecutionTime,
|
|
MinExecutionTime = tracker.MinExecutionTime,
|
|
MaxExecutionTime = tracker.MaxExecutionTime,
|
|
ExecutionsPerMinute = tracker.ExecutionCount / Math.Max(timeSinceFirst.TotalMinutes, 0.1),
|
|
IsActive = timeSinceFirst < TimeSpan.FromSeconds(_settings.LoopDetectionWindowSeconds)
|
|
};
|
|
}
|
|
|
|
return stats;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Clears all tracking data
|
|
/// </summary>
|
|
public void ClearAllTracking()
|
|
{
|
|
_queryTrackers.Clear();
|
|
_logger.LogInformation("[SQL-LOOP-DETECTION] All tracking data cleared");
|
|
}
|
|
|
|
private void CleanupOldTrackers(object? state)
|
|
{
|
|
var now = DateTime.UtcNow;
|
|
var keysToRemove = new List<string>();
|
|
|
|
foreach (var kvp in _queryTrackers)
|
|
{
|
|
var timeSinceLastExecution = now - kvp.Value.LastExecution;
|
|
// Use configurable retention period for monitoring dashboard
|
|
// This allows users to see query statistics even if queries haven't been executed recently
|
|
var retentionPeriod = TimeSpan.FromMinutes(_settings.DataRetentionMinutes);
|
|
|
|
if (timeSinceLastExecution > retentionPeriod)
|
|
{
|
|
keysToRemove.Add(kvp.Key);
|
|
}
|
|
}
|
|
|
|
foreach (var key in keysToRemove)
|
|
{
|
|
_queryTrackers.TryRemove(key, out _);
|
|
}
|
|
|
|
if (keysToRemove.Count > 0)
|
|
{
|
|
_logger.LogDebug("[SQL-MONITORING] Cleaned up {Count} old trackers (retention: {RetentionMinutes} minutes)", keysToRemove.Count, _settings.DataRetentionMinutes);
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Sends slow query alert to Sentry asynchronously
|
|
/// </summary>
|
|
public async Task SendSlowQueryAlertAsync(string repositoryName, string methodName, string queryPattern, TimeSpan executionTime)
|
|
{
|
|
try
|
|
{
|
|
var message = $"Slow SQL Query: {repositoryName}.{methodName}";
|
|
var exception = new TimeoutException($"SQL query took {executionTime.TotalMilliseconds:F0}ms to execute");
|
|
|
|
var sentryId = SentrySdk.CaptureException(exception, scope =>
|
|
{
|
|
scope.SetTag("repository", repositoryName);
|
|
scope.SetTag("method", methodName);
|
|
scope.SetTag("alert_type", "slow_query");
|
|
scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown");
|
|
|
|
scope.SetExtra("query_pattern", queryPattern);
|
|
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
|
|
scope.SetExtra("threshold_ms", 2000);
|
|
|
|
scope.SetFingerprint(new[] { "slow-query", repositoryName, methodName });
|
|
scope.Level = SentryLevel.Warning;
|
|
|
|
scope.AddBreadcrumb(
|
|
message: $"Slow SQL query in {repositoryName}.{methodName}",
|
|
category: "sql-monitoring",
|
|
level: BreadcrumbLevel.Warning,
|
|
data: new Dictionary<string, string>
|
|
{
|
|
["query_pattern"] = queryPattern,
|
|
["execution_time_ms"] = executionTime.TotalMilliseconds.ToString()
|
|
}
|
|
);
|
|
});
|
|
|
|
_logger.LogWarning(
|
|
"[SENTRY-SLOW-QUERY] Sent slow query alert to Sentry: {SentryId} | {Repository}.{Method} | {Time}ms",
|
|
sentryId, repositoryName, methodName, executionTime.TotalMilliseconds);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send slow query alert to Sentry");
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Sends SQL error alert to Sentry asynchronously
|
|
/// </summary>
|
|
public async Task SendSqlErrorAlertAsync(string repositoryName, string methodName, string queryPattern, TimeSpan executionTime, Exception exception)
|
|
{
|
|
try
|
|
{
|
|
var sentryId = SentrySdk.CaptureException(exception, scope =>
|
|
{
|
|
scope.SetTag("repository", repositoryName);
|
|
scope.SetTag("method", methodName);
|
|
scope.SetTag("alert_type", "sql_error");
|
|
scope.SetTag("environment", Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT") ?? "Unknown");
|
|
|
|
scope.SetExtra("query_pattern", queryPattern);
|
|
scope.SetExtra("execution_time_ms", executionTime.TotalMilliseconds);
|
|
scope.SetExtra("error_type", exception.GetType().Name);
|
|
|
|
scope.SetFingerprint(new[] { "sql-error", repositoryName, methodName, exception.GetType().Name });
|
|
scope.Level = SentryLevel.Error;
|
|
|
|
scope.AddBreadcrumb(
|
|
message: $"SQL error in {repositoryName}.{methodName}",
|
|
category: "sql-monitoring",
|
|
level: BreadcrumbLevel.Error,
|
|
data: new Dictionary<string, string>
|
|
{
|
|
["query_pattern"] = queryPattern,
|
|
["execution_time_ms"] = executionTime.TotalMilliseconds.ToString(),
|
|
["error_type"] = exception.GetType().Name
|
|
}
|
|
);
|
|
});
|
|
|
|
_logger.LogError(
|
|
"[SENTRY-SQL-ERROR] Sent SQL error alert to Sentry: {SentryId} | {Repository}.{Method} | {Error}",
|
|
sentryId, repositoryName, methodName, exception.Message);
|
|
}
|
|
catch (Exception ex)
|
|
{
|
|
_logger.LogError(ex, "[SENTRY-ERROR] Failed to send SQL error alert to Sentry");
|
|
}
|
|
}
|
|
|
|
/// <summary>
|
|
/// Checks if monitoring is enabled globally
|
|
/// </summary>
|
|
public bool IsMonitoringEnabled()
|
|
{
|
|
return _settings.Enabled;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Checks if logging is enabled
|
|
/// </summary>
|
|
public bool IsLoggingEnabled()
|
|
{
|
|
return _settings.LoggingEnabled;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Checks if Sentry integration is enabled
|
|
/// </summary>
|
|
public bool IsSentryEnabled()
|
|
{
|
|
return _settings.SentryEnabled;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Checks if loop detection is enabled
|
|
/// </summary>
|
|
public bool IsLoopDetectionEnabled()
|
|
{
|
|
return _settings.LoopDetectionEnabled;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Checks if performance monitoring is enabled
|
|
/// </summary>
|
|
public bool IsPerformanceMonitoringEnabled()
|
|
{
|
|
return _settings.PerformanceMonitoringEnabled;
|
|
}
|
|
|
|
/// <summary>
|
|
/// Checks if a query should be logged based on configuration
|
|
/// </summary>
|
|
public bool ShouldLogQuery(TimeSpan executionTime)
|
|
{
|
|
if (!_settings.LoggingEnabled) return false;
|
|
|
|
if (_settings.LogErrorsOnly) return false; // Only log errors, not normal queries
|
|
|
|
if (_settings.LogSlowQueriesOnly)
|
|
{
|
|
return executionTime.TotalMilliseconds > _settings.SlowQueryThresholdMs;
|
|
}
|
|
|
|
return true; // Log all queries if logging is enabled
|
|
}
|
|
|
|
public void Dispose()
|
|
{
|
|
_cleanupTimer?.Dispose();
|
|
}
|
|
|
|
private class QueryExecutionTracker
|
|
{
|
|
public string RepositoryName { get; set; } = string.Empty;
|
|
public string MethodName { get; set; } = string.Empty;
|
|
public string QueryPattern { get; set; } = string.Empty;
|
|
public DateTime FirstExecution { get; set; }
|
|
public DateTime LastExecution { get; set; }
|
|
public int ExecutionCount { get; set; }
|
|
public TimeSpan TotalExecutionTime { get; set; }
|
|
public TimeSpan MaxExecutionTime { get; set; }
|
|
public TimeSpan MinExecutionTime { get; set; }
|
|
|
|
public TimeSpan AverageExecutionTime =>
|
|
ExecutionCount > 0 ? TimeSpan.FromTicks(TotalExecutionTime.Ticks / ExecutionCount) : TimeSpan.Zero;
|
|
}
|
|
}
|