From 54138e5a103c3913beb778d12999ca378381c50e Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Wed, 1 Oct 2025 19:02:25 -0400 Subject: [PATCH 01/17] wip: stub MetricProcessor.cs --- src/CommonLib/Processors/MetricProcessor.cs | 70 +++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 src/CommonLib/Processors/MetricProcessor.cs diff --git a/src/CommonLib/Processors/MetricProcessor.cs b/src/CommonLib/Processors/MetricProcessor.cs new file mode 100644 index 00000000..9da44298 --- /dev/null +++ b/src/CommonLib/Processors/MetricProcessor.cs @@ -0,0 +1,70 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace SharpHoundCommonLib.Processors; + +public interface IMetricWriter { + Task WriteAsync(List metrics); +} + +public enum MetricType { + Counter, + Gauge, + Histogram +} + +public class Metric { + public string Name { get; set; } = string.Empty; + public MetricType MetricType { get; set; } + public double Value { get; set; } + public Dictionary? Labels { get; set; } = null; +} + +public class MetricProcessor { + private readonly ConcurrentBag _metrics = []; + private readonly TimeSpan _flushInterval; + private readonly IMetricWriter _writer; + private readonly CancellationTokenSource _cancellationTokenSource = new(); + + + public MetricProcessor(IMetricWriter writer, TimeSpan? flushInterval = null) { + _writer = writer; + _flushInterval = flushInterval ?? TimeSpan.FromSeconds(60); + _ = RunFlushLoop(); + } + + private async Task RunFlushLoop() { + while (!_cancellationTokenSource.IsCancellationRequested) { + await Task.Delay(_flushInterval); + await FlushAsync(); + } + } + + public void Record(string name, double value, MetricType metricType = MetricType.Gauge, + Dictionary labels = null) { + _metrics.Add(new Metric { Name = name, Value = value, MetricType = metricType, Labels = labels}); + } + + public async Task FlushAsync() { + if (_metrics.IsEmpty) { + return; + } + + var batch = new List(); + while (_metrics.TryTake(out var metric)) { + batch.Add(metric); + } + + try { + await _writer.WriteAsync(batch); + } + catch { + // Don't crash program if we cannot write metrics + } + } + + public void Stop() => _cancellationTokenSource.Cancel(); +} \ No newline at end of file From addb5a8aa5482bb4abad171323b65b4fc03919bb Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Thu, 2 Oct 2025 14:25:07 -0400 Subject: [PATCH 02/17] wip: cleanup MetricProcessor and Writer --- src/CommonLib/Interfaces/IMetricProcessor.cs | 10 +++ src/CommonLib/Interfaces/IMetricWriter.cs | 9 +++ src/CommonLib/Models/IsExternalInit.cs | 7 ++ src/CommonLib/Models/Metric.cs | 16 ++++ src/CommonLib/Processors/MetricProcessor.cs | 84 ++++++++++---------- 5 files changed, 82 insertions(+), 44 deletions(-) create mode 100644 src/CommonLib/Interfaces/IMetricProcessor.cs create mode 100644 src/CommonLib/Interfaces/IMetricWriter.cs create mode 100644 src/CommonLib/Models/IsExternalInit.cs create mode 100644 src/CommonLib/Models/Metric.cs diff --git a/src/CommonLib/Interfaces/IMetricProcessor.cs b/src/CommonLib/Interfaces/IMetricProcessor.cs new file mode 100644 index 00000000..569a850f --- /dev/null +++ b/src/CommonLib/Interfaces/IMetricProcessor.cs @@ -0,0 +1,10 @@ +using System.Collections.Generic; +using System.Threading.Tasks; +using SharpHoundCommonLib.Models; + +namespace SharpHoundCommonLib.Interfaces; + +public interface IMetricProcessor { + void Record(string name, object value, MetricType metricType = MetricType.Counter, IDictionary labels = null); + Task FlushAsync(); +} \ No newline at end of file diff --git a/src/CommonLib/Interfaces/IMetricWriter.cs b/src/CommonLib/Interfaces/IMetricWriter.cs new file mode 100644 index 00000000..a82c2780 --- /dev/null +++ b/src/CommonLib/Interfaces/IMetricWriter.cs @@ -0,0 +1,9 @@ +using System.Collections.Concurrent; +using System.Threading.Tasks; +using SharpHoundCommonLib.Models; + +namespace SharpHoundCommonLib.Interfaces; + +public interface IMetricWriter { + Task WriteAsync(ConcurrentDictionary metrics); +} \ No newline at end of file diff --git a/src/CommonLib/Models/IsExternalInit.cs b/src/CommonLib/Models/IsExternalInit.cs new file mode 100644 index 00000000..8feede59 --- /dev/null +++ b/src/CommonLib/Models/IsExternalInit.cs @@ -0,0 +1,7 @@ +using System.ComponentModel; + +namespace System.Runtime.CompilerServices +{ + [EditorBrowsable(EditorBrowsableState.Never)] + internal class IsExternalInit{} +} \ No newline at end of file diff --git a/src/CommonLib/Models/Metric.cs b/src/CommonLib/Models/Metric.cs new file mode 100644 index 00000000..00770271 --- /dev/null +++ b/src/CommonLib/Models/Metric.cs @@ -0,0 +1,16 @@ +using System.Collections.Generic; + +namespace SharpHoundCommonLib.Models; + +public abstract record Metric { + private Metric() { } + + public sealed record DoubleMetric(string Name, MetricType Type, double Value, IDictionary Labels) : Metric; + public sealed record VectorMetric(string Name, MetricType Type, IEnumerable Value, IDictionary Labels) : Metric; +} + +public enum MetricType { + Counter, + Gauge, + Histogram +} diff --git a/src/CommonLib/Processors/MetricProcessor.cs b/src/CommonLib/Processors/MetricProcessor.cs index 9da44298..110caf8c 100644 --- a/src/CommonLib/Processors/MetricProcessor.cs +++ b/src/CommonLib/Processors/MetricProcessor.cs @@ -1,51 +1,54 @@ -using System; using System.Collections.Concurrent; using System.Collections.Generic; -using System.Threading; +using System.Linq; using System.Threading.Tasks; +using SharpHoundCommonLib.Interfaces; +using SharpHoundCommonLib.Models; namespace SharpHoundCommonLib.Processors; -public interface IMetricWriter { - Task WriteAsync(List metrics); -} +public class MetricProcessor(IMetricWriter writer) : IMetricProcessor { + private readonly ConcurrentDictionary _metrics = []; -public enum MetricType { - Counter, - Gauge, - Histogram -} -public class Metric { - public string Name { get; set; } = string.Empty; - public MetricType MetricType { get; set; } - public double Value { get; set; } - public Dictionary? Labels { get; set; } = null; -} + public void Record(string name, object value, MetricType metricType = MetricType.Counter, + IDictionary labels = null) { + _metrics.AddOrUpdate(name, (_) => CreateMetric(name, value, metricType, labels), + (_, metric) => UpdateMetric(name, value, metric, metricType, labels)); + } -public class MetricProcessor { - private readonly ConcurrentBag _metrics = []; - private readonly TimeSpan _flushInterval; - private readonly IMetricWriter _writer; - private readonly CancellationTokenSource _cancellationTokenSource = new(); - + private static Metric CreateMetric(string name, object value, MetricType metricType = MetricType.Counter, + IDictionary labels = null) => metricType switch { + MetricType.Counter when value is double v => new Metric.DoubleMetric(name, metricType, v, labels), + MetricType.Counter => new Metric.DoubleMetric(name, metricType, -1, labels), + MetricType.Gauge when value is double v => new Metric.DoubleMetric(name, metricType, v, labels), + MetricType.Gauge => new Metric.DoubleMetric(name, metricType, -1, labels), + MetricType.Histogram when value is IEnumerable v => new Metric.VectorMetric(name, metricType, v, labels), + MetricType.Histogram => new Metric.VectorMetric(name, metricType, [], labels), + _ => new Metric.DoubleMetric(name, metricType, -1, labels) + }; - public MetricProcessor(IMetricWriter writer, TimeSpan? flushInterval = null) { - _writer = writer; - _flushInterval = flushInterval ?? TimeSpan.FromSeconds(60); - _ = RunFlushLoop(); - } + private static Metric UpdateMetric(string name, object value, Metric existingMetric, MetricType metricType = MetricType.Counter, + IDictionary labels = null) => existingMetric switch { + Metric.DoubleMetric m when metricType is MetricType.Counter && value is double v => + m with { Labels = CombineLabels(m.Labels, labels), Value = m.Value + v }, + Metric.DoubleMetric m when metricType is MetricType.Gauge && value is double v => + m with { Labels = CombineLabels(m.Labels, labels), Value = v }, + Metric.DoubleMetric m when metricType is MetricType.Counter => m, + Metric.VectorMetric m when metricType is MetricType.Histogram && value is double v => + m with { Labels = CombineLabels(m.Labels, labels), Value = m.Value.Concat([v]) }, + Metric.VectorMetric m when metricType is MetricType.Histogram && value is IEnumerable v => + m with { Labels = CombineLabels(m.Labels, labels), Value = m.Value.Concat(v) }, + Metric.VectorMetric m => m, + _ => new Metric.DoubleMetric(name, metricType, -1, labels) + }; - private async Task RunFlushLoop() { - while (!_cancellationTokenSource.IsCancellationRequested) { - await Task.Delay(_flushInterval); - await FlushAsync(); + private static IDictionary CombineLabels(IDictionary labels1 = null, + IDictionary labels2 = null) { + if (labels1 != null && labels2 != null) { + return labels1.Concat(labels2.Where( x=> !labels1.ContainsKey(x.Key))).ToDictionary(x=>x.Key, x=>x.Value); } - } - - public void Record(string name, double value, MetricType metricType = MetricType.Gauge, - Dictionary labels = null) { - _metrics.Add(new Metric { Name = name, Value = value, MetricType = metricType, Labels = labels}); + return labels1 ?? labels2; } public async Task FlushAsync() { @@ -53,18 +56,11 @@ public async Task FlushAsync() { return; } - var batch = new List(); - while (_metrics.TryTake(out var metric)) { - batch.Add(metric); - } - try { - await _writer.WriteAsync(batch); + await writer.WriteAsync(_metrics); } catch { // Don't crash program if we cannot write metrics } } - - public void Stop() => _cancellationTokenSource.Cancel(); } \ No newline at end of file From 8e1256ce9e3ec0b8d27cf8c591518966091759e8 Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Mon, 29 Dec 2025 11:27:48 -0500 Subject: [PATCH 03/17] chore: alter histogram value type --- src/CommonLib/Models/Metric.cs | 19 ++++++++++++--- src/CommonLib/Processors/MetricProcessor.cs | 27 +++++++++++++++------ 2 files changed, 34 insertions(+), 12 deletions(-) diff --git a/src/CommonLib/Models/Metric.cs b/src/CommonLib/Models/Metric.cs index 00770271..f77fd31a 100644 --- a/src/CommonLib/Models/Metric.cs +++ b/src/CommonLib/Models/Metric.cs @@ -4,13 +4,24 @@ namespace SharpHoundCommonLib.Models; public abstract record Metric { private Metric() { } - - public sealed record DoubleMetric(string Name, MetricType Type, double Value, IDictionary Labels) : Metric; - public sealed record VectorMetric(string Name, MetricType Type, IEnumerable Value, IDictionary Labels) : Metric; + + public sealed record DoubleMetric( + string Name, + MetricType Type, + double Value, + IDictionary Labels) : Metric; + + public sealed record DictionaryMetric( + string Name, + MetricType Type, + IDictionary Value, + IDictionary Labels) : Metric; } public enum MetricType { Counter, Gauge, - Histogram + ClassicHistogram, + // Currently Native Histograms are not supported for scraping. + // Histogram, } diff --git a/src/CommonLib/Processors/MetricProcessor.cs b/src/CommonLib/Processors/MetricProcessor.cs index 110caf8c..8c1ff24b 100644 --- a/src/CommonLib/Processors/MetricProcessor.cs +++ b/src/CommonLib/Processors/MetricProcessor.cs @@ -23,8 +23,8 @@ private static Metric CreateMetric(string name, object value, MetricType metricT MetricType.Counter => new Metric.DoubleMetric(name, metricType, -1, labels), MetricType.Gauge when value is double v => new Metric.DoubleMetric(name, metricType, v, labels), MetricType.Gauge => new Metric.DoubleMetric(name, metricType, -1, labels), - MetricType.Histogram when value is IEnumerable v => new Metric.VectorMetric(name, metricType, v, labels), - MetricType.Histogram => new Metric.VectorMetric(name, metricType, [], labels), + MetricType.ClassicHistogram when value is IDictionary v => new Metric.DictionaryMetric(name, metricType, v, labels), + MetricType.ClassicHistogram => new Metric.DictionaryMetric(name, metricType, [], labels), _ => new Metric.DoubleMetric(name, metricType, -1, labels) }; @@ -35,11 +35,9 @@ private static Metric UpdateMetric(string name, object value, Metric existingMet Metric.DoubleMetric m when metricType is MetricType.Gauge && value is double v => m with { Labels = CombineLabels(m.Labels, labels), Value = v }, Metric.DoubleMetric m when metricType is MetricType.Counter => m, - Metric.VectorMetric m when metricType is MetricType.Histogram && value is double v => - m with { Labels = CombineLabels(m.Labels, labels), Value = m.Value.Concat([v]) }, - Metric.VectorMetric m when metricType is MetricType.Histogram && value is IEnumerable v => - m with { Labels = CombineLabels(m.Labels, labels), Value = m.Value.Concat(v) }, - Metric.VectorMetric m => m, + Metric.DictionaryMetric m when metricType is MetricType.ClassicHistogram && value is IDictionary v => + m with { Labels = CombineLabels(m.Labels, labels), Value = CombineObservations(m.Value, v) }, + Metric.DictionaryMetric m => m, _ => new Metric.DoubleMetric(name, metricType, -1, labels) }; @@ -48,7 +46,20 @@ private static IDictionary CombineLabels(IDictionary !labels1.ContainsKey(x.Key))).ToDictionary(x=>x.Key, x=>x.Value); } - return labels1 ?? labels2; + return labels1 ?? labels2 ?? []; + } + + private static IDictionary CombineObservations(IDictionary observations1 = null, + IDictionary observations2 = null) { + if (observations1 == null || observations2 == null) return observations1 ?? observations2 ?? []; + foreach (var pair in observations2) { + if (!observations1.ContainsKey(pair.Key)) + observations1[pair.Key] = pair.Value; + else + observations1[pair.Key] += pair.Value; + } + + return observations1; } public async Task FlushAsync() { From dbe3475ed7a510a4854f948615a8be7b2e90ebde Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Mon, 29 Dec 2025 13:27:16 -0500 Subject: [PATCH 04/17] fix: create Dictionary Directly --- src/CommonLib/Processors/MetricProcessor.cs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/CommonLib/Processors/MetricProcessor.cs b/src/CommonLib/Processors/MetricProcessor.cs index 8c1ff24b..cf3b6e52 100644 --- a/src/CommonLib/Processors/MetricProcessor.cs +++ b/src/CommonLib/Processors/MetricProcessor.cs @@ -24,7 +24,7 @@ private static Metric CreateMetric(string name, object value, MetricType metricT MetricType.Gauge when value is double v => new Metric.DoubleMetric(name, metricType, v, labels), MetricType.Gauge => new Metric.DoubleMetric(name, metricType, -1, labels), MetricType.ClassicHistogram when value is IDictionary v => new Metric.DictionaryMetric(name, metricType, v, labels), - MetricType.ClassicHistogram => new Metric.DictionaryMetric(name, metricType, [], labels), + MetricType.ClassicHistogram => new Metric.DictionaryMetric(name, metricType, new Dictionary(), labels), _ => new Metric.DoubleMetric(name, metricType, -1, labels) }; @@ -46,12 +46,12 @@ private static IDictionary CombineLabels(IDictionary !labels1.ContainsKey(x.Key))).ToDictionary(x=>x.Key, x=>x.Value); } - return labels1 ?? labels2 ?? []; + return labels1 ?? labels2 ?? new Dictionary(); } private static IDictionary CombineObservations(IDictionary observations1 = null, IDictionary observations2 = null) { - if (observations1 == null || observations2 == null) return observations1 ?? observations2 ?? []; + if (observations1 == null || observations2 == null) return observations1 ?? observations2 ?? new Dictionary(); foreach (var pair in observations2) { if (!observations1.ContainsKey(pair.Key)) observations1[pair.Key] = pair.Value; From 129c35cc06322748e95fb731d1a0e70a8ed581ac Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Tue, 30 Dec 2025 13:51:04 -0500 Subject: [PATCH 05/17] wip: create platform for defining and observing metrics --- src/CommonLib/Interfaces/IMetricFactory.cs | 5 ++ src/CommonLib/Interfaces/IMetricProcessor.cs | 10 --- src/CommonLib/Interfaces/IMetricRegistry.cs | 9 +++ src/CommonLib/Interfaces/IMetricRouter.cs | 6 ++ src/CommonLib/Interfaces/IMetricSink.cs | 8 ++ src/CommonLib/Interfaces/IMetricWriter.cs | 5 +- src/CommonLib/Models/Metric.cs | 27 ------- src/CommonLib/Models/MetricDefinition.cs | 32 ++++++++ src/CommonLib/Models/MetricObservation.cs | 10 +++ src/CommonLib/Processors/MetricProcessor.cs | 77 ------------------- src/CommonLib/Services/MetricAggregator.cs | 64 +++++++++++++++ src/CommonLib/Services/MetricFactory.cs | 15 ++++ src/CommonLib/Services/MetricRegistry.cs | 29 +++++++ src/CommonLib/Services/MetricRouter.cs | 43 +++++++++++ src/CommonLib/Static/DefaultMetricRegistry.cs | 28 +++++++ src/CommonLib/Static/Metrics.cs | 24 ++++++ 16 files changed, 277 insertions(+), 115 deletions(-) create mode 100644 src/CommonLib/Interfaces/IMetricFactory.cs delete mode 100644 src/CommonLib/Interfaces/IMetricProcessor.cs create mode 100644 src/CommonLib/Interfaces/IMetricRegistry.cs create mode 100644 src/CommonLib/Interfaces/IMetricRouter.cs create mode 100644 src/CommonLib/Interfaces/IMetricSink.cs delete mode 100644 src/CommonLib/Models/Metric.cs create mode 100644 src/CommonLib/Models/MetricDefinition.cs create mode 100644 src/CommonLib/Models/MetricObservation.cs delete mode 100644 src/CommonLib/Processors/MetricProcessor.cs create mode 100644 src/CommonLib/Services/MetricAggregator.cs create mode 100644 src/CommonLib/Services/MetricFactory.cs create mode 100644 src/CommonLib/Services/MetricRegistry.cs create mode 100644 src/CommonLib/Services/MetricRouter.cs create mode 100644 src/CommonLib/Static/DefaultMetricRegistry.cs create mode 100644 src/CommonLib/Static/Metrics.cs diff --git a/src/CommonLib/Interfaces/IMetricFactory.cs b/src/CommonLib/Interfaces/IMetricFactory.cs new file mode 100644 index 00000000..a36b31cb --- /dev/null +++ b/src/CommonLib/Interfaces/IMetricFactory.cs @@ -0,0 +1,5 @@ +namespace SharpHoundCommonLib.Interfaces; + +public interface IMetricFactory { + IMetricRouter CreateMetricRouter(); +} \ No newline at end of file diff --git a/src/CommonLib/Interfaces/IMetricProcessor.cs b/src/CommonLib/Interfaces/IMetricProcessor.cs deleted file mode 100644 index 569a850f..00000000 --- a/src/CommonLib/Interfaces/IMetricProcessor.cs +++ /dev/null @@ -1,10 +0,0 @@ -using System.Collections.Generic; -using System.Threading.Tasks; -using SharpHoundCommonLib.Models; - -namespace SharpHoundCommonLib.Interfaces; - -public interface IMetricProcessor { - void Record(string name, object value, MetricType metricType = MetricType.Counter, IDictionary labels = null); - Task FlushAsync(); -} \ No newline at end of file diff --git a/src/CommonLib/Interfaces/IMetricRegistry.cs b/src/CommonLib/Interfaces/IMetricRegistry.cs new file mode 100644 index 00000000..c14a60fc --- /dev/null +++ b/src/CommonLib/Interfaces/IMetricRegistry.cs @@ -0,0 +1,9 @@ +using System.Collections.Generic; +using SharpHoundCommonLib.Models; + +namespace SharpHoundCommonLib.Interfaces; + +public interface IMetricRegistry { + bool TryRegister(MetricDefinition definition, out int definitionId); + IReadOnlyList Definitions { get; } +} \ No newline at end of file diff --git a/src/CommonLib/Interfaces/IMetricRouter.cs b/src/CommonLib/Interfaces/IMetricRouter.cs new file mode 100644 index 00000000..8a3b0099 --- /dev/null +++ b/src/CommonLib/Interfaces/IMetricRouter.cs @@ -0,0 +1,6 @@ +namespace SharpHoundCommonLib.Interfaces; + +public interface IMetricRouter { + void Observe(int definitionId, double value, string[] labelValues); + void Flush(); +} \ No newline at end of file diff --git a/src/CommonLib/Interfaces/IMetricSink.cs b/src/CommonLib/Interfaces/IMetricSink.cs new file mode 100644 index 00000000..e7421a56 --- /dev/null +++ b/src/CommonLib/Interfaces/IMetricSink.cs @@ -0,0 +1,8 @@ +using SharpHoundCommonLib.Models; + +namespace SharpHoundCommonLib.Interfaces; + +public interface IMetricSink { + void Observe(in MetricObservation.DoubleMetricObservation observation); + void Flush(); +} diff --git a/src/CommonLib/Interfaces/IMetricWriter.cs b/src/CommonLib/Interfaces/IMetricWriter.cs index a82c2780..a221f1cc 100644 --- a/src/CommonLib/Interfaces/IMetricWriter.cs +++ b/src/CommonLib/Interfaces/IMetricWriter.cs @@ -5,5 +5,8 @@ namespace SharpHoundCommonLib.Interfaces; public interface IMetricWriter { - Task WriteAsync(ConcurrentDictionary metrics); + Task WriteAsync(ConcurrentDictionary metrics); + Task FlushGauge(double value); + Task FlushCounter(long value); + Task FlushCumulativeHistogram(long[] values, long count, double sum); } \ No newline at end of file diff --git a/src/CommonLib/Models/Metric.cs b/src/CommonLib/Models/Metric.cs deleted file mode 100644 index f77fd31a..00000000 --- a/src/CommonLib/Models/Metric.cs +++ /dev/null @@ -1,27 +0,0 @@ -using System.Collections.Generic; - -namespace SharpHoundCommonLib.Models; - -public abstract record Metric { - private Metric() { } - - public sealed record DoubleMetric( - string Name, - MetricType Type, - double Value, - IDictionary Labels) : Metric; - - public sealed record DictionaryMetric( - string Name, - MetricType Type, - IDictionary Value, - IDictionary Labels) : Metric; -} - -public enum MetricType { - Counter, - Gauge, - ClassicHistogram, - // Currently Native Histograms are not supported for scraping. - // Histogram, -} diff --git a/src/CommonLib/Models/MetricDefinition.cs b/src/CommonLib/Models/MetricDefinition.cs new file mode 100644 index 00000000..c9c5941e --- /dev/null +++ b/src/CommonLib/Models/MetricDefinition.cs @@ -0,0 +1,32 @@ +using System; +using System.Collections.Generic; + +namespace SharpHoundCommonLib.Models; + +public abstract record MetricDefinition( + string Name, + IReadOnlyList LabelNames); + +public sealed record CounterDefinition(string Name, IReadOnlyList LabelNames) : MetricDefinition(Name, LabelNames); +public sealed record GaugeDefinition(string Name, IReadOnlyList LabelNames) : MetricDefinition(Name, LabelNames); + +public sealed record CumulativeHistogramDefinition(string Name, double[] InitBuckets, IReadOnlyList LabelNames) : MetricDefinition(Name, LabelNames) { + public double[] Buckets { get; } = NormalizeBuckets(InitBuckets); + + private static double[] NormalizeBuckets(double[] buckets) { + if (buckets is null || buckets.Length == 0) + throw new ArgumentException("Histogram buckets cannot be empty"); + + var copy = (double[])buckets.Clone(); + Array.Sort(copy); + + for (var i = 1; i < copy.Length; i++) { + if (copy[i] <= copy[i - 1]) + throw new ArgumentException("Histogram buckets must be strictly increasing"); + } + + return copy; + } +}; + +// Currently Native Histograms are not supported \ No newline at end of file diff --git a/src/CommonLib/Models/MetricObservation.cs b/src/CommonLib/Models/MetricObservation.cs new file mode 100644 index 00000000..7c499920 --- /dev/null +++ b/src/CommonLib/Models/MetricObservation.cs @@ -0,0 +1,10 @@ +namespace SharpHoundCommonLib.Models; + +public abstract record MetricObservation { + private MetricObservation() { } + + public readonly record struct DoubleMetricObservation( + int DefinitionId, + double Value, + string[] LabelsValues); +} diff --git a/src/CommonLib/Processors/MetricProcessor.cs b/src/CommonLib/Processors/MetricProcessor.cs deleted file mode 100644 index cf3b6e52..00000000 --- a/src/CommonLib/Processors/MetricProcessor.cs +++ /dev/null @@ -1,77 +0,0 @@ -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; -using SharpHoundCommonLib.Interfaces; -using SharpHoundCommonLib.Models; - -namespace SharpHoundCommonLib.Processors; - -public class MetricProcessor(IMetricWriter writer) : IMetricProcessor { - private readonly ConcurrentDictionary _metrics = []; - - - public void Record(string name, object value, MetricType metricType = MetricType.Counter, - IDictionary labels = null) { - _metrics.AddOrUpdate(name, (_) => CreateMetric(name, value, metricType, labels), - (_, metric) => UpdateMetric(name, value, metric, metricType, labels)); - } - - private static Metric CreateMetric(string name, object value, MetricType metricType = MetricType.Counter, - IDictionary labels = null) => metricType switch { - MetricType.Counter when value is double v => new Metric.DoubleMetric(name, metricType, v, labels), - MetricType.Counter => new Metric.DoubleMetric(name, metricType, -1, labels), - MetricType.Gauge when value is double v => new Metric.DoubleMetric(name, metricType, v, labels), - MetricType.Gauge => new Metric.DoubleMetric(name, metricType, -1, labels), - MetricType.ClassicHistogram when value is IDictionary v => new Metric.DictionaryMetric(name, metricType, v, labels), - MetricType.ClassicHistogram => new Metric.DictionaryMetric(name, metricType, new Dictionary(), labels), - _ => new Metric.DoubleMetric(name, metricType, -1, labels) - }; - - private static Metric UpdateMetric(string name, object value, Metric existingMetric, MetricType metricType = MetricType.Counter, - IDictionary labels = null) => existingMetric switch { - Metric.DoubleMetric m when metricType is MetricType.Counter && value is double v => - m with { Labels = CombineLabels(m.Labels, labels), Value = m.Value + v }, - Metric.DoubleMetric m when metricType is MetricType.Gauge && value is double v => - m with { Labels = CombineLabels(m.Labels, labels), Value = v }, - Metric.DoubleMetric m when metricType is MetricType.Counter => m, - Metric.DictionaryMetric m when metricType is MetricType.ClassicHistogram && value is IDictionary v => - m with { Labels = CombineLabels(m.Labels, labels), Value = CombineObservations(m.Value, v) }, - Metric.DictionaryMetric m => m, - _ => new Metric.DoubleMetric(name, metricType, -1, labels) - }; - - private static IDictionary CombineLabels(IDictionary labels1 = null, - IDictionary labels2 = null) { - if (labels1 != null && labels2 != null) { - return labels1.Concat(labels2.Where( x=> !labels1.ContainsKey(x.Key))).ToDictionary(x=>x.Key, x=>x.Value); - } - return labels1 ?? labels2 ?? new Dictionary(); - } - - private static IDictionary CombineObservations(IDictionary observations1 = null, - IDictionary observations2 = null) { - if (observations1 == null || observations2 == null) return observations1 ?? observations2 ?? new Dictionary(); - foreach (var pair in observations2) { - if (!observations1.ContainsKey(pair.Key)) - observations1[pair.Key] = pair.Value; - else - observations1[pair.Key] += pair.Value; - } - - return observations1; - } - - public async Task FlushAsync() { - if (_metrics.IsEmpty) { - return; - } - - try { - await writer.WriteAsync(_metrics); - } - catch { - // Don't crash program if we cannot write metrics - } - } -} \ No newline at end of file diff --git a/src/CommonLib/Services/MetricAggregator.cs b/src/CommonLib/Services/MetricAggregator.cs new file mode 100644 index 00000000..084e13e0 --- /dev/null +++ b/src/CommonLib/Services/MetricAggregator.cs @@ -0,0 +1,64 @@ +using System; +using System.Threading; +using SharpHoundCommonLib.Interfaces; +using SharpHoundCommonLib.Models; + +namespace SharpHoundCommonLib.Services; + +public static class MetricAggregatorExtensions { + public static MetricAggregator Create(MetricDefinition definition) => + definition switch { + CounterDefinition => new CounterAggregator(), + GaugeDefinition => new GaugeAggregator(), + CumulativeHistogramDefinition ch => new CumulativeHistogramAggregator(ch.Buckets), + _ => throw new ArgumentOutOfRangeException(nameof(definition), + $"Unknown metric type {definition.GetType().Name}") + }; +} + +public abstract class MetricAggregator { + public abstract void Observe(double value); + public abstract void Flush(IMetricWriter writer); +} + +public sealed class CounterAggregator : MetricAggregator { + private long _value; + + public override void Observe(double value) => Interlocked.Add(ref _value, (long)value); + public override void Flush(IMetricWriter writer) => writer.FlushCounter(_value); +} + +public sealed class GaugeAggregator : MetricAggregator { + private double _value; + + public override void Observe(double value) => _value = value; + public override void Flush(IMetricWriter writer) => writer.FlushGauge(_value); +} + +public sealed class CumulativeHistogramAggregator(double[] bounds) : MetricAggregator { + private readonly long[] _bucketCounts = new long[bounds.Length + 1]; // Includes the Inf+ bucket + private long _count; + private double _sum; + + public override void Observe(double value) { + // this along with the following line, finds the correct bucket the value should be placed in. + // If the value is defined as a specific bucket, binary search returns it. If it is not found, + // it returns the compliment of the position it should be at. with a simple check we can undo + // that compliment if it is what is found. + var idx = Array.BinarySearch(bounds, value); + if (idx < 0) idx = ~idx; + _bucketCounts[idx]++; + _count++; + _sum += value; + } + + public override void Flush(IMetricWriter writer) { + long cumulative = 0; + var cumulativeValues = new long[_bucketCounts.Length]; + for (var i = 0; i < _bucketCounts.Length; i++) { + cumulative += _bucketCounts[i]; + cumulativeValues[i] = cumulative; + } + writer.FlushCumulativeHistogram(cumulativeValues, _count, _sum); + } +} \ No newline at end of file diff --git a/src/CommonLib/Services/MetricFactory.cs b/src/CommonLib/Services/MetricFactory.cs new file mode 100644 index 00000000..b448668c --- /dev/null +++ b/src/CommonLib/Services/MetricFactory.cs @@ -0,0 +1,15 @@ +using SharpHoundCommonLib.Interfaces; + +namespace SharpHoundCommonLib.Services; + +public sealed class MetricFactory(IMetricRouter router) : IMetricFactory { + private readonly IMetricRouter _router = router; + + public IMetricRouter CreateMetricRouter() => _router; +} + +public sealed class NoOpMetricFactory : IMetricFactory { + public static readonly NoOpMetricFactory Instance = new(); + private NoOpMetricFactory() { } + public IMetricRouter CreateMetricRouter() => NoOpMetricRouter.Instance; +} diff --git a/src/CommonLib/Services/MetricRegistry.cs b/src/CommonLib/Services/MetricRegistry.cs new file mode 100644 index 00000000..18cf6d11 --- /dev/null +++ b/src/CommonLib/Services/MetricRegistry.cs @@ -0,0 +1,29 @@ +using System.Collections.Generic; +using SharpHoundCommonLib.Interfaces; +using SharpHoundCommonLib.Models; +using SharpHoundCommonLib.Static; + +namespace SharpHoundCommonLib.Services; + +public sealed class MetricRegistry : IMetricRegistry { + private readonly List _metrics = []; + private readonly Dictionary _nameToId = new(); + private bool _sealed; + + public IReadOnlyList Definitions => _metrics; + + public bool TryRegister(MetricDefinition definition, out int definitionId) { + definitionId = MetricId.InvalidId; + if (_sealed) return false; + + if (_nameToId.TryGetValue(definition.Name, out definitionId)) + return true; + + definitionId = _metrics.Count; + _metrics.Add(definition); + _nameToId[definition.Name] = definitionId; + return true; + } + + internal void Seal() => _sealed = true; +} diff --git a/src/CommonLib/Services/MetricRouter.cs b/src/CommonLib/Services/MetricRouter.cs new file mode 100644 index 00000000..b8af8f2b --- /dev/null +++ b/src/CommonLib/Services/MetricRouter.cs @@ -0,0 +1,43 @@ +using System.Collections.Generic; +using System.Linq; +using SharpHoundCommonLib.Interfaces; +using SharpHoundCommonLib.Models; + +namespace SharpHoundCommonLib.Services; + +public sealed class MetricRouter(IReadOnlyList definitions, IEnumerable sinks) : IMetricRouter { + private readonly int _definitionCount = definitions.Count; + private readonly IMetricSink[] _sinks = sinks.ToArray(); + + // TODO MC: See if this boosts runtime, may need more metrics to see an appreciable difference. + // In JIT Complication, can remove some of the overhead of calling + // [MethodImpl(MethodImplOptions.AggressiveInlining)] + public void Observe(int definitionId, double value, string[] labelValues) { + // check to see if metric is registered, handles negative values and IDs greater than those registered. + if ((uint)definitionId >= (uint)_definitionCount) + return; + + var obs = new MetricObservation.DoubleMetricObservation(definitionId, value, labelValues); + + foreach (var sink in _sinks) + sink.Observe(obs); + } + + public void Flush() { + foreach (var sink in _sinks) + sink.Flush(); + } +} + +public sealed class NoOpMetricRouter : IMetricRouter { + public static readonly NoOpMetricRouter Instance = new(); + private NoOpMetricRouter() { } + + public void Observe(int definitionId, double value, string[] labelValues) { + // intentionally empty + } + + public void Flush() { + // intentionally empty + } +} diff --git a/src/CommonLib/Static/DefaultMetricRegistry.cs b/src/CommonLib/Static/DefaultMetricRegistry.cs new file mode 100644 index 00000000..32356759 --- /dev/null +++ b/src/CommonLib/Static/DefaultMetricRegistry.cs @@ -0,0 +1,28 @@ +using SharpHoundCommonLib.Interfaces; +using SharpHoundCommonLib.Models; + +namespace SharpHoundCommonLib.Static; + +public static class DefaultMetricRegistry { + public static void RegisterDefaultMetrics(this IMetricRegistry registry) { + // LDAP Metrics + registry.TryRegister( + new CounterDefinition( + Name: "ldap_total_requests", + LabelNames: ["processor"]), + out LdapMetrics.RequestsTotal); + + registry.TryRegister( + new GaugeDefinition( + Name: "ldap_concurrent_requests", + LabelNames: ["processor"]), + out LdapMetrics.ConcurrentRequests); + + registry.TryRegister( + new CumulativeHistogramDefinition( + Name: "ldap_request_duration_seconds", + InitBuckets: [0.1, 0.25, 0.5, 1, 2.5, 5], + LabelNames: ["processor"]), + out LdapMetrics.RequestLatency); + } +} \ No newline at end of file diff --git a/src/CommonLib/Static/Metrics.cs b/src/CommonLib/Static/Metrics.cs new file mode 100644 index 00000000..63be844d --- /dev/null +++ b/src/CommonLib/Static/Metrics.cs @@ -0,0 +1,24 @@ +using SharpHoundCommonLib.Interfaces; +using SharpHoundCommonLib.Services; + +namespace SharpHoundCommonLib.Static; + +public static class Metrics { + private static IMetricFactory _factory = NoOpMetricFactory.Instance; + + public static IMetricFactory Factory { + get => _factory; + set => _factory = value ?? NoOpMetricFactory.Instance; + } +} + + +public static class MetricId { + public const int InvalidId = -1; +} + +public static class LdapMetrics { + public static int RequestLatency = MetricId.InvalidId; + public static int ConcurrentRequests = MetricId.InvalidId; + public static int RequestsTotal = MetricId.InvalidId; +} \ No newline at end of file From 43d4aa7e7e8b9470708efc8341b759000c11f103 Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Wed, 31 Dec 2025 11:36:22 -0500 Subject: [PATCH 06/17] wip: adding some ldap metrics to LdapUtils and LdapConnectionPool --- src/CommonLib/AdaptiveTimeout.cs | 20 +++++---- src/CommonLib/ExecutionTimeSampler.cs | 11 +++-- src/CommonLib/LdapConnectionPool.cs | 44 ++++++++++++++++++- src/CommonLib/LdapUtils.cs | 15 ++++++- src/CommonLib/Static/DefaultMetricRegistry.cs | 24 +++++++--- src/CommonLib/Static/Metrics.cs | 8 +++- test/unit/CommonLibHelperTests.cs | 4 +- test/unit/Utils.cs | 6 +++ 8 files changed, 108 insertions(+), 24 deletions(-) diff --git a/src/CommonLib/AdaptiveTimeout.cs b/src/CommonLib/AdaptiveTimeout.cs index 409266c7..6fcd1969 100644 --- a/src/CommonLib/AdaptiveTimeout.cs +++ b/src/CommonLib/AdaptiveTimeout.cs @@ -62,14 +62,15 @@ public void ClearSamples() { /// /// /// + /// A method that is used to observe the latency of the request. /// Returns a Fail result if a task runs longer than its budgeted time. - public async Task> ExecuteWithTimeout(Func func, CancellationToken parentToken = default) { + public async Task> ExecuteWithTimeout(Func func, CancellationToken parentToken = default, Action latencyObservation = null) { DateTime startTime = default; var result = await Timeout.ExecuteWithTimeout(GetAdaptiveTimeout(), (timeoutToken) => _sampler.SampleExecutionTime(() => { startTime = DateTime.Now; // for ordinal tracking; see use in TimeSpikeSafetyValve return func(timeoutToken); - }), parentToken); + }, latencyObservation), parentToken); TimeSpikeSafetyValve(result.IsSuccess, startTime); return result; } @@ -84,14 +85,15 @@ public async Task> ExecuteWithTimeout(Func fu /// /// /// + /// A method that is used to observe the latency of the request. /// Returns a Fail result if a task runs longer than its budgeted time. - public async Task ExecuteWithTimeout(Action func, CancellationToken parentToken = default) { + public async Task ExecuteWithTimeout(Action func, CancellationToken parentToken = default, Action latencyObservation = null) { DateTime startTime = default; var result = await Timeout.ExecuteWithTimeout(GetAdaptiveTimeout(), (timeoutToken) => _sampler.SampleExecutionTime(() => { startTime = DateTime.Now; // for ordinal tracking; see use in TimeSpikeSafetyValve func(timeoutToken); - }), parentToken); + }, latencyObservation), parentToken); TimeSpikeSafetyValve(result.IsSuccess, startTime); return result; } @@ -107,14 +109,15 @@ public async Task ExecuteWithTimeout(Action func, Can /// /// /// + /// A method that is used to observe the latency of the request. /// Returns a Fail result if a task runs longer than its budgeted time. - public async Task> ExecuteWithTimeout(Func> func, CancellationToken parentToken = default) { + public async Task> ExecuteWithTimeout(Func> func, CancellationToken parentToken = default, Action latencyObservation = null) { DateTime startTime = default; var result = await Timeout.ExecuteWithTimeout(GetAdaptiveTimeout(), (timeoutToken) => _sampler.SampleExecutionTime(() => { startTime = DateTime.Now; // for ordinal tracking; see use in TimeSpikeSafetyValve return func(timeoutToken); - }), parentToken); + }, latencyObservation), parentToken); TimeSpikeSafetyValve(result.IsSuccess, startTime); return result; } @@ -129,14 +132,15 @@ public async Task> ExecuteWithTimeout(Func /// /// + /// A method that is used to observe the latency of the request. /// Returns a Fail result if a task runs longer than its budgeted time. - public async Task ExecuteWithTimeout(Func func, CancellationToken parentToken = default) { + public async Task ExecuteWithTimeout(Func func, CancellationToken parentToken = default, Action latencyObservation = null) { DateTime startTime = default; var result = await Timeout.ExecuteWithTimeout(GetAdaptiveTimeout(), (timeoutToken) => _sampler.SampleExecutionTime(() => { startTime = DateTime.Now; // for ordinal tracking; see use in TimeSpikeSafetyValve return func(timeoutToken); - }), parentToken); + }, latencyObservation), parentToken); TimeSpikeSafetyValve(result.IsSuccess, startTime); return result; } diff --git a/src/CommonLib/ExecutionTimeSampler.cs b/src/CommonLib/ExecutionTimeSampler.cs index 74e0e4bd..ff5875e6 100644 --- a/src/CommonLib/ExecutionTimeSampler.cs +++ b/src/CommonLib/ExecutionTimeSampler.cs @@ -43,35 +43,38 @@ public double StandardDeviation() { public double Average() => _samples.Average(); - public async Task SampleExecutionTime(Func> func) { + public async Task SampleExecutionTime(Func> func, Action latencyObservation = null) { var stopwatch = Stopwatch.StartNew(); var result = await func.Invoke(); stopwatch.Stop(); + latencyObservation?.Invoke(stopwatch.ElapsedMilliseconds); AddTimeSample(stopwatch.Elapsed); return result; } - public async Task SampleExecutionTime(Func func) { + public async Task SampleExecutionTime(Func func, Action latencyObservation = null) { var stopwatch = Stopwatch.StartNew(); await func.Invoke(); stopwatch.Stop(); AddTimeSample(stopwatch.Elapsed); } - public T SampleExecutionTime(Func func) { + public T SampleExecutionTime(Func func, Action latencyObservation = null) { var stopwatch = Stopwatch.StartNew(); var result = func.Invoke(); stopwatch.Stop(); + latencyObservation?.Invoke(stopwatch.ElapsedMilliseconds); AddTimeSample(stopwatch.Elapsed); return result; } - public void SampleExecutionTime(Action func) { + public void SampleExecutionTime(Action func, Action latencyObservation = null) { var stopwatch = Stopwatch.StartNew(); func.Invoke(); stopwatch.Stop(); + latencyObservation?.Invoke(stopwatch.ElapsedMilliseconds); AddTimeSample(stopwatch.Elapsed); } diff --git a/src/CommonLib/LdapConnectionPool.cs b/src/CommonLib/LdapConnectionPool.cs index 8e373e5c..9344392d 100644 --- a/src/CommonLib/LdapConnectionPool.cs +++ b/src/CommonLib/LdapConnectionPool.cs @@ -11,8 +11,10 @@ using Microsoft.Extensions.Logging; using SharpHoundCommonLib.Enums; using SharpHoundCommonLib.Exceptions; +using SharpHoundCommonLib.Interfaces; using SharpHoundCommonLib.LDAPQueries; using SharpHoundCommonLib.Processors; +using SharpHoundCommonLib.Static; using SharpHoundRPC.NetAPINative; using SharpHoundRPC.PortScanner; @@ -36,12 +38,15 @@ internal class LdapConnectionPool : IDisposable { private const int BackoffDelayMultiplier = 2; private const int MaxRetries = 3; private static readonly ConcurrentDictionary DCInfoCache = new(); + + // Metrics + private readonly IMetricRouter _metric; // Tracks domains we know we've determined we shouldn't try to connect to private static readonly ConcurrentHashSet _excludedDomains = new(); public LdapConnectionPool(string identifier, string poolIdentifier, LdapConfig config, - IPortScanner scanner = null, NativeMethods nativeMethods = null, ILogger log = null) { + IPortScanner scanner = null, NativeMethods nativeMethods = null, ILogger log = null, IMetricRouter metric = null) { _connections = new ConcurrentBag(); _globalCatalogConnection = new ConcurrentBag(); //TODO: Re-enable this once we track down the semaphore deadlock @@ -56,6 +61,7 @@ public LdapConnectionPool(string identifier, string poolIdentifier, LdapConfig c _poolIdentifier = poolIdentifier; _ldapConfig = config; _log = log ?? Logging.LogProvider.CreateLogger("LdapConnectionPool"); + _metric = metric ?? Metrics.Factory.CreateMetricRouter(); _portScanner = scanner ?? new PortScanner(); _nativeMethods = nativeMethods ?? new NativeMethods(); _queryAdaptiveTimeout = new AdaptiveTimeout(maxTimeout: TimeSpan.FromMinutes(2), Logging.LogProvider.CreateLogger("LdapQuery"), useAdaptiveTimeout: false); @@ -72,6 +78,8 @@ public LdapConnectionPool(string identifier, string poolIdentifier, LdapConfig c return await GetConnectionAsync(); } + + private void LatencyObservation(double latency) => _metric.Observe(LdapMetricDefinitions.RequestLatency, latency, [nameof(LdapConnectionPool), _poolIdentifier]); public async IAsyncEnumerable> Query(LdapQueryParameters queryParameters, [EnumeratorCancellation] CancellationToken cancellationToken = new()) { @@ -114,11 +122,13 @@ public async IAsyncEnumerable> Query(LdapQueryParam querySuccess = true; } else if (queryRetryCount == MaxRetries) { + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); tempResult = LdapResult.Fail($"Failed to get a response after {MaxRetries} attempts", queryParameters); } else { + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); queryRetryCount++; } } @@ -134,6 +144,7 @@ public async IAsyncEnumerable> Query(LdapQueryParam * Release our connection in a faulted state since the connection is defunct. Attempt to get a new connection to any server in the domain * since non-paged queries do not require same server connections */ + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); queryRetryCount++; _log.LogDebug("Query - Attempting to recover from ServerDown for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), queryRetryCount); @@ -167,6 +178,7 @@ public async IAsyncEnumerable> Query(LdapQueryParam * If we get a busy error, we want to do an exponential backoff, but maintain the current connection * The expectation is that given enough time, the server should stop being busy and service our query appropriately */ + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); busyRetryCount++; _log.LogDebug("Query - Executing busy backoff for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), busyRetryCount); @@ -177,6 +189,7 @@ public async IAsyncEnumerable> Query(LdapQueryParam /* * Treat a timeout as a busy error */ + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); busyRetryCount++; _log.LogDebug("Query - Timeout: Executing busy backoff for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), busyRetryCount); @@ -187,6 +200,7 @@ public async IAsyncEnumerable> Query(LdapQueryParam /* * This is our fallback catch. If our retry counts have been exhausted this will trigger and break us out of our loop */ + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); tempResult = LdapResult.Fail( $"Query - Caught unrecoverable ldap exception: {le.Message} (ServerMessage: {le.ServerErrorMessage}) (ErrorCode: {le.ErrorCode})", queryParameters); @@ -195,6 +209,7 @@ public async IAsyncEnumerable> Query(LdapQueryParam /* * Generic exception handling for unforeseen circumstances */ + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); tempResult = LdapResult.Fail($"Query - Caught unrecoverable exception: {e.Message}", queryParameters); @@ -280,11 +295,13 @@ public async IAsyncEnumerable> PagedQuery(LdapQuery queryRetryCount = 0; } else if (queryRetryCount == MaxRetries) { + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); tempResult = LdapResult.Fail( $"PagedQuery - Failed to get a response after {MaxRetries} attempts", queryParameters); } else { + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); queryRetryCount++; } } @@ -299,6 +316,7 @@ public async IAsyncEnumerable> PagedQuery(LdapQuery * Release our connection in a faulted state since the connection is defunct. * Paged queries require a connection to be made to the same server which we started the paged query on */ + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); if (serverName == null) { _log.LogError( "PagedQuery - Received server down exception without a known servername. Unable to generate new connection\n{Info}", @@ -338,6 +356,7 @@ public async IAsyncEnumerable> PagedQuery(LdapQuery * If we get a busy error, we want to do an exponential backoff, but maintain the current connection * The expectation is that given enough time, the server should stop being busy and service our query appropriately */ + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); busyRetryCount++; _log.LogDebug("PagedQuery - Executing busy backoff for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), busyRetryCount); @@ -348,6 +367,7 @@ public async IAsyncEnumerable> PagedQuery(LdapQuery /* * Treat a timeout as a busy error */ + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); busyRetryCount++; _log.LogDebug("PagedQuery - Timeout: Executing busy backoff for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), busyRetryCount); @@ -355,11 +375,13 @@ public async IAsyncEnumerable> PagedQuery(LdapQuery await Task.Delay(backoffDelay, cancellationToken); } catch (LdapException le) { + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); tempResult = LdapResult.Fail( $"PagedQuery - Caught unrecoverable ldap exception: {le.Message} (ServerMessage: {le.ServerErrorMessage}) (ErrorCode: {le.ErrorCode})", queryParameters, le.ErrorCode); } catch (Exception e) { + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); tempResult = LdapResult.Fail($"PagedQuery - Caught unrecoverable exception: {e.Message}", queryParameters); @@ -499,6 +521,7 @@ public async IAsyncEnumerable> RangedRetrieval(string distinguish response = await SendRequestWithTimeout(connectionWrapper.Connection, searchRequest, _rangedRetrievalAdaptiveTimeout); } catch (LdapException le) when (le.ErrorCode == (int)ResultCode.Busy && busyRetryCount < MaxRetries) { + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); busyRetryCount++; _log.LogDebug("RangedRetrieval - Executing busy backoff for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), busyRetryCount); @@ -509,6 +532,7 @@ public async IAsyncEnumerable> RangedRetrieval(string distinguish /* * Treat a timeout as a busy error */ + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); busyRetryCount++; _log.LogDebug("RangedRetrieval - Timeout: Executing busy backoff for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), busyRetryCount); @@ -517,6 +541,7 @@ public async IAsyncEnumerable> RangedRetrieval(string distinguish } catch (LdapException le) when (le.ErrorCode == (int)LdapErrorCodes.ServerDown && queryRetryCount < MaxRetries) { + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); queryRetryCount++; _log.LogDebug( "RangedRetrieval - Attempting to recover from ServerDown for query {Info} (Attempt {Count})", @@ -548,11 +573,13 @@ public async IAsyncEnumerable> RangedRetrieval(string distinguish } } catch (LdapException le) { + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); tempResult = LdapResult.Fail( $"Caught unrecoverable ldap exception: {le.Message} (ServerMessage: {le.ServerErrorMessage}) (ErrorCode: {le.ErrorCode})", queryParameters, le.ErrorCode); } catch (Exception e) { + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); tempResult = LdapResult.Fail($"Caught unrecoverable exception: {e.Message}", queryParameters); } @@ -1047,11 +1074,24 @@ private SearchRequest CreateSearchRequest(string distinguishedName, string ldapF } private async Task SendRequestWithTimeout(LdapConnection connection, SearchRequest request, AdaptiveTimeout adaptiveTimeout) { + // Prerequest metrics + var concurrentRequests = Interlocked.Increment(ref LdapMetrics.InFlightRequests); + _metric.Observe(LdapMetricDefinitions.ConcurrentRequests, concurrentRequests, + [nameof(LdapConnectionPool), _poolIdentifier]); + // Add padding to account for network latency and processing overhead const int TimeoutPaddingSeconds = 3; var timeout = adaptiveTimeout.GetAdaptiveTimeout(); var timeoutWithPadding = timeout + TimeSpan.FromSeconds(TimeoutPaddingSeconds); - var result = await adaptiveTimeout.ExecuteWithTimeout((_) => connection.SendRequestAsync(request, timeoutWithPadding)); + var result = await adaptiveTimeout.ExecuteWithTimeout((_) => connection.SendRequestAsync(request, timeoutWithPadding), latencyObservation: LatencyObservation); + + // Postrequest metrics + concurrentRequests = Interlocked.Decrement(ref LdapMetrics.InFlightRequests); + _metric.Observe(LdapMetricDefinitions.ConcurrentRequests, concurrentRequests, + [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.RequestsTotal, 1, + [nameof(LdapConnectionPool), _poolIdentifier]); + if (result.IsSuccess) return (SearchResponse)result.Value; else diff --git a/src/CommonLib/LdapUtils.cs b/src/CommonLib/LdapUtils.cs index 14612da1..13ae3e2d 100644 --- a/src/CommonLib/LdapUtils.cs +++ b/src/CommonLib/LdapUtils.cs @@ -15,9 +15,11 @@ using Microsoft.Extensions.Logging; using SharpHoundCommonLib.DirectoryObjects; using SharpHoundCommonLib.Enums; +using SharpHoundCommonLib.Interfaces; using SharpHoundCommonLib.LDAPQueries; using SharpHoundCommonLib.OutputTypes; using SharpHoundCommonLib.Processors; +using SharpHoundCommonLib.Static; using SharpHoundRPC.NetAPINative; using SharpHoundRPC.PortScanner; using Domain = System.DirectoryServices.ActiveDirectory.Domain; @@ -47,6 +49,9 @@ private readonly ConcurrentDictionary private readonly ConcurrentDictionary _distinguishedNameCache = new(StringComparer.OrdinalIgnoreCase); + // Metrics + private readonly IMetricRouter _metric; + private readonly ILogger _log; private readonly IPortScanner _portScanner; private readonly NativeMethods _nativeMethods; @@ -77,13 +82,15 @@ public LdapUtils() { _nativeMethods = new NativeMethods(); _portScanner = new PortScanner(); _log = Logging.LogProvider.CreateLogger("LDAPUtils"); + _metric = Metrics.Factory.CreateMetricRouter(); _connectionPool = new ConnectionPoolManager(_ldapConfig, _log); } - public LdapUtils(NativeMethods nativeMethods = null, PortScanner scanner = null, ILogger log = null) { + public LdapUtils(NativeMethods nativeMethods = null, PortScanner scanner = null, ILogger log = null, IMetricRouter metric = null) { _nativeMethods = nativeMethods ?? new NativeMethods(); _portScanner = scanner ?? new PortScanner(); _log = log ?? Logging.LogProvider.CreateLogger("LDAPUtils"); + _metric = metric ?? Metrics.Factory.CreateMetricRouter(); _connectionPool = new ConnectionPoolManager(_ldapConfig, scanner: _portScanner); } @@ -126,6 +133,7 @@ public IAsyncEnumerable> PagedQuery(LdapQueryParame var result = await LookupSidType(identifier, objectDomain); if (!result.Success) { _unresolvablePrincipals.Add(identifier); + _metric.Observe(LdapMetricDefinitions.UnresolvablePrincipals, 1, [nameof(LdapUtils)]); } return (result.Success, new TypedPrincipal(identifier, result.Type)); @@ -134,6 +142,7 @@ public IAsyncEnumerable> PagedQuery(LdapQueryParame var (success, type) = await LookupGuidType(identifier, objectDomain); if (!success) { _unresolvablePrincipals.Add(identifier); + _metric.Observe(LdapMetricDefinitions.UnresolvablePrincipals, 1, [nameof(LdapUtils)]); } return (success, new TypedPrincipal(identifier, type)); @@ -965,6 +974,7 @@ public async Task IsDomainController(string computerObjectId, string domai } catch { _unresolvablePrincipals.Add(distinguishedName); + _metric.Observe(LdapMetricDefinitions.UnresolvablePrincipals, 1, [nameof(LdapUtils)]); return (false, default); } } @@ -1129,6 +1139,9 @@ public void ResetUtils() { _domainControllers = new ConcurrentHashSet(StringComparer.OrdinalIgnoreCase); _connectionPool?.Dispose(); _connectionPool = new ConnectionPoolManager(_ldapConfig, scanner: _portScanner); + + // Metrics + LdapMetrics.InFlightRequests = 0; } private IDirectoryObject CreateDirectoryEntry(string path) { diff --git a/src/CommonLib/Static/DefaultMetricRegistry.cs b/src/CommonLib/Static/DefaultMetricRegistry.cs index 32356759..c55bd85c 100644 --- a/src/CommonLib/Static/DefaultMetricRegistry.cs +++ b/src/CommonLib/Static/DefaultMetricRegistry.cs @@ -9,20 +9,32 @@ public static void RegisterDefaultMetrics(this IMetricRegistry registry) { registry.TryRegister( new CounterDefinition( Name: "ldap_total_requests", - LabelNames: ["processor"]), - out LdapMetrics.RequestsTotal); + LabelNames: ["location", "identifier"]), + out LdapMetricDefinitions.RequestsTotal); + + registry.TryRegister( + new CounterDefinition( + Name: "ldap_failed_requests", + LabelNames: ["location", "identifier"]), + out LdapMetricDefinitions.FailedRequests); registry.TryRegister( new GaugeDefinition( Name: "ldap_concurrent_requests", - LabelNames: ["processor"]), - out LdapMetrics.ConcurrentRequests); + LabelNames: ["location", "identifier"]), + out LdapMetricDefinitions.ConcurrentRequests); registry.TryRegister( new CumulativeHistogramDefinition( Name: "ldap_request_duration_seconds", InitBuckets: [0.1, 0.25, 0.5, 1, 2.5, 5], - LabelNames: ["processor"]), - out LdapMetrics.RequestLatency); + LabelNames: ["location", "identifier"]), + out LdapMetricDefinitions.RequestLatency); + + registry.TryRegister( + new CounterDefinition( + Name: "ldap_total_unresolvable_principals", + LabelNames: ["location"]), + out LdapMetricDefinitions.UnresolvablePrincipals); } } \ No newline at end of file diff --git a/src/CommonLib/Static/Metrics.cs b/src/CommonLib/Static/Metrics.cs index 63be844d..59d996cf 100644 --- a/src/CommonLib/Static/Metrics.cs +++ b/src/CommonLib/Static/Metrics.cs @@ -12,13 +12,19 @@ public static IMetricFactory Factory { } } +public static class LdapMetrics { + public static int InFlightRequests; +} + public static class MetricId { public const int InvalidId = -1; } -public static class LdapMetrics { +public static class LdapMetricDefinitions { public static int RequestLatency = MetricId.InvalidId; public static int ConcurrentRequests = MetricId.InvalidId; public static int RequestsTotal = MetricId.InvalidId; + public static int FailedRequests = MetricId.InvalidId; + public static int UnresolvablePrincipals = MetricId.InvalidId; } \ No newline at end of file diff --git a/test/unit/CommonLibHelperTests.cs b/test/unit/CommonLibHelperTests.cs index 0cad80e1..f2e4c0c1 100644 --- a/test/unit/CommonLibHelperTests.cs +++ b/test/unit/CommonLibHelperTests.cs @@ -302,7 +302,7 @@ public void DomainNameToDistinguishedName_DotsBecomeDcComponents() Assert.Equal("DC=test,DC=local", result); } - [Theory] + [WindowsOnlyTheory] [InlineData("S-1-5-32-544", "\\01\\02\\00\\00\\00\\00\\00\\05\\20\\00\\00\\00\\20\\02\\00\\00")] public void ConvertSidToHexSid_ValidSid_MatchesSecurityIdentifierBinaryForm(string sid, string expectedHexSid) { @@ -322,7 +322,7 @@ static string BuildExpectedHexSid(string sid) } } - [Fact] + [WindowsOnlyFact] public void ConvertSidToHexSid_InvalidSid_Throws() { Assert.ThrowsAny(() => Helpers.ConvertSidToHexSid("NOT-A-SID")); diff --git a/test/unit/Utils.cs b/test/unit/Utils.cs index 10e9d90a..3ae8fd08 100644 --- a/test/unit/Utils.cs +++ b/test/unit/Utils.cs @@ -90,4 +90,10 @@ public WindowsOnlyFact() if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) Skip = "Ignore on non-Windows platforms"; } } + + public sealed class WindowsOnlyTheory : TheoryAttribute { + public WindowsOnlyTheory() { + if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) Skip = "Ignore on non-Windows platforms"; + } + } } \ No newline at end of file From 5f21f6387b9fbad5f0c50b1833232a45527f7feb Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Wed, 31 Dec 2025 15:48:39 -0500 Subject: [PATCH 07/17] feat: add FileMetricSink, MetricsFlushTimer, and MetricWriter. Update LabelValues to a struct --- src/CommonLib/Interfaces/IMetricRouter.cs | 4 +- src/CommonLib/Interfaces/IMetricWriter.cs | 17 ++-- src/CommonLib/LdapConnectionPool.cs | 67 ++++++++----- src/CommonLib/LdapUtils.cs | 7 +- src/CommonLib/Models/FileMetricSinkOptions.cs | 9 ++ src/CommonLib/Models/MetricDefinition.cs | 25 +++++ src/CommonLib/Models/MetricObservation.cs | 2 +- src/CommonLib/Services/FileMetricSink.cs | 94 +++++++++++++++++++ src/CommonLib/Services/MetricAggregator.cs | 15 ++- src/CommonLib/Services/MetricRouter.cs | 4 +- src/CommonLib/Services/MetricWriter.cs | 14 +++ src/CommonLib/Services/MetricsFlushTimer.cs | 33 +++++++ 12 files changed, 250 insertions(+), 41 deletions(-) create mode 100644 src/CommonLib/Models/FileMetricSinkOptions.cs create mode 100644 src/CommonLib/Services/FileMetricSink.cs create mode 100644 src/CommonLib/Services/MetricWriter.cs create mode 100644 src/CommonLib/Services/MetricsFlushTimer.cs diff --git a/src/CommonLib/Interfaces/IMetricRouter.cs b/src/CommonLib/Interfaces/IMetricRouter.cs index 8a3b0099..c4489874 100644 --- a/src/CommonLib/Interfaces/IMetricRouter.cs +++ b/src/CommonLib/Interfaces/IMetricRouter.cs @@ -1,6 +1,8 @@ +using SharpHoundCommonLib.Models; + namespace SharpHoundCommonLib.Interfaces; public interface IMetricRouter { - void Observe(int definitionId, double value, string[] labelValues); + void Observe(int definitionId, double value, LabelValues labelValues); void Flush(); } \ No newline at end of file diff --git a/src/CommonLib/Interfaces/IMetricWriter.cs b/src/CommonLib/Interfaces/IMetricWriter.cs index a221f1cc..8c4c65cb 100644 --- a/src/CommonLib/Interfaces/IMetricWriter.cs +++ b/src/CommonLib/Interfaces/IMetricWriter.cs @@ -1,12 +1,17 @@ -using System.Collections.Concurrent; -using System.Threading.Tasks; +using System; +using System.Text; using SharpHoundCommonLib.Models; +using SharpHoundCommonLib.Services; namespace SharpHoundCommonLib.Interfaces; public interface IMetricWriter { - Task WriteAsync(ConcurrentDictionary metrics); - Task FlushGauge(double value); - Task FlushCounter(long value); - Task FlushCumulativeHistogram(long[] values, long count, double sum); + void StringBuilderAppendMetric( + StringBuilder builder, + MetricDefinition definition, + LabelValues labelValues, + MetricAggregator aggregator, + DateTimeOffset timestamp, + string timestampOutputString = "yyyy-MM-dd HH:mm:ss.fff" + ); } \ No newline at end of file diff --git a/src/CommonLib/LdapConnectionPool.cs b/src/CommonLib/LdapConnectionPool.cs index 9344392d..ac7f10cc 100644 --- a/src/CommonLib/LdapConnectionPool.cs +++ b/src/CommonLib/LdapConnectionPool.cs @@ -13,6 +13,7 @@ using SharpHoundCommonLib.Exceptions; using SharpHoundCommonLib.Interfaces; using SharpHoundCommonLib.LDAPQueries; +using SharpHoundCommonLib.Models; using SharpHoundCommonLib.Processors; using SharpHoundCommonLib.Static; using SharpHoundRPC.NetAPINative; @@ -79,7 +80,8 @@ public LdapConnectionPool(string identifier, string poolIdentifier, LdapConfig c return await GetConnectionAsync(); } - private void LatencyObservation(double latency) => _metric.Observe(LdapMetricDefinitions.RequestLatency, latency, [nameof(LdapConnectionPool), _poolIdentifier]); + private void LatencyObservation(double latency) => _metric.Observe(LdapMetricDefinitions.RequestLatency, latency, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); public async IAsyncEnumerable> Query(LdapQueryParameters queryParameters, [EnumeratorCancellation] CancellationToken cancellationToken = new()) { @@ -122,13 +124,15 @@ public async IAsyncEnumerable> Query(LdapQueryParam querySuccess = true; } else if (queryRetryCount == MaxRetries) { - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); tempResult = LdapResult.Fail($"Failed to get a response after {MaxRetries} attempts", queryParameters); } else { - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); queryRetryCount++; } } @@ -144,7 +148,8 @@ public async IAsyncEnumerable> Query(LdapQueryParam * Release our connection in a faulted state since the connection is defunct. Attempt to get a new connection to any server in the domain * since non-paged queries do not require same server connections */ - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); queryRetryCount++; _log.LogDebug("Query - Attempting to recover from ServerDown for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), queryRetryCount); @@ -178,7 +183,8 @@ public async IAsyncEnumerable> Query(LdapQueryParam * If we get a busy error, we want to do an exponential backoff, but maintain the current connection * The expectation is that given enough time, the server should stop being busy and service our query appropriately */ - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); busyRetryCount++; _log.LogDebug("Query - Executing busy backoff for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), busyRetryCount); @@ -189,7 +195,8 @@ public async IAsyncEnumerable> Query(LdapQueryParam /* * Treat a timeout as a busy error */ - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); busyRetryCount++; _log.LogDebug("Query - Timeout: Executing busy backoff for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), busyRetryCount); @@ -200,7 +207,8 @@ public async IAsyncEnumerable> Query(LdapQueryParam /* * This is our fallback catch. If our retry counts have been exhausted this will trigger and break us out of our loop */ - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); tempResult = LdapResult.Fail( $"Query - Caught unrecoverable ldap exception: {le.Message} (ServerMessage: {le.ServerErrorMessage}) (ErrorCode: {le.ErrorCode})", queryParameters); @@ -209,7 +217,8 @@ public async IAsyncEnumerable> Query(LdapQueryParam /* * Generic exception handling for unforeseen circumstances */ - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); tempResult = LdapResult.Fail($"Query - Caught unrecoverable exception: {e.Message}", queryParameters); @@ -295,13 +304,15 @@ public async IAsyncEnumerable> PagedQuery(LdapQuery queryRetryCount = 0; } else if (queryRetryCount == MaxRetries) { - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); tempResult = LdapResult.Fail( $"PagedQuery - Failed to get a response after {MaxRetries} attempts", queryParameters); } else { - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); queryRetryCount++; } } @@ -316,7 +327,8 @@ public async IAsyncEnumerable> PagedQuery(LdapQuery * Release our connection in a faulted state since the connection is defunct. * Paged queries require a connection to be made to the same server which we started the paged query on */ - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); if (serverName == null) { _log.LogError( "PagedQuery - Received server down exception without a known servername. Unable to generate new connection\n{Info}", @@ -356,7 +368,8 @@ public async IAsyncEnumerable> PagedQuery(LdapQuery * If we get a busy error, we want to do an exponential backoff, but maintain the current connection * The expectation is that given enough time, the server should stop being busy and service our query appropriately */ - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); busyRetryCount++; _log.LogDebug("PagedQuery - Executing busy backoff for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), busyRetryCount); @@ -367,7 +380,8 @@ public async IAsyncEnumerable> PagedQuery(LdapQuery /* * Treat a timeout as a busy error */ - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); busyRetryCount++; _log.LogDebug("PagedQuery - Timeout: Executing busy backoff for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), busyRetryCount); @@ -375,13 +389,15 @@ public async IAsyncEnumerable> PagedQuery(LdapQuery await Task.Delay(backoffDelay, cancellationToken); } catch (LdapException le) { - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); tempResult = LdapResult.Fail( $"PagedQuery - Caught unrecoverable ldap exception: {le.Message} (ServerMessage: {le.ServerErrorMessage}) (ErrorCode: {le.ErrorCode})", queryParameters, le.ErrorCode); } catch (Exception e) { - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); tempResult = LdapResult.Fail($"PagedQuery - Caught unrecoverable exception: {e.Message}", queryParameters); @@ -521,7 +537,8 @@ public async IAsyncEnumerable> RangedRetrieval(string distinguish response = await SendRequestWithTimeout(connectionWrapper.Connection, searchRequest, _rangedRetrievalAdaptiveTimeout); } catch (LdapException le) when (le.ErrorCode == (int)ResultCode.Busy && busyRetryCount < MaxRetries) { - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); busyRetryCount++; _log.LogDebug("RangedRetrieval - Executing busy backoff for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), busyRetryCount); @@ -532,7 +549,8 @@ public async IAsyncEnumerable> RangedRetrieval(string distinguish /* * Treat a timeout as a busy error */ - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); busyRetryCount++; _log.LogDebug("RangedRetrieval - Timeout: Executing busy backoff for query {Info} (Attempt {Count})", queryParameters.GetQueryInfo(), busyRetryCount); @@ -541,7 +559,8 @@ public async IAsyncEnumerable> RangedRetrieval(string distinguish } catch (LdapException le) when (le.ErrorCode == (int)LdapErrorCodes.ServerDown && queryRetryCount < MaxRetries) { - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); queryRetryCount++; _log.LogDebug( "RangedRetrieval - Attempting to recover from ServerDown for query {Info} (Attempt {Count})", @@ -573,13 +592,15 @@ public async IAsyncEnumerable> RangedRetrieval(string distinguish } } catch (LdapException le) { - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); tempResult = LdapResult.Fail( $"Caught unrecoverable ldap exception: {le.Message} (ServerMessage: {le.ServerErrorMessage}) (ErrorCode: {le.ErrorCode})", queryParameters, le.ErrorCode); } catch (Exception e) { - _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, [nameof(LdapConnectionPool), _poolIdentifier]); + _metric.Observe(LdapMetricDefinitions.FailedRequests, 1, + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); tempResult = LdapResult.Fail($"Caught unrecoverable exception: {e.Message}", queryParameters); } @@ -1077,7 +1098,7 @@ private async Task SendRequestWithTimeout(LdapConnection connect // Prerequest metrics var concurrentRequests = Interlocked.Increment(ref LdapMetrics.InFlightRequests); _metric.Observe(LdapMetricDefinitions.ConcurrentRequests, concurrentRequests, - [nameof(LdapConnectionPool), _poolIdentifier]); + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); // Add padding to account for network latency and processing overhead const int TimeoutPaddingSeconds = 3; @@ -1088,9 +1109,9 @@ private async Task SendRequestWithTimeout(LdapConnection connect // Postrequest metrics concurrentRequests = Interlocked.Decrement(ref LdapMetrics.InFlightRequests); _metric.Observe(LdapMetricDefinitions.ConcurrentRequests, concurrentRequests, - [nameof(LdapConnectionPool), _poolIdentifier]); + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); _metric.Observe(LdapMetricDefinitions.RequestsTotal, 1, - [nameof(LdapConnectionPool), _poolIdentifier]); + new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); if (result.IsSuccess) return (SearchResponse)result.Value; diff --git a/src/CommonLib/LdapUtils.cs b/src/CommonLib/LdapUtils.cs index 13ae3e2d..12c199e5 100644 --- a/src/CommonLib/LdapUtils.cs +++ b/src/CommonLib/LdapUtils.cs @@ -17,6 +17,7 @@ using SharpHoundCommonLib.Enums; using SharpHoundCommonLib.Interfaces; using SharpHoundCommonLib.LDAPQueries; +using SharpHoundCommonLib.Models; using SharpHoundCommonLib.OutputTypes; using SharpHoundCommonLib.Processors; using SharpHoundCommonLib.Static; @@ -133,7 +134,7 @@ public IAsyncEnumerable> PagedQuery(LdapQueryParame var result = await LookupSidType(identifier, objectDomain); if (!result.Success) { _unresolvablePrincipals.Add(identifier); - _metric.Observe(LdapMetricDefinitions.UnresolvablePrincipals, 1, [nameof(LdapUtils)]); + _metric.Observe(LdapMetricDefinitions.UnresolvablePrincipals, 1, new LabelValues([nameof(LdapUtils)])); } return (result.Success, new TypedPrincipal(identifier, result.Type)); @@ -142,7 +143,7 @@ public IAsyncEnumerable> PagedQuery(LdapQueryParame var (success, type) = await LookupGuidType(identifier, objectDomain); if (!success) { _unresolvablePrincipals.Add(identifier); - _metric.Observe(LdapMetricDefinitions.UnresolvablePrincipals, 1, [nameof(LdapUtils)]); + _metric.Observe(LdapMetricDefinitions.UnresolvablePrincipals, 1, new LabelValues([nameof(LdapUtils)])); } return (success, new TypedPrincipal(identifier, type)); @@ -974,7 +975,7 @@ public async Task IsDomainController(string computerObjectId, string domai } catch { _unresolvablePrincipals.Add(distinguishedName); - _metric.Observe(LdapMetricDefinitions.UnresolvablePrincipals, 1, [nameof(LdapUtils)]); + _metric.Observe(LdapMetricDefinitions.UnresolvablePrincipals, 1, new LabelValues([nameof(LdapUtils)])); return (false, default); } } diff --git a/src/CommonLib/Models/FileMetricSinkOptions.cs b/src/CommonLib/Models/FileMetricSinkOptions.cs new file mode 100644 index 00000000..9db1c668 --- /dev/null +++ b/src/CommonLib/Models/FileMetricSinkOptions.cs @@ -0,0 +1,9 @@ +using System; + +namespace SharpHoundCommonLib.Models; + +public sealed class FileMetricSinkOptions { + public TimeSpan FlushInterval { get; set; } = TimeSpan.FromSeconds(10); + public string TimestampFormat { get; set; } = "yyyy-MM-dd HH:mm:ss.fff"; + public bool FlushWriter { get; set; } = true; +} \ No newline at end of file diff --git a/src/CommonLib/Models/MetricDefinition.cs b/src/CommonLib/Models/MetricDefinition.cs index c9c5941e..69c6c945 100644 --- a/src/CommonLib/Models/MetricDefinition.cs +++ b/src/CommonLib/Models/MetricDefinition.cs @@ -1,8 +1,33 @@ using System; using System.Collections.Generic; +using System.Text; namespace SharpHoundCommonLib.Models; +public readonly record struct LabelValues(string[] Values) { + public string ToDisplayString(IReadOnlyList labelNames) { + if (labelNames.Count == 0) + return string.Empty; + + if (labelNames.Count != Values.Length) + return $"{{Improper Observation Labels, LabelNamesCount: {labelNames.Count}, LabelValuesCount: {Values.Length}}}"; + + var sb = new StringBuilder(); + sb.Append('{'); + for (var i = 0; i < labelNames.Count; i++) { + if (i > 0) + sb.Append(','); + + sb.Append(labelNames[i]) + .Append(':') + .Append(Values[i]); + } + + sb.Append('}'); + return sb.ToString(); + } +}; + public abstract record MetricDefinition( string Name, IReadOnlyList LabelNames); diff --git a/src/CommonLib/Models/MetricObservation.cs b/src/CommonLib/Models/MetricObservation.cs index 7c499920..fdd49636 100644 --- a/src/CommonLib/Models/MetricObservation.cs +++ b/src/CommonLib/Models/MetricObservation.cs @@ -6,5 +6,5 @@ private MetricObservation() { } public readonly record struct DoubleMetricObservation( int DefinitionId, double Value, - string[] LabelsValues); + LabelValues LabelsValues); } diff --git a/src/CommonLib/Services/FileMetricSink.cs b/src/CommonLib/Services/FileMetricSink.cs new file mode 100644 index 00000000..1d0d4489 --- /dev/null +++ b/src/CommonLib/Services/FileMetricSink.cs @@ -0,0 +1,94 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using SharpHoundCommonLib.Interfaces; +using SharpHoundCommonLib.Models; + +namespace SharpHoundCommonLib.Services; + +public sealed class FileMetricSink( + IReadOnlyList definitions, + TextWriter textWriter, + IMetricWriter metricWriter, + FileMetricSinkOptions options = null) + : IMetricSink, IDisposable { + private readonly TextWriter _textWriter = textWriter; + private readonly IMetricWriter _metricWriter = metricWriter; + private readonly FileMetricSinkOptions _options = options ?? new FileMetricSinkOptions(); + + + // metric state, using a lock rather than a concurrent dictionary protects both the dictionary, + // and the aggregators state. + private readonly MetricDefinition[] _definitions = definitions.ToArray(); + private readonly Dictionary<(int, LabelValues), MetricAggregator> _states = new(); + private readonly object _lock = new(); + + public FileMetricSink( + IReadOnlyList definitions, + string filePath, + IMetricWriter metricWriter, + FileMetricSinkOptions options = null) + : this( + definitions, + new StreamWriter( + File.Open(filePath, FileMode.Create, FileAccess.Write, FileShare.Read)), + metricWriter, + options) {} + + public void Observe(in MetricObservation.DoubleMetricObservation observation) { + var key = (observation.DefinitionId, observation.LabelsValues); + + lock (_lock) { + if (!_states.TryGetValue(key, out var aggregator)) { + aggregator = MetricAggregatorExtensions.Create(_definitions[observation.DefinitionId]); + _states[key] = aggregator; + } + + aggregator.Observe(observation.Value); + } + } + + private int EstimateSize() => _states.Count * 128; + + public void Flush() { + string output; + lock (_lock) { + var sb = new StringBuilder(EstimateSize()); + + var timestamp = DateTimeOffset.Now; + sb.Append("Metric Flush: ") + .Append(timestamp.ToString(_options.TimestampFormat)) + .AppendLine(); + sb.Append('=', 40).AppendLine(); + + // Must use this deconstruction for .Net Version + foreach (var kvp in _states) { + var definitionId = kvp.Key.Item1; + var labelValues = kvp.Key.Item2; + var aggregator = kvp.Value; + var definition = _definitions[definitionId]; + + _metricWriter.StringBuilderAppendMetric( + sb, + definition, + labelValues, + aggregator, + timestamp); + } + + sb.Append('=', 40).AppendLine().AppendLine().AppendLine().AppendLine().AppendLine(); + output = sb.ToString(); + } + + _textWriter.Write(output); + + if (_options.FlushWriter) + _textWriter.Flush(); + } + + public void Dispose() { + _textWriter.Dispose(); + } +} \ No newline at end of file diff --git a/src/CommonLib/Services/MetricAggregator.cs b/src/CommonLib/Services/MetricAggregator.cs index 084e13e0..3d0e2c33 100644 --- a/src/CommonLib/Services/MetricAggregator.cs +++ b/src/CommonLib/Services/MetricAggregator.cs @@ -18,21 +18,21 @@ public static MetricAggregator Create(MetricDefinition definition) => public abstract class MetricAggregator { public abstract void Observe(double value); - public abstract void Flush(IMetricWriter writer); + public abstract object Snapshot(); } public sealed class CounterAggregator : MetricAggregator { private long _value; public override void Observe(double value) => Interlocked.Add(ref _value, (long)value); - public override void Flush(IMetricWriter writer) => writer.FlushCounter(_value); + public override object Snapshot() => _value; } public sealed class GaugeAggregator : MetricAggregator { private double _value; public override void Observe(double value) => _value = value; - public override void Flush(IMetricWriter writer) => writer.FlushGauge(_value); + public override object Snapshot() => _value; } public sealed class CumulativeHistogramAggregator(double[] bounds) : MetricAggregator { @@ -52,13 +52,18 @@ public override void Observe(double value) { _sum += value; } - public override void Flush(IMetricWriter writer) { + public override object Snapshot() { long cumulative = 0; var cumulativeValues = new long[_bucketCounts.Length]; for (var i = 0; i < _bucketCounts.Length; i++) { cumulative += _bucketCounts[i]; cumulativeValues[i] = cumulative; } - writer.FlushCumulativeHistogram(cumulativeValues, _count, _sum); + + return new { + BucketCounts = cumulativeValues, + Count = _count, + Sum = _sum + }; } } \ No newline at end of file diff --git a/src/CommonLib/Services/MetricRouter.cs b/src/CommonLib/Services/MetricRouter.cs index b8af8f2b..a5405b27 100644 --- a/src/CommonLib/Services/MetricRouter.cs +++ b/src/CommonLib/Services/MetricRouter.cs @@ -12,7 +12,7 @@ public sealed class MetricRouter(IReadOnlyList definitions, IE // TODO MC: See if this boosts runtime, may need more metrics to see an appreciable difference. // In JIT Complication, can remove some of the overhead of calling // [MethodImpl(MethodImplOptions.AggressiveInlining)] - public void Observe(int definitionId, double value, string[] labelValues) { + public void Observe(int definitionId, double value, LabelValues labelValues) { // check to see if metric is registered, handles negative values and IDs greater than those registered. if ((uint)definitionId >= (uint)_definitionCount) return; @@ -33,7 +33,7 @@ public sealed class NoOpMetricRouter : IMetricRouter { public static readonly NoOpMetricRouter Instance = new(); private NoOpMetricRouter() { } - public void Observe(int definitionId, double value, string[] labelValues) { + public void Observe(int definitionId, double value, LabelValues labelValues) { // intentionally empty } diff --git a/src/CommonLib/Services/MetricWriter.cs b/src/CommonLib/Services/MetricWriter.cs new file mode 100644 index 00000000..8908ea9d --- /dev/null +++ b/src/CommonLib/Services/MetricWriter.cs @@ -0,0 +1,14 @@ +using System; +using System.Text; +using SharpHoundCommonLib.Interfaces; +using SharpHoundCommonLib.Models; + +namespace SharpHoundCommonLib.Services; + +public class MetricWriter : IMetricWriter { + public void StringBuilderAppendMetric(StringBuilder builder, MetricDefinition definition, LabelValues labelValues, + MetricAggregator aggregator, DateTimeOffset timestamp, string timestampOutputString = "yyyy-MM-dd HH:mm:ss.fff") { + var labelText = labelValues.ToDisplayString(definition.LabelNames); + builder.AppendFormat("{0} {1}{2} = {{{3}}}\n", timestamp.ToString(timestampOutputString), definition.Name, labelText, aggregator.Snapshot()); + } +} \ No newline at end of file diff --git a/src/CommonLib/Services/MetricsFlushTimer.cs b/src/CommonLib/Services/MetricsFlushTimer.cs new file mode 100644 index 00000000..a70c07cc --- /dev/null +++ b/src/CommonLib/Services/MetricsFlushTimer.cs @@ -0,0 +1,33 @@ +using System; +using System.Threading; + +namespace SharpHoundCommonLib.Services; + +public class MetricsFlushTimer : IDisposable { + private readonly Action _flush; + private readonly Timer _timer; + + + public MetricsFlushTimer( + Action flush, + TimeSpan interval) { + _flush = flush; + _timer = new Timer( + _ => FlushSafe(), + null, + interval, + interval); + } + + private void FlushSafe() { + try { + _flush(); + } catch { + // catch all exception and do not kill the process + } + } + + public void Dispose() { + _timer.Dispose(); + } +} \ No newline at end of file From def28951280cd77f4e9b1e06890bc9aabd23eb95 Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Mon, 5 Jan 2026 10:24:50 -0500 Subject: [PATCH 08/17] feat: refine metric logic --- src/CommonLib/Interfaces/ILabelValuesCache.cs | 5 ++ src/CommonLib/Models/MetricObservation.cs | 2 +- .../Services/DefaultLabelValuesCache.cs | 39 +++++++++++++ src/CommonLib/Services/FileMetricSink.cs | 4 +- src/CommonLib/Services/MetricAggregator.cs | 20 ++----- src/CommonLib/Services/MetricRegistry.cs | 2 +- src/CommonLib/Services/MetricRouter.cs | 10 +++- src/CommonLib/Services/MetricWriter.cs | 58 ++++++++++++++++++- src/CommonLib/Static/DefaultMetricRegistry.cs | 4 +- 9 files changed, 121 insertions(+), 23 deletions(-) create mode 100644 src/CommonLib/Interfaces/ILabelValuesCache.cs create mode 100644 src/CommonLib/Services/DefaultLabelValuesCache.cs diff --git a/src/CommonLib/Interfaces/ILabelValuesCache.cs b/src/CommonLib/Interfaces/ILabelValuesCache.cs new file mode 100644 index 00000000..f6d5e817 --- /dev/null +++ b/src/CommonLib/Interfaces/ILabelValuesCache.cs @@ -0,0 +1,5 @@ +namespace SharpHoundCommonLib.Interfaces; + +public interface ILabelValuesCache { + string[] Intern(string[] values); +} \ No newline at end of file diff --git a/src/CommonLib/Models/MetricObservation.cs b/src/CommonLib/Models/MetricObservation.cs index fdd49636..7c499920 100644 --- a/src/CommonLib/Models/MetricObservation.cs +++ b/src/CommonLib/Models/MetricObservation.cs @@ -6,5 +6,5 @@ private MetricObservation() { } public readonly record struct DoubleMetricObservation( int DefinitionId, double Value, - LabelValues LabelsValues); + string[] LabelsValues); } diff --git a/src/CommonLib/Services/DefaultLabelValuesCache.cs b/src/CommonLib/Services/DefaultLabelValuesCache.cs new file mode 100644 index 00000000..11248042 --- /dev/null +++ b/src/CommonLib/Services/DefaultLabelValuesCache.cs @@ -0,0 +1,39 @@ +using System; +using System.Collections.Generic; +using SharpHoundCommonLib.Interfaces; + +namespace SharpHoundCommonLib.Services; + +public sealed class DefaultLabelValuesCache : ILabelValuesCache { + private readonly Dictionary _cache = new(); + + private readonly object _lock = new(); + private const char Separator = '\u001F'; // ascii unit separator + + public string[] Intern(string[] values) { + if (values == null || values.Length == 0) { + return []; + } + + var key = MakeKey(values); + + lock (_lock) { + if (_cache.TryGetValue(key, out var existing)) + return existing; + + var copy = new string[values.Length]; + Array.Copy(values, copy, values.Length); + _cache[key] = copy; + return copy; + } + } + + private static string MakeKey(string[] values) { + if (values.Length == 1) + return values[0]; + + return string.Join(Separator.ToString(), values); + } + + +} \ No newline at end of file diff --git a/src/CommonLib/Services/FileMetricSink.cs b/src/CommonLib/Services/FileMetricSink.cs index 1d0d4489..a12b87ec 100644 --- a/src/CommonLib/Services/FileMetricSink.cs +++ b/src/CommonLib/Services/FileMetricSink.cs @@ -22,7 +22,7 @@ public sealed class FileMetricSink( // metric state, using a lock rather than a concurrent dictionary protects both the dictionary, // and the aggregators state. private readonly MetricDefinition[] _definitions = definitions.ToArray(); - private readonly Dictionary<(int, LabelValues), MetricAggregator> _states = new(); + private readonly Dictionary<(int, string[]), MetricAggregator> _states = new(); private readonly object _lock = new(); public FileMetricSink( @@ -73,7 +73,7 @@ public void Flush() { _metricWriter.StringBuilderAppendMetric( sb, definition, - labelValues, + new LabelValues(labelValues), aggregator, timestamp); } diff --git a/src/CommonLib/Services/MetricAggregator.cs b/src/CommonLib/Services/MetricAggregator.cs index 3d0e2c33..02c30cdb 100644 --- a/src/CommonLib/Services/MetricAggregator.cs +++ b/src/CommonLib/Services/MetricAggregator.cs @@ -35,6 +35,8 @@ public sealed class GaugeAggregator : MetricAggregator { public override object Snapshot() => _value; } +public record struct HistogramSnapshot(double[] Bounds, long[] Counts, long TotalCount, double Sum); + public sealed class CumulativeHistogramAggregator(double[] bounds) : MetricAggregator { private readonly long[] _bucketCounts = new long[bounds.Length + 1]; // Includes the Inf+ bucket private long _count; @@ -51,19 +53,9 @@ public override void Observe(double value) { _count++; _sum += value; } + + public override object Snapshot() => SnapshotHistogram(); - public override object Snapshot() { - long cumulative = 0; - var cumulativeValues = new long[_bucketCounts.Length]; - for (var i = 0; i < _bucketCounts.Length; i++) { - cumulative += _bucketCounts[i]; - cumulativeValues[i] = cumulative; - } - - return new { - BucketCounts = cumulativeValues, - Count = _count, - Sum = _sum - }; - } + public HistogramSnapshot SnapshotHistogram() => + new(bounds, _bucketCounts, _count, _sum); } \ No newline at end of file diff --git a/src/CommonLib/Services/MetricRegistry.cs b/src/CommonLib/Services/MetricRegistry.cs index 18cf6d11..fd154e79 100644 --- a/src/CommonLib/Services/MetricRegistry.cs +++ b/src/CommonLib/Services/MetricRegistry.cs @@ -25,5 +25,5 @@ public bool TryRegister(MetricDefinition definition, out int definitionId) { return true; } - internal void Seal() => _sealed = true; + public void Seal() => _sealed = true; } diff --git a/src/CommonLib/Services/MetricRouter.cs b/src/CommonLib/Services/MetricRouter.cs index a5405b27..86ba7d0a 100644 --- a/src/CommonLib/Services/MetricRouter.cs +++ b/src/CommonLib/Services/MetricRouter.cs @@ -5,9 +5,13 @@ namespace SharpHoundCommonLib.Services; -public sealed class MetricRouter(IReadOnlyList definitions, IEnumerable sinks) : IMetricRouter { +public sealed class MetricRouter( + IReadOnlyList definitions, + IEnumerable sinks, + ILabelValuesCache labelCache) : IMetricRouter { private readonly int _definitionCount = definitions.Count; private readonly IMetricSink[] _sinks = sinks.ToArray(); + private readonly ILabelValuesCache _labelCache = labelCache; // TODO MC: See if this boosts runtime, may need more metrics to see an appreciable difference. // In JIT Complication, can remove some of the overhead of calling @@ -17,7 +21,9 @@ public void Observe(int definitionId, double value, LabelValues labelValues) { if ((uint)definitionId >= (uint)_definitionCount) return; - var obs = new MetricObservation.DoubleMetricObservation(definitionId, value, labelValues); + var interned = _labelCache.Intern(labelValues.Values); + + var obs = new MetricObservation.DoubleMetricObservation(definitionId, value, interned); foreach (var sink in _sinks) sink.Observe(obs); diff --git a/src/CommonLib/Services/MetricWriter.cs b/src/CommonLib/Services/MetricWriter.cs index 8908ea9d..6c26531c 100644 --- a/src/CommonLib/Services/MetricWriter.cs +++ b/src/CommonLib/Services/MetricWriter.cs @@ -9,6 +9,62 @@ public class MetricWriter : IMetricWriter { public void StringBuilderAppendMetric(StringBuilder builder, MetricDefinition definition, LabelValues labelValues, MetricAggregator aggregator, DateTimeOffset timestamp, string timestampOutputString = "yyyy-MM-dd HH:mm:ss.fff") { var labelText = labelValues.ToDisplayString(definition.LabelNames); - builder.AppendFormat("{0} {1}{2} = {{{3}}}\n", timestamp.ToString(timestampOutputString), definition.Name, labelText, aggregator.Snapshot()); + if (aggregator is CumulativeHistogramAggregator cha) { + CumulativeHistogramAppend(builder, definition, labelText, cha, timestamp, timestampOutputString); + } else { + DefaultAppend(builder, definition, labelText, aggregator, timestamp, timestampOutputString); + } } + + private static void CumulativeHistogramAppend( + StringBuilder builder, + MetricDefinition definition, + string labelText, + CumulativeHistogramAggregator aggregator, + DateTimeOffset timestamp, + string timestampOutputString) { + long cumulativeValue = 0; + + var snapshot = aggregator.SnapshotHistogram(); + + for (var i = 0; i < snapshot.Bounds.Length; i++) { + cumulativeValue += snapshot.Counts[i]; + + builder.AppendFormat("{0} {1}{2}{{le=\"{3}\"}} = {4}\n", + timestamp.ToString(timestampOutputString), + definition.Name + "_bucket", + labelText, + snapshot.Bounds[i], + cumulativeValue); + } + + builder.AppendFormat("{0} {1}{2}{{le=\"+Inf\"}} = {3}\n", + timestamp.ToString(timestampOutputString), + definition.Name + "_bucket", + labelText, + snapshot.TotalCount); + + builder.AppendFormat("{0} {1}{2} = {3}\n", + timestamp.ToString(timestampOutputString), + definition.Name + "_sum", + labelText, + snapshot.Sum); + + builder.AppendFormat("{0} {1}{2} = {3}\n", + timestamp.ToString(timestampOutputString), + definition.Name + "_count", + labelText, + snapshot.TotalCount); + } + + + private static void DefaultAppend( + StringBuilder builder, + MetricDefinition definition, + string labelText, + MetricAggregator aggregator, + DateTimeOffset timestamp, + string timestampOutputString) => + builder.AppendFormat("{0} {1}{2} = {{{3}}}\n", timestamp.ToString(timestampOutputString), + definition.Name, labelText, aggregator.Snapshot()); } \ No newline at end of file diff --git a/src/CommonLib/Static/DefaultMetricRegistry.cs b/src/CommonLib/Static/DefaultMetricRegistry.cs index c55bd85c..d2cc8821 100644 --- a/src/CommonLib/Static/DefaultMetricRegistry.cs +++ b/src/CommonLib/Static/DefaultMetricRegistry.cs @@ -26,8 +26,8 @@ public static void RegisterDefaultMetrics(this IMetricRegistry registry) { registry.TryRegister( new CumulativeHistogramDefinition( - Name: "ldap_request_duration_seconds", - InitBuckets: [0.1, 0.25, 0.5, 1, 2.5, 5], + Name: "ldap_request_duration_milliseconds", + InitBuckets: [100, 250, 500, 1000, 2500, 5000], LabelNames: ["location", "identifier"]), out LdapMetricDefinitions.RequestLatency); From 3928eac5c7f8a621e21f86d285bee637f56fd19d Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Mon, 5 Jan 2026 14:06:51 -0500 Subject: [PATCH 09/17] test: fix AdaptiveTimeout LatencyObservation, Add Tests --- src/CommonLib/ExecutionTimeSampler.cs | 1 + test/unit/AdaptiveTimeoutTest.cs | 87 +++++++++++++++++++++++---- 2 files changed, 76 insertions(+), 12 deletions(-) diff --git a/src/CommonLib/ExecutionTimeSampler.cs b/src/CommonLib/ExecutionTimeSampler.cs index ff5875e6..a569f03c 100644 --- a/src/CommonLib/ExecutionTimeSampler.cs +++ b/src/CommonLib/ExecutionTimeSampler.cs @@ -57,6 +57,7 @@ public async Task SampleExecutionTime(Func func, Action latencyObs var stopwatch = Stopwatch.StartNew(); await func.Invoke(); stopwatch.Stop(); + latencyObservation?.Invoke(stopwatch.ElapsedMilliseconds); AddTimeSample(stopwatch.Elapsed); } diff --git a/test/unit/AdaptiveTimeoutTest.cs b/test/unit/AdaptiveTimeoutTest.cs index c105c23d..e4c81dfb 100644 --- a/test/unit/AdaptiveTimeoutTest.cs +++ b/test/unit/AdaptiveTimeoutTest.cs @@ -17,62 +17,115 @@ public AdaptiveTimeoutTest(ITestOutputHelper testOutputHelper) { [Fact] public async Task AdaptiveTimeout_GetAdaptiveTimeout_NotEnoughSamplesAsync() { + var observedLatency= -50.0; + var maxTimeout = TimeSpan.FromSeconds(1); var adaptiveTimeout = new AdaptiveTimeout(maxTimeout, new TestLogger(_testOutputHelper, Microsoft.Extensions.Logging.LogLevel.Trace), 10, 1000, 3); - await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(50)); + await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(50), latencyObservation: LatencyObservation); var adaptiveTimeoutResult = adaptiveTimeout.GetAdaptiveTimeout(); Assert.Equal(maxTimeout, adaptiveTimeoutResult); + Assert.InRange(observedLatency, 0.0, 55); + return; + + void LatencyObservation(double latency) { + observedLatency = latency; + } } [Fact] public async Task AdaptiveTimeout_GetAdaptiveTimeout_AdaptiveDisabled() { + var observedLatency1= -50.0; + var observedLatency2= -50.0; + var observedLatency3= -50.0; + var maxTimeout = TimeSpan.FromSeconds(1); var adaptiveTimeout = new AdaptiveTimeout(maxTimeout, new TestLogger(_testOutputHelper, Microsoft.Extensions.Logging.LogLevel.Trace), 10, 1000, 3, false); - await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(50)); - await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(50)); - await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(50)); + await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(50), latencyObservation: LatencyObservation1); + await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(50), latencyObservation: LatencyObservation2); + await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(50), latencyObservation: LatencyObservation3); var adaptiveTimeoutResult = adaptiveTimeout.GetAdaptiveTimeout(); Assert.Equal(maxTimeout, adaptiveTimeoutResult); + Assert.InRange(observedLatency1, 0.0, 55); + Assert.InRange(observedLatency2, 0.0, 55); + Assert.InRange(observedLatency3, 0.0, 55); + return; + + + void LatencyObservation1(double latency) { + observedLatency1 = latency; + } + void LatencyObservation2(double latency) { + observedLatency2 = latency; + } + void LatencyObservation3(double latency) { + observedLatency3 = latency; + } } [Fact] public async Task AdaptiveTimeout_GetAdaptiveTimeout() { + var observedLatency1= -50.0; + var observedLatency2= -50.0; + var observedLatency3= -50.0; var maxTimeout = TimeSpan.FromSeconds(1); var adaptiveTimeout = new AdaptiveTimeout(maxTimeout, new TestLogger(_testOutputHelper, Microsoft.Extensions.Logging.LogLevel.Trace), 10, 1000, 3); - await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(40)); - await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(50)); - await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(60)); + await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(40), latencyObservation: LatencyObservation1); + await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(50), latencyObservation: LatencyObservation2); + await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(60), latencyObservation: LatencyObservation3); var adaptiveTimeoutResult = adaptiveTimeout.GetAdaptiveTimeout(); Assert.True(adaptiveTimeoutResult < maxTimeout); + Assert.InRange(observedLatency1, 0.0, 45); + Assert.InRange(observedLatency2, 0.0, 55); + Assert.InRange(observedLatency3, 0.0, 65); + return; + + void LatencyObservation1(double latency) { + observedLatency1 = latency; + } + void LatencyObservation2(double latency) { + observedLatency2 = latency; + } + void LatencyObservation3(double latency) { + observedLatency3 = latency; + } } [Fact] public async Task AdaptiveTimeout_GetAdaptiveTimeout_TimeSpikeSafetyValve() { + var observations = new List(); var tasks = new List(); var maxTimeout = TimeSpan.FromSeconds(1); var numSamples = 30; var adaptiveTimeout = new AdaptiveTimeout(maxTimeout, new TestLogger(_testOutputHelper, Microsoft.Extensions.Logging.LogLevel.Trace), numSamples, 1000, 10); for (int i = 0; i < numSamples; i++) - tasks.Add(adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(10))); + tasks.Add(adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(10), latencyObservation: LatencyObservation)); await Task.WhenAll(tasks); for (int i = 0; i < 3; i++) - await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(500)); + await adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(500), latencyObservation: LatencyObservation); var adaptiveTimeoutResult = adaptiveTimeout.GetAdaptiveTimeout(); Assert.Equal(maxTimeout, adaptiveTimeoutResult); + foreach (var t in observations) { + Assert.InRange(t, 0.0, 1000.1); + } + return; + + void LatencyObservation(double latency) => observations.Add(latency); } [Fact] public async Task AdaptiveTimeout_GetAdaptiveTimeout_TimeSpikeSafetyValve_IgnoreHiccup() { + var completedObservations = new List(); + var timeoutObservations = new List(); var tasks = new List(); var maxTimeout = TimeSpan.FromSeconds(1); var numSamples = 5; @@ -80,23 +133,33 @@ public async Task AdaptiveTimeout_GetAdaptiveTimeout_TimeSpikeSafetyValve_Ignore // Prepare our successful samples for (int i = 0; i < numSamples; i++) - tasks.Add(adaptiveTimeout.ExecuteWithTimeout((_) => Task.CompletedTask)); + tasks.Add(adaptiveTimeout.ExecuteWithTimeout((_) => Task.CompletedTask, latencyObservation: LatencyCompletedObservation)); await Task.WhenAll(tasks); // Add some timeout tasks that will resolve last for (int i = 0; i < 5; i++) - tasks.Add(adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(2000))); + tasks.Add(adaptiveTimeout.ExecuteWithTimeout(async (_) => await Task.Delay(2000), latencyObservation: LatencyTimeoutObservation)); // These tasks are added later but will resolve first for (int i = 0; i < 4; i++) - tasks.Add(adaptiveTimeout.ExecuteWithTimeout((_) => Task.CompletedTask)); + tasks.Add(adaptiveTimeout.ExecuteWithTimeout((_) => Task.CompletedTask, latencyObservation: LatencyCompletedObservation)); await Task.WhenAll(tasks); var adaptiveTimeoutResult = adaptiveTimeout.GetAdaptiveTimeout(); // So our time spike safety valve should ignore the hiccup, since later tasks have resolved // by the time the safety valve has triggered by the timeout tasks Assert.True(adaptiveTimeoutResult < maxTimeout); + foreach (var t in completedObservations) { + Assert.InRange(t, 0.0, 50.0); + } + foreach (var t in timeoutObservations) { + Assert.InRange(t, 0.0, 1000.1); + } + return; + + void LatencyCompletedObservation(double latency) => completedObservations.Add(latency); + void LatencyTimeoutObservation(double latency) => timeoutObservations.Add(latency); } [Fact] From 454af11e136174da7bd66343bf4817539c7df96d Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Mon, 5 Jan 2026 16:26:40 -0500 Subject: [PATCH 10/17] test: fix AdaptiveTimeout LatencyObservation, Add MetricDefinitionTests.cs and DefaultLabelValuesCacheTests.cs --- .../Services/DefaultLabelValuesCache.cs | 9 +- test/unit/AdaptiveTimeoutTest.cs | 14 +-- test/unit/DefaultLabelValuesCacheTests.cs | 90 ++++++++++++++++ test/unit/MetricDefinitionTests.cs | 102 ++++++++++++++++++ 4 files changed, 202 insertions(+), 13 deletions(-) create mode 100644 test/unit/DefaultLabelValuesCacheTests.cs create mode 100644 test/unit/MetricDefinitionTests.cs diff --git a/src/CommonLib/Services/DefaultLabelValuesCache.cs b/src/CommonLib/Services/DefaultLabelValuesCache.cs index 11248042..cbb407a5 100644 --- a/src/CommonLib/Services/DefaultLabelValuesCache.cs +++ b/src/CommonLib/Services/DefaultLabelValuesCache.cs @@ -5,7 +5,7 @@ namespace SharpHoundCommonLib.Services; public sealed class DefaultLabelValuesCache : ILabelValuesCache { - private readonly Dictionary _cache = new(); + internal readonly Dictionary _cache = new(); private readonly object _lock = new(); private const char Separator = '\u001F'; // ascii unit separator @@ -28,11 +28,8 @@ public string[] Intern(string[] values) { } } - private static string MakeKey(string[] values) { - if (values.Length == 1) - return values[0]; - - return string.Join(Separator.ToString(), values); + internal static string MakeKey(string[] values) { + return values.Length == 1 ? values[0] : string.Join(Separator.ToString(), values); } diff --git a/test/unit/AdaptiveTimeoutTest.cs b/test/unit/AdaptiveTimeoutTest.cs index e4c81dfb..072de1c4 100644 --- a/test/unit/AdaptiveTimeoutTest.cs +++ b/test/unit/AdaptiveTimeoutTest.cs @@ -26,7 +26,7 @@ public async Task AdaptiveTimeout_GetAdaptiveTimeout_NotEnoughSamplesAsync() { var adaptiveTimeoutResult = adaptiveTimeout.GetAdaptiveTimeout(); Assert.Equal(maxTimeout, adaptiveTimeoutResult); - Assert.InRange(observedLatency, 0.0, 55); + Assert.InRange(observedLatency, 0.0, 60); return; void LatencyObservation(double latency) { @@ -49,9 +49,9 @@ public async Task AdaptiveTimeout_GetAdaptiveTimeout_AdaptiveDisabled() { var adaptiveTimeoutResult = adaptiveTimeout.GetAdaptiveTimeout(); Assert.Equal(maxTimeout, adaptiveTimeoutResult); - Assert.InRange(observedLatency1, 0.0, 55); - Assert.InRange(observedLatency2, 0.0, 55); - Assert.InRange(observedLatency3, 0.0, 55); + Assert.InRange(observedLatency1, 0.0, 60); + Assert.InRange(observedLatency2, 0.0, 60); + Assert.InRange(observedLatency3, 0.0, 60); return; @@ -80,9 +80,9 @@ public async Task AdaptiveTimeout_GetAdaptiveTimeout() { var adaptiveTimeoutResult = adaptiveTimeout.GetAdaptiveTimeout(); Assert.True(adaptiveTimeoutResult < maxTimeout); - Assert.InRange(observedLatency1, 0.0, 45); - Assert.InRange(observedLatency2, 0.0, 55); - Assert.InRange(observedLatency3, 0.0, 65); + Assert.InRange(observedLatency1, 0.0, 55); + Assert.InRange(observedLatency2, 0.0, 65); + Assert.InRange(observedLatency3, 0.0, 75); return; void LatencyObservation1(double latency) { diff --git a/test/unit/DefaultLabelValuesCacheTests.cs b/test/unit/DefaultLabelValuesCacheTests.cs new file mode 100644 index 00000000..d8634d6d --- /dev/null +++ b/test/unit/DefaultLabelValuesCacheTests.cs @@ -0,0 +1,90 @@ +using System.Collections.Concurrent; +using System.Linq; +using System.Threading.Tasks; +using SharpHoundCommonLib.Services; +using Xunit; + +namespace CommonLibTest; + +public class DefaultLabelValuesCacheTests { + + [Theory] + [InlineData(new[] {"value"}, "value")] + [InlineData(new string[] {}, "")] + [InlineData(new[] {"value1", "value2"}, "value1\u001Fvalue2")] + [InlineData(new[] {"value1", "value2", "value3"}, "value1\u001Fvalue2\u001Fvalue3")] + public void MakeKey_Returns_Proper_Key(string[] labelValues, string expectedKey) { + // act + var key = DefaultLabelValuesCache.MakeKey(labelValues); + + // assert + Assert.Equal(expectedKey, key); + } + + [Fact] + public void Intern_Retrieves_Existing_LabelValues() { + // setup + string[] values1 = ["value1", "value2"]; + string[] values2 = ["value1", "value2"]; + var cache = new DefaultLabelValuesCache(); + + // act + cache.Intern(values1); + cache.Intern(values2); + + // assert + Assert.NotEmpty(cache._cache); + Assert.Single(cache._cache); + } + + [Fact] + public void Empty_Intern_Returns_Empty_Array() { + // setup + var cache = new DefaultLabelValuesCache(); + + // act + var ret = cache.Intern([]); + + // assert + Assert.Empty(ret); + } + + [Fact] + public void LabelValuesCache_ReturnsSameReference_UnderConcurrency() + { + // setup + var cache = new DefaultLabelValuesCache(); + const int threadCount = 16; + const int iterationsPerThread = 10_000; + var results = new ConcurrentBag(); + var tasks = new Task[threadCount]; + + for (var t = 0; t < threadCount; t++) + { + tasks[t] = Task.Run(() => + { + for (var i = 0; i < iterationsPerThread; i++) + { + var labels = cache.Intern(["GET", "200"]); + results.Add(labels); + } + }); + } + + // act + Task.WaitAll(tasks); + + // assert + // Take the first reference + var first = results.First(); + + // Assert all references are identical + foreach (var arr in results) + { + Assert.True( + object.ReferenceEquals(first, arr), + "Different label array instances were returned"); + } + } + +} \ No newline at end of file diff --git a/test/unit/MetricDefinitionTests.cs b/test/unit/MetricDefinitionTests.cs new file mode 100644 index 00000000..b4017e66 --- /dev/null +++ b/test/unit/MetricDefinitionTests.cs @@ -0,0 +1,102 @@ +using System; +using SharpHoundCommonLib.Models; +using Xunit; + +namespace CommonLibTest; + +public class MetricDefinitionTests { + + [Fact] + public void LabelValues_EmptyLabelNames_Returns_Empty() { + // setup + var labelValues = new LabelValues(["value1", "value2"]); + string[] labelNames = []; + + // act + var output = labelValues.ToDisplayString(labelNames); + + // assert + Assert.Empty(output); + } + + [Fact] + public void LabelValues_MoreLabelNames_Returns_Error() { + // setup + var labelValues = new LabelValues(["value1", "value2"]); + string[] labelNames = ["value1", "value2", "value3"]; + + // act + var output = labelValues.ToDisplayString(labelNames); + + // assert + Assert.Equal($"{{Improper Observation Labels, LabelNamesCount: {labelNames.Length}, LabelValuesCount: {labelValues.Values.Length}}}", output); + } + + [Fact] + public void LabelValues_MoreLabelValues_Returns_Error() { + // setup + var labelValues = new LabelValues(["value1", "value2", "value3"]); + string[] labelNames = ["value1", "value2"]; + + // act + var output = labelValues.ToDisplayString(labelNames); + + // assert + Assert.Equal($"{{Improper Observation Labels, LabelNamesCount: {labelNames.Length}, LabelValuesCount: {labelValues.Values.Length}}}", output); + } + + [Fact] + public void LabelValues_ToDisplayString() { + // setup + var labelValues = new LabelValues(["value1", "value2", "value3"]); + string[] labelNames = ["name1", "name2", "name3"]; + + // act + var output = labelValues.ToDisplayString(labelNames); + + // assert + Assert.Equal("{name1:value1,name2:value2,name3:value3}", output); + } + + [Fact] + public void Definitions_Properly_Assign_Values() { + // setup + const string name = "definitionName"; + string[] labelNames = ["name1", "name2", "name3" ]; + double[] buckets = [0, 1, 2, 3]; + + // act + var counter = new CounterDefinition(name, labelNames); + var gauge = new GaugeDefinition(name, labelNames); + var histogram = new CumulativeHistogramDefinition(name, buckets, labelNames); + + // assert + Assert.Equal(name, counter.Name); + Assert.Equal(name, gauge.Name); + Assert.Equal(name, histogram.Name); + Assert.Equal(labelNames, counter.LabelNames); + Assert.Equal(labelNames, gauge.LabelNames); + Assert.Equal(labelNames, histogram.LabelNames); + Assert.Equal(buckets.Length, histogram.Buckets.Length); + for (var i = 0; i < buckets.Length; ++i) { + Assert.Equal(buckets[i], histogram.Buckets[i]); + } + + } + + [Fact] + public void CumulativeHistogramDefinition_NormalizesBuckets() { + // setup + double[] initBuckets = [5, 4, 3, 2, 1]; + + // act + var definition = new CumulativeHistogramDefinition("name", initBuckets, []); + Array.Sort(initBuckets); + + // assert + Assert.Equal(initBuckets.Length, definition.Buckets.Length); + for (var i = 0; i < definition.Buckets.Length; ++i) { + Assert.Equal(initBuckets[i], definition.Buckets[i]); + } + } +} \ No newline at end of file From eaaa04026625bef20b8294249e2b7419e2f09604 Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Mon, 5 Jan 2026 19:01:24 -0500 Subject: [PATCH 11/17] test: add FileMetricSinkTests --- test/unit/FileMetricSinkTests.cs | 143 +++++++++++++++++++++++++++++++ 1 file changed, 143 insertions(+) create mode 100644 test/unit/FileMetricSinkTests.cs diff --git a/test/unit/FileMetricSinkTests.cs b/test/unit/FileMetricSinkTests.cs new file mode 100644 index 00000000..7b27e0a7 --- /dev/null +++ b/test/unit/FileMetricSinkTests.cs @@ -0,0 +1,143 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using Moq; +using SharpHoundCommonLib.Interfaces; +using SharpHoundCommonLib.Models; +using SharpHoundCommonLib.Services; +using Xunit; + +namespace CommonLibTest; + +public class SimpleMetricWriter : IMetricWriter { + public void StringBuilderAppendMetric(StringBuilder builder, MetricDefinition definition, LabelValues labelValues, + MetricAggregator aggregator, DateTimeOffset timestamp, string timestampOutputString = "yyyy-MM-dd HH:mm:ss.fff") => + builder.AppendFormat( + "DefinitionType: {0}, DefinitionName: {1}, AggregatorType: {2}, AggregatorSnapshotType: {3}\n", + definition.GetType(), definition.Name, aggregator.GetType(), aggregator.Snapshot().GetType()); +} + +public class FileMetricSinkTests { + [Theory] + [MemberData(nameof(FileMetricSinkTestData.FlushStringCases), MemberType = typeof(FileMetricSinkTestData))] + public void FileMetricSink_Returns_Expected_Flush_String( + MetricDefinition[] definitions, + MetricObservation.DoubleMetricObservation[] observations, + string[] expectedOutputs, + string[] unexpectedOutputs) { + // setup + var sinkOptions = new FileMetricSinkOptions { + FlushWriter = true, + }; + var textWriter = new StringWriter(); + var metricWriter = new SimpleMetricWriter(); + var sink = new FileMetricSink(definitions, textWriter, metricWriter, sinkOptions); + + // act + foreach (var observation in observations) { + sink.Observe(observation); + } + sink.Flush(); + var output = textWriter.ToString(); + + // assert + foreach (var expectedOutput in expectedOutputs) { + Assert.Contains(expectedOutput, output); + } + + foreach (var unexpectedOutput in unexpectedOutputs) { + Assert.DoesNotContain(unexpectedOutput, output); + } + } + + [Theory] + [InlineData(false)] + [InlineData(true)] + public void FileMetricSink_Does_Not_Flush_Writer_With_AutoFlush_False(bool autoFlush) { + // setup + var sinkOptions = new FileMetricSinkOptions { + FlushWriter = autoFlush, + }; + var writerMoq = new Mock(MockBehavior.Strict); + writerMoq.Setup(w => w.Write(It.IsAny())).Verifiable(); + writerMoq.Setup(w => w.Flush()).Verifiable(); + var metricWriter = new SimpleMetricWriter(); + MetricDefinition[] definitions = [new CounterDefinition("counter_definition", ["name"])]; + var observation = new MetricObservation.DoubleMetricObservation(0, 1, ["value"]); + var sink = new FileMetricSink(definitions, writerMoq.Object, metricWriter, sinkOptions); + + // act + sink.Observe(observation); + sink.Flush(); + + // assert + writerMoq.Verify(w => w.Write(It.IsAny()), Times.Once); + if (autoFlush) + writerMoq.Verify(w => w.Flush(), Times.Once); + else + writerMoq.Verify(w => w.Flush(), Times.Never); + } +} + +public static class FileMetricSinkTestData { + public static IEnumerable FlushStringCases => [ + // Observations are flushed + [ + new MetricDefinition[] { + new CounterDefinition("counter_definition", ["value"]), + new GaugeDefinition("gauge_definition", ["value"]), + }, + new[] { + new MetricObservation.DoubleMetricObservation(0, 1, []), + new MetricObservation.DoubleMetricObservation(1, 1, []), + }, + new[] { + "Metric Flush: ", + "========================================", + "DefinitionType: SharpHoundCommonLib.Models.CounterDefinition, DefinitionName: counter_definition, AggregatorType: SharpHoundCommonLib.Services.CounterAggregator, AggregatorSnapshotType: System.Int64\n", + "DefinitionType: SharpHoundCommonLib.Models.GaugeDefinition, DefinitionName: gauge_definition, AggregatorType: SharpHoundCommonLib.Services.GaugeAggregator, AggregatorSnapshotType: System.Double\n", + }, + Array.Empty(), + ], + // Unobserved Metrics are not flushed + [ + new MetricDefinition[] { + new CounterDefinition("counter_definition", ["value"]), + new GaugeDefinition("gauge_definition", ["value"]), + }, + new[] { + new MetricObservation.DoubleMetricObservation(0, 1, []), + }, + new[] { + "Metric Flush: ", + "========================================", + "DefinitionType: SharpHoundCommonLib.Models.CounterDefinition, DefinitionName: counter_definition, AggregatorType: SharpHoundCommonLib.Services.CounterAggregator, AggregatorSnapshotType: System.Int64\n", + }, + new[] { + "DefinitionType: SharpHoundCommonLib.Models.GaugeDefinition, DefinitionName: gauge_definition, AggregatorType: SharpHoundCommonLib.Services.GaugeAggregator, AggregatorSnapshotType: System.Double\n", + }, + ], + // Cumulative Histogram Returns HistogramSnapshot + [ + new MetricDefinition[] { + new CounterDefinition("counter_definition", ["value"]), + new GaugeDefinition("gauge_definition", ["value"]), + new CumulativeHistogramDefinition("cumulative_histogram_definition", [1, 2, 3], ["value"]), + }, + new[] { + new MetricObservation.DoubleMetricObservation(0, 1, []), + new MetricObservation.DoubleMetricObservation(1, 1, []), + new MetricObservation.DoubleMetricObservation(2, 1, []), + }, + new[] { + "Metric Flush: ", + "========================================", + "DefinitionType: SharpHoundCommonLib.Models.CounterDefinition, DefinitionName: counter_definition, AggregatorType: SharpHoundCommonLib.Services.CounterAggregator, AggregatorSnapshotType: System.Int64\n", + "DefinitionType: SharpHoundCommonLib.Models.GaugeDefinition, DefinitionName: gauge_definition, AggregatorType: SharpHoundCommonLib.Services.GaugeAggregator, AggregatorSnapshotType: System.Double\n", + "DefinitionType: SharpHoundCommonLib.Models.CumulativeHistogramDefinition, DefinitionName: cumulative_histogram_definition, AggregatorType: SharpHoundCommonLib.Services.CumulativeHistogramAggregator, AggregatorSnapshotType: SharpHoundCommonLib.Services.HistogramSnapshot\n", + }, + Array.Empty(), + ], + ]; +} \ No newline at end of file From 1b2dc0a087156221fe30213d2deff00dab997db5 Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Tue, 6 Jan 2026 10:29:23 -0500 Subject: [PATCH 12/17] chore: add notes for IsExternalInit.cs --- src/CommonLib/Models/IsExternalInit.cs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/CommonLib/Models/IsExternalInit.cs b/src/CommonLib/Models/IsExternalInit.cs index 8feede59..c0627212 100644 --- a/src/CommonLib/Models/IsExternalInit.cs +++ b/src/CommonLib/Models/IsExternalInit.cs @@ -1,5 +1,9 @@ using System.ComponentModel; +// This class while it looks unused, is used to be able to build the records for a .Net Framework Target. +// see: https://stackoverflow.com/questions/64749385/predefined-type-system-runtime-compilerservices-isexternalinit-is-not-defined +// see: https://developercommunity.visualstudio.com/t/error-cs0518-predefined-type-systemruntimecompiler/1244809 + namespace System.Runtime.CompilerServices { [EditorBrowsable(EditorBrowsableState.Never)] From 4b0402776f605a3bc04de0c04172d1c1199336e7 Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Tue, 6 Jan 2026 14:39:12 -0500 Subject: [PATCH 13/17] tests: Add MetricAggregatorTests.cs, MetricRegistryTests.cs, and MetricRouterTests.cs --- test/unit/AdaptiveTimeoutTest.cs | 6 +- test/unit/MetricAggregatorTests.cs | 138 +++++++++++++++++++++++++++++ test/unit/MetricRegistryTests.cs | 65 ++++++++++++++ test/unit/MetricRouterTests.cs | 86 ++++++++++++++++++ 4 files changed, 292 insertions(+), 3 deletions(-) create mode 100644 test/unit/MetricAggregatorTests.cs create mode 100644 test/unit/MetricRegistryTests.cs create mode 100644 test/unit/MetricRouterTests.cs diff --git a/test/unit/AdaptiveTimeoutTest.cs b/test/unit/AdaptiveTimeoutTest.cs index 072de1c4..168d7443 100644 --- a/test/unit/AdaptiveTimeoutTest.cs +++ b/test/unit/AdaptiveTimeoutTest.cs @@ -49,9 +49,9 @@ public async Task AdaptiveTimeout_GetAdaptiveTimeout_AdaptiveDisabled() { var adaptiveTimeoutResult = adaptiveTimeout.GetAdaptiveTimeout(); Assert.Equal(maxTimeout, adaptiveTimeoutResult); - Assert.InRange(observedLatency1, 0.0, 60); - Assert.InRange(observedLatency2, 0.0, 60); - Assert.InRange(observedLatency3, 0.0, 60); + Assert.InRange(observedLatency1, 0.0, 100); + Assert.InRange(observedLatency2, 0.0, 100); + Assert.InRange(observedLatency3, 0.0, 100); return; diff --git a/test/unit/MetricAggregatorTests.cs b/test/unit/MetricAggregatorTests.cs new file mode 100644 index 00000000..654debf9 --- /dev/null +++ b/test/unit/MetricAggregatorTests.cs @@ -0,0 +1,138 @@ +using System; +using System.Collections.Generic; +using System.Text; +using SharpHoundCommonLib.Models; +using SharpHoundCommonLib.Services; +using Xunit; +using Xunit.Abstractions; + +namespace CommonLibTest; + + +public class MetricAggregatorTests(ITestOutputHelper output) { + + [Theory] + [MemberData(nameof(MetricAggregatorTestData.CreateTestData), MemberType = typeof(MetricAggregatorTestData))] + public void MetricAggregatorExtensions_Create_Creates_Proper_Aggregator(MetricDefinition definition, + MetricAggregator expectedAggregator) { + // setup + // act + var aggregator = MetricAggregatorExtensions.Create(definition); + + // assert + Assert.IsType(expectedAggregator.GetType(), aggregator); + } + + [Fact] + public void MetricAggregatorExtensions_Creates_Throws_Exception_For_Unimplemented_MetricDefinition() { + // setup + var newMetricDefinition = new UnimplementedMetricDefinition("unimplemented", ["value1"]); + + // act and assert + Assert.Throws(() => MetricAggregatorExtensions.Create(newMetricDefinition)); + } + + [Theory] + [MemberData(nameof(MetricAggregatorTestData.ObserveAndSnapshotTests), + MemberType = typeof(MetricAggregatorTestData))] + public void MetricAggregator_Observe_and_Snapshot_Tests(MetricAggregator aggregator, + double[] observations, object expectedSnapshot) { + // setup + foreach (var observation in observations) { + aggregator.Observe(observation); + } + + // act + var snapshot = aggregator.Snapshot(); + + // assert + if (expectedSnapshot is HistogramSnapshot ehs && snapshot is HistogramSnapshot ahs) { + Assert.Equal(ehs.TotalCount, ehs.TotalCount); + Assert.Equal(ehs.Sum, ahs.Sum); + Assert.Equal(ehs.Bounds, ahs.Bounds); + Assert.Equal(ehs.Counts, ahs.Counts); + } else { + Assert.Equal(expectedSnapshot, snapshot); + } + + } + + private string snapShotArrays(double[] bounds, long[] counts) { + var builder = new StringBuilder(); + builder.Append("bounds: [ "); + Iterate(builder, bounds); + builder.Append(" ], counts: [ "); + Iterate(builder, counts); + builder.Append(" ]"); + return builder.ToString(); + + + void Iterate(StringBuilder sb, T[] os) { + var first = true; + + for (var i = 0; i < os.Length; i++) { + if (!first) + builder.Append(", "); + + builder.Append(os[i]); + first = false; + } + } + } + + + private record UnimplementedMetricDefinition(string Name, IReadOnlyList LabelNames) : MetricDefinition(Name, LabelNames) {} + +} + +public static class MetricAggregatorTestData { + public static IEnumerable CreateTestData => [ + [ + new CounterDefinition("counter_name", ["value"]), + new CounterAggregator(), + ], + [ + new GaugeDefinition("gauge_name", ["value"]), + new GaugeAggregator(), + ], + [ + new CumulativeHistogramDefinition("cumulative_histogram_name", [1, 2, 3], ["value"]), + new CumulativeHistogramAggregator([1, 2, 3]) + ], + ]; + + public static IEnumerable ObserveAndSnapshotTests => [ + [ + new CounterAggregator(), + new[] { + 1.0, + 2.0, + 1.0, + 4.0, + }, + 8L + ], + [ + new GaugeAggregator(), + new[] { + 1.0, + 2.0, + 1.0, + + }, + 1.0 + ], + [ + new CumulativeHistogramAggregator([1, 2, 3, 4]), + new[] { + 1.0, + 1.0, + 3.0, + 3.0, + 2.0, + }, + // Ensure Aggregation does not happen on observation or snapshot + new HistogramSnapshot([1, 2, 3, 4], [2, 1, 2, 0, 0], 5, 10) + ], + ]; +} \ No newline at end of file diff --git a/test/unit/MetricRegistryTests.cs b/test/unit/MetricRegistryTests.cs new file mode 100644 index 00000000..4ab3630f --- /dev/null +++ b/test/unit/MetricRegistryTests.cs @@ -0,0 +1,65 @@ +using SharpHoundCommonLib.Models; +using SharpHoundCommonLib.Services; +using SharpHoundCommonLib.Static; +using Xunit; + +namespace CommonLibTest; + +public class MetricRegistryTests { + [Fact] + public void TryRegister_Returns_definitionID_if_Success() { + // setup + var registry = new MetricRegistry(); + var counter = new CounterDefinition("counter_name", ["value"]); + var gauge = new GaugeDefinition("gauge_name", ["value"]); + + // act + var registered1 = registry.TryRegister(counter, out var counterDefinitionId); + var registered2 = registry.TryRegister(gauge, out var gaugeDefinitionId); + + // assert + Assert.True(registered1); + Assert.True(registered2); + Assert.Equal(0, counterDefinitionId); + Assert.Equal(1, gaugeDefinitionId); + } + + [Fact] + public void TryRegister_Gets_Preregistered_Definition_by_Name() { + // setup + var registry = new MetricRegistry(); + var counter1 = new CounterDefinition("counter_name", ["value"]); + var counter2 = new CounterDefinition("counter_name", ["value"]); + + // act + var registered1 = registry.TryRegister(counter1, out var counterDefinitionId1); + var registered2 = registry.TryRegister(counter2, out var counterDefinitionId2); + + // assert + Assert.True(registered1); + Assert.True(registered2); + Assert.Equal(0, counterDefinitionId1); + Assert.Equal(counterDefinitionId1, counterDefinitionId2); + Assert.Single(registry.Definitions); + } + + [Fact] + public void TryRegister_After_Sealing_Returns_false_and_InvalidId() { + // setup + var registry = new MetricRegistry(); + var counter = new CounterDefinition("counter_name", ["value"]); + var gauge = new GaugeDefinition("gauge_name", ["value"]); + + // act + var registered1 = registry.TryRegister(counter, out var counterDefinitionId); + registry.Seal(); + var registered2 = registry.TryRegister(gauge, out var gaugeDefinitionId); + + // assert + Assert.True(registered1); + Assert.False(registered2); + Assert.Equal(0, counterDefinitionId); + Assert.Equal(MetricId.InvalidId, gaugeDefinitionId); + Assert.Single(registry.Definitions); + } +} \ No newline at end of file diff --git a/test/unit/MetricRouterTests.cs b/test/unit/MetricRouterTests.cs new file mode 100644 index 00000000..2b1bcb67 --- /dev/null +++ b/test/unit/MetricRouterTests.cs @@ -0,0 +1,86 @@ +using Moq; +using SharpHoundCommonLib.Interfaces; +using SharpHoundCommonLib.Models; +using SharpHoundCommonLib.Services; +using Xunit; + +namespace CommonLibTest; + +public class MetricRouterTests { + + [Theory] + [InlineData(-1)] + [InlineData(5)] + [InlineData(2)] + public void InvalidIds_Are_Not_Cached_or_Observed(int definitionId) { + // setup + MetricDefinition[] definitions = [ + new CounterDefinition("counter_name_1", ["name"]), + new CounterDefinition("counter_name_2", ["name"]), + ]; + var labelCacheMoq = new Mock(); + var sinkMoq = new Mock(); + var router = new MetricRouter( + definitions, + [sinkMoq.Object], + labelCacheMoq.Object + ); + + // act + router.Observe(definitionId, 1.0, new LabelValues(["value"])); + + // assert + sinkMoq.Verify(s => s.Observe(in It.Ref.IsAny), Times.Never()); + labelCacheMoq.Verify(c => c.Intern(It.IsAny()), Times.Never()); + } + + [Fact] + public void LabelValues_Are_Interned_And_Each_Sink_Is_Observed() { + // setup + MetricDefinition[] definitions = [ + new CounterDefinition("counter_name_1", ["name"]), + new CounterDefinition("counter_name_2", ["name"]), + ]; + var labelCacheMoq = new Mock(); + var sinkMoq1 = new Mock(); + var sinkMoq2 = new Mock(); + var router = new MetricRouter( + definitions, + [sinkMoq1.Object, sinkMoq2.Object], + labelCacheMoq.Object + ); + + // act + router.Observe(0, 1.0, new LabelValues(["value"])); + + // assert + labelCacheMoq.Verify(c => c.Intern(It.IsAny()), Times.Once()); + sinkMoq1.Verify(s => s.Observe(in It.Ref.IsAny), Times.Once()); + sinkMoq2.Verify(s => s.Observe(in It.Ref.IsAny), Times.Once()); + } + + [Fact] + public void Flush_Calls_Flush_on_Each_Sink() { + // setup + MetricDefinition[] definitions = [ + ]; + var labelCacheMoq = new Mock(); + var sinkMoq1 = new Mock(); + var sinkMoq2 = new Mock(); + var router = new MetricRouter( + definitions, + [sinkMoq1.Object, sinkMoq2.Object], + labelCacheMoq.Object + ); + + // act + router.Flush(); + + // assert + sinkMoq1.Verify(s => s.Flush(), Times.Once()); + sinkMoq2.Verify(s => s.Flush(), Times.Once()); + sinkMoq1.Verify(s => s.Observe(in It.Ref.IsAny), Times.Never()); + sinkMoq2.Verify(s => s.Observe(in It.Ref.IsAny), Times.Never()); + labelCacheMoq.Verify(c => c.Intern(It.IsAny()), Times.Never()); + } +} \ No newline at end of file From 6bba59025041d79370f0e3a8292089e8d8b1488a Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Tue, 6 Jan 2026 14:44:43 -0500 Subject: [PATCH 14/17] tests: adjusting to relax ranges on AdaptiveTimeoutTests --- test/unit/AdaptiveTimeoutTest.cs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/test/unit/AdaptiveTimeoutTest.cs b/test/unit/AdaptiveTimeoutTest.cs index 168d7443..dc43e36f 100644 --- a/test/unit/AdaptiveTimeoutTest.cs +++ b/test/unit/AdaptiveTimeoutTest.cs @@ -26,7 +26,8 @@ public async Task AdaptiveTimeout_GetAdaptiveTimeout_NotEnoughSamplesAsync() { var adaptiveTimeoutResult = adaptiveTimeout.GetAdaptiveTimeout(); Assert.Equal(maxTimeout, adaptiveTimeoutResult); - Assert.InRange(observedLatency, 0.0, 60); + + Assert.InRange(observedLatency, 0.0, 100); return; void LatencyObservation(double latency) { @@ -80,9 +81,9 @@ public async Task AdaptiveTimeout_GetAdaptiveTimeout() { var adaptiveTimeoutResult = adaptiveTimeout.GetAdaptiveTimeout(); Assert.True(adaptiveTimeoutResult < maxTimeout); - Assert.InRange(observedLatency1, 0.0, 55); - Assert.InRange(observedLatency2, 0.0, 65); - Assert.InRange(observedLatency3, 0.0, 75); + Assert.InRange(observedLatency1, 0.0, 150); + Assert.InRange(observedLatency2, 0.0, 160); + Assert.InRange(observedLatency3, 0.0, 170); return; void LatencyObservation1(double latency) { From 76719a8bd004c54bd17f757ed25efb7da0914569 Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Tue, 6 Jan 2026 15:14:50 -0500 Subject: [PATCH 15/17] chore: coderabbit suggestions --- src/CommonLib/LdapConnectionPool.cs | 4 +- src/CommonLib/LdapUtils.cs | 2 +- src/CommonLib/Models/MetricDefinition.cs | 6 ++- src/CommonLib/Services/FileMetricSink.cs | 1 + src/CommonLib/Services/MetricAggregator.cs | 16 ++++--- src/CommonLib/Services/MetricWriter.cs | 51 +++++++++++++++------- src/CommonLib/Static/Metrics.cs | 9 +++- test/unit/MetricAggregatorTests.cs | 2 +- 8 files changed, 64 insertions(+), 27 deletions(-) diff --git a/src/CommonLib/LdapConnectionPool.cs b/src/CommonLib/LdapConnectionPool.cs index ac7f10cc..5a094e86 100644 --- a/src/CommonLib/LdapConnectionPool.cs +++ b/src/CommonLib/LdapConnectionPool.cs @@ -1096,7 +1096,7 @@ private SearchRequest CreateSearchRequest(string distinguishedName, string ldapF private async Task SendRequestWithTimeout(LdapConnection connection, SearchRequest request, AdaptiveTimeout adaptiveTimeout) { // Prerequest metrics - var concurrentRequests = Interlocked.Increment(ref LdapMetrics.InFlightRequests); + var concurrentRequests = LdapMetrics.IncrementInFlight(); _metric.Observe(LdapMetricDefinitions.ConcurrentRequests, concurrentRequests, new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); @@ -1107,7 +1107,7 @@ private async Task SendRequestWithTimeout(LdapConnection connect var result = await adaptiveTimeout.ExecuteWithTimeout((_) => connection.SendRequestAsync(request, timeoutWithPadding), latencyObservation: LatencyObservation); // Postrequest metrics - concurrentRequests = Interlocked.Decrement(ref LdapMetrics.InFlightRequests); + concurrentRequests = LdapMetrics.DecrementInFlight(); _metric.Observe(LdapMetricDefinitions.ConcurrentRequests, concurrentRequests, new LabelValues([nameof(LdapConnectionPool), _poolIdentifier])); _metric.Observe(LdapMetricDefinitions.RequestsTotal, 1, diff --git a/src/CommonLib/LdapUtils.cs b/src/CommonLib/LdapUtils.cs index 12c199e5..03cfdc90 100644 --- a/src/CommonLib/LdapUtils.cs +++ b/src/CommonLib/LdapUtils.cs @@ -1142,7 +1142,7 @@ public void ResetUtils() { _connectionPool = new ConnectionPoolManager(_ldapConfig, scanner: _portScanner); // Metrics - LdapMetrics.InFlightRequests = 0; + LdapMetrics.ResetInFlight(); } private IDirectoryObject CreateDirectoryEntry(string path) { diff --git a/src/CommonLib/Models/MetricDefinition.cs b/src/CommonLib/Models/MetricDefinition.cs index 69c6c945..7c78f526 100644 --- a/src/CommonLib/Models/MetricDefinition.cs +++ b/src/CommonLib/Models/MetricDefinition.cs @@ -5,7 +5,7 @@ namespace SharpHoundCommonLib.Models; public readonly record struct LabelValues(string[] Values) { - public string ToDisplayString(IReadOnlyList labelNames) { + public string ToDisplayString(IReadOnlyList labelNames, string additionalName = null, string additionalValue = null) { if (labelNames.Count == 0) return string.Empty; @@ -22,6 +22,10 @@ public string ToDisplayString(IReadOnlyList labelNames) { .Append(':') .Append(Values[i]); } + + if (!string.IsNullOrEmpty(additionalName) && !string.IsNullOrEmpty(additionalValue)) { + sb.Append(',').Append(additionalName).Append(':').Append(additionalValue); + } sb.Append('}'); return sb.ToString(); diff --git a/src/CommonLib/Services/FileMetricSink.cs b/src/CommonLib/Services/FileMetricSink.cs index a12b87ec..fbfd4e2c 100644 --- a/src/CommonLib/Services/FileMetricSink.cs +++ b/src/CommonLib/Services/FileMetricSink.cs @@ -89,6 +89,7 @@ public void Flush() { } public void Dispose() { + _textWriter.Flush(); _textWriter.Dispose(); } } \ No newline at end of file diff --git a/src/CommonLib/Services/MetricAggregator.cs b/src/CommonLib/Services/MetricAggregator.cs index 02c30cdb..e6d491f9 100644 --- a/src/CommonLib/Services/MetricAggregator.cs +++ b/src/CommonLib/Services/MetricAggregator.cs @@ -41,6 +41,7 @@ public sealed class CumulativeHistogramAggregator(double[] bounds) : MetricAggre private readonly long[] _bucketCounts = new long[bounds.Length + 1]; // Includes the Inf+ bucket private long _count; private double _sum; + private readonly object _lock = new(); public override void Observe(double value) { // this along with the following line, finds the correct bucket the value should be placed in. @@ -49,13 +50,18 @@ public override void Observe(double value) { // that compliment if it is what is found. var idx = Array.BinarySearch(bounds, value); if (idx < 0) idx = ~idx; - _bucketCounts[idx]++; - _count++; - _sum += value; + lock (_lock) { + _bucketCounts[idx]++; + _count++; + _sum += value; + } } public override object Snapshot() => SnapshotHistogram(); - public HistogramSnapshot SnapshotHistogram() => - new(bounds, _bucketCounts, _count, _sum); + public HistogramSnapshot SnapshotHistogram() { + lock (_lock) { + return new HistogramSnapshot(bounds, (long[])_bucketCounts.Clone(), _count, _sum); + } + } } \ No newline at end of file diff --git a/src/CommonLib/Services/MetricWriter.cs b/src/CommonLib/Services/MetricWriter.cs index 6c26531c..b909a94b 100644 --- a/src/CommonLib/Services/MetricWriter.cs +++ b/src/CommonLib/Services/MetricWriter.cs @@ -1,4 +1,5 @@ using System; +using System.Globalization; using System.Text; using SharpHoundCommonLib.Interfaces; using SharpHoundCommonLib.Models; @@ -10,50 +11,68 @@ public void StringBuilderAppendMetric(StringBuilder builder, MetricDefinition de MetricAggregator aggregator, DateTimeOffset timestamp, string timestampOutputString = "yyyy-MM-dd HH:mm:ss.fff") { var labelText = labelValues.ToDisplayString(definition.LabelNames); if (aggregator is CumulativeHistogramAggregator cha) { - CumulativeHistogramAppend(builder, definition, labelText, cha, timestamp, timestampOutputString); + CumulativeHistogramAppend(builder, definition, labelValues, cha, timestamp, timestampOutputString); } else { - DefaultAppend(builder, definition, labelText, aggregator, timestamp, timestampOutputString); + DefaultAppend(builder, definition, labelValues.ToDisplayString(definition.LabelNames), aggregator, timestamp, timestampOutputString); } } private static void CumulativeHistogramAppend( StringBuilder builder, MetricDefinition definition, - string labelText, + LabelValues labelValues, CumulativeHistogramAggregator aggregator, DateTimeOffset timestamp, string timestampOutputString) { long cumulativeValue = 0; + var defaultLabelText = labelValues.ToDisplayString(definition.LabelNames); var snapshot = aggregator.SnapshotHistogram(); for (var i = 0; i < snapshot.Bounds.Length; i++) { cumulativeValue += snapshot.Counts[i]; - builder.AppendFormat("{0} {1}{2}{{le=\"{3}\"}} = {4}\n", - timestamp.ToString(timestampOutputString), - definition.Name + "_bucket", - labelText, - snapshot.Bounds[i], - cumulativeValue); + if (labelValues.Values.Length > 0) { + builder.AppendFormat("{0} {1}{2} = {3}\n", + timestamp.ToString(timestampOutputString), + definition.Name + "_bucket", + labelValues.ToDisplayString(definition.LabelNames, "le", snapshot.Bounds[i].ToString(CultureInfo.InvariantCulture)), + cumulativeValue); + } else { + builder.AppendFormat("{0} {1}{2}{{le=\"{3}\"}} = {4}\n", + timestamp.ToString(timestampOutputString), + definition.Name + "_bucket", + defaultLabelText, + snapshot.Bounds[i], + cumulativeValue); + } } - builder.AppendFormat("{0} {1}{2}{{le=\"+Inf\"}} = {3}\n", - timestamp.ToString(timestampOutputString), - definition.Name + "_bucket", - labelText, - snapshot.TotalCount); + if (labelValues.Values.Length > 0) { + builder.AppendFormat("{0} {1}{2} = {3}\n", + timestamp.ToString(timestampOutputString), + definition.Name + "_bucket", + labelValues.ToDisplayString(definition.LabelNames, "le", "+Inf"), + snapshot.TotalCount); + + } else { + builder.AppendFormat("{0} {1}{2}{{le=\"+Inf\"}} = {3}\n", + timestamp.ToString(timestampOutputString), + definition.Name + "_bucket", + defaultLabelText, + snapshot.TotalCount); + } builder.AppendFormat("{0} {1}{2} = {3}\n", timestamp.ToString(timestampOutputString), definition.Name + "_sum", - labelText, + defaultLabelText, snapshot.Sum); builder.AppendFormat("{0} {1}{2} = {3}\n", timestamp.ToString(timestampOutputString), definition.Name + "_count", - labelText, + defaultLabelText, snapshot.TotalCount); } diff --git a/src/CommonLib/Static/Metrics.cs b/src/CommonLib/Static/Metrics.cs index 59d996cf..a0a75d54 100644 --- a/src/CommonLib/Static/Metrics.cs +++ b/src/CommonLib/Static/Metrics.cs @@ -1,3 +1,4 @@ +using System.Threading; using SharpHoundCommonLib.Interfaces; using SharpHoundCommonLib.Services; @@ -13,7 +14,13 @@ public static IMetricFactory Factory { } public static class LdapMetrics { - public static int InFlightRequests; + private static int _inFlightRequests; + + public static int InFlightRequest => _inFlightRequests; + + public static int IncrementInFlight() => Interlocked.Increment(ref _inFlightRequests); + public static int DecrementInFlight() => Interlocked.Decrement(ref _inFlightRequests); + public static void ResetInFlight() => Interlocked.Exchange(ref _inFlightRequests, 0); } diff --git a/test/unit/MetricAggregatorTests.cs b/test/unit/MetricAggregatorTests.cs index 654debf9..3f77c505 100644 --- a/test/unit/MetricAggregatorTests.cs +++ b/test/unit/MetricAggregatorTests.cs @@ -47,7 +47,7 @@ public void MetricAggregator_Observe_and_Snapshot_Tests(MetricAggregator aggrega // assert if (expectedSnapshot is HistogramSnapshot ehs && snapshot is HistogramSnapshot ahs) { - Assert.Equal(ehs.TotalCount, ehs.TotalCount); + Assert.Equal(ehs.TotalCount, ahs.TotalCount); Assert.Equal(ehs.Sum, ahs.Sum); Assert.Equal(ehs.Bounds, ahs.Bounds); Assert.Equal(ehs.Counts, ahs.Counts); From ef03459f2a3ca62d0bd8c723c91f7947af2c99e3 Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Tue, 6 Jan 2026 15:28:21 -0500 Subject: [PATCH 16/17] chore: more coderabbit suggestions --- src/CommonLib/Models/MetricDefinition.cs | 9 ++++---- src/CommonLib/Services/FileMetricSink.cs | 2 +- test/unit/MetricDefinitionTests.cs | 27 +++++++++++++++++++++++- 3 files changed, 32 insertions(+), 6 deletions(-) diff --git a/src/CommonLib/Models/MetricDefinition.cs b/src/CommonLib/Models/MetricDefinition.cs index 7c78f526..31520cdc 100644 --- a/src/CommonLib/Models/MetricDefinition.cs +++ b/src/CommonLib/Models/MetricDefinition.cs @@ -17,14 +17,15 @@ public string ToDisplayString(IReadOnlyList labelNames, string additiona for (var i = 0; i < labelNames.Count; i++) { if (i > 0) sb.Append(','); - + sb.Append(labelNames[i]) - .Append(':') - .Append(Values[i]); + .Append("=\"") + .Append(Values[i]) + .Append('"'); } if (!string.IsNullOrEmpty(additionalName) && !string.IsNullOrEmpty(additionalValue)) { - sb.Append(',').Append(additionalName).Append(':').Append(additionalValue); + sb.Append(',').Append(additionalName).Append("=\"").Append(additionalValue).Append('"'); } sb.Append('}'); diff --git a/src/CommonLib/Services/FileMetricSink.cs b/src/CommonLib/Services/FileMetricSink.cs index fbfd4e2c..73bc1f08 100644 --- a/src/CommonLib/Services/FileMetricSink.cs +++ b/src/CommonLib/Services/FileMetricSink.cs @@ -89,7 +89,7 @@ public void Flush() { } public void Dispose() { - _textWriter.Flush(); + Flush(); _textWriter.Dispose(); } } \ No newline at end of file diff --git a/test/unit/MetricDefinitionTests.cs b/test/unit/MetricDefinitionTests.cs index b4017e66..49ba2d51 100644 --- a/test/unit/MetricDefinitionTests.cs +++ b/test/unit/MetricDefinitionTests.cs @@ -55,7 +55,32 @@ public void LabelValues_ToDisplayString() { var output = labelValues.ToDisplayString(labelNames); // assert - Assert.Equal("{name1:value1,name2:value2,name3:value3}", output); + Assert.Equal("{name1=\"value1\",name2=\"value2\",name3=\"value3\"}", output); + } + + [Fact] + public void LabelValues_ToDisplayString_Additional_Values_Requires_Both() { + // setup + var labelValues = new LabelValues(["value1", "value2", "value3"]); + string[] labelNames = ["name1", "name2", "name3"]; + + // act + var output1 = labelValues.ToDisplayString(labelNames, string.Empty); + var output2 = labelValues.ToDisplayString(labelNames, ""); + var output3 = labelValues.ToDisplayString(labelNames, "additional_name"); + var output4 = labelValues.ToDisplayString(labelNames, additionalValue: string.Empty); + var output5 = labelValues.ToDisplayString(labelNames, additionalValue: ""); + var output6 = labelValues.ToDisplayString(labelNames, additionalValue: "additional_value"); + var output7 = labelValues.ToDisplayString(labelNames, "additional_name", "additional_value"); + + // assert + Assert.Equal("{name1=\"value1\",name2=\"value2\",name3=\"value3\"}", output1); + Assert.Equal("{name1=\"value1\",name2=\"value2\",name3=\"value3\"}", output2); + Assert.Equal("{name1=\"value1\",name2=\"value2\",name3=\"value3\"}", output3); + Assert.Equal("{name1=\"value1\",name2=\"value2\",name3=\"value3\"}", output4); + Assert.Equal("{name1=\"value1\",name2=\"value2\",name3=\"value3\"}", output5); + Assert.Equal("{name1=\"value1\",name2=\"value2\",name3=\"value3\"}", output6); + Assert.Equal("{name1=\"value1\",name2=\"value2\",name3=\"value3\",additional_name=\"additional_value\"}", output7); } [Fact] From 3333ef6e7043d57098fac220f56c9f44f75c84bd Mon Sep 17 00:00:00 2001 From: Michael Cuomo Date: Tue, 6 Jan 2026 15:31:28 -0500 Subject: [PATCH 17/17] chore: more coderabbit suggestions --- test/unit/AdaptiveTimeoutTest.cs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/test/unit/AdaptiveTimeoutTest.cs b/test/unit/AdaptiveTimeoutTest.cs index dc43e36f..044ce758 100644 --- a/test/unit/AdaptiveTimeoutTest.cs +++ b/test/unit/AdaptiveTimeoutTest.cs @@ -1,4 +1,5 @@ using System; +using System.Collections.Concurrent; using System.Collections.Generic; using System.Threading.Tasks; using SharpHoundCommonLib; @@ -99,7 +100,7 @@ void LatencyObservation3(double latency) { [Fact] public async Task AdaptiveTimeout_GetAdaptiveTimeout_TimeSpikeSafetyValve() { - var observations = new List(); + var observations = new ConcurrentBag(); var tasks = new List(); var maxTimeout = TimeSpan.FromSeconds(1); var numSamples = 30; @@ -125,8 +126,8 @@ public async Task AdaptiveTimeout_GetAdaptiveTimeout_TimeSpikeSafetyValve() { [Fact] public async Task AdaptiveTimeout_GetAdaptiveTimeout_TimeSpikeSafetyValve_IgnoreHiccup() { - var completedObservations = new List(); - var timeoutObservations = new List(); + var completedObservations = new ConcurrentBag(); + var timeoutObservations = new ConcurrentBag(); var tasks = new List(); var maxTimeout = TimeSpan.FromSeconds(1); var numSamples = 5;