diff --git a/src/benchmarks/gc/GC.Infrastructure/Notebooks/BenchmarkAnalysis-Obsolete.ipynb b/src/benchmarks/gc/GC.Infrastructure/Notebooks/BenchmarkAnalysis-Obsolete.ipynb new file mode 100644 index 00000000000..87abcd1dab0 --- /dev/null +++ b/src/benchmarks/gc/GC.Infrastructure/Notebooks/BenchmarkAnalysis-Obsolete.ipynb @@ -0,0 +1,2061 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Obsolete stuff - for temporary reference" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "// Old comparison/summary code (commented out)\n", + " /*\n", + " public LoadInfo GetComparison(LoadInfo baseline, LoadInfo comparand)\n", + " {\n", + " return new LoadInfo\n", + " {\n", + " MaxWorkingSetMB = DeltaPercent(baseline.MaxWorkingSetMB, comparand.MaxWorkingSetMB),\n", + " P99WorkingSetMB = DeltaPercent(baseline.P99WorkingSetMB, comparand.P99WorkingSetMB),\n", + " P95WorkingSetMB = DeltaPercent(baseline.P95WorkingSetMB, comparand.P95WorkingSetMB),\n", + " P90WorkingSetMB = DeltaPercent(baseline.P90WorkingSetMB, comparand.P90WorkingSetMB),\n", + " P75WorkingSetMB = DeltaPercent(baseline.P75WorkingSetMB, comparand.P75WorkingSetMB),\n", + " P50WorkingSetMB = DeltaPercent(baseline.P50WorkingSetMB, comparand.P50WorkingSetMB),\n", + "\n", + " MaxPrivateMemoryMB = DeltaPercent(baseline.MaxPrivateMemoryMB, comparand.MaxPrivateMemoryMB),\n", + " P99PrivateMemoryMB = DeltaPercent(baseline.P99PrivateMemoryMB, comparand.P99PrivateMemoryMB),\n", + " P95PrivateMemoryMB = DeltaPercent(baseline.P95PrivateMemoryMB, comparand.P95PrivateMemoryMB),\n", + " P90PrivateMemoryMB = DeltaPercent(baseline.P90PrivateMemoryMB, comparand.P90PrivateMemoryMB),\n", + " P75PrivateMemoryMB = DeltaPercent(baseline.P75PrivateMemoryMB, comparand.P75PrivateMemoryMB),\n", + " P50PrivateMemoryMB = DeltaPercent(baseline.P50PrivateMemoryMB, comparand.P50PrivateMemoryMB),\n", + " \n", + " Latency50thMS = DeltaPercent(baseline.Latency50thMS, comparand.Latency50thMS),\n", + " Latency75thMS = DeltaPercent(baseline.Latency75thMS, comparand.Latency75thMS),\n", + " Latency90thMS = DeltaPercent(baseline.Latency90thMS, comparand.Latency90thMS), \n", + " Latency99thMS = DeltaPercent(baseline.Latency99thMS, comparand.Latency99thMS), \n", + " MeanLatencyMS = DeltaPercent(baseline.MeanLatencyMS, comparand.MeanLatencyMS),\n", + " RequestsPerMSec = DeltaPercent(baseline.RequestsPerMSec, comparand.RequestsPerMSec),\n", + " TotalSuspensionTimeMSec = DeltaPercent(baseline.TotalSuspensionTimeMSec, comparand.TotalSuspensionTimeMSec),\n", + " PercentPauseTimeInGC = DeltaPercent(baseline.PercentPauseTimeInGC, comparand.PercentPauseTimeInGC),\n", + " PercentTimeInGC = DeltaPercent(baseline.PercentTimeInGC, comparand.PercentTimeInGC),\n", + " MeanHeapSizeBeforeMB = DeltaPercent(baseline.MeanHeapSizeBeforeMB, comparand.MeanHeapSizeBeforeMB),\n", + " MaxHeapSizeMB = DeltaPercent(baseline.MaxHeapSizeMB, comparand.MaxHeapSizeMB),\n", + " TotalAllocationsMB = DeltaPercent(baseline.TotalAllocationsMB, comparand.TotalAllocationsMB),\n", + " GCScore = DeltaPercent(baseline.GCScore, comparand.GCScore),\n", + " MaxHeapCount = DeltaPercent(baseline.MaxHeapCount, comparand.MaxHeapCount),\n", + " NumberOfHeapCountSwitches = DeltaPercent(baseline.NumberOfHeapCountSwitches, comparand.NumberOfHeapCountSwitches),\n", + " NumberOfHeapCountDirectionChanges = DeltaPercent(baseline.NumberOfHeapCountDirectionChanges, comparand.NumberOfHeapCountDirectionChanges),\n", + " Data = baseline.Data,\n", + " Data2 = comparand.Data,\n", + " Run = $\"{baseline.Run} vs. {comparand.Run}\",\n", + " Benchmark = baseline.Benchmark,\n", + " Id = $\"{baseline.Run} vs. {comparand.Run} for {baseline.Benchmark}\"\n", + " };\n", + " }\n", + "\n", + " public Dictionary? GetAllBenchmarksForRun(string run)\n", + " {\n", + " if (!_runToBenchmarkData.TryGetValue(run, out var benchmarksForRun))\n", + " {\n", + " Console.WriteLine($\"No benchmarks found for run: {run}\");\n", + " return null;\n", + " }\n", + "\n", + " return benchmarksForRun;\n", + " }\n", + "\n", + " public void SaveBenchmarkData(string outputPath = \"\")\n", + " {\n", + " if (string.IsNullOrEmpty(outputPath))\n", + " {\n", + " outputPath = _basePath;\n", + " }\n", + "\n", + " StringBuilder sb = new();\n", + " sb.AppendLine($\"Run,Benchmark,Max Working Set (MB), Max Private Memory (MB), Request/MSec, Mean Latency (MSec), Latency 50th Percentile MSec, Latency 75th Percentile MSec, Latency 90th Percentile MSec, Latency 99th Percentile MSec\");\n", + " foreach (var b in _data)\n", + " {\n", + " var val = b.Value; \n", + " sb.AppendLine($\"{val.Run},{val.Benchmark},{val.MaxWorkingSetMB},{val.MaxPrivateMemoryMB},{val.RequestsPerMSec},{val.MeanLatencyMS},{val.Latency50thMS},{val.Latency75thMS},{val.Latency90thMS},{val.Latency99thMS}\");\n", + " }\n", + "\n", + " File.WriteAllText(Path.Combine(outputPath, \"AllBenchmarks.csv\"), sb.ToString());\n", + " }\n", + "\n", + " public Dictionary? GetAllRunsForBenchmark(string benchmark)\n", + " {\n", + " if (!_benchmarkToRunData.TryGetValue(benchmark, out var runsForBenchmark))\n", + " {\n", + " Console.WriteLine($\"No runs found for benchmark: {benchmark}\");\n", + " return null;\n", + " }\n", + "\n", + " return runsForBenchmark;\n", + " }\n", + "*/\n", + "\n", + "/*\n", + " public LoadInfo? GetBenchmarkData(string benchmark, string run)\n", + " {\n", + " if (!_benchmarkToRunData.TryGetValue(benchmark, out var runData))\n", + " {\n", + " Console.WriteLine($\"Benchmark: {benchmark} not found!\");\n", + " return null;\n", + " }\n", + "\n", + " if (!runData.TryGetValue(run, out var loadInfo))\n", + " {\n", + " Console.WriteLine($\"Run: {run} not found!\");\n", + " return null;\n", + " }\n", + "\n", + " return loadInfo;\n", + " }\n", + "*/\n", + "\n", + " /*\n", + " public Dictionary GetBenchmarkToComparison(string baselineRun, string comparandRun)\n", + " {\n", + " Dictionary comparisons = new();\n", + "\n", + " Dictionary baselineData = new();\n", + " Dictionary comparandData = new();\n", + " HashSet allBenchmarks = new();\n", + "\n", + " foreach (var d in _data)\n", + " {\n", + " allBenchmarks.Add(d.Value.Benchmark);\n", + "\n", + " string run = d.Key.Split(\"|\", StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)[0];\n", + "\n", + " if (string.CompareOrdinal(run, baselineRun) == 0 && !baselineData.TryGetValue(d.Key, out var baselineInfo))\n", + " {\n", + " baselineInfo = baselineData[d.Value.Benchmark] = d.Value;\n", + " }\n", + "\n", + " else if (string.CompareOrdinal(run, comparandRun) == 0 && !comparandData.TryGetValue(d.Key, out var comparandInfo))\n", + " {\n", + " comparandInfo = comparandData[d.Value.Benchmark] = d.Value;\n", + " }\n", + " }\n", + "\n", + " foreach (var benchmark in allBenchmarks)\n", + " {\n", + " if (!baselineData.TryGetValue(benchmark, out var baselineBenchmarkInfo))\n", + " {\n", + " Console.WriteLine($\"Benchmark: {benchmark} not found on the baseline: {baselineRun}\");\n", + " continue;\n", + " }\n", + "\n", + " if (!comparandData.TryGetValue(benchmark, out var comparandBenchmarkInfo))\n", + " {\n", + " Console.WriteLine($\"Benchmark: {benchmark} not found on the comparand: {comparandRun}\");\n", + " continue;\n", + " }\n", + "\n", + " LoadInfo comparison = GetComparison(baselineBenchmarkInfo, comparandBenchmarkInfo);\n", + " comparisons[benchmark] = comparison;\n", + " }\n", + " \n", + " return comparisons;\n", + " }\n", + " */\n", + "\n", + " // Haven't used this in a while - writes a summary file to disk\n", + "\n", + "/*\n", + " public void SummarizeResults(DataManager dataManager, string outFile, Dictionary info = null)\n", + " {\n", + " if (info == null)\n", + " {\n", + " info = dataManager._data;\n", + " }\n", + "\n", + " using (StreamWriter sw = new StreamWriter(outFile))\n", + " {\n", + " sw.WriteLine(\"{0,12} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 10:0.00} | {14, 10:0.00} |\", \n", + " \"run\", \"benchmark\", \"gen0\", \"pause\", \"gen1\", \"pause\", \"ngc2\", \"pause\", \"bgc\", \"pause\", \"allocMB\", \"alloc/gc\", \"pct\", \"peakMB\", \"meanMB\");\n", + " sw.WriteLine(\"{0,12} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 10:0.00} | {14, 10:0.00} |\", \n", + " \"\", \"\", \"\", \"susp\", \"\", \"susp\", \"\", \"susp\", \"\", \"susp\", \"\", \"\", \"\", \"totalcpu\", \"meancpu\");\n", + " sw.WriteLine(\"{0}\", new String('-', 174));\n", + " foreach (var kvp in info)\n", + " {\n", + " List gcs = kvp.Value?.Data?.GCs;\n", + " if (gcs == null || gcs.Count == 0)\n", + " {\n", + " continue;\n", + " }\n", + "\n", + " int[] gc_counts = new int[4];\n", + " double[] gc_pauses = new double[4];\n", + " double[] gc_susps = new double[4];\n", + " for (int i = 0; i < gcs.Count; i++)\n", + " {\n", + " TraceGC gc = gcs[i];\n", + " //if (gc.SuspendDurationMSec > 5) sw.WriteLine($\"i={gc.Number} gen={gc.Generation} suspension={gc.SuspendDurationMSec} totalpause={gc.PauseDurationMSec}\");\n", + " if (gc.Generation < 2)\n", + " {\n", + " gc_counts[gc.Generation]++;\n", + " gc_pauses[gc.Generation] += gc.PauseDurationMSec;\n", + " gc_susps[gc.Generation] += gc.SuspendDurationMSec;\n", + " }\n", + " else\n", + " {\n", + " if (gc.Type == GCType.BackgroundGC)\n", + " {\n", + " gc_counts[3]++;\n", + " gc_pauses[3] += gc.PauseDurationMSec;\n", + " gc_susps[3] += gc.SuspendDurationMSec;\n", + " }\n", + " else\n", + " {\n", + " gc_counts[2]++;\n", + " gc_pauses[2] += gc.PauseDurationMSec;\n", + " gc_susps[2] += gc.SuspendDurationMSec;\n", + " }\n", + " }\n", + " }\n", + " \n", + " for (int i = 0; i < 4; i++)\n", + " {\n", + " if (gc_counts[i] > 0)\n", + " {\n", + " gc_pauses[i] /= gc_counts[i];\n", + " gc_susps[i] /= gc_counts[i];\n", + " }\n", + " }\n", + " \n", + " sw.WriteLine(\"{0,12} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 10:0.00} | {14, 10:0.00} |\",\n", + " kvp.Value.Run, kvp.Value.Benchmark, gc_counts[0], gc_pauses[0], gc_counts[1], gc_pauses[1], gc_counts[2], gc_pauses[2], gc_counts[3], gc_pauses[3],\n", + " kvp.Value.Data.Stats.TotalAllocatedMB, (kvp.Value.Data.Stats.TotalAllocatedMB / gcs.Count), kvp.Value.Data.Stats.GetGCPauseTimePercentage(), kvp.Value.Data.Stats.MaxSizePeakMB, kvp.Value.Data.Stats.MeanSizePeakMB);\n", + " sw.WriteLine(\"{0,12} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 10:0.00} | {14, 10:0.00} |\",\n", + " \"\", \"\", \"\", gc_susps[0], \"\", gc_susps[1], \"\", gc_susps[2], \"\", gc_susps[3],\n", + " \"\", \"\", \"\", kvp.Value.Data.Stats.TotalCpuMSec, kvp.Value.Data.Stats.MeanCpuMSec);\n", + " }\n", + " }}\n", + "\n", + "class MeanDataComparison\n", + "{\n", + " public string bench { get; set; }\n", + " public double baselineMaxPrivateMemoryMB { get; set; }\n", + " public double baselineP50PrivateMemoryMB { get; set; }\n", + " public double baselineRequestsPerMSec { get; set; }\n", + " public double avgMaxPrivateMemoryMBDiff { get; set; }\n", + " public double avgP50PrivateMemoryMBDiff { get; set; }\n", + " public double avgRequestsPerMSecDiff { get; set; }\n", + " public double baselineCVMaxPrivateMemoryMB { get; set; }\n", + " public double baselineCVP50PrivateMemoryMB { get; set; }\n", + " public double baselineCVRequestsPerMSec { get; set; }\n", + " public double fixCVMaxPrivateMemoryMB { get; set; }\n", + " public double fixCVP50PrivateMemoryMB { get; set; }\n", + " public double fixCVRequestsPerMSec { get; set; }\n", + " public double cvMaxPrivateMemoryMBDiff { get; set; }\n", + " public double cvP50PrivateMemoryMBDiff { get; set; }\n", + " public double cvRequestsPerMSecDiff { get; set; }\n", + "}\n", + "\n", + "double GetCV(List dataPoints, out double avg)\n", + "{\n", + " // for (int i = 0; i < dataPoints.Count; i++)\n", + " // {\n", + " // Console.WriteLine(\"item {0}: {1}\", i, dataPoints[i]);\n", + " // }\n", + " double mean = dataPoints.Average();\n", + " avg = mean;\n", + " double sumOfSquaredDifferences = dataPoints.Sum(val => Math.Pow(val - mean, 2));\n", + " double populationStandardDeviation = Math.Sqrt(sumOfSquaredDifferences / dataPoints.Count);\n", + " double coefficientOfVariation = (populationStandardDeviation / mean) * 100;\n", + " return coefficientOfVariation;\n", + "}\n", + "\n", + "// accommodates when there are different numbers of iterations in first and second run.\n", + "// returns a list of benchmarks we added to the comparison data\n", + "List SummarizeResultsByBench(DataManager dataManager, List runNames, string benchName = null)\n", + "{\n", + " Dictionary> benchmarkToRunData = dataManager._benchmarkToRunData;\n", + " Console.WriteLine(\"benchmarkToRunData has {0} tests\\n\", benchmarkToRunData.Count);\n", + "\n", + " //bool fLogDetail = false;\n", + " bool fLogDetail = true;\n", + "\n", + " string strSeparator = new String('-', 223);\n", + " Console.WriteLine(\"{0}\", strSeparator);\n", + "\n", + " // key is the name of the run, eg, \"baseline\" or \"fix\". For each run, we add its summary data to a list.\n", + " Dictionary> summaryDataForRuns = new Dictionary>(2);\n", + " List comparisonData = new List(51);\n", + "\n", + " foreach (var benchmarkData in benchmarkToRunData)\n", + " {\n", + " // // Console.WriteLine(\"benchmark is {0}\", benchmarkData.Key);\n", + "\n", + " if ((benchName == null) || benchmarkData.Key.Equals(benchName, StringComparison.OrdinalIgnoreCase))\n", + " {\n", + " summaryDataForRuns.Clear();\n", + "\n", + " if (fLogDetail)\n", + " {\n", + " Console.WriteLine(\"{0,25} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 8:0.00} | {14, 8:0.00} | {15, 8:0.00} | {16, 8:0.00} | {17, 8:0.00} | {18, 4:0.00} | {19, 10} |\",\n", + " \"run\", \"benchmark\", \"gen0\", \"pause\", \"gen1\", \"pause\", \"ngc2\", \"pause\", \"bgc\", \"pause\", \"allocMB\", \"alloc/gc\", \"pct\", \"peakMB\", \"meanMB\", \"max mem\", \"rps\", \"latency\", \"hc\", \"gc count\");\n", + " Console.WriteLine(\"{0}\", strSeparator); \n", + " }\n", + "\n", + " // if no runs observed an hc change, we don't keep it in the summary data.\n", + " int totalHCChanges = 0;\n", + "\n", + " foreach (var kvp in benchmarkData.Value)\n", + " {\n", + " List gcs = kvp.Value?.Data?.GCs;\n", + " // We don't look at benchmarks that did very few GCs\n", + " if ((gcs == null) || (gcs.Count == 0))\n", + " {\n", + " continue;\n", + " }\n", + "\n", + " int[] gc_counts = new int[4];\n", + " double[] gc_pauses = new double[4];\n", + " for (int i = 0; i < gcs.Count; i++)\n", + " {\n", + " TraceGC gc = gcs[i];\n", + " if (gc.Generation < 2)\n", + " {\n", + " gc_counts[gc.Generation]++;\n", + " gc_pauses[gc.Generation] += gc.PauseDurationMSec;\n", + " }\n", + " else\n", + " {\n", + " if (gc.Type == GCType.BackgroundGC)\n", + " {\n", + " gc_counts[3]++;\n", + " gc_pauses[3] += gc.PauseDurationMSec;\n", + " }\n", + " else\n", + " {\n", + " gc_counts[2]++;\n", + " gc_pauses[2] += gc.PauseDurationMSec;\n", + " }\n", + " }\n", + " }\n", + "\n", + " for (int i = 0; i < 4; i++)\n", + " {\n", + " if (gc_counts[i] > 0)\n", + " {\n", + " gc_pauses[i] /= gc_counts[i];\n", + " }\n", + " }\n", + "\n", + " if (fLogDetail)\n", + " {\n", + " Console.WriteLine(\"{0,25} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 8:0.00} | {14, 8:0.00} | {15, 8:0.00} | {16, 8:0.00} | {17, 8:0.00} | {18, 4} | {19, 10} |\",\n", + " kvp.Value.Run, kvp.Value.Benchmark, gc_counts[0], gc_pauses[0], gc_counts[1], gc_pauses[1], gc_counts[2], gc_pauses[2], gc_counts[3], gc_pauses[3],\n", + " kvp.Value.Data.Stats.TotalAllocatedMB, (kvp.Value.Data.Stats.TotalAllocatedMB / gcs.Count), kvp.Value.Data.Stats.GetGCPauseTimePercentage(), kvp.Value.Data.Stats.MaxSizePeakMB, kvp.Value.Data.Stats.MeanSizePeakMB,\n", + " kvp.Value.MaxPrivateMemoryMB, kvp.Value.RequestsPerMSec, kvp.Value.MeanLatencyMS, kvp.Value.NumberOfHeapCountSwitches, kvp.Value.Data.Stats.Count);\n", + " }\n", + "\n", + " totalHCChanges += (int)kvp.Value.NumberOfHeapCountSwitches;\n", + "\n", + " for (int runIdx = 0; runIdx < runNames.Count; runIdx++)\n", + " {\n", + " if (kvp.Value.Run.StartsWith(runNames[runIdx]))\n", + " {\n", + " BenchmarkSummaryData data = new BenchmarkSummaryData \n", + " {\n", + " MaxPrivateMemoryMB = kvp.Value.MaxPrivateMemoryMB,\n", + " P50PrivateMemoryMB = kvp.Value.P50PrivateMemoryMB,\n", + " RequestsPerMSec = kvp.Value.RequestsPerMSec,\n", + " };\n", + "\n", + " if (summaryDataForRuns.ContainsKey(runNames[runIdx]))\n", + " {\n", + " summaryDataForRuns[runNames[runIdx]].Add(data);\n", + " }\n", + " else\n", + " {\n", + " List listData = new List(3);\n", + " listData.Add(data);\n", + " summaryDataForRuns.Add(runNames[runIdx], listData);\n", + " }\n", + " break;\n", + " }\n", + " }\n", + " }\n", + "\n", + " if (fLogDetail)\n", + " {\n", + " Console.WriteLine(\"{0}\", strSeparator);\n", + " }\n", + "\n", + " if (totalHCChanges == 0)\n", + " {\n", + " //Console.WriteLine(\"don't do comparison for bench {0}! no HC changes\", benchmarkData.Key);\n", + " continue;\n", + " }\n", + "\n", + " // Now write some summary stuff\n", + " //Console.WriteLine(\"summary dictionary has {0} elements\", summaryDataForRuns.Count);\n", + "\n", + " if (fLogDetail)\n", + " {\n", + " Console.WriteLine(\"{0,45} | {1,10} | {2,10} | {3,10} | {4,10} | {5,10} | {6,10} |\", \"data\", \"max mem\", \"CV%\", \"p50 mem\", \"CV%\", \"rps\", \"CV%\");\n", + " }\n", + "\n", + " int numRuns = summaryDataForRuns.Count;\n", + " double[] avgMaxPrivateMemoryMBForRuns = new double [numRuns];\n", + " double[] avgP50PrivateMemoryMBForRuns = new double [numRuns];\n", + " double[] avgRequestsPerMSecForRuns = new double [numRuns];\n", + " double[] cvMaxPrivateMemoryMBForRuns = new double [numRuns];\n", + " double[] cvP50PrivateMemoryMBForRuns = new double [numRuns];\n", + " double[] cvRequestsPerMSecForRuns = new double [numRuns];\n", + "\n", + " for (int i = 0; i < summaryDataForRuns.Count; i++)\n", + " {\n", + " //Console.WriteLine(\"bench {0} has {1} iteration in run {2}\", benchmarkData.Key, summaryDataForRuns.ElementAt(i).Value.Count, summaryDataForRuns.ElementAt(i).Key);\n", + " List listData = summaryDataForRuns.ElementAt(i).Value;\n", + "\n", + " // for (int runIdx = 0; runIdx < listData.Count; runIdx++)\n", + " // {\n", + " // Console.WriteLine(\"run {0} iter {1} max mem {2}, rps {3}\", summaryDataForRuns.ElementAt(i).Key, runIdx, listData[runIdx].MaxPrivateMemoryMB, listData[runIdx].RequestsPerMSec);\n", + " // }\n", + " List listMaxPrivateMemoryMB = listData.Select(s => s.MaxPrivateMemoryMB).ToList();\n", + " double avgMaxPrivateMemoryMB, avgP50PrivateMemoryMB, avgRequestsPerMSec;\n", + " double cvMaxPrivateMemoryMB = GetCV(listMaxPrivateMemoryMB, out avgMaxPrivateMemoryMB);\n", + " List listP50PrivateMemoryMB = listData.Select(s => s.P50PrivateMemoryMB).ToList();\n", + " double cvP50PrivateMemoryMB = GetCV(listP50PrivateMemoryMB, out avgP50PrivateMemoryMB);\n", + " List listRequestsPerMSec = listData.Select(s => s.RequestsPerMSec).ToList();\n", + " double cvRequestsPerMSec = GetCV(listRequestsPerMSec, out avgRequestsPerMSec);\n", + "\n", + " avgMaxPrivateMemoryMBForRuns[i] = avgMaxPrivateMemoryMB;\n", + " avgP50PrivateMemoryMBForRuns[i] = avgP50PrivateMemoryMB;\n", + " avgRequestsPerMSecForRuns[i] = avgRequestsPerMSec;\n", + " cvMaxPrivateMemoryMBForRuns[i] = cvMaxPrivateMemoryMB;\n", + " cvP50PrivateMemoryMBForRuns[i] = cvP50PrivateMemoryMB;\n", + " cvRequestsPerMSecForRuns[i] = cvRequestsPerMSec;\n", + " \n", + " if (fLogDetail)\n", + " {\n", + " Console.WriteLine(\"{0,45} | {1,10:0.00} | {2,10:0.00} | {3,10:0.00} | {4,10:0.00} | {5,10:0.00} | {6,10:0.00} |\", (\"-\" + benchmarkData.Key + \"-\" + summaryDataForRuns.ElementAt(i).Key), \n", + " avgMaxPrivateMemoryMB, cvMaxPrivateMemoryMB, avgP50PrivateMemoryMB, cvP50PrivateMemoryMB, avgRequestsPerMSec, cvRequestsPerMSec);\n", + " }\n", + " }\n", + "\n", + " // I'm just assuming we only have 2 runs.\n", + " MeanDataComparison comp = new MeanDataComparison\n", + " {\n", + " bench = benchmarkData.Key,\n", + " baselineMaxPrivateMemoryMB = avgMaxPrivateMemoryMBForRuns[0],\n", + " baselineP50PrivateMemoryMB = avgP50PrivateMemoryMBForRuns[0],\n", + " baselineRequestsPerMSec = avgRequestsPerMSecForRuns[0],\n", + " avgMaxPrivateMemoryMBDiff = (avgMaxPrivateMemoryMBForRuns[1] - avgMaxPrivateMemoryMBForRuns[0]) * 100.0 / avgMaxPrivateMemoryMBForRuns[0],\n", + " avgP50PrivateMemoryMBDiff = (avgP50PrivateMemoryMBForRuns[1] - avgP50PrivateMemoryMBForRuns[0]) * 100.0 / avgP50PrivateMemoryMBForRuns[0],\n", + " avgRequestsPerMSecDiff = (avgRequestsPerMSecForRuns[1] - avgRequestsPerMSecForRuns[0]) * 100.0 / avgRequestsPerMSecForRuns[0],\n", + " baselineCVMaxPrivateMemoryMB = cvMaxPrivateMemoryMBForRuns[0],\n", + " baselineCVP50PrivateMemoryMB = cvP50PrivateMemoryMBForRuns[0],\n", + " baselineCVRequestsPerMSec = cvRequestsPerMSecForRuns[0],\n", + " fixCVMaxPrivateMemoryMB = cvMaxPrivateMemoryMBForRuns[1],\n", + " fixCVP50PrivateMemoryMB = cvP50PrivateMemoryMBForRuns[1],\n", + " fixCVRequestsPerMSec = cvRequestsPerMSecForRuns[1],\n", + " cvMaxPrivateMemoryMBDiff = (cvMaxPrivateMemoryMBForRuns[1] - cvMaxPrivateMemoryMBForRuns[0]) * 100.0 / cvMaxPrivateMemoryMBForRuns[0],\n", + " cvP50PrivateMemoryMBDiff = (cvP50PrivateMemoryMBForRuns[1] - cvP50PrivateMemoryMBForRuns[0]) * 100.0 / cvP50PrivateMemoryMBForRuns[0],\n", + " cvRequestsPerMSecDiff = (cvRequestsPerMSecForRuns[1] - cvRequestsPerMSecForRuns[0]) * 100.0 / cvRequestsPerMSecForRuns[0], \n", + " };\n", + " comparisonData.Add(comp);\n", + "\n", + " if (fLogDetail)\n", + " {\n", + " Console.WriteLine(\"{0}\\n\", strSeparator);\n", + " }\n", + "\n", + " if (benchName != null)\n", + " {\n", + " break;\n", + " }\n", + " }\n", + " }\n", + "\n", + " if (true)\n", + " {\n", + " Console.WriteLine(\"displaying {0} benches that observed HC changes\", comparisonData.Count);\n", + "\n", + " Console.WriteLine(\"{0,35} | {1, 9} | {2,9} | {3,9} | {4,9} | {5,9} | {6,9} | {7,9} | {8,9} | {9,9} | {10,9} | {11,9} | {12,9} | {13,9} | {14,9} | {15,9} |\",\n", + " \"bench\", \"b max mem\", \"max mem %\", \"b cv%\", \"f cv%\", \"CV% %\", \"b p50 mem\", \"p50 mem %\", \"b cv%\", \"f cv%\", \"CV% %\", \"b rps\", \"rps %\", \"b cv%\", \"f cv%\", \"CV% %\");\n", + "\n", + " var sortedComparisonData = comparisonData.OrderByDescending(a => a.cvMaxPrivateMemoryMBDiff).ToList();\n", + " //var sortedComparisonData = comparisonData.OrderBy(a => a.avgMaxPrivateMemoryMBDiff).ToList();\n", + " for (int benchIdx = 0; benchIdx < sortedComparisonData.Count; benchIdx++)\n", + " {\n", + " MeanDataComparison currentComp = sortedComparisonData[benchIdx];\n", + " Console.WriteLine(\"{0,35} | {1,9:0.00} | {2,9:0.00} | {3,9:0.00} | {4,9:0.00} | {5,9:0.00} | {6,9:0.00} | {7,9:0.00} | {8,9:0.00} | {9,9:0.00} | {10,9:0.00} | {11,9:0.00} | {12,9:0.00} | {13,9:0.00} | {14,9:0.00} | {15,9:0.00} |\",\n", + " currentComp.bench,\n", + " currentComp.baselineMaxPrivateMemoryMB, currentComp.avgMaxPrivateMemoryMBDiff, currentComp.baselineCVMaxPrivateMemoryMB, currentComp.fixCVMaxPrivateMemoryMB, currentComp.cvMaxPrivateMemoryMBDiff,\n", + " currentComp.baselineP50PrivateMemoryMB, currentComp.avgP50PrivateMemoryMBDiff, currentComp.baselineCVP50PrivateMemoryMB, currentComp.fixCVP50PrivateMemoryMB, currentComp.cvP50PrivateMemoryMBDiff,\n", + " currentComp.baselineRequestsPerMSec, currentComp.avgRequestsPerMSecDiff, currentComp.baselineCVRequestsPerMSec, currentComp.fixCVRequestsPerMSec, currentComp.cvRequestsPerMSecDiff);\n", + " }\n", + " }\n", + "\n", + " return comparisonData;\n", + "}\n", + "*/\n", + "\n", + "// I haven't used this in a while. I'm not sure if it works.\n", + "/*\n", + " public void SaveDifferences(DataManager dataManager, string baseline, string comparand, List sortingCriteria = null)\n", + " {\n", + " // This function assumes the runs are all in:\n", + " // {build}_{iteration} form.\n", + " // Else, it will except.\n", + "using (StreamWriter sw = new StreamWriter(@\"c:\\home\\repro\\hc\\hc-savediff.txt\")) {\n", + " sw.WriteLine(\"start\");\n", + " // Iteration -> LoadInfos\n", + " Dictionary> iterationData = new();\n", + "\n", + " // Get the max iteration.\n", + " int maxIteration = -1;\n", + " foreach (var run in dataManager._runToBenchmarkData)\n", + " {\n", + " string runName = run.Key;\n", + " int iteration = 0;\n", + " if (run.Key.Contains(\"_\"))\n", + " {\n", + " string[] split = run.Key.Split(\"_\");\n", + " Debug.Assert(split.Length == 2);\n", + " string build = split[0];\n", + " string iterationAsString = split[1];\n", + " iteration = Convert.ToInt32(iterationAsString);\n", + " }\n", + " maxIteration = System.Math.Max(iteration, maxIteration);\n", + " }\n", + " sw.WriteLine(maxIteration);\n", + " // Compute Average Diff\n", + " // Build to Benchmark -> Data\n", + " Dictionary> averageData = new();\n", + "\n", + " for (int i = 0; i <= maxIteration; i++)\n", + " {\n", + " sw.WriteLine(i);\n", + " sw.WriteLine(maxIteration);\n", + " string baselineIteration;\n", + " string comparandIteration;\n", + " if (maxIteration == 0)\n", + " {\n", + " baselineIteration = baseline;\n", + " comparandIteration = comparand;\n", + " }\n", + " else\n", + " {\n", + " baselineIteration = baseline + \"_\" + i.ToString();\n", + " comparandIteration = comparand + \"_\" + i.ToString();\n", + " }\n", + " foreach (var x in dataManager._runToBenchmarkData.Keys) { sw.WriteLine(x); }\n", + " Dictionary baselineIterationRuns = dataManager._runToBenchmarkData[baselineIteration];\n", + " Dictionary comparandIterationRuns = dataManager._runToBenchmarkData[comparandIteration];\n", + "\n", + " foreach (var b in baselineIterationRuns)\n", + " {\n", + " if (!iterationData.TryGetValue(i, out var benchmarks))\n", + " {\n", + " iterationData[i] = benchmarks = new();\n", + " }\n", + "\n", + " benchmarks.Add(dataManager.GetComparison(baselineIterationRuns[b.Key], comparandIterationRuns[b.Key]));\n", + " }\n", + "\n", + " if (!averageData.TryGetValue(baseline, out var bVal))\n", + " {\n", + " averageData[baseline] = bVal = new();\n", + " foreach (var benchmark in baselineIterationRuns)\n", + " {\n", + " bVal[benchmark.Key] = new LoadInfo\n", + " {\n", + " Benchmark = benchmark.Key,\n", + " MaxWorkingSetMB = benchmark.Value.MaxWorkingSetMB,\n", + " MaxPrivateMemoryMB = benchmark.Value.MaxPrivateMemoryMB,\n", + " P99PrivateMemoryMB = benchmark.Value.P99PrivateMemoryMB,\n", + " P95PrivateMemoryMB = benchmark.Value.P95PrivateMemoryMB,\n", + " P90PrivateMemoryMB = benchmark.Value.P90PrivateMemoryMB,\n", + " P75PrivateMemoryMB = benchmark.Value.P75PrivateMemoryMB,\n", + " P50PrivateMemoryMB = benchmark.Value.P50PrivateMemoryMB,\n", + " RequestsPerMSec = benchmark.Value.RequestsPerMSec,\n", + " MeanLatencyMS = benchmark.Value.MeanLatencyMS,\n", + " Latency50thMS = benchmark.Value.Latency50thMS, \n", + " Latency75thMS = benchmark.Value.Latency75thMS,\n", + " Latency90thMS = benchmark.Value.Latency90thMS,\n", + " Latency99thMS = benchmark.Value.Latency99thMS,\n", + " MaxHeapCount = benchmark.Value.MaxHeapCount,\n", + " NumberOfHeapCountSwitches = benchmark.Value.NumberOfHeapCountSwitches,\n", + " NumberOfHeapCountDirectionChanges = benchmark.Value.NumberOfHeapCountDirectionChanges,\n", + " };\n", + " }\n", + " }\n", + "\n", + " else\n", + " {\n", + " foreach (var benchmark in baselineIterationRuns)\n", + " {\n", + " var data = bVal[benchmark.Key];\n", + " data.Benchmark = benchmark.Key;\n", + " data.MaxWorkingSetMB += benchmark.Value.MaxWorkingSetMB;\n", + " data.MaxPrivateMemoryMB += benchmark.Value.MaxPrivateMemoryMB;\n", + " data.P99PrivateMemoryMB += benchmark.Value.P99PrivateMemoryMB;\n", + " data.P95PrivateMemoryMB += benchmark.Value.P95PrivateMemoryMB;\n", + " data.P90PrivateMemoryMB += benchmark.Value.P90PrivateMemoryMB;\n", + " data.P75PrivateMemoryMB += benchmark.Value.P75PrivateMemoryMB;\n", + " data.P50PrivateMemoryMB += benchmark.Value.P50PrivateMemoryMB;\n", + " data.RequestsPerMSec += benchmark.Value.RequestsPerMSec;\n", + " data.MeanLatencyMS += benchmark.Value.MeanLatencyMS;\n", + " data.Latency50thMS += benchmark.Value.Latency50thMS; \n", + " data.Latency75thMS += benchmark.Value.Latency75thMS;\n", + " data.Latency90thMS += benchmark.Value.Latency90thMS;\n", + " data.Latency99thMS += benchmark.Value.Latency99thMS;\n", + " data.MaxHeapCount += benchmark.Value.MaxHeapCount;\n", + " data.NumberOfHeapCountSwitches += benchmark.Value.NumberOfHeapCountSwitches;\n", + " data.NumberOfHeapCountDirectionChanges += benchmark.Value.NumberOfHeapCountDirectionChanges;\n", + " }\n", + " }\n", + "\n", + " if (!averageData.TryGetValue(comparand, out var cVal))\n", + " {\n", + " averageData[comparand] = cVal = new();\n", + " foreach (var benchmark in comparandIterationRuns)\n", + " {\n", + " cVal[benchmark.Key] = new LoadInfo\n", + " {\n", + " Benchmark = benchmark.Key,\n", + " MaxWorkingSetMB = benchmark.Value.MaxWorkingSetMB,\n", + " MaxPrivateMemoryMB = benchmark.Value.MaxPrivateMemoryMB,\n", + " P99PrivateMemoryMB = benchmark.Value.P99PrivateMemoryMB,\n", + " P95PrivateMemoryMB = benchmark.Value.P95PrivateMemoryMB,\n", + " P90PrivateMemoryMB = benchmark.Value.P90PrivateMemoryMB,\n", + " P75PrivateMemoryMB = benchmark.Value.P75PrivateMemoryMB,\n", + " P50PrivateMemoryMB = benchmark.Value.P50PrivateMemoryMB,\n", + " RequestsPerMSec = benchmark.Value.RequestsPerMSec,\n", + " MeanLatencyMS = benchmark.Value.MeanLatencyMS,\n", + " Latency50thMS = benchmark.Value.Latency50thMS, \n", + " Latency75thMS = benchmark.Value.Latency75thMS,\n", + " Latency90thMS = benchmark.Value.Latency90thMS,\n", + " Latency99thMS = benchmark.Value.Latency99thMS,\n", + " MaxHeapCount = benchmark.Value.MaxHeapCount,\n", + " NumberOfHeapCountSwitches = benchmark.Value.NumberOfHeapCountSwitches,\n", + " NumberOfHeapCountDirectionChanges = benchmark.Value.NumberOfHeapCountDirectionChanges,\n", + " };\n", + " }\n", + " }\n", + "\n", + " else\n", + " {\n", + " foreach (var benchmark in comparandIterationRuns)\n", + " {\n", + " var data = cVal[benchmark.Key];\n", + " data.Benchmark = benchmark.Key;\n", + " data.MaxWorkingSetMB += benchmark.Value.MaxWorkingSetMB;\n", + " data.MaxPrivateMemoryMB += benchmark.Value.MaxPrivateMemoryMB;\n", + " data.P99PrivateMemoryMB += benchmark.Value.P99PrivateMemoryMB;\n", + " data.P95PrivateMemoryMB += benchmark.Value.P95PrivateMemoryMB;\n", + " data.P90PrivateMemoryMB += benchmark.Value.P90PrivateMemoryMB;\n", + " data.P75PrivateMemoryMB += benchmark.Value.P75PrivateMemoryMB;\n", + " data.P50PrivateMemoryMB += benchmark.Value.P50PrivateMemoryMB;\n", + " data.RequestsPerMSec += benchmark.Value.RequestsPerMSec;\n", + " data.MeanLatencyMS += benchmark.Value.MeanLatencyMS;\n", + " data.Latency50thMS += benchmark.Value.Latency50thMS; \n", + " data.Latency75thMS += benchmark.Value.Latency75thMS;\n", + " data.Latency90thMS += benchmark.Value.Latency90thMS;\n", + " data.Latency99thMS += benchmark.Value.Latency99thMS;\n", + " data.MaxHeapCount += benchmark.Value.MaxHeapCount;\n", + " data.NumberOfHeapCountSwitches += benchmark.Value.NumberOfHeapCountSwitches;\n", + " data.NumberOfHeapCountDirectionChanges += benchmark.Value.NumberOfHeapCountDirectionChanges;\n", + " }\n", + " }\n", + " }\n", + "\n", + " foreach (var benchmark in dataManager._benchmarkToRunData)\n", + " {\n", + " foreach (var build in averageData)\n", + " {\n", + " var data = build.Value[benchmark.Key];\n", + " data.Benchmark = benchmark.Key;\n", + " data.MaxWorkingSetMB /= (maxIteration + 1); \n", + " data.MaxPrivateMemoryMB /= (maxIteration + 1);\n", + " data.P99PrivateMemoryMB /= (maxIteration + 1);\n", + " data.P95PrivateMemoryMB /= (maxIteration + 1);\n", + " data.P90PrivateMemoryMB /= (maxIteration + 1);\n", + " data.P75PrivateMemoryMB /= (maxIteration + 1);\n", + " data.P50PrivateMemoryMB /= (maxIteration + 1);\n", + " data.RequestsPerMSec /= (maxIteration + 1);\n", + " data.MeanLatencyMS /= (maxIteration + 1);\n", + " data.Latency50thMS /= (maxIteration + 1);\n", + " data.Latency75thMS /= (maxIteration + 1);\n", + " data.Latency90thMS /= (maxIteration + 1);\n", + " data.Latency99thMS /= (maxIteration + 1);\n", + " data.MaxHeapCount /= (maxIteration + 1);\n", + " data.NumberOfHeapCountSwitches /= (maxIteration + 1);\n", + " data.NumberOfHeapCountDirectionChanges /= (maxIteration + 1);\n", + " }\n", + " }\n", + "\n", + " string DisplayDetailsForABenchmark(LoadInfo val) =>\n", + " $\"{val.Benchmark},{val.MaxWorkingSetMB},{val.MaxPrivateMemoryMB},{val.RequestsPerMSec},{val.MeanLatencyMS},{val.Latency50thMS},{val.Latency75thMS},{val.Latency90thMS},{val.Latency99thMS},{val.NumberOfHeapCountSwitches},{val.MaxHeapCount}\";\n", + "\n", + " if (sortingCriteria == null)\n", + " {\n", + " sortingCriteria = new() { nameof(LoadInfo.MaxPrivateMemoryMB) };\n", + " }\n", + "\n", + " foreach (var s in sortingCriteria)\n", + " {\n", + " Func sortingFunctor = null;\n", + " Func, double> selectionFunctor = null;\n", + "\n", + " switch (s)\n", + " {\n", + " case nameof(LoadInfo.MaxWorkingSetMB):\n", + " sortingFunctor = (data) => data.MaxWorkingSetMB;\n", + " selectionFunctor = (data) => data.Value.MaxWorkingSetMB;\n", + " break;\n", + " case nameof(LoadInfo.MaxPrivateMemoryMB):\n", + " sortingFunctor = (data) => data.MaxPrivateMemoryMB;\n", + " selectionFunctor = (data) => data.Value.MaxPrivateMemoryMB;\n", + " break;\n", + " case nameof(LoadInfo.P99PrivateMemoryMB):\n", + " sortingFunctor = (data) => data.P99PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.Value.P99PrivateMemoryMB;\n", + " break;\n", + " case nameof(LoadInfo.P95PrivateMemoryMB):\n", + " sortingFunctor = (data) => data.P95PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.Value.P95PrivateMemoryMB;\n", + " break;\n", + " case nameof(LoadInfo.P90PrivateMemoryMB):\n", + " sortingFunctor = (data) => data.P90PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.Value.P90PrivateMemoryMB;\n", + " break;\n", + " case nameof(LoadInfo.P75PrivateMemoryMB):\n", + " sortingFunctor = (data) => data.P75PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.Value.P75PrivateMemoryMB;\n", + " break;\n", + " case nameof(LoadInfo.P50PrivateMemoryMB):\n", + " sortingFunctor = (data) => data.P50PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.Value.P50PrivateMemoryMB;\n", + " break;\n", + " case nameof(LoadInfo.RequestsPerMSec):\n", + " sortingFunctor = (data) => data.RequestsPerMSec;\n", + " selectionFunctor = (data) => data.Value.RequestsPerMSec;\n", + " break;\n", + " case nameof(LoadInfo.MeanLatencyMS):\n", + " sortingFunctor = (data) => data.MeanLatencyMS;\n", + " selectionFunctor = (data) => data.Value.MeanLatencyMS;\n", + " break;\n", + " case nameof(LoadInfo.Latency50thMS):\n", + " sortingFunctor = (data) => data.Latency50thMS;\n", + " selectionFunctor = (data) => data.Value.Latency50thMS;\n", + " break;\n", + " case nameof(LoadInfo.Latency75thMS):\n", + " sortingFunctor = (data) => data.Latency75thMS;\n", + " selectionFunctor = (data) => data.Value.Latency75thMS;\n", + " break;\n", + " case nameof(LoadInfo.Latency90thMS):\n", + " sortingFunctor = (data) => data.Latency90thMS;\n", + " selectionFunctor = (data) => data.Value.Latency90thMS;\n", + " break;\n", + " case nameof(LoadInfo.Latency99thMS):\n", + " sortingFunctor = (data) => data.Latency99thMS;\n", + " selectionFunctor = (data) => data.Value.Latency99thMS;\n", + " break;\n", + " case nameof(LoadInfo.MaxHeapCount):\n", + " sortingFunctor = (data) => data.MaxHeapCount;\n", + " selectionFunctor = (data) => data.Value.MaxHeapCount;\n", + " break;\n", + " case nameof(LoadInfo.NumberOfHeapCountSwitches):\n", + " sortingFunctor = (data) => data.NumberOfHeapCountSwitches;\n", + " selectionFunctor = (data) => data.Value.NumberOfHeapCountSwitches;\n", + " break;\n", + " case nameof(LoadInfo.NumberOfHeapCountDirectionChanges):\n", + " sortingFunctor = (data) => data.NumberOfHeapCountDirectionChanges;\n", + " selectionFunctor = (data) => data.Value.NumberOfHeapCountDirectionChanges;\n", + " break;\n", + "\n", + " case nameof(BenchmarkSummaryData.TotalSuspensionTimeMSec):\n", + " sortingFunctor = (data) => data.TotalSuspensionTimeMSec;\n", + " selectionFunctor = (data) => data.Value.TotalSuspensionTimeMSec;\n", + " break;\n", + " case nameof(BenchmarkSummaryData.PercentPauseTimeInGC):\n", + " sortingFunctor = (data) => data.PercentPauseTimeInGC;\n", + " selectionFunctor = (data) => data.Value.PercentPauseTimeInGC;\n", + " break;\n", + " case nameof(BenchmarkSummaryData.PercentTimeInGC):\n", + " sortingFunctor = (data) => data.PercentTimeInGC;\n", + " selectionFunctor = (data) => data.Value.PercentTimeInGC;\n", + " break;\n", + " case nameof(BenchmarkSummaryData.MeanHeapSizeBeforeMB):\n", + " sortingFunctor = (data) => data.MeanHeapSizeBeforeMB;\n", + " selectionFunctor = (data) => data.Value.MeanHeapSizeBeforeMB;\n", + " break;\n", + " case nameof(BenchmarkSummaryData.MaxHeapSizeMB):\n", + " sortingFunctor = (data) => data.MaxHeapSizeMB;\n", + " selectionFunctor = (data) => data.Value.MaxHeapSizeMB;\n", + " break;\n", + " case nameof(BenchmarkSummaryData.TotalAllocationsMB):\n", + " sortingFunctor = (data) => data.TotalAllocationsMB;\n", + " selectionFunctor = (data) => data.Value.TotalAllocationsMB;\n", + " break;\n", + " case nameof(BenchmarkSummaryData.GCScore):\n", + " sortingFunctor = (data) => data.GCScore;\n", + " selectionFunctor = (data) => data.Value.GCScore;\n", + " break;\n", + "\n", + " default:\n", + " throw new Exception($\"unexpected {s}\");\n", + " }\n", + "\n", + " List> sortedLoadInfo = new(); \n", + " foreach (var iteration in iterationData)\n", + " {\n", + " sortedLoadInfo.Add(iteration.Value.OrderByDescending(sortingFunctor).ToList());\n", + " }\n", + "\n", + " List sortedAverages = new();\n", + "\n", + " foreach (var benchmark in averageData[baseline])\n", + " {\n", + " LoadInfo baselineInfo = benchmark.Value;\n", + " LoadInfo comparandInfo = averageData[comparand][benchmark.Key];\n", + " LoadInfo comparisonInfo = dataManager.GetComparison(baselineInfo, comparandInfo);\n", + " sortedAverages.Add(comparisonInfo);\n", + " }\n", + " sortedAverages = sortedAverages.OrderByDescending(sortingFunctor).ToList();\n", + "\n", + " // Create CSV.\n", + " StringBuilder top = new();\n", + "\n", + " // Iterate over each of the runs.\n", + " const int singleBuildColumnSize = 11;\n", + " int numberOfIterations = maxIteration + 1;\n", + " string columnHeader = \"Benchmark Name,WorkingSetMB,PrivateMemoryMB,RequestsPerMSec,MeanLatencyMS,Latency50thMS,Latency75thMS,Latency90thMS,Latency99thMS,# HC Switches\";\n", + "\n", + " int totalCountOfBenchmarks = sortedLoadInfo.First().Count;\n", + "\n", + " string first = string.Join(\"\", Enumerable.Range(0, numberOfIterations).Select(build => build + string.Join(\"\", Enumerable.Repeat(\",\", singleBuildColumnSize))));\n", + " string second = string.Join(\",,\", Enumerable.Repeat(columnHeader, numberOfIterations));\n", + "\n", + " // Add the average diff.\n", + " first += \"Average Diff %\" + string.Join(\"\", Enumerable.Repeat(\",\", singleBuildColumnSize));\n", + " second += \",,\" + string.Join(\",,\", columnHeader);\n", + "\n", + " top.AppendLine(first);\n", + " top.AppendLine(second);\n", + "\n", + " for (int benchmarkIdx = 0; benchmarkIdx < totalCountOfBenchmarks; benchmarkIdx++)\n", + " {\n", + " string benchmarkData = string.Join(\",,\", Enumerable.Range(0, numberOfIterations).Select(iteration => DisplayDetailsForABenchmark(sortedLoadInfo[iteration][benchmarkIdx])));\n", + " benchmarkData += $\",,{DisplayDetailsForABenchmark(sortedAverages[benchmarkIdx])}\";\n", + "\n", + " top.AppendLine(benchmarkData);\n", + " }\n", + "\n", + " File.WriteAllText(Path.Combine(dataManager._basePath, $\"Difference_{s}.csv\"), top.ToString());\n", + "\n", + " var layout = new Layout.Layout\n", + " {\n", + " xaxis = new Xaxis { title = \"Benchmark Name\" },\n", + " yaxis = new Yaxis { title = $\"{s}\" },\n", + " width = 1500,\n", + " title = $\"Raw values of {s} for Runs\"\n", + " };\n", + "\n", + " List scatters = new();\n", + "\n", + " const int baseColor = 150;\n", + "\n", + " for (int iterationIdx = 0; iterationIdx <= maxIteration; iterationIdx++)\n", + " {\n", + " string baselineIteration;\n", + " string comparandIteration;\n", + " if (maxIteration == 0)\n", + " {\n", + " baselineIteration = baseline;\n", + " comparandIteration = comparand;\n", + " }\n", + " else\n", + " {\n", + " baselineIteration = baseline + \"_\" + iterationIdx.ToString();\n", + " comparandIteration = comparand + \"_\" + iterationIdx.ToString();\n", + " }\n", + "\n", + " Dictionary baselineData = dataManager._runToBenchmarkData[baselineIteration];\n", + " Dictionary comparandData = dataManager._runToBenchmarkData[comparandIteration];\n", + "\n", + " if (iterationIdx == 0)\n", + " {\n", + " var sortedBaseline = baselineData.Values.OrderByDescending(sortingFunctor);\n", + " baselineData = sortedBaseline.ToDictionary(d => d.Benchmark);\n", + " }\n", + "\n", + " Scatter baselineScatter = new()\n", + " {\n", + " x = baselineData.Select(b => b.Key),\n", + " y = baselineData.Select(selectionFunctor),\n", + " name = $\"{baselineIteration} - {s}\",\n", + " mode = \"markers\",\n", + " marker = new Marker { color = $\"rgb({baseColor + iterationIdx * 50}, 0, 0)\" } \n", + " };\n", + "\n", + " Scatter comparandScatter = new()\n", + " {\n", + " x = comparandData.Select(b => b.Key),\n", + " y = comparandData.Select(selectionFunctor),\n", + " name = $\"{comparandIteration} - {s}\",\n", + " mode = \"markers\",\n", + " marker = new Marker { color = $\"rgb(0, 0, {baseColor + iterationIdx * 50})\" } \n", + " };\n", + "\n", + " scatters.Add(baselineScatter);\n", + " scatters.Add(comparandScatter);\n", + " }\n", + "\n", + " Chart.Plot(scatters, layout).Display();\n", + " }\n", + " }\n", + " }\n", + " */\n", + "\n", + " /*\n", + " public class BuildNameComparer : IEqualityComparer\n", + " {\n", + " public bool Equals(BuildName b1, BuildName b2) => b1.InData == b2.InData;\n", + " public int GetHashCode(BuildName b) => b.InData.GetHashCode();\n", + " }\n", + " public record PerBuildData((DataType, string) Criteria, string Unit, BuildName BuildName, Func Selector, List Data);\n", + " \n", + " public Func, double>[] summarizers = new Func, double>[] { ComputeVolatility, ComputeMin, ComputeMax, ComputeAverage, ComputeRange, ComputeGeoMean };\n", + " public void SaveData(DataManager dataManager, List builds, List<(DataType, string)> chartCriteria = null)\n", + " => SaveData(dataManager, builds, chartCriteria?.Select(s => new List<(DataType, string)> {s}).ToList());\n", + "\n", + " public void SaveData(DataManager dataManager, List builds, DataType dataType, List chartCriteria = null)\n", + " => SaveData(dataManager, builds, chartCriteria?.Select(s => (dataType, s)).ToList());\n", + " public void SaveData(DataManager dataManager, List builds, DataType dataType, List> chartCriteria = null)\n", + " => SaveData(dataManager, builds, chartCriteria?.Select(s => s.Select(s2 => (dataType, s2)).ToList()).ToList());\n", + " public void SaveDataOne(DataManager dataManager, List builds, DataType dataType, List chartCriteria = null)\n", + " => SaveData(dataManager, builds, new List>() { chartCriteria?.Select(s => (dataType, s)).ToList() });\n", + "\n", + " public void SaveData(DataManager dataManager, List builds, List> chartCriteria = null)\n", + " {\n", + " // Build Parent -> < Run -> < Benchmark -> Data >>>\n", + " Dictionary>> listOfData = new(new BuildNameComparer());\n", + "\n", + " foreach (var build in builds)\n", + " {\n", + " if (!listOfData.TryGetValue(build, out var b))\n", + " {\n", + " listOfData[build] = b = new();\n", + " }\n", + "\n", + " foreach (var run in dataManager._runToBenchmarkData)\n", + " {\n", + " if (run.Key.Contains(build.InData))\n", + " {\n", + " b.Add(run.Key, run.Value);\n", + " }\n", + " }\n", + " }\n", + "\n", + " // At this point all the data has been categorized.\n", + "\n", + " // Build Parent -> < DataType -> < Benchmark -> BenchmarkSummaryData >>\n", + " Dictionary[]> buildToBenchmarkSummaryData = new(new BuildNameComparer());\n", + " //summarizers.Select(_ => new Dictionary>()).ToArray();\n", + "\n", + " // Get the Summary Data Per Build.\n", + " foreach (var b in listOfData)\n", + " {\n", + " if (!buildToBenchmarkSummaryData.TryGetValue(b.Key, out var data))\n", + " {\n", + " buildToBenchmarkSummaryData[b.Key] = data = summarizers.Select(_ => new Dictionary()).ToArray();\n", + " }\n", + "\n", + " foreach (var br in dataManager._benchmarkToRunData)\n", + " {\n", + " for (DataType type = DataType.MIN_VALUE; type < DataType.COUNT; ++type)\n", + " {\n", + " data[(int) type][br.Key] = new();\n", + " }\n", + " }\n", + "\n", + " Dictionary> benchmarkToData = new();\n", + " foreach (var run in b.Value)\n", + " {\n", + " foreach (var benchmark in run.Value)\n", + " {\n", + " if (!benchmarkToData.TryGetValue(benchmark.Key, out var d))\n", + " {\n", + " benchmarkToData[benchmark.Key] = d = new();\n", + " }\n", + "\n", + " d.Add(benchmark.Value);\n", + " }\n", + " }\n", + " }\n", + "\n", + " //string DisplayDetailsForABenchmark(BenchmarkSummaryData val) =>\n", + " // $\"{val.Benchmark},{val.MaxWorkingSetMB},{val.MaxPrivateMemoryMB},{val.RequestsPerMSec},{val.MeanLatencyMS},{val.Latency50thMS},{val.Latency75thMS},{val.Latency90thMS},{val.Latency99thMS},{val.NumberOfHeapCountSwitches},{val.MaxHeapCount}\";\n", + " if (chartCriteria == null)\n", + " {\n", + " chartCriteria = new() { new() { (DataType.Volatility, nameof(LoadInfo.MaxPrivateMemoryMB)) } };\n", + " }\n", + "\n", + " foreach (var (group, criteriaIndex) in chartCriteria.WithIndex())\n", + " {\n", + " Func, double> sortingFunctor = null;\n", + " List> selectionFunctors = new();\n", + " List units = new();\n", + "\n", + " foreach (var ((type, s), index) in group.WithIndex())\n", + " {\n", + " Func, double> thisSortingFunctor = null;\n", + " Func selectionFunctor = null;\n", + " string unit = null;\n", + " switch (s)\n", + " {\n", + " //case nameof()\n", + " case nameof(BenchmarkSummaryData.MaxWorkingSetMB):\n", + " thisSortingFunctor = (data) => data.Value.MaxWorkingSetMB;\n", + " selectionFunctor = (data) => data.MaxWorkingSetMB;\n", + " unit = \"MB\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.MaxPrivateMemoryMB):\n", + " thisSortingFunctor = (data) => data.Value.MaxPrivateMemoryMB;\n", + " selectionFunctor = (data) => data.MaxPrivateMemoryMB;\n", + " unit = \"MB\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.P99PrivateMemoryMB):\n", + " thisSortingFunctor = (data) => data.Value.P99PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.P99PrivateMemoryMB;\n", + " unit = \"MB\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.P95PrivateMemoryMB):\n", + " thisSortingFunctor = (data) => data.Value.P95PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.P95PrivateMemoryMB;\n", + " unit = \"MB\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.P90PrivateMemoryMB):\n", + " thisSortingFunctor = (data) => data.Value.P90PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.P90PrivateMemoryMB;\n", + " unit = \"MB\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.P75PrivateMemoryMB):\n", + " thisSortingFunctor = (data) => data.Value.P75PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.P75PrivateMemoryMB;\n", + " unit = \"MB\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.P50PrivateMemoryMB):\n", + " thisSortingFunctor = (data) => data.Value.P50PrivateMemoryMB;\n", + " selectionFunctor = (data) => data.P50PrivateMemoryMB;\n", + " unit = \"MB\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.RequestsPerMSec):\n", + " thisSortingFunctor = (data) => data.Value.RequestsPerMSec;\n", + " selectionFunctor = (data) => data.RequestsPerMSec;\n", + " unit = \"Req/sec\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.MeanLatencyMS):\n", + " thisSortingFunctor = (data) => data.Value.MeanLatencyMS;\n", + " selectionFunctor = (data) => data.MeanLatencyMS;\n", + " unit = \"ms\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.Latency50thMS):\n", + " thisSortingFunctor = (data) => data.Value.Latency50thMS;\n", + " selectionFunctor = (data) => data.Latency50thMS;\n", + " unit = \"ms\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.Latency75thMS):\n", + " thisSortingFunctor = (data) => data.Value.Latency75thMS;\n", + " selectionFunctor = (data) => data.Latency75thMS;\n", + " unit = \"ms\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.Latency90thMS):\n", + " thisSortingFunctor = (data) => data.Value.Latency90thMS;\n", + " selectionFunctor = (data) => data.Latency90thMS;\n", + " unit = \"ms\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.Latency99thMS):\n", + " thisSortingFunctor = (data) => data.Value.Latency99thMS;\n", + " selectionFunctor = (data) => data.Latency99thMS;\n", + " unit = \"ms\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.MaxHeapCount):\n", + " thisSortingFunctor = (data) => data.Value.MaxHeapCount;\n", + " selectionFunctor = (data) => data.MaxHeapCount;\n", + " unit = \"heap count\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.NumberOfHeapCountSwitches):\n", + " thisSortingFunctor = (data) => data.Value.NumberOfHeapCountSwitches;\n", + " selectionFunctor = (data) => data.NumberOfHeapCountSwitches;\n", + " unit = \"hc switches\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.NumberOfHeapCountDirectionChanges):\n", + " thisSortingFunctor = (data) => data.Value.NumberOfHeapCountDirectionChanges;\n", + " selectionFunctor = (data) => data.NumberOfHeapCountDirectionChanges;\n", + " unit = \"hc dir changes\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.TotalSuspensionTimeMSec):\n", + " thisSortingFunctor = (data) => data.Value.TotalSuspensionTimeMSec;\n", + " selectionFunctor = (data) => data.TotalSuspensionTimeMSec;\n", + " unit = \"ms\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.PercentPauseTimeInGC):\n", + " thisSortingFunctor = (data) => data.Value.PercentPauseTimeInGC;\n", + " selectionFunctor = (data) => data.PercentPauseTimeInGC;\n", + " unit = \"%\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.PercentTimeInGC):\n", + " thisSortingFunctor = (data) => data.Value.PercentTimeInGC;\n", + " selectionFunctor = (data) => data.PercentTimeInGC;\n", + " unit = \"%\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.MeanHeapSizeBeforeMB):\n", + " thisSortingFunctor = (data) => data.Value.MeanHeapSizeBeforeMB;\n", + " selectionFunctor = (data) => data.MeanHeapSizeBeforeMB;\n", + " unit = \"MB\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.MaxHeapSizeMB):\n", + " thisSortingFunctor = (data) => data.Value.MaxHeapSizeMB;\n", + " selectionFunctor = (data) => data.MaxHeapSizeMB;\n", + " unit = \"MB\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.TotalAllocationsMB):\n", + " thisSortingFunctor = (data) => data.Value.TotalAllocationsMB;\n", + " selectionFunctor = (data) => data.TotalAllocationsMB;\n", + " unit = \"MB\";\n", + " break;\n", + " case nameof(BenchmarkSummaryData.GCScore):\n", + " thisSortingFunctor = (data) => data.Value.GCScore;\n", + " selectionFunctor = (data) => data.GCScore;\n", + " unit = \"score\";\n", + " break;\n", + "\n", + " default:\n", + " throw new Exception($\"unexpected {s}\");\n", + " //thisSortingFunctor = (data) => data.Value.MaxPrivateMemoryMB;\n", + " //selectionFunctor = (data) => data.MaxPrivateMemoryMB;\n", + " //unit = \"MB\";\n", + " //break;\n", + " }\n", + " sortingFunctor = sortingFunctor ?? thisSortingFunctor; // keep first one\n", + " selectionFunctors.Add(selectionFunctor);\n", + " units.Add(unit);\n", + " }\n", + "\n", + " var uniqueUnits = units.Zip(group.Select(t => t.Item1)).Select(p => p.Item2 == DataType.Volatility ? \"Volatility Score\" : p.Item1).Distinct();\n", + " if (uniqueUnits.Count() > 2) throw new Exception(\"More than two units in chart\");\n", + "\n", + " List pairedPerBuildData = new();\n", + " List sortedPerBuildData = new();\n", + "\n", + " foreach (BuildName build in buildToBenchmarkSummaryData.Keys)\n", + " {\n", + " for (int groupIndex = 0; groupIndex < group.Count; ++groupIndex)\n", + " {\n", + " var b = buildToBenchmarkSummaryData[build][(int) group[groupIndex].Item1];\n", + " var pairedData = b.Zip(buildToBenchmarkSummaryData[buildToBenchmarkSummaryData.Keys.First()][(int) group[0].Item1]).OrderByDescending(pair => sortingFunctor(pair.Second)).Select(pair => pair.First.Value);\n", + " //pairedPerBuildData.Add(new PerBuildData(group[groupIndex], units[groupIndex], build, selectionFunctors[groupIndex], b.OrderByDescending(sortingFunctor).Select(k => k.Value).ToList()));\n", + " sortedPerBuildData.Add(new PerBuildData(group[groupIndex], units[groupIndex], build, selectionFunctors[groupIndex], pairedData)); // b.OrderByDescending(sortingFunctor).Select(k => k.Value).ToList() ));\n", + " //sortedPerBuildData.Add(new PerBuildData(group[groupIndex], units[groupIndex], build, selectionFunctors[groupIndex], b.OrderByDescending(sortingFunctor).Select(k => k.Value).ToList()));\n", + " }\n", + " }\n", + "\n", + "\n", + "// // Create CSV.\n", + "// StringBuilder top = new();\n", + "//\n", + "// // Iterate over each of the runs.\n", + "// const int singleBuildColumnSize = 10;\n", + "// int numberOfBuilds = buildToBenchmarkSummaryData.Count;\n", + "// string columnHeader = \"Benchmark Name,MaxWorkingSetMB,MaxPrivateMemoryMB,RequestsPerMSec,MeanLatencyMS,Latency50thMS,Latency75thMS,Latency90thMS,Latency99thMS,# HC Switches\";\n", + "//\n", + "// // Assumption: the same benchmarks are present for all runs.\n", + "// int totalCountOfBenchmarks = buildToBenchmarkSummaryData.First().Value.Count;\n", + "//\n", + "// string first = string.Join(\",\", namesOfBuilds.Select(build => build + string.Join(\"\", Enumerable.Repeat(\",\", singleBuildColumnSize))));\n", + "// string second = string.Join(\",,\", Enumerable.Repeat(columnHeader, numberOfBuilds));\n", + "//\n", + "// top.AppendLine(first);\n", + "// top.AppendLine(second);\n", + "//\n", + "// for (int benchmarkIdx = 0; benchmarkIdx < totalCountOfBenchmarks; benchmarkIdx++)\n", + "// {\n", + "// top.AppendLine(string.Join(\",,\", namesOfBuilds.Select(buildName => DisplayDetailsForABenchmark(sortedPerBuildVolatility[buildName][benchmarkIdx]))));\n", + "// }\n", + "//\n", + "// File.WriteAllText(Path.Combine(dataManager._basePath, $\"Volatility_{group[0]}.csv\"), top.ToString());\n", + "\n", + " // Chart the sorted % Vol Results.\n", + "\n", + " ColorProvider colorProvider = new();\n", + " //colorProvider.StartColors(builds.Select(build => build.InData));\n", + " List scatters = new();\n", + " //string mode = \"markers\";\n", + " string mode = \"lines+markers\";\n", + " string firstUnit = sortedPerBuildData[0].Unit;\n", + "\n", + " var layout = new Layout.Layout\n", + " {\n", + " xaxis = new Xaxis { title = \"Benchmark Name\" },\n", + " yaxis = new Yaxis { title = firstUnit },\n", + " width = 1200,\n", + " title = $\"GCMetrcs Sorted by {group[0].Item1} of {group[0].Item2} for {builds[0].ToDisplay} (by test)\"\n", + " };\n", + "\n", + " foreach (var (b, index) in sortedPerBuildData.WithIndex())\n", + " {\n", + " var scatter = new Scatter\n", + " {\n", + " x = b.Data.Select(s => s.Benchmark),\n", + " y = b.Data.Select(v => b.Selector(v)),// + 0.1 * index),\n", + " mode = mode,\n", + " name = $\"{b.BuildName.ToDisplay}: {b.Criteria.Item1.ToString()} of {b.Criteria.Item2}\",\n", + " };\n", + "\n", + " if (b.Unit != firstUnit)\n", + " {\n", + " layout.yaxis2 = new Yaxis { title = b.Unit, side = \"right\", overlaying = \"y\" };\n", + " scatter.yaxis = \"y2\";\n", + " }\n", + "\n", + " colorProvider.SetMarker(scatter, b.BuildName.InData, sortedPerBuildData.Count());\n", + " scatters.Add(scatter);\n", + " }\n", + "\n", + " Chart.Plot(scatters, layout).Display();\n", + "\n", + "// scatters.Clear();\n", + "// layout = new Layout.Layout\n", + "// {\n", + "// xaxis = new Xaxis { title = \"Benchmark Index\" },\n", + "// yaxis = new Yaxis { title = firstUnit },\n", + "// width = 1200,\n", + "// title = $\"GCMetrcs Sorted by {group[0].Item1} of {group[0].Item2} for {builds[0].ToDisplay} (by index)\"\n", + "// };\n", + "\n", + "// //colorProvider.StartColors(builds.Select(build => build.InData));\n", + "// foreach (var b in sortedPerBuildData)\n", + "// {\n", + "// var sortedData = b.Data.OrderByDescending(b.Selector);\n", + "// var scatter = new Scatter\n", + "// {\n", + "// x = Enumerable.Range(0, sortedData.Count()),\n", + "// y = sortedData.Select(b.Selector),\n", + "// mode = mode,\n", + "// name = $\"{b.BuildName.ToDisplay}: {b.Criteria.Item1.ToString()} of {b.Criteria.Item2}\",\n", + "// text = sortedData.Select(ss => ss.Benchmark),\n", + "// };\n", + "\n", + "// if (b.Unit != firstUnit)\n", + "// {\n", + "// layout.yaxis2 = new Yaxis { title = b.Unit, side = \"right\", overlaying = \"y\" };\n", + "// scatter.yaxis = \"y2\";\n", + "// }\n", + "\n", + "// colorProvider.SetMarker(scatter, b.BuildName.InData, sortedPerBuildData.Count());\n", + "// scatters.Add(scatter);\n", + "// }\n", + " \n", + "// Chart.Plot(scatters, layout).Display();\n", + " }\n", + " }\n", + "*/\n", + "\n", + "// CompareFull is used to compare different builds.\n", + "\n", + "/*\n", + "public class CollectedBenchmarkData\n", + "{\n", + " public List Data = new();\n", + " public double Sum => Data.Sum(x => x);\n", + " public double Prod => Data.Aggregate(1.0, (prod, next) => prod * next);\n", + " public double Average => Sum / Data.Count();\n", + " public double GeoMean => Math.Pow(Prod, 1.0 / Data.Count());\n", + "\n", + " public void Add(double value) => Data.Add(value);\n", + "}\n", + "\n", + "public class Blob // rename this...\n", + "{\n", + " public CollectedBenchmarkData Baseline = new();\n", + " public List Diffs = new();\n", + " public double Ratio(int i) => Diffs[i].GeoMean / Baseline.GeoMean;\n", + "}\n", + "\n", + "void CheckAdd(string benchmark, CollectedBenchmarkData data, Func selector, string includeRE, string excludeRE)\n", + "{\n", + " if ((includeRE != null) && !Regex.Match(benchmark, includeRE).Success) return;\n", + " if ((excludeRE != null) && Regex.Match(benchmark, excludeRE).Success) return;\n", + " if (!data.TryGetValue(benchmark, out var blob)) data[benchmark] = blob = new Blob();\n", + "\n", + "}\n", + "\n", + "// selector -> to extract the data to CompareFull\n", + "// includeRE -> which benchmarks to include (regex), all if null\n", + "// excludeRE -> which benchmarks to exclude (regex), none if null\n", + "// baseline/diffs -> names of builds to compare\n", + "// includeIndiv -> whether to include the individual benchmark comparisons (probably use true)\n", + "void CompareFull(DataManager dataManager, Func selector, string includeRE, string excludeRE, string baseline, List diffs, bool includeIndiv)\n", + "{\n", + " HashSet seen = new();\n", + " // benchmark -> Blob\n", + " Dictionary data = new();\n", + " foreach (var (run, benchmarkData) in dataManager._runToBenchmarkData)\n", + " {\n", + " var build = run.Substring(0, run.LastIndexOf('_'));\n", + " //if (!seen.Contains(build)) { Console.WriteLine(build); seen.Add(build); }\n", + " if (build != baseline && build != diff) continue;\n", + " foreach (var (benchmark, loadInfo) in benchmarkData)\n", + " {\n", + " if ((includeRE != null) && !Regex.Match(benchmark, includeRE).Success) continue;\n", + " if ((excludeRE != null) && Regex.Match(benchmark, excludeRE).Success) continue;\n", + " if (!data.TryGetValue(benchmark, out var blob)) data[benchmark] = blob = new Blob();\n", + " if (build == baseline) blob.Baseline.Add(selector(loadInfo));\n", + " else \n", + " blob.GetData(build == baseline).Add(selector(loadInfo));\n", + " }\n", + " }\n", + "\n", + " List ratios = new();\n", + " Console.WriteLine($\"Baseline: {baseline}\");\n", + " foreach (var (d, i) in diffs.WithIndex())\n", + " {\n", + " Console.WriteLine($\"Diff{i}: {d}\");\n", + " }\n", + " {\n", + " Console.WriteLine($\"{\"Benchmark\",35} | {\"D/B\",5} | {\"Base\",8} | {\"Diff\",8}\");\n", + " Console.WriteLine($\"{new string('-', 35)}-+-{new string('-', 5)}-+-{new string('-', 8)}-+-{new string('-', 8)}\");\n", + " foreach (var (benchmark, value) in data.OrderByDescending(kvp => kvp.Value.Ratio))\n", + " {\n", + " if (includeIndiv)\n", + " {\n", + " Console.WriteLine($\"{benchmark,35} | {value.Ratio,5:N3} | {value.Baseline.GeoMean,8:N2} | {value.Diff.GeoMean,8:N2}\");\n", + " }\n", + " ratios.Add(value.Ratio);\n", + " }\n", + " }\n", + "\n", + " Console.WriteLine($\"{new string('-', 35)}-+-{new string('-', 5)}-+-{new string('-', 8)}-+-{new string('-', 8)}\");\n", + " double baseGeoMean = ComputeGeoMean(data.Select(kvp => kvp.Value.Baseline.GeoMean));\n", + " double diffGeoMean = ComputeGeoMean(data.Select(kvp => kvp.Value.Diff.GeoMean));\n", + " Console.WriteLine($\"{\"GeoMean\",35} | {diffGeoMean / baseGeoMean,5:N3} | {baseGeoMean,8:N2} | {diffGeoMean,8:N2}\");\n", + " Console.WriteLine($\"{\"ArithMean\",35} | {ComputeAverage(ratios),5:N3} | {\"\",8} | {\"\",8}\");\n", + " Console.WriteLine();\n", + "}\n", + "*/\n", + "\n", + "// Display individual benchmark runs\n", + "\n", + "// extract -> to extract the data to CompareFull\n", + "// benchmark -> benchmarks(s) to include\n", + "// exactMatch -> impact matching of benchmark - odd behavior.. see code\n", + "\n", + "/*\n", + "void ProcessDataMean(DataManager dataManager, Func extract, string benchmarkName, bool exactMatch = false)\n", + "{\n", + " Console.WriteLine(\"Benchmark {0}\", benchmarkName);\n", + " var names = dataManager.Data.Keys;\n", + " // build -> (sum of GCMetrc, count)\n", + " Dictionary GCMetrcByBuild = new(2);\n", + " foreach (var name in names)\n", + " {\n", + " bool matched = (exactMatch ? name.EndsWith(benchmarkName) : name.Contains(benchmarkName));\n", + " if (matched)\n", + " {\n", + " string[] fields = name.Split(new Char[] { '_' }, StringSplitOptions.RemoveEmptyEntries);\n", + " string buildname = fields[0];\n", + " LoadInfo info = dataManager.Data[name];\n", + " //Console.WriteLine(\"build name is {0}\", buildname);\n", + " if (GCMetrcByBuild.TryGetValue(buildname, out (double sum, int count) p))\n", + " {\n", + " metricByBuild[buildname] = (p.sum + extract(info), p.count + 1);\n", + " }\n", + " else\n", + " {\n", + " metricByBuild.Add(buildname, (extract(info), 1));\n", + " }\n", + "\n", + " //Console.WriteLine(\"metric is now {0:0.00}\", metricByBuild[buildname]);\n", + "\n", + " Console.WriteLine(\"{0,60}: mean latency {1:0.00} ms, time in GC {2:0.00}%, heap switches {3}, max HC {4,2}, maxHeapMB {5:0.0}\",\n", + " name, info.MeanLatencyMS, info.PercentPauseTimeInGC,\n", + " info.NumberOfHeapCountSwitches, info.MaxHeapCount, info.MaxHeapSizeMB);\n", + " }\n", + " }\n", + "\n", + " foreach (var (build, (sum, count)) in metricByBuild)\n", + " {\n", + " Console.WriteLine(\"build {0, 10}: {1:0.00}\", build, sum / count);\n", + " }\n", + "}\n", + "\n", + "void ProcessDataMeanLatency(DataManager dataManager, string benchmarkName, bool exactMatch = false)\n", + " => ProcessDataMean(dataManager, loadInfo => loadInfo.MeanLatencyMS, benchmarkName, exactMatch);\n", + "*/\n", + "\n", + "// Shows benchmark runs that increment the metric two times in a row\n", + "\n", + "// builds -> the build name (with trailing _, like \"fix_\")\n", + "// extract -> data to examine\n", + "// benchmarkFilterRE -> benchmarks to include\n", + "// listAll -> show all data points instead of just the changes - usually too much\n", + "/*\n", + "public void DisplayDoubleIncrement(DataManager dataManager, IEnumerable builds, Func extract, string benchmarkFilterRE, bool listAll = false)\n", + "{\n", + " foreach ((string benchmark, var allRuns) in dataManager._benchmarkToRunData)\n", + " {\n", + " bool firstForBenchmark = true;\n", + " if (!Regex.IsMatch(benchmark, benchmarkFilterRE)) continue;\n", + " foreach ((string run, var results) in allRuns)\n", + " {\n", + " if (!builds.Any(b => run.StartsWith(b))) continue;\n", + " if (results.Data == null)\n", + " {\n", + " Console.WriteLine($\"No data for {benchmark} {run}\");\n", + " continue;\n", + " }\n", + " var doubleIncr =\n", + " results.Data.GCs\n", + " .Where(gc => gc.GlobalHeapHistory != null)\n", + " .Select(extract)\n", + " .SlidingRange(10)\n", + " .SkipWhile(window => ((window[0] + 1) != window[3]) || ((window[3] + 1) != window[6])|| ((window[6] + 1) != window[9]));\n", + " bool anyDouble = doubleIncr.Any(x => true);\n", + " if (!anyDouble) continue;\n", + "\n", + " if (firstForBenchmark)\n", + " {\n", + " Console.WriteLine($\"{benchmark}:\");\n", + " firstForBenchmark = false;\n", + " }\n", + " Console.Write($\" {run,12}:\");\n", + " if (listAll)\n", + " {\n", + " foreach (int num in\n", + " results.Data.GCs\n", + " .Where(gc => gc.GlobalHeapHistory != null)\n", + " .Select(extract)\n", + " .SlidingWindow(-1)\n", + " .Where(window => window.PrevItem != window.CurrentItem)\n", + " .Select(window => window.CurrentItem))\n", + " {\n", + " Console.Write($\" {num}\");\n", + " }\n", + " }\n", + " Console.WriteLine();\n", + " }\n", + " }\n", + "}\n", + "\n", + "\n", + "// Shows benchmark runs that decrease a metric (ever)\n", + "\n", + "// builds -> the build name (with trailing _, like \"fix_\")\n", + "// extract -> data to examine\n", + "// amount -> threshold of decrease to include\n", + "// benchmarkFilterRE -> benchmarks to include\n", + "// listAll -> show all data points instead of just the changes - usually too much\n", + "\n", + "public void DisplayChangesDown(DataManager dataManager, IEnumerable builds, Func extract, double amount, string benchmarkFilterRE, bool listAll = false)\n", + "{\n", + " foreach ((string benchmark, var allRuns) in dataManager._benchmarkToRunData)\n", + " {\n", + " bool firstForBenchmark = true;\n", + " if (!Regex.IsMatch(benchmark, benchmarkFilterRE)) continue;\n", + " foreach ((string run, var results) in allRuns)\n", + " {\n", + " if (!builds.Any(b => run.StartsWith(b))) continue;\n", + " if (results.Data == null)\n", + " {\n", + " Console.WriteLine($\"No data for {benchmark} {run}\");\n", + " continue;\n", + " }\n", + " var dec =\n", + " results.Data.GCs\n", + " .Where(gc => gc.GlobalHeapHistory != null)\n", + " .Select(extract)\n", + " .SlidingWindow(-1)\n", + " .SkipWhile(window => window.PrevItem <= window.CurrentItem + amount);\n", + " bool anyDecrease = dec.Any(x => true);\n", + " if (!anyDecrease) continue;\n", + "\n", + " var incAfterDec = dec.SkipWhile(window => window.PrevItem >= window.CurrentItem - amount);\n", + " var anyIncAfterDec = incAfterDec.Any(x => true);\n", + " if (!anyIncAfterDec) continue;\n", + "\n", + " if (firstForBenchmark)\n", + " {\n", + " Console.WriteLine($\"{benchmark}:\");\n", + " firstForBenchmark = false;\n", + " }\n", + " Console.Write($\" {run,12}:\");\n", + " if (listAll)\n", + " {\n", + " foreach (int num in\n", + " results.Data.GCs\n", + " .Where(gc => gc.GlobalHeapHistory != null)\n", + " .Select(extract)\n", + " .SlidingWindow(-1)\n", + " .Where(window => window.PrevItem != window.CurrentItem)\n", + " .Select(window => window.CurrentItem))\n", + " {\n", + " Console.Write($\" {num}\");\n", + " }\n", + " }\n", + " Console.WriteLine();\n", + " }\n", + " }\n", + "}\n", + "\n", + "public void DisplayHeapChangesDown(DataManager dataManager, IEnumerable builds, string benchmarkFilterRE)\n", + " => DisplayChangesDown(dataManager, builds, gc => gc.GlobalHeapHistory.NumHeaps, 0, benchmarkFilterRE, listAll: true);\n", + "\n", + "// This is used to reduce a list of charts into a small enough number for the notebook to fully display.\n", + "// I have a habit of calling GetPage(0) for small lists, but this isn't needed - just display the whole list.\n", + "// (When displaying all benchmarks, having that and changing the 0 to 1, 2, etc., can be useful)\n", + "static IEnumerable GetPage(this IEnumerable groups, int page, int numPerPage = 18)\n", + " => groups.Skip(numPerPage * page).Take(numPerPage);\n", + "*/\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "// Old examples - use as resources then delete\n", + "\n", + "/*\n", + "\n", + "SummarizeResultsByBench(low4DM, ML(\"v2-fixrearranged-all\", \"v2-fixrearranged-all-svr\"));\n", + "SummarizeResultsByBench(low4DM, ML(\"v2-rc3\", \"v2-fixrearranged\"));\n", + "\n", + "SummarizeResults(diffDataManager, latestPath + @\"\\summarize.txt\");\n", + "\n", + "// The specific values are busted here, but more paths can be added to an existing DataManager.\n", + "// Note: Adding/overwriting more benchmarks to an existing loaded directory is untested/etc.\n", + "// This is intended for adding a new run when you already have a baseline or previous run\n", + "// loaded and don't want to wait to read it again.\n", + "\n", + "dataManager.AddData(new[] { slopePath, evalDecrPath }, scoutList.ToList());\n", + "\n", + "// Again, the values are busted, but you can speed up loading if you only want to look at\n", + "// certain benchmarks.\n", + "\n", + "var x = new DataManager(new[] { evalDecrPath }, filter: debugList);\n", + "\n", + "var low4BaseRun = ML(new BuildName(\"v2-fixrearranged-mult-max_\", \"base\"));\n", + "var low4Run = ML(new BuildName(\"v2-fixrearranged-mult-max-h4_\", \"max4\"));\n", + "var svrRun = ML(new BuildName(\"v2-fixrearranged-mult-max-svr_\", \"svr\"));\n", + "var svr4Run = ML(new BuildName(\"v2-fixrearranged-mult-max-svr4_\", \"svr4\"));\n", + "var mult8Run = ML(new BuildName(\"v2-fixrearranged-mult-max-mult8_\", \"mult8\"));\n", + "var mult32Run = ML(new BuildName(\"v2-fixrearranged-mult-max-mult32_\", \"mult32\"));\n", + "var mult8max10Run = ML(new BuildName(\"v2-fixrearranged-mult-max-mult8x10_\", \"m8x10\"));\n", + "var max10Run = ML(new BuildName(\"v2-fixrearranged-mult-max-x10_\", \"x10\"));\n", + "var low4CompRuns = Concat(low4BaseRun, low4Run, svrRun, svr4Run, mult8Run, mult32Run, mult8max10Run, max10Run);\n", + "\n", + "string compareBase = \"v2-fixrearranged-mult-max-svr4\";\n", + "string compareDiff = \"v2-fixrearranged-mult-max-mult8\";\n", + "\n", + "//var extract = (LoadInfo loadInfo) => loadInfo.RequestsPerMSec;\n", + "//var extract = (LoadInfo loadInfo) => loadInfo.Latency50thMS;\n", + "var extract = (LoadInfo loadInfo) => loadInfo.MeanLatencyMS;\n", + "//var extract = (LoadInfo loadInfo) => loadInfo.MaxPrivateMemoryMB;\n", + "//var extract = (LoadInfo loadInfo) => loadInfo.P50PrivateMemoryMB;\n", + "//var extract = (LoadInfo loadInfo) => loadInfo.PercentPauseTimeInGC;\n", + "\n", + "string includeRE = null; // scoutREListShort2;\n", + "string excludeRE = null; // \"ConnectionClose\";\n", + "CompareFull(low4DM, extract, includeRE, excludeRE, compareBase, compareDiff, true);\n", + "//CompareFull(rc3DataManager, (LoadInfo loadInfo) => Math.Max(5, loadInfo.PercentPauseTimeInGC), scoutREListShort, null, compareBase, compareDiff, true);\n", + "\n", + "foreach (string benchmark in new[] { \"ConnectionClose\", \"SingleQueryPlatform\" })\n", + "{\n", + " ProcessDataMeanLatency(rc2DataManager, benchmark, true);\n", + "}\n", + "\n", + "// Not tested for a while.\n", + "\n", + "DisplayDoubleIncrement(rc2DataManager, ML(\"fix_\"), gc => gc.GlobalHeapHistory.NumHeaps, \"\", true)\n", + "\n", + "// Not tested for a while\n", + "\n", + "DisplayChangesDown(rc2DataManager, ML(\"fix_\"), gc => gc.HeapSizeAfterMB, 3, \"\")\n", + "\n", + "// Leftover code - manually displays heap changes\n", + "\n", + "foreach (string build in ML(\"v2-fixrearranged-all_\"))\n", + "{\n", + " for (int i = 0; i < 3; ++i)\n", + " {\n", + " Console.Write($\"{build}{i}:\");\n", + " foreach (int num in\n", + " low4DM.Data[$\"{build}{i} | Stage1\"].Data.GCs\n", + " .Where(gc => gc.GlobalHeapHistory != null)\n", + " .Select(gc => gc.GlobalHeapHistory.NumHeaps)\n", + " .SlidingWindow(-1)\n", + " .Where(window => window.PrevItem != window.CurrentItem)\n", + " .Select(window => window.CurrentItem))\n", + " {\n", + " Console.Write($\" {num}\");\n", + " }\n", + " Console.WriteLine();\n", + " }\n", + "}\n", + "\n", + "*/" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "// Old charting examples\n", + "/*\n", + "SaveData(low4DM, low4CompRuns, ML((DataType.Average, nameof(LoadInfo.P90PrivateMemoryMB)), (DataType.Max, nameof(LoadInfo.MaxPrivateMemoryMB)), (DataType.Max, nameof(LoadInfo.MaxHeapCount))));\n", + "SaveData(low4DM, low4CompRuns, DataType.Average, ML(nameof(LoadInfo.RequestsPerMSec)));\n", + "SaveData(dataManager, allRuns, DataType.Average, ML(nameof(LoadInfo.RequestsPerMSec), nameof(LoadInfo.Latency50thMS)));\n", + "SaveData(rc3DataManager, rc3RearrangedRun,\n", + " ML(ML((DataType.Max, nameof(LoadInfo.MaxPrivateMemoryMB)), (DataType.Min, nameof(LoadInfo.MaxPrivateMemoryMB))),\n", + " ML((DataType.Max, nameof(LoadInfo.P99PrivateMemoryMB)), (DataType.Min, nameof(LoadInfo.P99PrivateMemoryMB))),\n", + " ML((DataType.Max, nameof(LoadInfo.P50PrivateMemoryMB)), (DataType.Min, nameof(LoadInfo.P50PrivateMemoryMB)))));\n", + "SaveData(rc3DataManager, rc3Runs, DataType.Average, priMemList);\n", + "SaveData(diffDataManager, vsBaseRuns, priMemList.Select(m => ML((DataType.Min, m), (DataType.Max, m))));\n", + "SaveDataOne(v2DataManager, v2Runs, DataType.Average, priMemList);\n", + "SaveDataOne(diffDataManager, allRuns, DataType.Volatility, volList);\n", + "\n", + "// Using the DataManager - I haven't been using this section.\n", + "\n", + "// The following cells demonstrates how to make use of the ``DataManager``. \n", + "\n", + "// The name of the run from the yaml file for which the ASP.NET run is created for.\n", + "string runName = \"base_0\";\n", + "\n", + "Dictionary run = dataManager.GetAllBenchmarksForRun(runName);\n", + "dataManager.Data.Display();\n", + "List> runsWithGCData = dataManager.GetAllBenchmarksForRun(runName).Where(gc => gc.Value.Data != null);\n", + "\n", + "string benchmarkName = \"Stage2\";\n", + "LoadInfo benchmarkData = dataManager.GetBenchmarkData(benchmark: benchmarkName, run: runName);\n", + "benchmarkData.Id\n", + "\n", + "Dictionary allRunsForBenchmark = dataManager.GetAllRunsForBenchmark(benchmark: benchmarkName);\n", + "allRunsForBenchmark.Keys\n", + "\n", + "dataManager.SaveBenchmarkData()\n", + "\n", + "// ## Build to Build Comparison and Volatility Analysis\n", + "\n", + "// I haven't been using this section, but it is an obvious one to start using again.\n", + "\n", + "var run1_vs_run2 = diffDataManager.GetBenchmarkToComparison(\"tp3-m_0\", \"tp3-m_1\");\n", + "\n", + "static bool IsNotInvalidDouble(double val) => \n", + " !double.IsNaN(val) && \n", + " !double.IsInfinity(val) && \n", + " !double.IsPositiveInfinity(val) && \n", + " !double.IsNegativeInfinity(val);\n", + "\n", + "public class SummaryTable\n", + "{\n", + " public SummaryTable(Dictionary> comparisons)\n", + " {\n", + " Comparisons = comparisons;\n", + " }\n", + "\n", + " private string GenerateSummaryForComparison(string comparisonKey, Dictionary comparison)\n", + " {\n", + " double averageWorkingSet = comparison.Where(a => IsNotInvalidDouble(a.Value.MaxWorkingSetMB)).Average(a => a.Value.MaxWorkingSetMB);\n", + " double privateMemory = comparison.Where(a => IsNotInvalidDouble(a.Value.MaxPrivateMemoryMB)).Average(a => a.Value.MaxPrivateMemoryMB);\n", + " double throughput = comparison.Where(a => IsNotInvalidDouble(a.Value.RequestsPerMSec)).Average(a => a.Value.RequestsPerMSec);\n", + " double meanLatency = comparison.Where(a => IsNotInvalidDouble(a.Value.MeanLatencyMS)).Average(a => a.Value.MeanLatencyMS);\n", + "\n", + " double p50Latency = comparison.Where(a => IsNotInvalidDouble(a.Value.Latency50thMS)).Average(a => a.Value.Latency50thMS);\n", + " double p75Latency = comparison.Where(a => IsNotInvalidDouble(a.Value.Latency75thMS)).Average(a => a.Value.Latency75thMS);\n", + " double p90Latency = comparison.Where(a => IsNotInvalidDouble(a.Value.Latency90thMS)).Average(a => a.Value.Latency90thMS);\n", + " double p99Latency = comparison.Where(a => IsNotInvalidDouble(a.Value.Latency99thMS)).Average(a => a.Value.Latency99thMS);\n", + "\n", + " return $\"{comparisonKey},{averageWorkingSet},{privateMemory},{throughput},{meanLatency},{p50Latency},{p75Latency},{p90Latency},{p99Latency}\";\n", + " }\n", + "\n", + " public string GenerateSummaryForComparisons()\n", + " {\n", + " StringBuilder sb = new();\n", + " sb.AppendLine(\"Build to Build,Average Max Working Set (MB) %, Average Max Private Memory (MB) %, Average Request/MSec %, Average Mean Latency (MSec), Average P50 Latency (MSec) %, Average P75 Latency (MSec) %, Average P90 Latency (MSec) %, Average P99 Latency (MSec) %\");\n", + " foreach (var comparison in Comparisons)\n", + " {\n", + " sb.AppendLine(GenerateSummaryForComparison(comparison.Key, comparison.Value));\n", + " }\n", + "\n", + " return sb.ToString();\n", + " }\n", + "\n", + " private int GetCountOfRegressions(List selected, double thresholdPercentage, bool lessIsBetter = true)\n", + " {\n", + " // If throughput, less is worse => threshold <= -5%.\n", + " var comparison = selected.Where(d => IsNotInvalidDouble(d) && ( (lessIsBetter) ? (d >= thresholdPercentage) : (d <= -thresholdPercentage)));\n", + " return comparison.Count;\n", + " }\n", + "\n", + " private int GetCountOfAbsRegressions(List selected, double thresholdPercentage)\n", + " {\n", + " var comparison = selected.Where(d => IsNotInvalidDouble(d) && Math.Abs(d) >= thresholdPercentage);\n", + " return comparison.Count;\n", + " }\n", + "\n", + " // # of benchmarks with throughput regressed by >= 5% and 10%\n", + " private string GenerateRegressionSummary(string comparisonKey, Dictionary comparison)\n", + " {\n", + " List workingSet = comparison.Select(c => c.Value.MaxWorkingSetMB);\n", + " int workingSetCountGT_5 = GetCountOfRegressions(workingSet, 5);\n", + " int workingSetCountGT_10 = GetCountOfRegressions(workingSet, 10);\n", + "\n", + " List privateMemory = comparison.Select(c => c.Value.MaxPrivateMemoryMB);\n", + " int privateMemoryCountGT_5 = GetCountOfRegressions(privateMemory, 5);\n", + " int privateMemoryCountGT_10 = GetCountOfRegressions(privateMemory, 10);\n", + "\n", + " List throughput = comparison.Select(a => a.Value.RequestsPerMSec);\n", + " int throughputCountGT_5 = GetCountOfRegressions(throughput, 5, false);\n", + " int throughputCountGT_10 = GetCountOfRegressions(throughput, 10, false);\n", + "\n", + " List meanLatency = comparison.Select(a => a.Value.MeanLatencyMS);\n", + " int meanLatencyCountGT_5 = GetCountOfRegressions(meanLatency, 5);\n", + " int meanLatencyCountGT_10 = GetCountOfRegressions(meanLatency, 10);\n", + "\n", + " List p50Latency = comparison.Select(a => a.Value.Latency50thMS);\n", + " int p50LatencyCountGT_5 = GetCountOfRegressions(p50Latency, 5);\n", + " int p50LatencyCountGT_10 = GetCountOfRegressions(p50Latency, 10);\n", + "\n", + " List p75Latency = comparison.Select(a => a.Value.Latency75thMS);\n", + " int p75LatencyCountGT_5 = GetCountOfRegressions(p75Latency, 5);\n", + " int p75LatencyCountGT_10 = GetCountOfRegressions(p75Latency, 10);\n", + "\n", + " List p90Latency = comparison.Select(a => a.Value.Latency90thMS);\n", + " int p90LatencyCountGT_5 = GetCountOfRegressions(p90Latency, 5);\n", + " int p90LatencyCountGT_10 = GetCountOfRegressions(p90Latency, 10);\n", + " \n", + " List p99Latency = comparison.Select(a => a.Value.Latency99thMS);\n", + " int p99LatencyCountGT_5 = GetCountOfRegressions(p99Latency, 5);\n", + " int p99LatencyCountGT_10 = GetCountOfRegressions(p99Latency, 10);\n", + "\n", + " return $\"{comparisonKey},{workingSetCountGT_5},{workingSetCountGT_10},{privateMemoryCountGT_5},{privateMemoryCountGT_10},{throughputCountGT_5},{throughputCountGT_10},{meanLatencyCountGT_5},{meanLatencyCountGT_10},{p50LatencyCountGT_5},{p50LatencyCountGT_10},{p75LatencyCountGT_5},{p75LatencyCountGT_10},{p90LatencyCountGT_5},{p90LatencyCountGT_10},{p99LatencyCountGT_5},{p99LatencyCountGT_10}\";\n", + " }\n", + "\n", + " public string GenerateRegressionSummaryForComparisons()\n", + " {\n", + " StringBuilder sb = new();\n", + " sb.AppendLine(\"Build to Build,Reg. Count - Working Set (MB),Large Reg. Count - Working Set (MB),Reg. Count - Max Private Memory (MB),Large Reg. Count - Max Private Memory (MB),Reg. Count - Throughput, Large Reg. Count - Throughput,Reg. Count - Mean Latency,Large Reg. Count - Mean Latency,Reg. Count - P50 Latency, Large Reg. Count - P50 Latency, Reg. Count - P75 Latency, Large Reg. Count - P75 Latency,Reg. Count - P90 Latency, Large Reg. Count - P90 Latency,Reg. Count - P99 Latency, Large Reg. Count - P99 Latency\");\n", + " foreach (var comparison in Comparisons)\n", + " {\n", + " sb.AppendLine(GenerateRegressionSummary(comparison.Key, comparison.Value));\n", + " }\n", + "\n", + " return sb.ToString();\n", + " }\n", + "\n", + " public Dictionary GenerateRegressionAnalysisForComparison(string comparisonKey)\n", + " {\n", + " StringBuilder sb = new();\n", + " Dictionary csvData = new();\n", + " Dictionary comparison = Comparisons[comparisonKey];\n", + "\n", + " string header = \"Benchmark,MaxWorkingSetMB,MaxPrivateMemoryMB,RequestsPerMSec,MeanLatencyMS,Latency50thMS,Latency75thMS,Latency90thMS,Latency99thMS\";\n", + "\n", + " // Generate Memory Regressions.\n", + " StringBuilder memRegressions = new();\n", + " memRegressions.AppendLine(header);\n", + " foreach (var benchmark in comparison.Where(c => c.Value.MaxWorkingSetMB >= 10 || c.Value.MaxPrivateMemoryMB >= 10 ))\n", + " {\n", + " memRegressions.AppendLine($\"{benchmark.Key},{benchmark.Value.MaxWorkingSetMB},{benchmark.Value.MaxPrivateMemoryMB},{benchmark.Value.RequestsPerMSec},{benchmark.Value.MeanLatencyMS},{benchmark.Value.Latency50thMS},{benchmark.Value.Latency75thMS},{benchmark.Value.Latency90thMS},{benchmark.Value.Latency99thMS}\");\n", + " }\n", + " csvData[\"memory\"] = memRegressions.ToString();\n", + "\n", + " // Generate Throughput Regressions.\n", + " StringBuilder throughputRegressions = new();\n", + " throughputRegressions.AppendLine(header);\n", + " foreach (var benchmark in comparison.Where(c => c.Value.RequestsPerMSec <= -10))\n", + " {\n", + " throughputRegressions.AppendLine($\"{benchmark.Key},{benchmark.Value.MaxWorkingSetMB},{benchmark.Value.MaxPrivateMemoryMB},{benchmark.Value.RequestsPerMSec},{benchmark.Value.MeanLatencyMS},{benchmark.Value.Latency50thMS},{benchmark.Value.Latency75thMS},{benchmark.Value.Latency90thMS},{benchmark.Value.Latency99thMS}\");\n", + " }\n", + " csvData[\"throughput\"] = throughputRegressions.ToString();\n", + "\n", + " // Generate Latency Regressions.\n", + " StringBuilder latencyRegressions = new();\n", + " latencyRegressions.AppendLine(header);\n", + " foreach (var benchmark in comparison.Where(c => c.Value.MeanLatencyMS >= 10 || \n", + " c.Value.Latency50thMS >= 10 || \n", + " c.Value.Latency75thMS >= 10 || \n", + " c.Value.Latency90thMS >= 10 || \n", + " c.Value.Latency99thMS >= 10 ))\n", + " {\n", + " latencyRegressions.AppendLine($\"{benchmark.Key},{benchmark.Value.MaxWorkingSetMB},{benchmark.Value.MaxPrivateMemoryMB},{benchmark.Value.RequestsPerMSec},{benchmark.Value.MeanLatencyMS},{benchmark.Value.Latency50thMS},{benchmark.Value.Latency75thMS},{benchmark.Value.Latency90thMS},{benchmark.Value.Latency99thMS}\");\n", + " }\n", + " csvData[\"latency\"] = latencyRegressions.ToString();\n", + "\n", + " // All.\n", + " StringBuilder all = new();\n", + " all.AppendLine(header);\n", + " foreach (var benchmark in comparison)\n", + " {\n", + " all.AppendLine($\"{benchmark.Key},{benchmark.Value.MaxWorkingSetMB},{benchmark.Value.MaxPrivateMemoryMB},{benchmark.Value.RequestsPerMSec},{benchmark.Value.MeanLatencyMS},{benchmark.Value.Latency50thMS},{benchmark.Value.Latency75thMS},{benchmark.Value.Latency90thMS},{benchmark.Value.Latency99thMS}\");\n", + " }\n", + " csvData[\"all\"] = all.ToString();\n", + "\n", + " return csvData;\n", + " }\n", + "\n", + " public void SaveComparisons(string basePath)\n", + " {\n", + " // Add Summary for Comparisons.\n", + " string summaryOfComparisons = GenerateSummaryForComparisons();\n", + " File.WriteAllText(Path.Combine(basePath, \"SummaryOfComparisons.csv\"), summaryOfComparisons);\n", + "\n", + " // Add Regression Summary for Comparisons.\n", + " string regressionSummary = GenerateRegressionSummaryForComparisons();\n", + " File.WriteAllText(Path.Combine(basePath, \"RegressionSummary.csv\"), regressionSummary);\n", + "\n", + " // Add Large Regression Analysis for Comparison.\n", + " string perComparisonDataPath = Path.Combine(basePath, \"PerComparisonData\");\n", + " if (!Directory.Exists(perComparisonDataPath))\n", + " {\n", + " Directory.CreateDirectory(perComparisonDataPath);\n", + " }\n", + "\n", + " foreach (var comparison in Comparisons)\n", + " {\n", + " string comparisonPath = Path.Combine(perComparisonDataPath, comparison.Key);\n", + " Directory.CreateDirectory(comparisonPath);\n", + "\n", + " Dictionary regressionComparisons = GenerateRegressionAnalysisForComparison(comparison.Key);\n", + "\n", + " // Memory\n", + " File.WriteAllText(Path.Combine(comparisonPath, \"MemoryRegressions.csv\"), regressionComparisons[\"memory\"]);\n", + "\n", + " // Throughput\n", + " File.WriteAllText(Path.Combine(comparisonPath, \"ThroughputRegressions.csv\"), regressionComparisons[\"throughput\"]);\n", + "\n", + " // Latency\n", + " File.WriteAllText(Path.Combine(comparisonPath, \"LatencyRegressions.csv\"), regressionComparisons[\"latency\"]);\n", + "\n", + " // All\n", + " File.WriteAllText(Path.Combine(comparisonPath, \"All.csv\"), regressionComparisons[\"all\"]);\n", + " }\n", + " }\n", + "\n", + " public Dictionary> Comparisons { get; }\n", + "}\n", + "\n", + "\n", + "//var datas3_vs_datas_4 = baseDataManager.GetBenchmarkToComparison(\"base_0\", \"base_1\");\n", + "\n", + "Dictionary> comparisons = new()\n", + "{\n", + " { nameof(run1_vs_run2), run1_vs_run2 },\n", + "};\n", + "\n", + "SummaryTable summaryTable = new(comparisons);\n", + "summaryTable.SaveComparisons(diffPath);\n", + "\n", + "\n", + "// I don't use this anymore (or GCCharting at all)\n", + "\n", + "void ChartProperty(LoadInfo baseline, LoadInfo comparand, string nameOfProperty)\n", + "{\n", + " GCProcessData baselineGC = baseline.Data;\n", + " GCProcessData comparandGC = comparand.Data;\n", + "\n", + " List<(string scatterName, List gcs)> gcData = \n", + " new()\n", + " {\n", + " { ( scatterName : $\"{nameOfProperty} for {baseline.Id}\" , gcs : baselineGC.GCs )},\n", + " { ( scatterName : $\"{nameOfProperty} for {comparand.Id}\" , gcs : comparandGC.GCs )}\n", + " };\n", + "\n", + " GCCharting.ChartGCData(gcData : gcData, \n", + " title : $\"{nameOfProperty} Comparison Between {baseline.Run} and {comparand.Run}\", \n", + " isXAxisRelative : false,\n", + " fieldName : nameOfProperty).Display();\n", + "\n", + "}\n", + "\n", + "void ChartProperty(LoadInfo comparison, string nameOfProperty)\n", + "{\n", + " GCProcessData baselineGC = comparison.Data;\n", + " GCProcessData comparandGC = comparison.Data2;\n", + "\n", + " List<(string scatterName, List gcs)> gcData = \n", + " new()\n", + " {\n", + " { ( scatterName : $\"{nameOfProperty} for Baseline\" , gcs : baselineGC.GCs )},\n", + " { ( scatterName : $\"{nameOfProperty} for Comparand\" , gcs : comparandGC.GCs )}\n", + " };\n", + "\n", + " GCCharting.ChartGCData(gcData : gcData, \n", + " title : $\"{nameOfProperty} Comparison\", \n", + " isXAxisRelative : false,\n", + " fieldName : nameOfProperty).Display();\n", + "\n", + "}\n", + "\n", + "void ChartProperty(IEnumerable info, string nameOfProperty)\n", + "{\n", + " List<(string scatterName, List gcs)> gcData =\n", + " info.Select(li => (scatterName: $\"{nameOfProperty}\", gcs: li.Data.GCs)).ToList();\n", + " GCCharting.ChartGCData(gcData: gcData, title: \"${nameOfProperty} Comparison\", isXAxisRelative: false, fieldName: nameOfProperty).Display();\n", + "}\n", + "\n", + "\n", + "var run1_Benchmark = diffDataManager.GetBenchmarkData(benchmark: \"CachingPlatform\", \"tp3-m_0\");\n", + "var run2_Benchmark = diffDataManager.GetBenchmarkData(benchmark: \"CachingPlatform\", \"tp3-m_1\");\n", + "\n", + "// Chart the PauseDurationMSec for the run1 vs. run2.\n", + "ChartProperty(baseline: run1_Benchmark, comparand: run2_Benchmark, nameof(TraceGC.HeapCount))\n", + "\n", + "\n", + "// Leftover code that bucketed ranges of values for metrics and displayed them in columns\n", + "// - probably out-of-date (and very hardwired to the data I was looking at) - probably ignore this\n", + "\n", + "int[] ranges = {1,11,12,13};\n", + "\n", + "for (int i = 0; i < 4; ++i)\n", + "{\n", + " string trace = \"fixed-newlinear-nosmooth_\" + i;\n", + " Console.Write($\"{trace}: \");\n", + " //var cpData = diffDataManager.GetBenchmarkData(\"MultipleQueriesPlatform\", trace);\n", + " var cpData = noDataManager.GetBenchmarkData(\"Fortunes\", trace);\n", + "\n", + " int prevNumHeaps = -1;\n", + " int count = 0;\n", + " int nextRangeIndex = 0;\n", + " foreach (int numHeaps in cpData.Data.GCs.Select(gc => gc.GlobalHeapHistory?.NumHeaps).Where(x => x.HasValue).Append(-1))\n", + " {\n", + " if (numHeaps == prevNumHeaps)\n", + " {\n", + " count++;\n", + " continue;\n", + " }\n", + "\n", + " if (count != 0)\n", + " {\n", + " int skip = ranges.Skip(nextRangeIndex).TakeWhile(r => prevNumHeaps > r).Count();\n", + " Console.Write(new string(' ', skip * 13));\n", + " nextRangeIndex += skip + 1;\n", + " Console.Write($\"{count,5} @ {prevNumHeaps,2} {(numHeaps == -1 ? ' ' : (numHeaps > prevNumHeaps ? '^' : 'v'))} \");\n", + " }\n", + " prevNumHeaps = numHeaps;\n", + " count = 1;\n", + " }\n", + " Console.WriteLine();\n", + "}\n", + "*/" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Debugging" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "System.Diagnostics.Process.GetCurrentProcess().Id" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "#!about" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".NET (C#)", + "language": "C#", + "name": ".net-csharp" + }, + "language_info": { + "name": "python" + }, + "orig_nbformat": 4, + "polyglot_notebook": { + "kernelInfo": { + "defaultKernelName": "csharp", + "items": [ + { + "aliases": [], + "name": "csharp" + } + ] + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/src/benchmarks/gc/GC.Infrastructure/Notebooks/BenchmarkAnalysis.dib b/src/benchmarks/gc/GC.Infrastructure/Notebooks/BenchmarkAnalysis.dib new file mode 100644 index 00000000000..731846083e5 --- /dev/null +++ b/src/benchmarks/gc/GC.Infrastructure/Notebooks/BenchmarkAnalysis.dib @@ -0,0 +1,12 @@ +#!meta + +{"kernelInfo":{"defaultKernelName":"csharp","items":[{"aliases":[],"languageName":"csharp","name":"csharp"}]}} + +#!pwsh + +dotnet build -c Release "..\GC.Analysis.API" + +#!csharp + +#!import DataManager.dib +#!import Reports.dib diff --git a/src/benchmarks/gc/GC.Infrastructure/Notebooks/BenchmarkAnalysis.ipynb b/src/benchmarks/gc/GC.Infrastructure/Notebooks/BenchmarkAnalysis.ipynb deleted file mode 100644 index e4b4d4e48e9..00000000000 --- a/src/benchmarks/gc/GC.Infrastructure/Notebooks/BenchmarkAnalysis.ipynb +++ /dev/null @@ -1,5580 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n", - "# Benchmark Analysis \n", - "\n", - "This notebook contains code for producing charts (and soon, tables) for GC benchmarks. It can currently process data\n", - "from the ASP.NET benchmarks obtained using crank as well as ETL data. One of the design points of this notebook is\n", - "that the different operations have a similar \"feel\"; they have many optional parameters that build on default settings.\n", - "The parameters are intended to be identical (or at least similar) across operations.\n", - "\n", - "The data is organized in a hierarchy. (See `TopLevelData`.)\n", - "- A \"run\" consists of multiple \"configurations\". (See `RunData`.)\n", - "- A \"configuration\" consists of multiple \"benchmarks\". (See `ConfigData`.)\n", - "- A \"benchmark\" consists of multiple \"iterations\". (See `BenchmarkData`.)\n", - "- An \"iteration\" consists of multiple GCs. (See `IterationData`.)\n", - "\n", - "In addition to multiple instances of the next lower level, each level contains data appropriate for that level.\n", - "For example, an iteration of an ASP.NET benchmark will have an RPS (requests per second) score. The overall\n", - "benchmark could have the average RPS score across the iterations (though this can also be computed at presentation-time -\n", - "more on that later).\n", - "\n", - "Data is stored in a `DataManager` object. This class has a number of `Create...` and `Add...` methods. They process\n", - "data identically; a `Create` method is simply shorthand for `new` and `Add` and is the common usage.\n", - "\n", - "`CreateAspNetData` expects the directory structure that is produced by the GC infrastructure for ASP.NET. For example:\n", - "``` xml\n", - "\\_0\\._0.log\n", - " \\.gc.etl\n", - " \\._0.log\n", - " \\.gc.etl\n", - " \\_1\\...\n", - " \\_2\\...\n", - " \\_3\\...\n", - " \\_0\\...\n", - " \\_1\\...\n", - " \\_2\\...\n", - " \\_3\\...\n", - "```\n", - "Because of the way these names are generated, do not put `.` in any name or `_` in configuration names. The `_0`, `_1`,\n", - "etc., are the iterations.\n", - "\n", - "Many operations including `CreateAspNetData` use the `Filter` class. It is a consistent way to specify names to\n", - "include or exclude and can be done by listing names or by regular expression. `CreateAspNetData` can filter by\n", - "config or benchmark. (To filter by run, simply don't pass that directory to the method.) By default, it has a list\n", - "of process names that it will look for in the ETL data, but the optional parameter `pertinentProcesses` can override\n", - "that.\n", - "\n", - "`CreateGCTrace(s)` only loads ETL files. Since there is no context for a default value, `pertinentProcesses` must be\n", - "specified. GC traces can be loaded in two ways. The first expects one relevant process per trace and defaults to\n", - "setting the run as blank, the config as the enclosing directory name, and the iteration as zero. The\n", - "benchmark name is extracted from the ETL filename but can be overridden or filtered. The second allows multiple\n", - "processes per trace. It uses the process as the benchmark name and promotes the other values up one level (e.g.,\n", - "the ETL filename becomes the config). This behavior is controlled by the `loadMultipleProcesses` parameter.\n", - "\n", - "The data is stored in nested dictionaries that can be directly modified or accessed through a number of `Get...`\n", - "helpers. However, typically charting (and soon tabling) methods will be called next. There are charting methods\n", - "for each of the three levels (the \"run\" level is not included since aggregating across configurations is not\n", - "expected), and at each level there are two overloads that only differ based on whether they expect one metric or\n", - "a list of metrics.\n", - "- `ChartBenchmarks` will chart benchmarks across the x-axis using aggregation of data from the iterations. Each\n", - " run/configuration will be a data series.\n", - "- `ChartIterations` will chart benchmarks across the x-axis using data from each iteration. Each\n", - " run/configuration/iteration will be a data series.\n", - "- `ChartGCData` will chart GCs across the x-axis using data from each iteration. Each run/configuration/iteration\n", - " will be a data series, and by default each benchmark will be on a different chart.\n", - "\n", - "Each charting method requires one or more metrics to include in the chart. These are represented by the `Metric`\n", - "class, which encapsulates a way to extract the metric from the data source, a label for that data, and the unit\n", - "for that data. Many examples of metrics are provided in the `Metrics` class. Data from one level can be\n", - "aggregated to the next level via the `Metrics.Promote` methods and the `Aggregation` class. For example, the\n", - "average GC pause time for the execution of a single iteration can be extracted using\n", - "`Metrics.Promote(Metrics.G.PauseDuration, Aggregation.Max)`, though this particular example is already available as\n", - "`Metrics.I.MaxPauseDuration`. Sample GC metrics are in `Metrics.G`. Sample iteration metrics are in `Metrics.I`.\n", - "Sample benchmark metrics are in `Metrics.B`.\n", - "\n", - "For typical cases, x-axis values are handled automatically (the GC index or the benchmark name as appropriate), but\n", - "the start time of the GC can be used instead by passing `Metrics.X.StartRelativeMSec` as the optional `xMetric`\n", - "argument. (See the class `BaseMetric` for more details on how this works.)\n", - "\n", - "Each charting method accepts `Filter`s for the runs, configs, and benchmarks and a predicate `dataFilter` for the\n", - "data itself (`BenchmarkData`, `IterationData`, or `TraceGC`).\n", - "\n", - "In addition, some more advanced arguments are available:\n", - "- `xArrangement` - controls how the x-axis is arranged\n", - " - `XArrangements.Default` - normal sorting by x values\n", - " - `XArrangements.Sorted` - each series is sorted (highest-to-lowest), and the x-axis values are changed to ranks\n", - " - `XArrangements.CombinedSorted` - the first series is sorted (highest-to-lowest), then other series are updated\n", - " to match the resulting ordering of x values found from that sort\n", - " - `XArrangements.Percentile` - similar to sorted except lower-to-highest, and the x-axis values are the\n", - " percentiles of the data within that series - `Sorted` is useful for a small number of items where the x values\n", - " have specific meanings (such as benchmark names), whereas `Percentile` is useful when considering the x values\n", - " as a distribution.\n", - " - Alternatively, create a new subclass of the `XArrangement` class\n", - "- `configNameSimplifier` - XPlot has trouble if the series' names (and thus the chart legend) get too large. The\n", - " configuration names can be long and repetitive, so this option can be used to display shorter values.\n", - " - `NameSimplifier.PrefixDashed` - a predefined strategy that considers configurations as a series of names\n", - " separated by dashes. Common prefixes are removed. For example, `a`, `a-b-d`, `a-b-e`, and `a-c` will be\n", - " simplified to `<>`, `b-d`, `b-e`, and `c`. The blank value and delimiter can be adjusted by creating a new\n", - " `PrefixSimplifier`.\n", - " - `ListSimplifier` - applies key-value pairs to the names\n", - " - Alternatively, create a new subclass of the `NameSimplifier` class\n", - "- `includeRunName` - By default, the run name is discarded when charting under the assumption that the typical\n", - " case is multiple configurations under the same run. Setting this parameter concatenates the run and configuration\n", - " together.\n", - "- `display` - By default, generated chart(s) will be displayed. Clearing this parameters prevents that behavior.\n", - " Charts are always returned to the caller for possible further processing.\n", - "- `debug` - Enables a bit of debug spew.\n", - "\n", - "Upcoming:\n", - "- Add the ability to specify a primary data series and add metrics that compare against it.\n", - "- Fill out the predefined metrics.\n", - "- Add requested features (specify width of chart).\n", - "- Add more aggregations, including adding the aggregation of iterations to an iteration-level chart/table.\n", - " (e.g., b1_1, b1_2, b1_3, b1_max, b1_avg, b2_1, b2_2, b2_3, b2_max, b2_avg)\n", - "- Consider splitting `SeriesInfo` into level-specific versions and make methods such as `ChartInternal` generic\n", - " on the series information." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Building and Using The GC Analysis API" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "pwsh" - }, - "polyglot_notebook": { - "kernelName": "pwsh" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "dotnet build -c Release \"..\\GC.Analysis.API\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "#i \"nuget: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json\"\n", - "\n", - "#r \"nuget: Microsoft.Diagnostics.Tracing.TraceEvent\"\n", - "#r \"nuget: YamlDotnet\"\n", - "#r \"nuget: XPlot.Plotly\"\n", - "#r \"nuget: XPlot.Plotly.Interactive\"\n", - "#r \"nuget: Microsoft.Data.Analysis, 0.19.1\"\n", - "#r \"nuget: Newtonsoft.Json\"\n", - "\n", - "// TODO: Ensure you are pointing to the right artifacts folder.\n", - "#r \"..\\..\\..\\..\\..\\artifacts\\bin\\GC.Analysis.API\\Release\\net7.0\\GC.Analysis.API.dll\"\n", - "\n", - "using Etlx = Microsoft.Diagnostics.Tracing.Etlx;\n", - "using GC.Analysis.API;\n", - "using Microsoft.Data.Analysis;\n", - "using Microsoft.Diagnostics.Tracing.Analysis.GC;\n", - "using Microsoft.Diagnostics.Tracing.Analysis;\n", - "using Microsoft.Diagnostics.Tracing.Parsers.Clr;\n", - "using Microsoft.Diagnostics.Tracing;\n", - "using System.Diagnostics;\n", - "using XPlot.Plotly;\n", - "\n", - "using System.IO;\n", - "using System.Text.RegularExpressions;\n", - "using Newtonsoft.Json;\n", - "\n", - "// Very basic utilities\n", - "\n", - "// ML and MA are convenience syntax for making lists and arrays.\n", - "public static List ML(params T[] elems) => new List(elems);\n", - "public static T[] MA(params T[] elems) => elems;\n", - "\n", - "public static V GetOrAdd(this Dictionary dict, K key, V value)\n", - " => dict.TryAdd(key, value) ? value : dict[key];\n", - "\n", - "public static void SetWithExtend(this List list, int index, T value)\n", - "{\n", - " int count = list.Count;\n", - " int needed = index + 1;\n", - " for (int i = 0; i < (needed - count); ++i)\n", - " {\n", - " list.Add(default(T));\n", - " }\n", - " list[index] = value;\n", - "}\n", - "\n", - "public static IEnumerable<(T, int)> WithIndex(this IEnumerable list) => list.Select((value, index) => (value, index));\n", - "public static bool NotNull(T x) => x != null;\n" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Data Acquisition\n", - "\n", - "The next few cells detail how to retrieve the data from a base path. The run name below is the name of the folder generated from running the ``aspnetbenchmarks`` command from the GC.Infrastructure API. " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "// The LoadInfo class consists of all the pertinent fields needed to represent both the result from a particular benchmark\n", - "// as well as the the comparison between two runs where the Data2 represents the GCProcessData of the comparand.\n", - "public sealed class LoadInfo\n", - "{\n", - " public double MaxWorkingSetMB {get;set;} = double.NaN;\n", - " public double P99WorkingSetMB {get;set;} = double.NaN;\n", - " public double P95WorkingSetMB {get;set;} = double.NaN;\n", - " public double P90WorkingSetMB {get;set;} = double.NaN;\n", - " public double P75WorkingSetMB {get;set;} = double.NaN;\n", - " public double P50WorkingSetMB {get;set;} = double.NaN;\n", - "\n", - " public double MaxPrivateMemoryMB {get;set;} = double.NaN;\n", - " public double P99PrivateMemoryMB {get;set;} = double.NaN;\n", - " public double P95PrivateMemoryMB {get;set;} = double.NaN;\n", - " public double P90PrivateMemoryMB {get;set;} = double.NaN;\n", - " public double P75PrivateMemoryMB {get;set;} = double.NaN;\n", - " public double P50PrivateMemoryMB {get;set;} = double.NaN;\n", - "\n", - " public double RequestsPerMSec {get; set;} = double.NaN;\n", - " public double MeanLatencyMS {get; set;} = double.NaN;\n", - " public double Latency99thMS {get; set;} = double.NaN;\n", - " public double Latency90thMS {get; set;} = double.NaN;\n", - " public double Latency75thMS {get; set;} = double.NaN;\n", - " public double Latency50thMS {get; set;} = double.NaN;\n", - "\n", - " // Do these need to be stored on the LoadInfo? Context should already have this information.\n", - " public string Run {get; set;}\n", - " public string Config {get; set;}\n", - " public string Benchmark {get; set;}\n", - " public int Iteration {get; set;} = -1;\n", - "}\n", - "\n", - "public class GCSummaryInfo\n", - "{\n", - " public double TotalSuspensionTimeMSec {get;set;} = double.NaN;\n", - " public double PercentPauseTimeInGC {get; set;} = double.NaN;\n", - " public double PercentTimeInGC {get; set;} = double.NaN;\n", - " public double MeanHeapSizeBeforeMB {get; set;} = double.NaN;\n", - " public double MaxHeapSizeMB {get; set;} = double.NaN;\n", - " public double TotalAllocationsMB {get;set;} = double.NaN;\n", - " public double GCScore {get;set;} = double.NaN;\n", - "\n", - " public double MaxHeapCount {get;set;} = double.NaN;\n", - " public double NumberOfHeapCountSwitches {get;set;} = double.NaN;\n", - " public double NumberOfHeapCountDirectionChanges {get;set;} = double.NaN;\n", - "\n", - " // Consider removing\n", - " public GCProcessData Data {get;set;}\n", - " public GCProcessData? Data2 {get;set;}\n", - "\n", - " public int ProcessId {get;set;}\n", - " public string CommandLine {get;set;}\n", - " public string TracePath {get; set;}\n", - " public string ProcessName {get;set;}\n", - "}\n", - "\n", - "public class BenchmarkSummaryData\n", - "{\n", - " public double MaxWorkingSetMB {get;set;} = double.NaN;\n", - " public double P99WorkingSetMB {get;set;} = double.NaN;\n", - " public double P95WorkingSetMB {get;set;} = double.NaN;\n", - " public double P90WorkingSetMB {get;set;} = double.NaN;\n", - " public double P75WorkingSetMB {get;set;} = double.NaN;\n", - " public double P50WorkingSetMB {get;set;} = double.NaN;\n", - "\n", - " public double MaxPrivateMemoryMB {get;set;} = double.NaN;\n", - " public double P99PrivateMemoryMB {get;set;} = double.NaN;\n", - " public double P95PrivateMemoryMB {get;set;} = double.NaN;\n", - " public double P90PrivateMemoryMB {get;set;} = double.NaN;\n", - " public double P75PrivateMemoryMB {get;set;} = double.NaN;\n", - " public double P50PrivateMemoryMB {get;set;} = double.NaN;\n", - "\n", - " public double RequestsPerMSec {get;set;} = double.NaN;\n", - " public double MeanLatencyMS {get; set;} = double.NaN;\n", - " public double Latency50thMS {get; set;} = double.NaN;\n", - " public double Latency75thMS {get; set;} = double.NaN;\n", - " public double Latency90thMS {get; set;} = double.NaN;\n", - " public double Latency99thMS {get; set;} = double.NaN;\n", - "\n", - " public string Benchmark {get; set;}\n", - "}\n", - "\n", - "// XXXData is the Data for an XXX, not a mapping from XXX to data.\n", - "// For example, BenchmarkData is a mapping from iterations to data because a benchmark can have multiple iterations.\n", - "public record IterationData(LoadInfo LoadInfo, GCSummaryInfo GCSummaryInfo, GCProcessData GCProcessData)\n", - "{\n", - " public LoadInfo LoadInfo { get; set; } = LoadInfo;\n", - " public GCSummaryInfo GCSummaryInfo { get; set; } = GCSummaryInfo;\n", - " public GCProcessData GCProcessData { get; set; } = GCProcessData;\n", - " // GCLogInfo GCLogInfo;\n", - " // Dictionary Other;\n", - "}\n", - "public record BenchmarkData(LoadInfo SummaryLoadInfo, List Iterations); // Iteration # -> data\n", - "public record ConfigData(Dictionary Benchmarks); // Benchmark name -> data\n", - "public record RunData(Dictionary Configs); // Config name -> data\n", - "public record TopLevelData(Dictionary Runs); // Run name -> data\n", - "\n", - "public class Filter // abstraction used whenever names should be filtered\n", - "{\n", - " private string[] _includeNames;\n", - " private string[] _excludeNames;\n", - " private Regex _includeRE;\n", - " private Regex _excludeRE;\n", - "\n", - " public Filter(params string[] includeNames) : this(includeNames: includeNames, excludeNames: null) {}\n", - " public Filter(IEnumerable includeNames = null, IEnumerable excludeNames = null,\n", - " string includeRE = null, string excludeRE = null)\n", - " : this(\n", - " includeNames: includeNames?.ToArray(),\n", - " excludeNames: excludeNames?.ToArray(),\n", - " includeRE: (includeRE != null) ? (new Regex(includeRE)) : null,\n", - " excludeRE: (excludeRE != null) ? (new Regex(excludeRE)) : null\n", - " )\n", - " {}\n", - "\n", - " private Filter(string[] includeNames = null, string[] excludeNames = null,\n", - " Regex includeRE = null, Regex excludeRE = null)\n", - " {\n", - " _includeNames = includeNames;\n", - " _excludeNames = excludeNames;\n", - " _includeRE = includeRE;\n", - " _excludeRE = excludeRE;\n", - " }\n", - "\n", - " public static Filter Names(params string[] includeNames) => new(includeNames: includeNames);\n", - " public static Filter ExcludeNames(params string[] includeNames) => new(excludeNames: includeNames);\n", - " public static Filter RE(string includeRE) => new(includeRE: includeRE);\n", - " public static Filter ExcludeRE(string includeRE) => new(excludeRE: includeRE);\n", - " public static Filter All { get; } = new(null);\n", - "\n", - " public bool Include(string candidate)\n", - " => (((_includeNames != null) || (_includeRE != null))\n", - " ? ((_includeNames?.Contains(candidate) ?? false) || ((_includeRE?.Match(candidate).Success ?? false)))\n", - " : true)\n", - " && (!_excludeNames?.Contains(candidate) ?? true)\n", - " && (!_excludeRE?.Match(candidate).Success ?? true);\n", - "}\n", - "\n", - "public class IntFilter\n", - "{\n", - " private (int min, int max)[] _includeRanges;\n", - " private (int min, int max)[] _excludeRanges;\n", - "\n", - " private static IEnumerable EmptyIfNull(IEnumerable enumerable)\n", - " => enumerable ?? Enumerable.Empty();\n", - "\n", - " public IntFilter(params int[] includeValues) : this(includeValues: includeValues, excludeRanges: null) {}\n", - " public IntFilter(params (int min, int max)[] includeRanges) : this(includeRanges: includeRanges, excludeRanges: null) {}\n", - " public IntFilter(IEnumerable includeValues = null, IEnumerable excludeValues = null,\n", - " IEnumerable<(int min, int max)> includeRanges = null, IEnumerable<(int min, int max)> excludeRanges = null)\n", - " : this(\n", - " includeRanges:\n", - " (includeValues != null || includeRanges != null)\n", - " ? (EmptyIfNull(includeValues).Select(v => (v,v))).Concat(EmptyIfNull(includeRanges)).ToArray()\n", - " : null,\n", - " excludeRanges:\n", - " (excludeValues != null || excludeRanges != null)\n", - " ? (EmptyIfNull(excludeValues).Select(v => (v,v))).Concat(EmptyIfNull(excludeRanges)).ToArray()\n", - " : null\n", - " )\n", - " {}\n", - "\n", - " private IntFilter((int min, int max)[] includeRanges = null, (int min, int max)[] excludeRanges = null)\n", - " {\n", - " _includeRanges = includeRanges;\n", - " _excludeRanges = excludeRanges;\n", - " }\n", - "\n", - " public static IntFilter Values(params int[] includeValues) => new(includeValues: includeValues);\n", - " public static IntFilter Ranges(params (int min, int max)[] includeRanges) => new(includeRanges: includeRanges);\n", - " public static IntFilter ExcludeValues(params int[] excludeValues) => new(excludeValues: excludeValues);\n", - " public static IntFilter ExcludeRanges(params (int min, int max)[] excludeRanges) => new(excludeRanges: excludeRanges);\n", - " public static IntFilter All { get; } = new(includeValues: null);\n", - "\n", - " public bool Include(int candidate)\n", - " => (_includeRanges?.Any(pair => pair.min <= candidate && candidate <= pair.max) ?? true)\n", - " && (!_excludeRanges?.Any(pair => pair.min <= candidate && candidate <= pair.max) ?? true);\n", - "}" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "// Filter tests\n", - "int failed = 0;\n", - "void Assert(bool b, string message)\n", - "{\n", - " if (!b)\n", - " {\n", - " failed++;\n", - " Console.WriteLine($\"Failed: {message}\");\n", - " }\n", - "}\n", - "\n", - "{\n", - " foreach (Filter fa in ML(new(\"a\"), new (includeNames: ML(\"a\")), Filter.Names(\"a\"), new(includeRE: \"a\"), Filter.RE(\"a\")))\n", - " {\n", - " Assert(fa.Include(\"a\"), \"a~a\");\n", - " Assert(!fa.Include(\"b\"), \"a~!b\");\n", - " }\n", - "\n", - " foreach (Filter fab in ML(new(\"a\", \"b\"), new(includeNames: ML(\"a\", \"b\")), Filter.Names(\"a\", \"b\"), new(includeRE: \"a|b\"), Filter.RE(\"a|b\"),\n", - " new(includeNames: ML(\"a\"), includeRE: \"b\")))\n", - " {\n", - " Assert(fab.Include(\"a\"), \"ab~a\");\n", - " Assert(fab.Include(\"b\"), \"ab~b\");\n", - " Assert(!fab.Include(\"c\"), \"ab~!c\");\n", - " }\n", - "\n", - " foreach (Filter fna in ML(new(excludeNames: ML(\"a\")), Filter.ExcludeNames(\"a\"), new(excludeRE: \"a\"), Filter.ExcludeRE(\"a\")))\n", - " {\n", - " Assert(!fna.Include(\"a\"), \"!a~!a\");\n", - " Assert(fna.Include(\"b\"), \"!a~b\");\n", - " }\n", - "\n", - " foreach (Filter fnab in ML(new(excludeNames: ML(\"a\", \"b\")), Filter.ExcludeNames(\"a\", \"b\"), new(excludeRE: \"a|b\"), Filter.ExcludeRE(\"a|b\"),\n", - " new(excludeNames: ML(\"a\"), excludeRE: \"b\")))\n", - " {\n", - " Assert(!fnab.Include(\"a\"), \"!ab~!a\");\n", - " Assert(!fnab.Include(\"b\"), \"!ab~!b\");\n", - " Assert(fnab.Include(\"c\"), \"!ab~c\");\n", - " }\n", - "\n", - " foreach (Filter fanb in ML(new(includeNames: ML(\"a\", \"b\"), excludeNames: ML(\"b\")), new(includeRE: \"a|b\", excludeRE: \"b\")))\n", - " {\n", - " Assert(fanb.Include(\"a\"), \"a!b~a\");\n", - " Assert(!fanb.Include(\"b\"), \"a!b~!b\");\n", - " }\n", - "\n", - " Assert(Filter.All.Include(\"a\"), \"all~a\");\n", - "\n", - " foreach (IntFilter f1 in ML(new(1), new((1,1)), new (includeValues: ML(1)), new (includeRanges: ML((1,1))),\n", - " IntFilter.Values(1), IntFilter.Ranges((1,1))))\n", - " {\n", - " Assert(f1.Include(1), \"1~1\");\n", - " Assert(!f1.Include(2), \"1~!2\");\n", - " }\n", - "\n", - " foreach (IntFilter f12 in ML(new((1,2)), new((1,1), (2,2)),\n", - " new (includeValues: ML(1,2)), new (includeRanges: ML((1,1), (2,2))), new (includeRanges: ML((1,2))),\n", - " IntFilter.Values(1, 2), IntFilter.Ranges((1,2)), IntFilter.Ranges((1,1), (2,2))))\n", - " {\n", - " Assert(f12.Include(1), \"1~1\");\n", - " Assert(f12.Include(2), \"1~!2\");\n", - " }\n", - "\n", - " foreach (IntFilter fn1 in ML(new (excludeValues: ML(1)), new (excludeRanges: ML((1,1))),\n", - " IntFilter.ExcludeValues(1), IntFilter.ExcludeRanges((1,1))))\n", - " {\n", - " Assert(!fn1.Include(1), \"!1~!1\");\n", - " Assert(fn1.Include(2), \"!1~2\");\n", - " }\n", - "\n", - " foreach (IntFilter fn12 in ML(new(excludeValues: ML(1,2)), IntFilter.ExcludeValues(1,2),\n", - " new(excludeRanges: ML((1,1),(2,2))), new(excludeRanges: ML((1,2))), IntFilter.ExcludeRanges((1,2)), IntFilter.ExcludeRanges((1,2))))\n", - " {\n", - " Assert(!fn12.Include(1), \"!12~!1\");\n", - " Assert(!fn12.Include(2), \"!12~!2\");\n", - " Assert(fn12.Include(3), \"!12~3\");\n", - " }\n", - "\n", - " foreach (IntFilter f1n2 in ML(new(includeValues: ML(1,2), excludeValues: ML(2)), new(includeRanges: ML((1,2)), excludeRanges: ML((2,2)))))\n", - " {\n", - " Assert(f1n2.Include(1), \"1!2~1\");\n", - " Assert(!f1n2.Include(2), \"1!2~!2\");\n", - " }\n", - "\n", - " Assert(IntFilter.All.Include(1), \"all~1\");\n", - "}\n", - "if (failed > 0) throw new Exception($\"Failed {failed} test(s)\");" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "using ConfigIterationFilter = System.Collections.Generic.IReadOnlyDictionary;\n", - "\n", - "public static bool MightInclude(this ConfigIterationFilter configIterationFilter, string config)\n", - " => (configIterationFilter == null) || configIterationFilter.ContainsKey(config);\n", - "\n", - "public static bool Include(this ConfigIterationFilter configIterationFilter, string config, int iteration)\n", - " => (configIterationFilter == null) || (configIterationFilter.GetValueOrDefault(config)?.Include(iteration) ?? true);\n", - "\n", - "public class DataManager\n", - "{\n", - " public readonly TopLevelData _data;\n", - "\n", - " public DataManager() => _data = new(new());\n", - "\n", - " public static DataManager CreateAspNetData(string basePath,\n", - " Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, ConfigIterationFilter configIterationFilter = null,\n", - " List pertinentProcesses = null)\n", - " => CreateAspNetData(MA(basePath),\n", - " configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, configIterationFilter: configIterationFilter,\n", - " pertinentProcesses: pertinentProcesses);\n", - "\n", - " public static DataManager CreateAspNetData(IEnumerable basePaths,\n", - " Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, ConfigIterationFilter configIterationFilter = null,\n", - " List pertinentProcesses = null)\n", - " {\n", - " DataManager dataManager = new();\n", - " dataManager.AddAspNetData(basePaths: basePaths,\n", - " configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, configIterationFilter: configIterationFilter,\n", - " pertinentProcesses: pertinentProcesses);\n", - " return dataManager;\n", - " }\n", - "\n", - " public static DataManager CreateGCTrace(string file, List pertinentProcesses, string run = null, string config = null, int? iteration = null,\n", - " bool loadMultipleProcesses = true)\n", - " {\n", - " DataManager dataManager = new();\n", - " dataManager.AddGCTrace(file: file, pertinentProcesses: pertinentProcesses, run: run, config: config, iteration: iteration,\n", - " loadMultipleProcesses: loadMultipleProcesses);\n", - " return dataManager;\n", - " }\n", - "\n", - " public static DataManager CreateGCTraces(string basePath, List pertinentProcesses, SearchOption searchOption = SearchOption.TopDirectoryOnly,\n", - " Filter benchmarkFilter = null, string run = null, string config = null, int? iteration = null, bool loadMultipleProcesses = true)\n", - " {\n", - " DataManager dataManager = new();\n", - " dataManager.AddGCTraces(basePath: basePath, pertinentProcesses: pertinentProcesses, searchOption: searchOption,\n", - " benchmarkFilter: benchmarkFilter, run: run, config: config, iteration: iteration, loadMultipleProcesses: loadMultipleProcesses);\n", - " return dataManager;\n", - "\n", - " }\n", - "\n", - " public void AddAspNetData(string basePath,\n", - " Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, ConfigIterationFilter configIterationFilter = null,\n", - " List pertinentProcesses = null)\n", - " => AddAspNetData(basePaths: MA(basePath),\n", - " configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, configIterationFilter: configIterationFilter,\n", - " pertinentProcesses: pertinentProcesses);\n", - "\n", - " public void AddAspNetData(IEnumerable basePaths,\n", - " Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, ConfigIterationFilter configIterationFilter = null,\n", - " List pertinentProcesses = null)\n", - " {\n", - " configFilter = configFilter ?? Filter.All;\n", - " benchmarkFilter = benchmarkFilter ?? Filter.All;\n", - " iterationFilter = iterationFilter ?? IntFilter.All;\n", - " // configIterationFilter is not set to an empty dictionary as that would exclude everything\n", - "\n", - " foreach (var basePath in basePaths)\n", - " {\n", - " LoadAspNetDataFromBasePath(basePath: basePath,\n", - " configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, configIterationFilter: configIterationFilter,\n", - " pertinentProcesses: pertinentProcesses);\n", - " }\n", - " }\n", - "\n", - " public void AddGCTrace(string file, List pertinentProcesses, string run = null, string config = null, int? iteration = null, bool loadMultipleProcesses = true)\n", - " {\n", - " LoadGCTrace(file: file, configFilter: Filter.All, benchmarkFilter: Filter.All, run: run, config: config, iteration: iteration, pertinentProcesses: pertinentProcesses,\n", - " expectAspNetData: false, loadMultipleProcesses: loadMultipleProcesses);\n", - " }\n", - "\n", - " public void AddGCTraces(string basePath, List pertinentProcesses, SearchOption searchOption = SearchOption.TopDirectoryOnly, Filter configFilter = null, Filter benchmarkFilter = null,\n", - " string run = null, string config = null, int? iteration = null, bool loadMultipleProcesses = true)\n", - " {\n", - " configFilter = configFilter ?? Filter.All;\n", - " benchmarkFilter = benchmarkFilter ?? Filter.All;\n", - "\n", - " LoadGCTracesFromPath(path: basePath, searchOption: searchOption, configFilter: configFilter, benchmarkFilter: benchmarkFilter,\n", - " run: run, config: config, iteration: iteration, pertinentProcesses: pertinentProcesses,\n", - " expectAspNetData: false, loadMultipleProcesses: loadMultipleProcesses);\n", - " }\n", - "\n", - " public static double DeltaPercent (double baseline, double comparand) => Math.Round((comparand - baseline) / baseline * 100, 2);\n", - "\n", - " public TopLevelData Data => _data; \n", - "\n", - " //public static LoadInfo LoadLogFile(string file)\n", - " //{\n", - " // \n", - " //}\n", - "\n", - " // Consider generalizing the error reporting here\n", - " private (string, int) ParseConfigIterName(string dir)\n", - " {\n", - " int lastUnderscore = dir.LastIndexOf(\"_\");\n", - " string config;\n", - " int iteration;\n", - " if ((lastUnderscore != -1)\n", - " && int.TryParse(dir.AsSpan(lastUnderscore + 1), out iteration))\n", - " {\n", - " config = dir.Substring(0, lastUnderscore);\n", - " }\n", - " else\n", - " {\n", - " Console.WriteLine($\"{dir} is not in the form _\");\n", - " config = dir;\n", - " iteration = 0;\n", - " }\n", - "\n", - " return (config, iteration);\n", - " }\n", - "\n", - " private (string, string, int) ParseBenchmarkLogFileName(string logName)\n", - " {\n", - " string[] split = Path.GetFileName(logName).Split(\".\");\n", - " if ((split.Length != 3) || (split[2] != \"log\"))\n", - " {\n", - " Console.WriteLine($\"{logName} is not in the form ._.log\");\n", - " }\n", - " // TODO: Store these suffixes\n", - " string benchmark = Path.GetFileName( split[0] ).Replace(\"_Windows\", \"\").Replace(\"_Linux\", \"\").Replace(\".gc\", \"\").Replace(\".nettrace\", \"\");\n", - " (string config, int iteration) = ParseConfigIterName(split[1]);\n", - " return (config, benchmark, iteration);\n", - " }\n", - "\n", - " private List AspNetProcesses = new()\n", - " {\n", - " \"PlatformBenchmarks\",\n", - " \"Benchmarks\",\n", - " \"MapAction\",\n", - " \"TodosApi\",\n", - " \"BasicGrpc\",\n", - " \"BasicMinimalApi\",\n", - " };\n", - "\n", - " private void LoadAspNetDataFromBasePath(string basePath,\n", - " Filter configFilter, Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter,\n", - " List pertinentProcesses)\n", - " {\n", - " pertinentProcesses = pertinentProcesses ?? AspNetProcesses;\n", - "\n", - " string run = Path.GetFileName(basePath);\n", - "\n", - " foreach (string fullDir in Directory.GetDirectories(basePath))\n", - " {\n", - " string subDir = Path.GetFileName(fullDir);\n", - " (string config, int iteration) = ParseConfigIterName(subDir);\n", - " if (configFilter.Include(config) && iterationFilter.Include(iteration) && configIterationFilter.Include(config, iteration))\n", - " {\n", - " LoadAspNetDataFromPath(fullDir, benchmarkFilter, run, config, iteration);\n", - " // configFilter has alreay been done- LoadGCTracesFromPath needs it for the case where loadMultipleProcesses is true\n", - " // and the filenames become the configs\n", - " LoadGCTracesFromPath(fullDir, SearchOption.TopDirectoryOnly, configFilter: Filter.All, benchmarkFilter: benchmarkFilter,\n", - " run: run, config: config, iteration: iteration,\n", - " pertinentProcesses: pertinentProcesses, expectAspNetData: true, loadMultipleProcesses: false);\n", - " }\n", - " }\n", - " }\n", - "\n", - " // Returns a LoadInfo with information extracted from the log file.\n", - " // Does not populate the Benchmark, etc., fields.\n", - " private LoadInfo LoadAspNetLogFile(string file)\n", - " {\n", - " LoadInfo info = new();\n", - "\n", - " int idxOfApplication = Int32.MaxValue;\n", - " int idxOfLoad = Int32.MaxValue;\n", - " int idx = 0;\n", - "\n", - " foreach (var line in File.ReadLines(file))\n", - " {\n", - " string[] sp = line.Split(\"|\", StringSplitOptions.TrimEntries);\n", - " if (line.Contains(\"| application\"))\n", - " {\n", - " idxOfApplication = idx;\n", - " }\n", - " else if (line.Contains(\"| load\"))\n", - " {\n", - " idxOfLoad = idx;\n", - " }\n", - " else if (line.Contains(\"| Latency 50th\"))\n", - " {\n", - " info.Latency50thMS = double.Parse(sp[2]);\n", - " }\n", - " else if (line.Contains(\"| Latency 75th\"))\n", - " {\n", - " info.Latency75thMS = double.Parse(sp[2]);\n", - " }\n", - " else if (line.Contains(\"| Latency 90th\"))\n", - " {\n", - " info.Latency90thMS = double.Parse(sp[2]);\n", - " }\n", - " else if (line.Contains(\"| Latency 99th\"))\n", - " {\n", - " info.Latency99thMS = double.Parse(sp[2]);\n", - " }\n", - " else if (line.Contains(\"Requests/sec\"))\n", - " {\n", - " info.RequestsPerMSec = double.Parse(sp[2]) / 1000;\n", - " }\n", - " else if (line.Contains(\"Mean latency\"))\n", - " {\n", - " info.MeanLatencyMS = double.Parse(sp[2]);\n", - " }\n", - " else if (line.Contains(\"Max Working Set\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", - " {\n", - " info.MaxWorkingSetMB = double.Parse(sp[2]);\n", - " }\n", - " else if (line.Contains(\"Working Set P99\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", - " {\n", - " info.P99WorkingSetMB = double.Parse(sp[2]);\n", - " }\n", - " else if (line.Contains(\"Working Set P95\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", - " {\n", - " info.P95WorkingSetMB = double.Parse(sp[2]);\n", - " }\n", - " else if (line.Contains(\"Working Set P90\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", - " {\n", - " info.P90WorkingSetMB = double.Parse(sp[2]);\n", - " } \n", - " else if (line.Contains(\"Working Set P75\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", - " {\n", - " info.P75WorkingSetMB = double.Parse(sp[2]);\n", - " }\n", - " else if (line.Contains(\"Working Set P50\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", - " {\n", - " info.P50WorkingSetMB = double.Parse(sp[2]);\n", - " } \n", - " else if (line.Contains(\"Max Private Memory\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", - " {\n", - " info.MaxPrivateMemoryMB = double.Parse(sp[2]);\n", - " }\n", - " else if (line.Contains(\"Private Memory P99\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", - " {\n", - " info.P99PrivateMemoryMB = double.Parse(sp[2]);\n", - " }\n", - " else if (line.Contains(\"Private Memory P95\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", - " {\n", - " info.P95PrivateMemoryMB = double.Parse(sp[2]);\n", - " }\n", - " else if (line.Contains(\"Private Memory P90\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", - " {\n", - " info.P90PrivateMemoryMB = double.Parse(sp[2]);\n", - " } \n", - " else if (line.Contains(\"Private Memory P75\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", - " {\n", - " info.P75PrivateMemoryMB = double.Parse(sp[2]);\n", - " }\n", - " else if (line.Contains(\"Private Memory P50\") && (idxOfApplication < idx && idx < idxOfLoad)) \n", - " {\n", - " info.P50PrivateMemoryMB = double.Parse(sp[2]);\n", - " }\n", - "\n", - " ++idx;\n", - " }\n", - "\n", - " return info;\n", - " }\n", - "\n", - " private void LoadAspNetDataFromPath(string path, Filter benchmarkFilter, string run, string config, int iteration)\n", - " {\n", - " var files = Directory.GetFiles(path, \"*.log\", SearchOption.AllDirectories);\n", - "\n", - " foreach (var file in files)\n", - " {\n", - " if (file.Contains(\"build.log\") || file.Contains(\"output.log\") || file.Contains(\"_GCLog\"))\n", - " {\n", - " continue;\n", - " }\n", - "\n", - " (string logConfig, string benchmark, int logIteration) = ParseBenchmarkLogFileName(file);\n", - "\n", - " if (!benchmarkFilter.Include(benchmark))\n", - " {\n", - " continue;\n", - " }\n", - "\n", - " if ((config != logConfig) || (iteration != logIteration))\n", - " {\n", - " Console.WriteLine($\"Directory name and log filename in {file} disagree on config/iteration\");\n", - " }\n", - "\n", - " LoadInfo info = LoadAspNetLogFile(file);\n", - "\n", - " info.Run = run;\n", - " info.Config = config;\n", - " info.Benchmark = benchmark;\n", - " info.Iteration = iteration;\n", - "\n", - " RunData runData = _data.Runs.GetOrAdd(run, new(new()));\n", - " ConfigData configData = runData.Configs.GetOrAdd(config, new(new()));\n", - " BenchmarkData benchmarkData = configData.Benchmarks.GetOrAdd(benchmark, new(null, new()));\n", - " if ((benchmarkData.Iterations.Count > iteration)\n", - " && (benchmarkData.Iterations[iteration] != null))\n", - " {\n", - " Console.WriteLine($\"WARNING: Duplicate iteration '{run} / {config} / {benchmark} / {iteration}' found\");\n", - " benchmarkData.Iterations[iteration].LoadInfo = info;\n", - " }\n", - " else\n", - " {\n", - " benchmarkData.Iterations.SetWithExtend(iteration, new(info, null, null));\n", - " }\n", - " }\n", - " }\n", - "\n", - " private void LoadGCTracesFromPath(string path, SearchOption searchOption, Filter configFilter, Filter benchmarkFilter, string run, string config, int? iteration, List pertinentProcesses,\n", - " bool expectAspNetData, bool loadMultipleProcesses)\n", - " {\n", - " var traceFiles = Directory.GetFiles(path, \"*.etl.zip\", searchOption).ToList();\n", - " var nettraceFiles = Directory.GetFiles(path, \"*.nettrace\", searchOption);\n", - " traceFiles.AddRange(nettraceFiles);\n", - "\n", - " Parallel.ForEach(traceFiles,\n", - " file => LoadGCTrace(file: file, configFilter: configFilter, benchmarkFilter: benchmarkFilter, run: run, config: config, iteration: iteration,\n", - " pertinentProcesses: pertinentProcesses, expectAspNetData: expectAspNetData, loadMultipleProcesses: loadMultipleProcesses));\n", - " }\n", - "\n", - " private void LoadGCTrace(string file, Filter configFilter, Filter benchmarkFilter, string run, string config, string benchmark, int? iteration, List pertinentProcesses, bool expectAspNetData, bool loadMultipleProcesses)\n", - " {\n", - " string dir = Path.GetFileName(Path.GetDirectoryName(file));\n", - " //string[] sp = file.Split(\"\\\\\");\n", - " //sp[sp.Length - 1]\n", - " string fileBaseName = Path.GetFileNameWithoutExtension(file)\n", - " .Replace(\"_Windows\", \"\")\n", - " .Replace(\".gc.etl\", \"\")\n", - " .Replace(\"_Linux\", \"\")\n", - " .Replace(\".nettrace\", \"\")\n", - " .Replace(\".gc\", \"\")\n", - " .Replace(\".etl\", \"\");\n", - "\n", - " if (loadMultipleProcesses && (benchmark != null))\n", - " {\n", - " throw new ArgumentException(\"LoadGCTrace: loadMultipleProcesses and setting a benchmark name are not compatible\");\n", - " }\n", - "\n", - " run = run ?? (loadMultipleProcesses ? dir : \"\");\n", - " config = config ?? (loadMultipleProcesses ? fileBaseName : dir);\n", - " if (!configFilter.Include(config)) return;\n", - "\n", - " Analyzer analyzer = AnalyzerManager.GetAnalyzer(file);\n", - " List allData;\n", - "\n", - " //foreach (var pair in analyzer.AllGCProcessData)\n", - " //{\n", - " // Console.WriteLine($\"{pair.Key}: {pair.Value.Count}\");\n", - " //}\n", - " //if (file.Contains(\".nettrace\"))\n", - " //{\n", - " // data = analyzer.AllGCProcessData.First().Value.First();\n", - " //}\n", - " //else\n", - " {\n", - " allData = pertinentProcesses.SelectMany(p => analyzer.GetProcessGCData(p)).ToList(); //.Where(NotNull).FirstOrDefault();\n", - " }\n", - "\n", - " if (allData.Count == 0)\n", - " {\n", - " Console.WriteLine($\"The following trace doesn't have a pertinent process: {file}\");\n", - " Console.WriteLine($\"Processes: {string.Join(\", \", analyzer.AllGCProcessData.Keys)}\");\n", - " Console.WriteLine($\"Check: {string.Join(\", \", analyzer.AllGCProcessData.Keys.Select(k => k == pertinentProcesses[0]))}\");\n", - " return;\n", - " }\n", - " if (!loadMultipleProcesses && (allData.Count > 1))\n", - " {\n", - " Console.WriteLine($\"The following trace has more than one pertinent process: {file}\");\n", - " Console.WriteLine($\"Found processes: {string.Join(\", \", allData.Select(d => d.ProcessName))}'\");\n", - " return;\n", - " }\n", - "\n", - " foreach (GCProcessData data in allData)\n", - " {\n", - " benchmark = benchmark ?? (loadMultipleProcesses ? data.ProcessName : fileBaseName);\n", - " if (!benchmarkFilter.Include(benchmark)) continue;\n", - " LoadGCTraceOneProcess(file, data, run, config, benchmark, iteration, expectAspNetData);\n", - " }\n", - " }\n", - "\n", - " private void LoadGCTraceOneProcess(string file, GCProcessData data, string run, string config, string benchmark, int? iteration, bool expectAspNetData)\n", - " {\n", - " GCSummaryInfo gcSummaryInfo = new();\n", - " gcSummaryInfo.MeanHeapSizeBeforeMB = data.Stats.MeanSizePeakMB;\n", - " gcSummaryInfo.MaxHeapSizeMB = data.Stats.MaxSizePeakMB;\n", - " gcSummaryInfo.PercentTimeInGC = (data.GCs.Sum(gc => gc.PauseDurationMSec - gc.SuspendDurationMSec) / (data.Stats.ProcessDuration) ) * 100;\n", - " gcSummaryInfo.TracePath = data.Parent.TraceLogPath;\n", - " gcSummaryInfo.TotalAllocationsMB = data.Stats.TotalAllocatedMB;\n", - " gcSummaryInfo.CommandLine = data.CommandLine;\n", - " gcSummaryInfo.PercentPauseTimeInGC = data.Stats.GetGCPauseTimePercentage();\n", - " gcSummaryInfo.GCScore = (gcSummaryInfo.MaxHeapSizeMB * gcSummaryInfo.PercentPauseTimeInGC);\n", - " gcSummaryInfo.ProcessId = data.ProcessID;\n", - " gcSummaryInfo.Data = data;\n", - " gcSummaryInfo.ProcessName = data.ProcessName;\n", - " gcSummaryInfo.TotalSuspensionTimeMSec = data.GCs.Sum(gc => gc.SuspendDurationMSec);\n", - "\n", - " gcSummaryInfo.MaxHeapCount = 0;\n", - " gcSummaryInfo.NumberOfHeapCountSwitches = 0;\n", - " gcSummaryInfo.NumberOfHeapCountDirectionChanges = 0;\n", - "\n", - " int? prevNumHeapsOption = null;\n", - " bool prevChangeUp = true; // don't want to count the initial 1->n change as a change in direction\n", - " for (int i = 0; i < data.GCs.Count; i++)\n", - " {\n", - " if (data.GCs[i].GlobalHeapHistory == null) continue;\n", - " int thisNumHeaps = data.GCs[i].GlobalHeapHistory.NumHeaps;\n", - " gcSummaryInfo.MaxHeapCount = Math.Max(gcSummaryInfo.MaxHeapCount, thisNumHeaps);\n", - " if (prevNumHeapsOption.HasValue)\n", - " {\n", - " int prevNumHeaps = prevNumHeapsOption.Value;\n", - " if (prevNumHeaps != thisNumHeaps)\n", - " {\n", - " gcSummaryInfo.NumberOfHeapCountSwitches++;\n", - " bool thisChangeUp = thisNumHeaps > prevNumHeaps;\n", - " if (prevChangeUp != thisChangeUp)\n", - " {\n", - " gcSummaryInfo.NumberOfHeapCountDirectionChanges++;\n", - " }\n", - " prevChangeUp = thisChangeUp;\n", - " }\n", - " }\n", - " prevNumHeapsOption = thisNumHeaps;\n", - " }\n", - "\n", - " lock (_data)\n", - " {\n", - " RunData runData = _data.Runs.GetOrAdd(run, new(new()));\n", - " ConfigData configData = runData.Configs.GetOrAdd(config, new(new()));\n", - " BenchmarkData benchmarkData = configData.Benchmarks.GetOrAdd(benchmark, new(null, new()));\n", - "\n", - " int iterationToUse = iteration ?? benchmarkData.Iterations.FindIndex(iterationData => iterationData == null);\n", - " if (iterationToUse == -1) iterationToUse = benchmarkData.Iterations.Count;\n", - "\n", - " if ((benchmarkData.Iterations.Count > iterationToUse)\n", - " && (benchmarkData.Iterations[iterationToUse] != null))\n", - " {\n", - " if (benchmarkData.Iterations[iterationToUse].GCSummaryInfo != null)\n", - " {\n", - " Console.WriteLine($\"Replacing existing GC information for '{run} / {config} / {benchmark} / {iterationToUse}' - {file}\");\n", - " }\n", - " benchmarkData.Iterations[iterationToUse].GCSummaryInfo = gcSummaryInfo;\n", - " benchmarkData.Iterations[iterationToUse].GCProcessData = data;\n", - " }\n", - " else\n", - " {\n", - " if (expectAspNetData)\n", - " {\n", - " Console.WriteLine($\"The following trace doesn't have a corresponding ASP.NET log '{run} / {config} / {benchmark} / {iterationToUse}' - {file}\");\n", - " }\n", - "\n", - " benchmarkData.Iterations.SetWithExtend(iterationToUse, new(null, gcSummaryInfo, data));\n", - " }\n", - " }\n", - " }\n", - "}" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "// Huge block of code that operates on DataManager\n", - "// -----------------------------------------------\n", - "\n", - "// Notebook cells are already in implicit classes, so this isn't needed (and doesn't work):\n", - "// public static class DataManagerExtensions\n", - "\n", - "public static IEnumerable<(string run, string config, ConfigData configData)> GetConfigsWithData(this DataManager dataManager, Filter runFilter, Filter configFilter)\n", - "{\n", - " foreach ((string run, RunData runData) in dataManager.Data.Runs)\n", - " {\n", - " if (!runFilter.Include(run)) continue;\n", - " foreach ((string config, ConfigData configData) in runData.Configs)\n", - " {\n", - " if (!configFilter.Include(config)) continue;\n", - " yield return (run, config, configData);\n", - " }\n", - " }\n", - "}\n", - "\n", - "public static IEnumerable<(string run, string config)> GetConfigs(this DataManager dataManager, Filter runFilter, Filter configFilter)\n", - " => dataManager.GetConfigsWithData(runFilter, configFilter).Select(tuple => (tuple.run, tuple.config));\n", - "\n", - "public static IEnumerable<(string run, string config, string benchmark, BenchmarkData benchmarkData)> GetBenchmarksWithData(\n", - " this DataManager dataManager, Filter runFilter, Filter configFilter, Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter)\n", - "{\n", - " foreach ((string run, string config, ConfigData configData) in dataManager.GetConfigsWithData(runFilter, configFilter))\n", - " {\n", - " if (!configIterationFilter.MightInclude(config)) continue;\n", - "\n", - " foreach ((string benchmark, BenchmarkData benchmarkData) in configData.Benchmarks)\n", - " {\n", - " if (!benchmarkFilter.Include(benchmark)) continue;\n", - " if (!benchmarkData.Iterations.WithIndex()\n", - " .Where(pair => pair.Item1 != null)\n", - " .Select(pair => pair.Item2)\n", - " .Any(iteration => iterationFilter.Include(iteration) && configIterationFilter.Include(config, iteration))) continue;\n", - " yield return (run, config, benchmark, benchmarkData);\n", - " }\n", - " }\n", - "}\n", - "\n", - "public static IEnumerable<(string run, string config, string benchmark)> GetBenchmarks(this DataManager dataManager, Filter runFilter, Filter configFilter,\n", - " Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter)\n", - " => dataManager.GetBenchmarksWithData(runFilter, configFilter, benchmarkFilter, iterationFilter, configIterationFilter)\n", - " .Select(tuple => (tuple.run, tuple.config, tuple.benchmark));\n", - "\n", - "public static IEnumerable<(string run, string config, int iteration, IterationData data)> GetIterationsForBenchmark(this DataManager dataManager,\n", - " Filter runFilter, Filter configFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, string benchmark)\n", - "{\n", - " foreach ((string run, string config, ConfigData configData) in dataManager.GetConfigsWithData(runFilter, configFilter))\n", - " {\n", - " if (!configIterationFilter.MightInclude(config)) continue;\n", - " if (!configData.Benchmarks.TryGetValue(benchmark, out BenchmarkData benchmarkData)) continue;\n", - "\n", - " foreach ((IterationData iterationData, int iteration) in benchmarkData.Iterations.WithIndex())\n", - " {\n", - " if (!iterationFilter.Include(iteration)) continue;\n", - " if (!configIterationFilter.Include(config, iteration)) continue;\n", - " if (iterationData == null) continue;\n", - " yield return (run, config, iteration, iterationData);\n", - " }\n", - " }\n", - "}\n", - "\n", - "public static IEnumerable GetIterations(this ConfigData data, string config,\n", - " Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter)\n", - " // May need to improve efficiency here\n", - " => data.Benchmarks\n", - " .Where((b, _) => benchmarkFilter.Include(b.Key))\n", - " .SelectMany(b =>\n", - " b.Value.Iterations\n", - " .WithIndex()\n", - " .Where(pair => pair.Item1 != null)\n", - " .Select(pair => pair.Item2)\n", - " .Where(iteration => iterationFilter.Include(iteration) && configIterationFilter.Include(config, iteration)))\n", - " .Distinct()\n", - " .OrderBy(x => x);\n", - "\n", - "// Utilities\n", - "\n", - "// https://stackoverflow.com/a/49058506 \n", - "public static IEnumerable<(T PrevItem, T CurrentItem, T NextItem)>\n", - " SlidingWindow(this IEnumerable source, T emptyValue = default)\n", - "{\n", - " using (var iter = source.GetEnumerator())\n", - " {\n", - " if (!iter.MoveNext())\n", - " yield break;\n", - " var prevItem = emptyValue;\n", - " var currentItem = iter.Current;\n", - " while (iter.MoveNext())\n", - " {\n", - " var nextItem = iter.Current;\n", - " yield return (prevItem, currentItem, nextItem);\n", - " prevItem = currentItem;\n", - " currentItem = nextItem;\n", - " }\n", - " yield return (prevItem, currentItem, emptyValue);\n", - " }\n", - "}\n", - "\n", - "// overkill for what is needed now but leftover\n", - "\n", - "public struct CircularListAccess : IReadOnlyList\n", - "{\n", - " private IList _list;\n", - " private int _start;\n", - " private int _length;\n", - "\n", - " public CircularListAccess(IList list, int start, int length)\n", - " {\n", - " if (list == null) throw new ArgumentException(\"list\");\n", - " if (start < 0 || start >= list.Count) throw new ArgumentException(\"start\");\n", - " if (length < 0 || length > list.Count) throw new ArgumentException(\"length\");\n", - "\n", - " _list = list;\n", - " _start = start;\n", - " _length = length;\n", - " }\n", - "\n", - " public T this[int index]\n", - " {\n", - " get\n", - " {\n", - " if (index >= _length) throw new IndexOutOfRangeException();\n", - " return _list[(_start + index) % _list.Count];\n", - " }\n", - " }\n", - "\n", - " public int Count => _length;\n", - "\n", - " public struct Enumerator : IEnumerator\n", - " {\n", - " private CircularListAccess _list;\n", - " private int _index;\n", - " private T _current;\n", - "\n", - " public Enumerator(CircularListAccess list)\n", - " {\n", - " _list = list;\n", - " _index = 0;\n", - " _current = default;\n", - " }\n", - " public T Current => _current;\n", - " object IEnumerator.Current => Current;\n", - " public bool MoveNext()\n", - " {\n", - " int count = _list.Count;\n", - " if (_index < count)\n", - " {\n", - " _current = _list[_index++];\n", - " return true;\n", - " }\n", - " else\n", - " {\n", - " _current = default;\n", - " return false;\n", - " }\n", - " }\n", - " public void Reset() { _index = 0; _current = default; }\n", - " public void Dispose() {}\n", - " }\n", - "\n", - " public IEnumerator GetEnumerator() => new Enumerator(this);\n", - " IEnumerator IEnumerable.GetEnumerator() => new Enumerator(this);\n", - "}\n", - "\n", - "public static IEnumerable>\n", - " SlidingRange(this List source, int size)\n", - "{\n", - " for (int i = 0; i <= source.Count - size; ++i)\n", - " {\n", - " // don't actually need CircularListAccess - was from an earlier idea\n", - " yield return new CircularListAccess(source, i, size);\n", - " }\n", - "}\n", - "\n", - "public class ColorProvider\n", - "{\n", - " // Families of gradients\n", - " // 80 00 00 -> ff 00 00 -> ff 80 80 (3)\n", - " // 80 80 00 -> ff ff 00 -> ff ff 80 (3)\n", - " // 80 40 00 -> ff 80 00 -> ff c0 80 (6)\n", - " // 40 40 40 -> 80 80 80 -> c0 c0 c0 (1)\n", - " // 80 2A 00 -> ff 55 00 -> ff aa 80 (6)\n", - " // 80 55 00 -> ff aa 00 -> ff d4 80 (6)\n", - " enum Scale\n", - " {\n", - " Zero,\n", - " Full,\n", - " Half,\n", - " OneThird,\n", - " TwoThird,\n", - " }\n", - " \n", - " static (int first, int mid, int last) GetScale(Scale scale)\n", - " => scale switch\n", - " {\n", - " Scale.Zero => (0, 0, 0x80),\n", - " Scale.Full => (0x80, 0xFF, 0xFF),\n", - " Scale.Half => (0x40, 0x80, 0xC0),\n", - " Scale.OneThird => (0x2A, 0x55, 0xAA),\n", - " Scale.TwoThird => (0x55, 0xAA, 0xD4),\n", - " _ => throw new Exception(\"Unknown Scale\")\n", - " };\n", - "\n", - " public record RGB(int R, int G, int B);\n", - " record ScaleRGB(Scale R, Scale G, Scale B);\n", - "\n", - " static ScaleRGB[] _colorFamilies =\n", - " {\n", - " new ScaleRGB(Scale.Full, Scale.Zero, Scale.Zero),\n", - " new ScaleRGB(Scale.Zero, Scale.Full, Scale.Zero),\n", - " new ScaleRGB(Scale.Zero, Scale.Zero, Scale.Full),\n", - "\n", - " new ScaleRGB(Scale.Half, Scale.Half, Scale.Half),\n", - "\n", - " //new ScaleRGB(Scale.Full, Scale.Full, Scale.Zero), // yellow isn't scaling very well\n", - " new ScaleRGB(Scale.Full, Scale.Zero, Scale.Full),\n", - " new ScaleRGB(Scale.Zero, Scale.Full, Scale.Full),\n", - " \n", - " new ScaleRGB(Scale.Full, Scale.Half, Scale.Zero),\n", - " new ScaleRGB(Scale.Zero, Scale.Full, Scale.Half),\n", - " new ScaleRGB(Scale.Half, Scale.Zero, Scale.Full),\n", - "\n", - " new ScaleRGB(Scale.Full, Scale.Zero, Scale.Half),\n", - " new ScaleRGB(Scale.Half, Scale.Full, Scale.Zero),\n", - " new ScaleRGB(Scale.Zero, Scale.Half, Scale.Full),\n", - "\n", - " new ScaleRGB(Scale.Full, Scale.OneThird, Scale.Zero),\n", - " new ScaleRGB(Scale.Zero, Scale.Full, Scale.OneThird),\n", - " new ScaleRGB(Scale.OneThird, Scale.Zero, Scale.Full),\n", - "\n", - " new ScaleRGB(Scale.Full, Scale.Zero, Scale.OneThird),\n", - " new ScaleRGB(Scale.OneThird, Scale.Full, Scale.Zero),\n", - " new ScaleRGB(Scale.Zero, Scale.OneThird, Scale.Full),\n", - "\n", - " new ScaleRGB(Scale.Full, Scale.TwoThird, Scale.Zero),\n", - " new ScaleRGB(Scale.Zero, Scale.Full, Scale.TwoThird),\n", - " new ScaleRGB(Scale.TwoThird, Scale.Zero, Scale.Full),\n", - "\n", - " new ScaleRGB(Scale.Full, Scale.Zero, Scale.TwoThird),\n", - " new ScaleRGB(Scale.TwoThird, Scale.Full, Scale.Zero),\n", - " new ScaleRGB(Scale.Zero, Scale.TwoThird, Scale.Full),\n", - " };\n", - "\n", - " int GetComponent(Scale scale, int index, int count)\n", - " {\n", - " int max = count - 1;\n", - " float half = max / 2.0f;\n", - " var scaleValue = GetScale(scale);\n", - " if (max == 0) return scaleValue.first;\n", - " (int baseValue, int topValue, float fraction) =\n", - " (index > half)\n", - " ? (scaleValue.mid, scaleValue.last, (index - half) / half)\n", - " : (scaleValue.first, scaleValue.mid, (index / half));\n", - " return (int)(baseValue + fraction * (topValue - baseValue));\n", - " }\n", - "\n", - " public static Marker GetMarker(RGB rgb) => (rgb != null) ? (new Marker { color = $\"rgb({rgb.R}, {rgb.G}, {rgb.B})\" }) : null;\n", - "\n", - " RGB GetColor(int colorIndex, int groupIndex, int numInBuild)\n", - " {\n", - " if (colorIndex >= _colorFamilies.Length) return null;\n", - "\n", - " var RGB = _colorFamilies[colorIndex];\n", - " var R = GetComponent(RGB.R, groupIndex, numInBuild);\n", - " var G = GetComponent(RGB.G, groupIndex, numInBuild);\n", - " var B = GetComponent(RGB.B, groupIndex, numInBuild);\n", - " return new RGB(R, G, B);\n", - " }\n", - "\n", - " record ColorGroup(int FamilyIndex, int GroupIndex, int GroupSize, Dictionary GroupColorMap)\n", - " {\n", - " public int GroupIndex { get; set; } = GroupIndex;\n", - " }\n", - "\n", - " Dictionary? _groups; // name of build -> (color index, next index in group)\n", - "\n", - " public ColorProvider(Dictionary groups)\n", - " {\n", - " if (groups.Count <= 1) return;\n", - "\n", - " _groups = groups\n", - " .Take(_colorFamilies.Length)\n", - " .Select((kvp, index) => (kvp.Key, new ColorGroup(index, 0, kvp.Value, new())))\n", - " .ToDictionary();\n", - " }\n", - "\n", - " public RGB GetColor(string buildName, string id = null)\n", - " {\n", - " //Console.WriteLine($\"- '{buildName}' '{id}'\");\n", - " if (_groups == null) return null;\n", - " ColorGroup group = _groups[buildName];\n", - " if (group.FamilyIndex >= _colorFamilies.Length) return null;\n", - "\n", - " if ((id != null) && group.GroupColorMap.TryGetValue(id, out RGB color))\n", - " {\n", - " return color;\n", - " }\n", - " //Console.WriteLine($\"--- '{group}'\");\n", - " color = GetColor(group.FamilyIndex, group.GroupIndex++, group.GroupSize);\n", - " //Console.WriteLine($\"----- '{color}'\");\n", - " if (id != null) group.GroupColorMap[id] = color;\n", - " return color;\n", - " }\n", - "\n", - " public void SetMarker(Scatter scatter, string buildName, string id = null)\n", - " {\n", - " Marker marker = GetMarker(GetColor(buildName, id));\n", - " if (marker != null) scatter.marker = marker;\n", - " }\n", - "\n", - " public void DumpColorGroups()\n", - " {\n", - " if (_groups == null)\n", - " {\n", - " Console.WriteLine(\"No groups\");\n", - " return;\n", - " }\n", - " Console.WriteLine($\"Number of groups: {_groups.Count}\");\n", - " foreach (var (name, group) in _groups)\n", - " {\n", - " Console.WriteLine($\" '{name}': {group.FamilyIndex}, {group.GroupIndex}/{group.GroupSize}\");\n", - " }\n", - " }\n", - "}\n", - "\n", - "public class Aggregation\n", - "{\n", - " public Func, double> Func;\n", - " public string Title;\n", - " public string UnitOverride;\n", - "\n", - " public Aggregation(Func, double> func, string title, string unitOverride)\n", - " {\n", - " Func = func;\n", - " Title = title;\n", - " UnitOverride = unitOverride;\n", - " }\n", - "\n", - " public static class Funcs\n", - " {\n", - " public static double Min(IEnumerable data) => data.Min();\n", - " public static double Max(IEnumerable data) => data.Max();\n", - "\n", - " public static double Volatility(IEnumerable data)\n", - " {\n", - " var max = data.Max();\n", - " var min = data.Min();\n", - " return Math.Round(((max - min) / min) * 100, 2);\n", - " }\n", - "\n", - " public static double Average(IEnumerable data) => data.Average();\n", - " public static double Range(IEnumerable data) => data.Max() - data.Min();\n", - "\n", - " public static double GeoMean(IEnumerable data)\n", - " {\n", - " double mult = 1;\n", - " int count = 0;\n", - " foreach (double value in data)\n", - " {\n", - " mult *= value;\n", - " count++;\n", - " }\n", - " return Math.Pow(mult, 1.0 / count);\n", - " }\n", - " }\n", - "\n", - " public static Aggregation Min { get; } = new Aggregation(Funcs.Min, \"Min\", null);\n", - " public static Aggregation Max { get; } = new Aggregation(Funcs.Max, \"Max\", null);\n", - " public static Aggregation Volatility { get; } = new Aggregation(Funcs.Volatility, \"Volatility\", \"?\");\n", - " public static Aggregation Average { get; } = new Aggregation(Funcs.Average, \"Average\", null);\n", - " public static Aggregation Range { get; } = new Aggregation(Funcs.Range, \"Range\", null);\n", - " public static Aggregation GeoMean { get; } = new Aggregation(Funcs.GeoMean, \"GeoMean\", null);\n", - "}\n", - "\n", - "public class BaseMetric\n", - "{\n", - " protected Func ExtractFunc;\n", - " public string Title;\n", - "\n", - " public BaseMetric(Func extract, string title)\n", - " {\n", - " ExtractFunc = extract;\n", - " Title = title;\n", - " }\n", - "\n", - " public TValue? DoExtract(TSource gc)\n", - " {\n", - " TValue? value;\n", - " try\n", - " {\n", - " value = ExtractFunc(gc);\n", - " }\n", - " catch (Exception e)\n", - " {\n", - " //Console.WriteLine($\"Exception processing {Title}\");\n", - " //Console.WriteLine($\" {e}\");\n", - " value = default;\n", - " }\n", - " return value;\n", - " }\n", - "}\n", - "\n", - "public class Metric : BaseMetric\n", - "{\n", - " public string Unit;\n", - " public double? Cap;\n", - " private int _capExceededCount;\n", - " private double _capExceededMin;\n", - " private double _capExceededMax;\n", - " public double? AxisCountOffset;\n", - "\n", - " public Metric(Func extract, string title, string unit, double? cap = null, double? axisCountOffset = null)\n", - " : base((s => extract(s)), title)\n", - " {\n", - " Unit = unit;\n", - " Cap = cap;\n", - " AxisCountOffset = axisCountOffset;\n", - " }\n", - "\n", - " public Metric(Func extract, string title, string unit, double? cap = null, double? axisCountOffset = null)\n", - " : base(extract, title)\n", - " {\n", - " Unit = unit;\n", - " Cap = cap;\n", - " AxisCountOffset = axisCountOffset;\n", - " }\n", - "\n", - " public double? DoExtract(TSource gc, int count)\n", - " {\n", - " double? value = base.DoExtract(gc);\n", - " if (value.HasValue)\n", - " {\n", - " if (value > Cap)\n", - " {\n", - " _capExceededCount++;\n", - " _capExceededMin = Math.Min(_capExceededMin, value.Value);\n", - " _capExceededMax = Math.Max(_capExceededMax, value.Value);\n", - " value = Cap;\n", - " }\n", - " if (AxisCountOffset.HasValue) value += AxisCountOffset * count;\n", - " }\n", - " return value;\n", - " }\n", - "\n", - " private Metric Copy() => new(ExtractFunc, Title, Unit, Cap);\n", - " public Metric WithCap(double cap) => new(ExtractFunc, Title, Unit, cap, AxisCountOffset);\n", - " public Metric WithOffset(double offset) => new(ExtractFunc, Title, Unit, Cap, offset);\n", - "\n", - " public void ResetDiagnostics()\n", - " {\n", - " _capExceededCount = 0;\n", - " _capExceededMin = double.MaxValue;\n", - " _capExceededMax = double.MinValue;\n", - " }\n", - "\n", - " public void DisplayDiagnostics(string context)\n", - " {\n", - " if (_capExceededCount > 0)\n", - " {\n", - " Console.WriteLine($\"Cap ({Cap.Value}) exceeded {_capExceededCount} times (min={_capExceededMin:N2}, max={_capExceededMax:N2}) for {context}\");\n", - " }\n", - " }\n", - "\n", - " public static Metric Promote(Metric metric, Func> oldExtract, Aggregation aggregation)\n", - " => new(extract: source => aggregation.Func(oldExtract(source).Select(metric.ExtractFunc).Where(NotNull).Select(value => value.Value)),\n", - " title: $\"{aggregation.Title} of {metric.Title}\",\n", - " unit: aggregation.UnitOverride ?? metric.Unit);\n", - "}\n", - "\n", - "public static class Metrics\n", - "{\n", - " public static Metric Promote(Metric metric, Aggregation aggregation)\n", - " => Metric.Promote(metric, iterationData => iterationData.GCProcessData.GCs, aggregation);\n", - " public static Metric Promote(Metric metric, Aggregation aggregation)\n", - " => Metric.Promote(metric, benchmarkData => benchmarkData.Iterations, aggregation);\n", - " public static Metric Promote(Metric metric, Aggregation aggregation)\n", - " => Metric.Promote(metric, configData => configData.Benchmarks.Values, aggregation);\n", - " public static Metric Promote(Metric metric, Aggregation aggregation)\n", - " => Metric.Promote(metric, runData => runData.Configs.Values, aggregation);\n", - " public static Metric Promote(Metric metric, Aggregation aggregation)\n", - " => Metric.Promote(metric, data => data.Runs.Values, aggregation);\n", - "\n", - " public static class X\n", - " {\n", - " public static BaseMetric<(string, TraceGC), XValue> GCIndex { get; } = new(pair => new XValue(pair.Item2.Number), \"GC Index\");\n", - " public static BaseMetric<(string, TraceGC), XValue> StartRelativeMSec { get; } = new(pair => new XValue(pair.Item2.StartRelativeMSec), \"GC Start\");\n", - " public static BaseMetric<(string, BenchmarkData), XValue> BenchmarkName { get; } = new(pair => new XValue(pair.Item1), \"Benchmark Name\");\n", - " public static BaseMetric<(string, IterationData), XValue> IterationBenchmarkName { get; } = new(pair => new XValue(pair.Item1), \"Benchmark Name\");\n", - " }\n", - "\n", - " public static class G\n", - " {\n", - " public static Metric AllocedSinceLastGCMB = new(gc => gc.AllocedSinceLastGCMB, title: \"Allocated\", unit: \"MB\");\n", - " // AllocRateMBSec is MB/s but this puts it on same y-axis as plain MB\n", - " public static Metric AllocRateMBSec = new(gc => gc.AllocRateMBSec, title: \"Allocation rate\", unit: \"MB\");\n", - " public static Metric CommittedAfterTotalBookkeeping = new(gc => gc.CommittedUsageAfter.TotalBookkeepingCommitted, title: \"Committed Book (after)\", unit: \"MB\");\n", - " public static Metric CommittedAfterInFree = new(gc => gc.CommittedUsageAfter.TotalCommittedInFree, title: \"Committed In Free (after)\", unit: \"MB\");\n", - " public static Metric CommittedAfterInGlobalDecommit = new(gc => gc.CommittedUsageAfter.TotalCommittedInGlobalDecommit, title: \"Committed In Global Decommit (after)\", unit: \"MB\");\n", - " public static Metric CommittedAfterInGlobalFree = new(gc => gc.CommittedUsageAfter.TotalCommittedInGlobalFree, title: \"Committed In Global Free (after)\", unit: \"MB\");\n", - " public static Metric CommittedAfterInUse = new(gc => gc.CommittedUsageAfter.TotalCommittedInUse, title: \"Committed In Use (after)\", unit: \"MB\");\n", - " public static List> CommittedAfterMetrics = ML(CommittedAfterTotalBookkeeping, CommittedAfterInFree, CommittedAfterInGlobalDecommit, CommittedAfterInGlobalFree, CommittedAfterInUse);\n", - " public static Metric CommittedBeforeTotalBookkeeping = new(gc => gc.CommittedUsageBefore.TotalBookkeepingCommitted, title: \"Committed Book (before)\", unit: \"MB\");\n", - " public static Metric CommittedBeforeInFree = new(gc => gc.CommittedUsageBefore.TotalCommittedInFree, title: \"Committed In Free (before)\", unit: \"MB\");\n", - " public static Metric CommittedBeforeInGlobalDecommit = new(gc => gc.CommittedUsageBefore.TotalCommittedInGlobalDecommit, title: \"Committed In Global Decommit (before)\", unit: \"MB\");\n", - " public static Metric CommittedBeforeInGlobalFree = new(gc => gc.CommittedUsageBefore.TotalCommittedInGlobalFree, title: \"Committed In Global Free (before)\", unit: \"MB\");\n", - " public static Metric CommittedBeforeInUse = new(gc => gc.CommittedUsageBefore.TotalCommittedInUse, title: \"Committed In Use (before)\", unit: \"MB\");\n", - " public static List> CommittedBeforeMetrics = ML(CommittedBeforeTotalBookkeeping, CommittedBeforeInFree, CommittedBeforeInGlobalDecommit, CommittedBeforeInGlobalFree, CommittedBeforeInUse);\n", - " public static Metric DurationMSec = new(gc => gc.DurationMSec, \"Duration\", \"ms\");\n", - " public static Metric GCCpuMSec = new(gc => gc.GCCpuMSec, \"GC CPU\", \"ms\");\n", - " public static Metric Gen0Budget = new(gc => gc.GenBudgetMB(Gens.Gen0), \"Gen0 budget\", \"MB\");\n", - " public static Metric Gen1Budget = new(gc => gc.GenBudgetMB(Gens.Gen1), \"Gen1 budget\", \"MB\");\n", - " public static Metric Gen2Budget = new(gc => gc.GenBudgetMB(Gens.Gen2), \"Gen2 budget\", \"MB\");\n", - " public static Metric GenLargeBudget = new(gc => gc.GenBudgetMB(Gens.GenLargeObj), \"GenLarge budget\", \"MB\");\n", - " public static Metric GenPinBudget = new(gc => gc.GenBudgetMB(Gens.GenPinObj), \"GenPin budget\", \"MB\");\n", - " public static Metric Generation = new(gc => gc.Generation, \"Generation\", \"gen\");\n", - " public static Metric Gen0Fragmentation = new(gc => gc.GenFragmentationMB(Gens.Gen0), \"Gen0 fragmentation\", \"MB\");\n", - " public static Metric Gen1Fragmentation = new(gc => gc.GenFragmentationMB(Gens.Gen1), \"Gen1 fragmentation\", \"MB\");\n", - " public static Metric Gen2Fragmentation = new(gc => gc.GenFragmentationMB(Gens.Gen2), \"Gen2 fragmentation\", \"MB\");\n", - " public static Metric GenLargeFragmentation = new(gc => gc.GenFragmentationMB(Gens.GenLargeObj), \"GenLarge fragmentation\", \"MB\");\n", - " public static Metric GenPinFragmentation = new(gc => gc.GenFragmentationMB(Gens.GenPinObj), \"GenPin fragmentation\", \"MB\");\n", - " public static Metric Gen0FragmentationPercent = new(gc => gc.GenFragmentationPercent(Gens.Gen0), \"Gen0 fragmentation %\", \"%\");\n", - " public static Metric Gen1FragmentationPercent = new(gc => gc.GenFragmentationPercent(Gens.Gen1), \"Gen1 fragmentation %\", \"%\");\n", - " public static Metric Gen2FragmentationPercent = new(gc => gc.GenFragmentationPercent(Gens.Gen2), \"Gen2 fragmentation %\", \"%\");\n", - " public static Metric GenLargeFragmentationPercent = new(gc => gc.GenFragmentationPercent(Gens.GenLargeObj), \"GenLarge fragmentation %\", \"%\");\n", - " public static Metric GenPinFragmentationPercent = new(gc => gc.GenFragmentationPercent(Gens.GenPinObj), \"GenPin fragmentation %\", \"%\");\n", - " public static Metric Gen0In = new(gc => gc.GenInMB(Gens.Gen0), \"Gen0 Memory (in)\", \"MB\");\n", - " public static Metric Gen1In = new(gc => gc.GenInMB(Gens.Gen1), \"Gen1 Memory (in)\", \"MB\");\n", - " public static Metric Gen2In = new(gc => gc.GenInMB(Gens.Gen2), \"Gen2 Memory (in)\", \"MB\");\n", - " public static Metric GenLargeIn = new(gc => gc.GenInMB(Gens.GenLargeObj), \"GenLarge Memory (in)\", \"MB\");\n", - " public static Metric GenPinIn = new(gc => gc.GenInMB(Gens.GenPinObj), \"GenPin Memory (in)\", \"MB\");\n", - " public static Metric Gen0ObjSizeAfter = new(gc => gc.GenObjSizeAfterMB(Gens.Gen0), \"Gen0 object size (after)\", \"MB\");\n", - " public static Metric Gen1ObjSizeAfter = new(gc => gc.GenObjSizeAfterMB(Gens.Gen1), \"Gen1 object size (after)\", \"MB\");\n", - " public static Metric Gen2ObjSizeAfter = new(gc => gc.GenObjSizeAfterMB(Gens.Gen2), \"Gen2 object size (after)\", \"MB\");\n", - " public static Metric GenLargeObjSizeAfter = new(gc => gc.GenObjSizeAfterMB(Gens.GenLargeObj), \"GenLarge object size (after)\", \"MB\");\n", - " public static Metric GenPinObjSizeAfter = new(gc => gc.GenObjSizeAfterMB(Gens.GenPinObj), \"GenPin object size (after)\", \"MB\");\n", - " public static Metric Gen0Out = new(gc => gc.GenOutMB(Gens.Gen0), \"Gen0 Memory (out)\", \"MB\");\n", - " public static Metric Gen1Out = new(gc => gc.GenOutMB(Gens.Gen1), \"Gen1 Memory (out)\", \"MB\");\n", - " public static Metric Gen2Out = new(gc => gc.GenOutMB(Gens.Gen2), \"Gen2 Memory (out)\", \"MB\");\n", - " public static Metric GenLargeOut = new(gc => gc.GenOutMB(Gens.GenLargeObj), \"GenLarge Memory (out)\", \"MB\");\n", - " public static Metric GenPinOut = new(gc => gc.GenOutMB(Gens.GenPinObj), \"GenPin Memory (out)\", \"MB\");\n", - " public static Metric Gen0Promoted = new(gc => gc.GenPromotedMB(Gens.Gen0), \"Gen0 Promoted\", \"MB\");\n", - " public static Metric Gen1Promoted = new(gc => gc.GenPromotedMB(Gens.Gen1), \"Gen1 Promoted\", \"MB\");\n", - " public static Metric Gen2Promoted = new(gc => gc.GenPromotedMB(Gens.Gen2), \"Gen2 Promoted\", \"MB\");\n", - " public static Metric GenLargePromoted = new(gc => gc.GenPromotedMB(Gens.GenLargeObj), \"GenLarge Promoted\", \"MB\");\n", - " public static Metric GenPinPromoted = new(gc => gc.GenPromotedMB(Gens.GenPinObj), \"GenPin Promoted\", \"MB\");\n", - " public static Metric Gen0SizeAfter = new(gc => gc.GenSizeAfterMB(Gens.Gen0), \"Gen0 size (after)\", \"MB\");\n", - " public static Metric Gen1SizeAfter = new(gc => gc.GenSizeAfterMB(Gens.Gen1), \"Gen1 size (after)\", \"MB\");\n", - " public static Metric Gen2SizeAfter = new(gc => gc.GenSizeAfterMB(Gens.Gen2), \"Gen2 size (after)\", \"MB\");\n", - " public static Metric GenLargeSizeAfter = new(gc => gc.GenSizeAfterMB(Gens.GenLargeObj), \"GenLarge size (after)\", \"MB\");\n", - " public static Metric GenPinSizeAfter = new(gc => gc.GenSizeAfterMB(Gens.GenPinObj), \"GenPin size (after)\", \"MB\");\n", - " public static Metric Gen0SizeBefore = new(gc => gc.GenSizeBeforeMB[(int) Gens.Gen0], \"Gen0 size (before)\", \"MB\");\n", - " public static Metric Gen1SizeBefore = new(gc => gc.GenSizeBeforeMB[(int) Gens.Gen1], \"Gen1 size (before)\", \"MB\");\n", - " public static Metric Gen2SizeBefore = new(gc => gc.GenSizeBeforeMB[(int) Gens.Gen2], \"Gen2 size (before)\", \"MB\");\n", - " public static Metric GenLargeSizeBefore = new(gc => gc.GenSizeBeforeMB[(int) Gens.GenLargeObj], \"GenLarge size (before)\", \"MB\");\n", - " public static Metric GenPinSizeBefore = new(gc => gc.GenSizeBeforeMB[(int) Gens.GenPinObj], \"GenPin size (before)\", \"MB\");\n", - " //public static Metric Condemned = new(gc => gc.GetCondemnedReasons());\n", - "\n", - " // TODO: GlobalHeapHistory.*\n", - " //public static Metric Ghh = new(gc => gc.GlobalHeapHistory., \"\", \"\");\n", - " public static Metric IsConcurrent = new (gc => Convert.ToDouble((gc.GlobalHeapHistory.GlobalMechanisms & GCGlobalMechanisms.Concurrent) != 0), \"Is concurrent\", \"Y/N\");\n", - " public static Metric IsCompaction = new (gc => Convert.ToDouble((gc.GlobalHeapHistory.GlobalMechanisms & GCGlobalMechanisms.Compaction) != 0), \"Is compaction\", \"Y/N\");\n", - " public static Metric IsPromotion = new (gc => Convert.ToDouble((gc.GlobalHeapHistory.GlobalMechanisms & GCGlobalMechanisms.Promotion) != 0), \"Is promotion\", \"Y/N\");\n", - " public static Metric IsDemotion = new (gc => Convert.ToDouble((gc.GlobalHeapHistory.GlobalMechanisms & GCGlobalMechanisms.Demotion) != 0), \"Is demotion\", \"Y/N\");\n", - " public static Metric IsCardBundles = new (gc => Convert.ToDouble((gc.GlobalHeapHistory.GlobalMechanisms & GCGlobalMechanisms.CardBundles) != 0), \"Is cardbundles\", \"Y/N\");\n", - " public static Metric NumHeaps = new((gc => gc.GlobalHeapHistory.NumHeaps), \"GC Heaps\", \"#\");\n", - " public static Metric NumHeapsWithOffset = NumHeaps.WithOffset(0.05);\n", - "\n", - " public static Metric HeapCount = new(gc => gc.HeapCount, \"Heap count\", \"#\");\n", - "\n", - " // HeapCountSample\n", - " public static Metric HcsElapsedTimeBetweenGCs = new(gc => gc.HeapCountSample.ElapsedTimeBetweenGCsMSec, \"HCSampleElapsed\", \"ms\");\n", - " public static Metric HcsGCIndex = new(gc => gc.HeapCountSample.GCIndex, \"HCSampleGCIndex\", \"#\");\n", - " public static Metric HcsGCPauseTime = new(gc => gc.HeapCountSample.GCPauseTimeMSec, \"HCSampleGCPause\", \"ms\");\n", - " public static Metric HcsMslWaitTime = new(gc => gc.HeapCountSample.MslWaitTimeMSec, \"HCSampleGCMslWait\", \"ms\");\n", - "\n", - " // HeapCountTuning\n", - " public static Metric HctGCIndex = new(gc => gc.HeapCountTuning?.GCIndex, \"HCTuningGCIndex\", \"#\");\n", - " public static Metric HctMtcp = new((gc => gc.HeapCountTuning?.MedianThroughputCostPercent), \"Median TCP\", \"%\");\n", - " public static Metric HctMtcpCap15 = HctMtcp.WithCap(15);\n", - " public static Metric HctNewHeapCount = new(gc => gc.HeapCountTuning?.NewHeapCount, \"HCTuningNewHeapCount\", \"#\");\n", - " public static Metric HctSmtcp = new(gc => gc.HeapCountTuning?.SmoothedMedianThroughputCostPercent, \"Smoothed MTCP\", \"%\");\n", - " public static Metric HctSpaceCostDown = new(gc => gc.HeapCountTuning?.SpaceCostPercentDecreasePerStepDown, \"Space cost (down)\", \"%\");\n", - " public static Metric HctSpaceCostUp = new(gc => gc.HeapCountTuning?.SpaceCostPercentIncreasePerStepUp, \"Space cost (up)TCP\", \"%\");\n", - " public static Metric HctTPCostDown = new(gc => gc.HeapCountTuning?.ThroughputCostPercentIncreasePerStepDown, \"TP cost (down)\", \"%\");\n", - " public static Metric HctTPCostUp = new(gc => gc.HeapCountTuning?.ThroughputCostPercentReductionPerStepUp, \"TP cost (up)\", \"%\");\n", - "\n", - " public static Metric HeapSizeAfter = new(gc => gc.HeapSizeAfterMB, \"Heap size (after)\", \"MB\");\n", - " public static Metric HeapSizeBefore = new(gc => gc.HeapSizeBeforeMB, \"Heap size (before)\", \"MB\");\n", - " public static Metric HeapSizePeak = new(gc => gc.HeapSizePeakMB, \"Heap size (peak)\", \"MB\");\n", - " \n", - " // TODO: HeapStats.*\n", - " //public static Metric Hs = new(gc => gc.HeapStats., \"\", \"\");\n", - "\n", - " // TODO: Remaining are less comprehensive\n", - " public static Metric PauseDuration = new((gc => gc.PauseDurationMSec), \"GC pause\", \"ms\");\n", - " public static Metric PausePercent = new((gc => gc.PauseTimePercentageSinceLastGC), \"GC pause %\", \"%\");\n", - " public static Metric EndOfSegAllocated = new(gc => gc.PerHeapHistories.Sum(p => p.EndOfSegAllocated), title: \"EndOfSegAllocated\", unit: \"?\");\n", - " public static Metric PauseStack = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkTimes[(int) MarkRootType.MarkStack]).Sum(), \"Pause (stack)\", \"ms\");\n", - " public static Metric PauseFQ = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkTimes[(int) MarkRootType.MarkFQ]).Sum(), \"Pause (FQ)\", \"ms\");\n", - " public static Metric PauseHandles = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkTimes[(int) MarkRootType.MarkHandles]).Sum(), \"Pause (handles)\", \"ms\");\n", - " public static Metric PauseCards = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkTimes[(int) MarkRootType.MarkOlder]).Sum(), \"Pause (cards)\", \"ms\");\n", - " public static Metric ObjectSpaceStack = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkPromoted[(int) MarkRootType.MarkStack]).Sum(), \"Obj space (stack)\", \"bytes\");\n", - " public static Metric ObjectSpaceFQ = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkPromoted[(int) MarkRootType.MarkFQ]).Sum(), \"Obj space (FQ)\", \"bytes\");\n", - " public static Metric ObjectSpaceHandles = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkPromoted[(int) MarkRootType.MarkHandles]).Sum(), \"Obj space (handles)\", \"bytes\");\n", - " public static Metric ObjectSpaceCards = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkPromoted[(int) MarkRootType.MarkOlder]).Sum(), \"Obj space (cards)\", \"bytes\");\n", - " public static Metric Suspend = new(gc => gc.SuspendDurationMSec, \"Suspend\", \"ms\");\n", - " public static Metric UserAllocated = new(gc => gc.UserAllocated.Sum(), \"UserAllocated\", \"bytes\");\n", - " }\n", - "\n", - " public static class I\n", - " {\n", - " public static Metric MaxNumHeaps = Promote(Metrics.G.NumHeaps, Aggregation.Max);\n", - " public static Metric MaxPauseDuration = Promote(Metrics.G.PauseDuration, Aggregation.Max);\n", - "\n", - " public static Metric TotalSuspensionTime = new (iterationData => iterationData.GCSummaryInfo.TotalSuspensionTimeMSec, \"Total suspension time\", \"ms\");\n", - " public static Metric PercentPauseTimeInGC = new (iterationData => iterationData.GCSummaryInfo.PercentPauseTimeInGC, \"% pause GC\", \"%\");\n", - " public static Metric PercentTimeInGC = new (iterationData => iterationData.GCSummaryInfo.PercentTimeInGC, \"% GC\", \"%\");\n", - " public static Metric MeanHeapSizeBeforeMB = new (iterationData => iterationData.GCSummaryInfo.MeanHeapSizeBeforeMB, \"Mean heap size (before)\", \"MB\");\n", - " public static Metric MaxHeapSizeMB = new (iterationData => iterationData.GCSummaryInfo.MaxHeapSizeMB, \"Max heap size\", \"MB\");\n", - " public static Metric TotalAllocationsMB = new (iterationData => iterationData.GCSummaryInfo.TotalAllocationsMB, \"Total allocations\", \"MB\");\n", - " public static Metric GCScore = new (iterationData => iterationData.GCSummaryInfo.GCScore, \"GC score\", \"score\"); // MB * %\n", - "\n", - " public static Metric MaxHeapCount = new (iterationData => iterationData.GCSummaryInfo.MaxHeapCount, \"Max heap count\", \"#\");\n", - " public static Metric NumberOfHeapCountSwitches = new (iterationData => iterationData.GCSummaryInfo.NumberOfHeapCountSwitches, \"# hc changes\", \"#\");\n", - " public static Metric NumberOfHeapCountDirectionChanges = new (iterationData => iterationData.GCSummaryInfo.NumberOfHeapCountDirectionChanges, \"# hc dir changes\", \"#\");\n", - "\n", - " public static Metric MaxWorkingSetMB = new (iterationData => iterationData.LoadInfo.MaxWorkingSetMB, \"Max working set\", \"MB\");\n", - " public static Metric P99WorkingSetMB = new (iterationData => iterationData.LoadInfo.P99WorkingSetMB, \"P99 working set\", \"MB\");\n", - " public static Metric P95WorkingSetMB = new (iterationData => iterationData.LoadInfo.P95WorkingSetMB, \"P95 working set\", \"MB\");\n", - " public static Metric P90WorkingSetMB = new (iterationData => iterationData.LoadInfo.P90WorkingSetMB, \"P90 working set\", \"MB\");\n", - " public static Metric P75WorkingSetMB = new (iterationData => iterationData.LoadInfo.P75WorkingSetMB, \"P75 working set\", \"MB\");\n", - " public static Metric P50WorkingSetMB = new (iterationData => iterationData.LoadInfo.P50WorkingSetMB, \"P50 working set\", \"MB\");\n", - " public static List> WorkingSetMBList = ML(MaxWorkingSetMB, P99PrivateMemoryMB, P95PrivateMemoryMB, P90PrivateMemoryMB, P75PrivateMemoryMB, P50PrivateMemoryMB);\n", - "\n", - " public static Metric MaxPrivateMemoryMB = new (iterationData => iterationData.LoadInfo.MaxPrivateMemoryMB, \"Max private memory\", \"MB\");\n", - " public static Metric P99PrivateMemoryMB = new (iterationData => iterationData.LoadInfo.P99PrivateMemoryMB, \"P99 private memory\", \"MB\");\n", - " public static Metric P95PrivateMemoryMB = new (iterationData => iterationData.LoadInfo.P95PrivateMemoryMB, \"P95 private memory\", \"MB\");\n", - " public static Metric P90PrivateMemoryMB = new (iterationData => iterationData.LoadInfo.P90PrivateMemoryMB, \"P90 private memory\", \"MB\");\n", - " public static Metric P75PrivateMemoryMB = new (iterationData => iterationData.LoadInfo.P75PrivateMemoryMB, \"P75 private memory\", \"MB\");\n", - " public static Metric P50PrivateMemoryMB = new (iterationData => iterationData.LoadInfo.P50PrivateMemoryMB, \"P50 private memory\", \"MB\");\n", - " public static List> PrivateMemoryMBList = ML(MaxPrivateMemoryMB, P99PrivateMemoryMB, P95PrivateMemoryMB, P90PrivateMemoryMB, P75PrivateMemoryMB, P50PrivateMemoryMB);\n", - "\n", - " public static Metric RequestsPerMSec = new (iterationData => iterationData.LoadInfo.RequestsPerMSec, \"RPS\", \"RPS\");\n", - " public static Metric MeanLatencyMS = new (iterationData => iterationData.LoadInfo.MeanLatencyMS, \"Mean latency\", \"ms\");\n", - " public static Metric Latency99thMS = new (iterationData => iterationData.LoadInfo.Latency99thMS, \"Latency 99th\", \"ms\");\n", - " public static Metric Latency90thMS = new (iterationData => iterationData.LoadInfo.Latency90thMS, \"Latency 90th\", \"ms\");\n", - " public static Metric Latency75thMS = new (iterationData => iterationData.LoadInfo.Latency75thMS, \"Latency 75th\", \"ms\");\n", - " public static Metric Latency50thMS = new (iterationData => iterationData.LoadInfo.Latency50thMS, \"Latency 50th\", \"ms\");\n", - " public static List> LatencyMSList = ML(MeanLatencyMS, Latency99thMS, Latency90thMS, Latency75thMS, Latency50thMS);\n", - " }\n", - "\n", - " public static class B\n", - " {\n", - " public static Metric MaxHeapCount = Promote(Metrics.I.MaxHeapCount, Aggregation.Max);\n", - " public static Metric MaxPauseDurationBenchmark = Promote(Metrics.I.MaxPauseDuration, Aggregation.Max);\n", - " public static Metric MaxPercentPauseTimeInGC = Promote(Metrics.I.PercentPauseTimeInGC, Aggregation.Max);\n", - " public static Metric AveragePercentPauseTimeInGC = Promote(Metrics.I.PercentPauseTimeInGC, Aggregation.Average);\n", - " }\n", - "\n", - "}\n", - "\n", - "// Exploratory\n", - "public abstract class NameSimplifier\n", - "{\n", - " public abstract (string title, Dictionary) Simplify(List names);\n", - "\n", - " public static PrefixSimplifier PrefixDashed { get; } = new PrefixSimplifier('-');\n", - "}\n", - "\n", - "public class ListSimplifier : NameSimplifier\n", - "{\n", - " private Dictionary _nameMap;\n", - "\n", - " public ListSimplifier(params (string inData, string toDisplay)[] names)\n", - " : this((IEnumerable<(string, string)>) names) {}\n", - "\n", - " public ListSimplifier(IEnumerable<(string inData, string toDisplay)> names)\n", - " => _nameMap = names.ToDictionary();\n", - "\n", - " public override (string title, Dictionary) Simplify(List names) => (null, _nameMap);\n", - "}\n", - "\n", - "public class PrefixSimplifier : NameSimplifier\n", - "{\n", - " private char _delimiter;\n", - " private string _emptyResult;\n", - "\n", - " public PrefixSimplifier(char delimiter, string emptyResult = \"<>\")\n", - " {\n", - " _delimiter = delimiter;\n", - " _emptyResult = emptyResult;\n", - " }\n", - "\n", - " public override (string title, Dictionary) Simplify(List names)\n", - " {\n", - " if (names.Count == 0) return (null, null);\n", - " List namesToScan = names;\n", - " int longestMatch = namesToScan.Select(n => n.Length).Min();\n", - " bool allContinueWithDelimiter = namesToScan.All(n => (n.Length == longestMatch) || (n[longestMatch] == _delimiter));\n", - " if (allContinueWithDelimiter)\n", - " {\n", - " namesToScan = namesToScan.Select(n => ((allContinueWithDelimiter && (n.Length == longestMatch)) ? (n + _delimiter) : n)).ToList();\n", - " longestMatch++;\n", - " }\n", - " foreach (string name in namesToScan)\n", - " {\n", - " int overlap = name.TakeWhile((ch, i) => (i < longestMatch) && (ch == namesToScan[0][i])).Count();\n", - " longestMatch = (overlap == 0) ? 0 : name.LastIndexOf(_delimiter, overlap - 1) + 1;\n", - " if (longestMatch == 0) break;\n", - " }\n", - " if (longestMatch > 0)\n", - " {\n", - " return (\n", - " names[0].Substring(0, longestMatch - 1),\n", - " names.Select(config => (config, (longestMatch >= config.Length) ? _emptyResult : config.Substring(longestMatch)))\n", - " .ToDictionary()\n", - " );\n", - " }\n", - " return (null, null);\n", - " }\n", - "}\n", - "\n", - "// Some will be null depending on the chart type\n", - "record SeriesInfo(Metric Metric, string Run, string Config, ConfigData ConfigData, string Benchmark, int? Iteration, IterationData IterationData);\n", - "\n", - "abstract class ChartType\n", - "{\n", - " public abstract BaseMetric<(string, TData), XValue> DefaultXMetric { get; }\n", - " public abstract string DefaultBenchmarkMap(string benchmark);\n", - "\n", - " public abstract IEnumerable> GetSeries(DataManager dataManager, List> metrics, Filter runFilter, Filter configFilter,\n", - " Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, IEnumerable benchmarkList);\n", - " public abstract string GetColorFamilyKey(SeriesInfo info, bool multipleMetrics, bool includeRunName, bool multipleConfigs,\n", - " Dictionary configDisplayNames, bool multipleBenchmarks);\n", - " public abstract string GetColorFamilyId(SeriesInfo info, bool multipleMetrics);\n", - " public abstract string GetSeriesTitle(SeriesInfo info, string colorFamilyKey, bool multipleMetrics);\n", - " public abstract string GetChartTitle();\n", - " public abstract List> GetDataSource(SeriesInfo info,\n", - " Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, Func dataFilter);\n", - "}\n", - "\n", - "class BenchmarksChartType : ChartType\n", - "{\n", - " public override BaseMetric<(string, BenchmarkData), XValue> DefaultXMetric { get; } = Metrics.X.BenchmarkName;\n", - " public override string DefaultBenchmarkMap(string benchmark) => \"\";\n", - "\n", - " public override IEnumerable> GetSeries(DataManager dataManager, List> metrics, Filter runFilter, Filter configFilter,\n", - " Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, IEnumerable benchmarkList)\n", - " {\n", - " foreach (var metric in metrics)\n", - " {\n", - " foreach ((string run, string config, ConfigData configData) in dataManager.GetConfigsWithData(runFilter, configFilter))\n", - " {\n", - " if (!configIterationFilter.MightInclude((config))) continue;\n", - "\n", - " // Note - could filter out configs that don't have a relevant benchmark/iteration\n", - " yield return new (metric, run, config, configData, null, null, null);\n", - " }\n", - " }\n", - " }\n", - "\n", - " public override string GetColorFamilyKey(SeriesInfo info, bool multipleMetrics, bool includeRunName, bool multipleConfigs,\n", - " Dictionary configDisplayNames, bool multipleBenchmarks)\n", - " {\n", - " string runDisplay = includeRunName ? $\"{info.Run}, \" : \"\";\n", - " string configDisplay = multipleConfigs ? (configDisplayNames?.GetValueOrDefault(info.Config) ?? info.Config) : \"\";\n", - " string colorFamilyKey = $\"{runDisplay}{configDisplay}\";\n", - " return colorFamilyKey;\n", - " }\n", - "\n", - " public override string GetColorFamilyId(SeriesInfo info, bool multipleMetrics) => multipleMetrics ? $\"{info.Metric.Title} / \" : \"\";\n", - " public override string GetSeriesTitle(SeriesInfo info, string colorFamilyKey, bool multipleMetrics) => $\"{GetColorFamilyId(info, multipleMetrics)}{colorFamilyKey}\";\n", - "\n", - " public override string GetChartTitle() => \"Per-benchmark behavior\";\n", - "\n", - " public override List> GetDataSource(SeriesInfo info,\n", - " Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, Func dataFilter)\n", - " => info.ConfigData.Benchmarks\n", - " .Where(benchmark => benchmarkFilter.Include(benchmark.Key)\n", - " && benchmark.Value.Iterations.WithIndex()\n", - " .Any(pair => (pair.Item1 != null)\n", - " && iterationFilter.Include(pair.Item2)\n", - " && configIterationFilter.Include(info.Config, pair.Item2)));\n", - "}\n", - "\n", - "class IterationsChartType : ChartType\n", - "{\n", - " public override BaseMetric<(string, IterationData), XValue> DefaultXMetric { get; } = Metrics.X.IterationBenchmarkName;\n", - " public override string DefaultBenchmarkMap(string benchmark) => \"\";\n", - "\n", - " public override IEnumerable> GetSeries(DataManager dataManager, List> metrics, Filter runFilter, Filter configFilter,\n", - " Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, IEnumerable benchmarkList)\n", - " {\n", - " foreach (var metric in metrics)\n", - " {\n", - " foreach ((string run, string config, ConfigData configData) in dataManager.GetConfigsWithData(runFilter, configFilter))\n", - " {\n", - " foreach (int iteration in configData.GetIterations(config, benchmarkFilter, iterationFilter, configIterationFilter))\n", - " {\n", - " yield return new (metric, run, config, configData, null, iteration, null);\n", - " }\n", - " }\n", - " }\n", - " }\n", - " \n", - " public override string GetColorFamilyKey(SeriesInfo info, bool multipleMetrics, bool includeRunName, bool multipleConfigs,\n", - " Dictionary configDisplayNames, bool multipleBenchmarks)\n", - " {\n", - " string metricDisplay = multipleMetrics ? $\"{info.Metric.Title}, \" : \"\";\n", - " string runDisplay = includeRunName ? $\"{info.Run}, \" : \"\";\n", - " string configDisplay = multipleConfigs ? (configDisplayNames?.GetValueOrDefault(info.Config) ?? info.Config) : \"\";\n", - " string colorFamilyKey = $\"{metricDisplay}{runDisplay}{configDisplay}\";\n", - "\n", - " return colorFamilyKey;\n", - " }\n", - "\n", - " public override string GetColorFamilyId(SeriesInfo info, bool multipleMetrics) => $\"_{info.Iteration}\";\n", - " public override string GetSeriesTitle(SeriesInfo info, string colorFamilyKey, bool multipleMetrics) => $\"{colorFamilyKey}{GetColorFamilyId(info, multipleMetrics)}\";\n", - "\n", - " public override string GetChartTitle() => \"Per-iteration behavior\";\n", - "\n", - " public override List> GetDataSource(SeriesInfo info,\n", - " Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, Func dataFilter)\n", - " {\n", - " if (!iterationFilter.Include(info.Iteration.Value)\n", - " || !configIterationFilter.Include(info.Config, info.Iteration.Value))\n", - " {\n", - " throw new Exception(\"IterationsChartType.GetDataSource expected GetSeries to filter iterations\");\n", - " }\n", - "\n", - " return info.ConfigData.Benchmarks\n", - " .Where(benchmark => benchmarkFilter.Include(benchmark.Key))\n", - " .Where(benchmark => info.Iteration < benchmark.Value.Iterations.Count)\n", - " .Select(benchmark => KeyValuePair.Create(benchmark.Key, benchmark.Value.Iterations[info.Iteration.Value]))\n", - " .Where(kvp => kvp.Value != null);\n", - " }\n", - "}\n", - "\n", - "class TraceGCChartType : ChartType\n", - "{\n", - " public override BaseMetric<(string, TraceGC), XValue> DefaultXMetric { get; } = Metrics.X.GCIndex;\n", - " public override string DefaultBenchmarkMap(string benchmark) => benchmark;\n", - " \n", - " public override IEnumerable> GetSeries(DataManager dataManager, List> metrics, Filter runFilter, Filter configFilter,\n", - " Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, IEnumerable benchmarkList)\n", - " {\n", - " foreach (var metric in metrics)\n", - " {\n", - " foreach (string benchmark in benchmarkList)\n", - " {\n", - " foreach ((string run, string config, int iteration, IterationData iterationData) in\n", - " dataManager.GetIterationsForBenchmark(runFilter, configFilter, iterationFilter, configIterationFilter, benchmark))\n", - " {\n", - " yield return new (metric, run, config, null, benchmark, iteration, iterationData);\n", - " }\n", - " }\n", - " }\n", - " }\n", - "\n", - " public override string GetColorFamilyKey(SeriesInfo info, bool multipleMetrics, bool includeRunName, bool multipleConfigs,\n", - " Dictionary configDisplayNames, bool multipleBenchmarks)\n", - " {\n", - " string benchmarkDisplay = multipleBenchmarks ? $\"{info.Benchmark}, \" : \"\";\n", - " string metricDisplay = multipleMetrics ? $\"{info.Metric.Title}, \" : \"\";\n", - " string runDisplay = includeRunName ? $\"{info.Run}, \" : \"\";\n", - " string configDisplay = multipleConfigs ? (configDisplayNames?.GetValueOrDefault(info.Config) ?? info.Config) : \"\";\n", - " string colorFamilyKey = $\"{benchmarkDisplay}{metricDisplay}{runDisplay}{configDisplay}\";\n", - "\n", - " return colorFamilyKey;\n", - " }\n", - "\n", - " public override string GetColorFamilyId(SeriesInfo info, bool multipleMetrics) => $\"_{info.Iteration}\";\n", - " public override string GetSeriesTitle(SeriesInfo info, string colorFamilyKey, bool multipleMetrics) => $\"{colorFamilyKey}{GetColorFamilyId(info, multipleMetrics)}\";\n", - "\n", - " public override string GetChartTitle() => \"Per-run behavior\";\n", - "\n", - " public override List> GetDataSource(SeriesInfo info,\n", - " Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, Func dataFilter)\n", - " => info.IterationData.GCProcessData?.GCs.Where(gc => gc.GlobalHeapHistory != null).Where(dataFilter).Select(gc => KeyValuePair.Create(\"\", gc));\n", - "}\n", - "\n", - "public struct XValue : IComparable, IEquatable\n", - "{\n", - " private double _value;\n", - " private string _name;\n", - "\n", - " public XValue(double value) { _value = value; _name = null; }\n", - " public XValue(string name) { _value = 0; _name = name; }\n", - "\n", - " public bool HasValue => _name == null;\n", - " public bool HasName => _name != null;\n", - "\n", - " public double GetValue() => HasValue ? _value : throw new Exception(\"XValue.GetValue on a named value\");\n", - " public string GetName() => HasName ? _name : throw new Exception(\"XValue.GetName on a numerical value\");\n", - "\n", - " public override int GetHashCode() => HasValue ? GetValue().GetHashCode() : GetName().GetHashCode();\n", - " public bool Equals(XValue other) => HasValue ? (other.HasValue && (GetValue() == other.GetValue())) : (other.HasName && (GetName() == other.GetName()));\n", - " public override bool Equals(object other) => other is XValue otherX && Equals(otherX);\n", - "\n", - " public int CompareTo(XValue other)\n", - " => (HasValue && other.HasName) ? 1\n", - " : (HasName && other.HasValue) ? -1\n", - " : HasValue ? GetValue().CompareTo(other.GetValue())\n", - " : GetName().CompareTo(other.GetName());\n", - "\n", - " public override string ToString() => HasValue ? _value.ToString() : _name;\n", - " public string ToString(string format) => HasValue ? _value.ToString(format) : _name;\n", - "}\n", - "\n", - "public abstract class XArrangement\n", - "{\n", - " private string _titleOverride;\n", - "\n", - " public XArrangement(string titleOverride) { _titleOverride = titleOverride; }\n", - "\n", - " public string GetNewTitle(string oldTitle) => _titleOverride ?? oldTitle;\n", - " public abstract List<(XValue x, double? y)> Arrange(List<(XValue x, double? y)> data, List<(XValue x, double? y)> firstDataPreSorted);\n", - " // This interface probably needs some work. The idea is that, given the next xvalue in each series, this selects which one\n", - " // should be next overall.\n", - " public abstract XValue? ChooseNext(IEnumerable xavlues);\n", - "\n", - " public class DefaultXArrangement : XArrangement\n", - " {\n", - " public DefaultXArrangement() : base(null) {}\n", - " public override List<(XValue x, double? y)> Arrange(List<(XValue x, double? y)> data, List<(XValue x, double? y)> firstDataPreSorted) => data;\n", - " public override XValue? ChooseNext(IEnumerable xvalues) => xvalues.FirstOrDefault(x => x.HasValue);\n", - " }\n", - "\n", - " public class PercentileXArrangement : XArrangement\n", - " {\n", - " private bool _descending;\n", - " public PercentileXArrangement(bool descending = false) : base(\"Percentile\") { _descending = descending; }\n", - " public override List<(XValue x, double? y)> Arrange(List<(XValue x, double? y)> data, List<(XValue x, double? y)> firstDataPreSorted)\n", - " {\n", - " var sortedData = _descending\n", - " ? data.Select(d => d.y).OrderByDescending(y => y)\n", - " : data.Select(d => d.y).OrderBy(y => y);\n", - " return sortedData.Select((d, i) => (new XValue(i / (double) data.Count), d)).ToList();\n", - " }\n", - " public override XValue? ChooseNext(IEnumerable xvalues) => xvalues.Min();\n", - " }\n", - "\n", - " public class SortedXArrangement : XArrangement\n", - " {\n", - " private bool _descending;\n", - " public SortedXArrangement(bool descending = true) : base(\"Metric Rank\") { _descending = descending; }\n", - " public override List<(XValue x, double? y)> Arrange(List<(XValue x, double? y)> data, List<(XValue x, double? y)> firstDataPreSorted)\n", - " {\n", - " var sortedData = _descending\n", - " ? data.Select(d => d.y).OrderByDescending(y => y)\n", - " : data.Select(d => d.y).OrderBy(y => y);\n", - " return sortedData.Select((d, i) => (new XValue(i), d)).ToList();\n", - " }\n", - " public override XValue? ChooseNext(IEnumerable xvalues) => xvalues.Min();\n", - " }\n", - "\n", - " public class CombinedSortedXArrangement : XArrangement\n", - " {\n", - " public CombinedSortedXArrangement() : base(null) {}\n", - " public override List<(XValue x, double? y)> Arrange(List<(XValue x, double? y)> data, List<(XValue x, double? y)> firstDataPreSorted)\n", - " => data.Join(firstDataPreSorted, d => d.x, d => d.x, ((d, sortedEntry) => (d.x, d.y, sortedEntry.y)))\n", - " .OrderByDescending(triple => triple.Item3)\n", - " .Select(triple => (triple.x, triple.Item2));\n", - " public override XValue? ChooseNext(IEnumerable xvalues) => xvalues.FirstOrDefault(x => x.HasValue);\n", - " }\n", - "\n", - " public class RelativeXArrangement : XArrangement\n", - " {\n", - " public RelativeXArrangement() : base(null) {}\n", - " public override List<(XValue x, double? y)> Arrange(List<(XValue x, double? y)> data, List<(XValue x, double? y)> firstDataPreSorted)\n", - " {\n", - " if (data.Count == 0) return data;\n", - " if (data[0].x.HasName)\n", - " {\n", - " Console.WriteLine($\"Applying {nameof(RelativeXArrangement)} on non-numeric x values (first is {data[0].x})\");\n", - " return data;\n", - " }\n", - " double firstValue = data[0].x.GetValue();\n", - " return data.Select(d => (new XValue(d.x.GetValue() - firstValue), d.y));\n", - " }\n", - " public override XValue? ChooseNext(IEnumerable xvalues) => xvalues.Min(); // not necessarily?\n", - " }\n", - "}\n", - "\n", - "public static class XArrangements\n", - "{\n", - " public static XArrangement.DefaultXArrangement Default { get; } = new ();\n", - " public static XArrangement.PercentileXArrangement Percentile { get; } = new();\n", - " public static XArrangement.SortedXArrangement Sorted { get; } = new();\n", - " public static XArrangement.CombinedSortedXArrangement CombinedSorted { get; } = new();\n", - " public static XArrangement.RelativeXArrangement Relative { get; } = new();\n", - "}\n", - "\n", - "public abstract class DataPresenter\n", - "{\n", - " public bool Debug;\n", - "\n", - " public abstract void Clear();\n", - "\n", - " // true if ok\n", - " public abstract bool PrepareUnits(IEnumerable units);\n", - " public abstract void SetColorGroups(Dictionary colorGroups);\n", - " public abstract void Display();\n", - " public abstract TResult Result { get; }\n", - "\n", - " // Below members are per-chart\n", - "\n", - " public abstract void Start(string title, string xlabel);\n", - " public abstract void AddSeries(string title, string unit, string colorFamilyKey, string colorFamilyId, List<(XValue x, double? y)> data);\n", - " public abstract void Finish(XArrangement xArrangement);\n", - "}\n", - "\n", - "public abstract class TextPresenter : DataPresenter>>\n", - "{\n", - " public static RawTextPresenter RawText { get; } = new RawTextPresenter();\n", - " public static MarkdownPresenter Markdown { get; } = new MarkdownPresenter();\n", - " public static HtmlPresenter Html { get; } = new HtmlPresenter();\n", - " public static CsvPresenter Csv { get; } = new CsvPresenter();\n", - "\n", - " protected record struct DataPoint(XValue x, double? y);\n", - " private record Series(string title, string unit, List data);\n", - " private record Table(string title, string xlabel, List series);\n", - "\n", - " private List> _result = new();\n", - " public override void Clear() => _result.Clear();\n", - "\n", - " // true if ok\n", - " public override bool PrepareUnits(IEnumerable units) => true;\n", - " public override void SetColorGroups(Dictionary colorGroups) {}\n", - "\n", - " public override List> Result => _result;\n", - " \n", - " // Below members are per-table\n", - "\n", - " private Table _current;\n", - "\n", - " public override void Start(string title, string xlabel) { _current = new(title: title, xlabel: xlabel, series: new()); }\n", - " public override void AddSeries(string title, string unit, string colorFamilyKey, string colorFamilyId, List<(XValue x, double? y)> data)\n", - " => _current.series.Add(new(title: title, unit: unit, data: data.Select(pair => new DataPoint(pair.x, pair.y)).ToList()));\n", - "\n", - " private int MaxTokenLength(string phrase) => phrase.Split(' ').Select(s => s.Length).Max();\n", - " protected string NDashes(int n) => new string('-', n);\n", - " protected string NSpaces(int n) => new string(' ', n);\n", - " protected void PadLeft(StringBuilder sb, int width) => sb.Insert(0, NSpaces(width - sb.Length));\n", - "\n", - " protected abstract string MakeTitle(string title);\n", - " protected abstract string? StartTable();\n", - " protected abstract IEnumerable HeaderLines(IEnumerable headerValues, IEnumerable widths);\n", - " protected abstract string? HeaderBorder(IEnumerable widths);\n", - " protected abstract IEnumerable DataLine(XValue xvalue, IEnumerable values, IEnumerable widths);\n", - " protected abstract string? EndTable();\n", - "\n", - " protected const string lineStart = \"| \";\n", - " protected const string lineDelim = \" | \";\n", - " protected const string lineEnd = \" |\";\n", - "\n", - " private static void AddIfNotNull(List list, string? value)\n", - " {\n", - " if (value != null) list.Add(value);\n", - " }\n", - "\n", - " public override void Finish(XArrangement xArrangement)\n", - " {\n", - "\n", - " int xWidth = _current.series.SelectMany(series => series.data.Select(d => d.x.ToString().Length))\n", - " .Append(MaxTokenLength(_current.xlabel))\n", - " .Max();\n", - " List seriesWidths = _current.series.Select(\n", - " series => series.data.Select(d => d.y?.ToString(\"N3\").Length ?? 0)\n", - " .Append((this is RawTextPresenter) ? MaxTokenLength(series.title) : series.title.Length)\n", - " .Append(series.unit.Length + 2) // \"()\"\n", - " .Max());\n", - " var allWidths = seriesWidths.Prepend(xWidth);\n", - " List tableText = new();\n", - " tableText.Add(MakeTitle(_current.title));\n", - " tableText.Add(\"\");\n", - " AddIfNotNull(tableText, StartTable());\n", - "\n", - " var headerValues = _current.series.Select(series => series.title).Prepend(_current.xlabel);\n", - " tableText.AddRange(HeaderLines(headerValues, allWidths));\n", - " AddIfNotNull(tableText, HeaderBorder(allWidths));\n", - "\n", - " int numSeries = _current.series.Count;\n", - " int[] nextIndices = new int[numSeries]; // all zeroes\n", - " DataPoint?[] candidates = new DataPoint?[numSeries];\n", - " string[] elements = new string[numSeries + 1]; // includes X\n", - " while (true)\n", - " {\n", - " // Find next xvalue, if it exists.\n", - " for (int i = 0; i < numSeries; ++i)\n", - " {\n", - " while ((nextIndices[i] < _current.series[i].data.Count)\n", - " && !_current.series[i].data[nextIndices[i]].y.HasValue)\n", - " {\n", - " nextIndices[i]++;\n", - " }\n", - "\n", - " candidates[i] = nextIndices[i] < _current.series[i].data.Count\n", - " ? _current.series[i].data[nextIndices[i]]\n", - " : null;\n", - " }\n", - " XValue? next = xArrangement.ChooseNext(candidates.Select(p => p?.x));\n", - " if (!next.HasValue) break;\n", - "\n", - " // Get values\n", - " for (int i = 0; i < numSeries; ++i)\n", - " {\n", - " if (!candidates[i].HasValue) continue;\n", - " if (!next.Value.Equals(candidates[i].Value.x))\n", - " {\n", - " candidates[i] = null;\n", - " continue;\n", - " }\n", - " nextIndices[i]++;\n", - " }\n", - " if (!candidates.Any(NotNull)) throw new Exception(\"internal error - no candidate used\");\n", - "\n", - " tableText.AddRange(DataLine(next.Value, candidates, allWidths));\n", - " }\n", - "\n", - " AddIfNotNull(tableText, EndTable());\n", - " _result.Add(tableText);\n", - " }\n", - "}\n", - "\n", - "public class RawTextPresenter : TextPresenter\n", - "{\n", - " private const string borderStart = \"+ \";\n", - " private const string borderDelim = \"-+-\";\n", - " private const string borderEnd = \" +\";\n", - "\n", - " protected override string MakeTitle(string title) => title;\n", - " protected override string? StartTable() => null;\n", - " \n", - " private List MakeLines(string phrase, int width)\n", - " {\n", - " List result = new();\n", - "\n", - " string[] tokens = phrase.Split(' ');\n", - " StringBuilder current = new();\n", - " foreach (string token in tokens)\n", - " {\n", - " if (token.Length > width) throw new Exception(\"Tokenization inconsistent\");\n", - " if ((current.Length + token.Length + 1) > width)\n", - " {\n", - " PadLeft(current, width);\n", - " result.Add(current.ToString());\n", - " current = new();\n", - " }\n", - "\n", - " if (current.Length > 0) current.Append(' ');\n", - " current.Append(token);\n", - " }\n", - " PadLeft(current, width);\n", - " result.Add(current.ToString());\n", - "\n", - " return result;\n", - " }\n", - "\n", - " protected override IEnumerable HeaderLines(IEnumerable headerValues, IEnumerable widths)\n", - " {\n", - " var headerCells = headerValues.Zip(widths).Select(headerAndWidth => MakeLines(headerAndWidth.First, headerAndWidth.Second));\n", - " int maxHeaderLines = headerCells.Select(lines => lines.Count).Max();\n", - " foreach ((List cell, int width) in headerCells.Zip(widths))\n", - " {\n", - " while (cell.Count < maxHeaderLines) cell.Insert(0, NSpaces(width));\n", - " }\n", - " for (int i = 0; i < maxHeaderLines; ++i)\n", - " {\n", - " yield return (lineStart + string.Join(lineDelim, headerCells.Select(cell => cell[i])) + lineEnd);\n", - " }\n", - " }\n", - "\n", - " protected override string? HeaderBorder(IEnumerable widths)\n", - " => borderStart + string.Join(borderDelim, widths.Select(n => NDashes(n))) + borderEnd;\n", - "\n", - " protected override IEnumerable DataLine(XValue xvalue, IEnumerable values, IEnumerable widths)\n", - " {\n", - " var cells = values.Select(p => p?.y?.ToString(\"N3\")).Prepend(xvalue.ToString())\n", - " .Zip(widths).Select(p => (p.First ?? \"\").PadLeft(p.Second));\n", - " yield return lineStart + string.Join(lineDelim, cells) + lineEnd;\n", - " }\n", - "\n", - " protected override string? EndTable() => null;\n", - "\n", - " public override void Display()\n", - " {\n", - " foreach (List table in Result)\n", - " {\n", - "\n", - " Console.WriteLine();\n", - " foreach (string line in table)\n", - " {\n", - " Console.WriteLine(line);\n", - " }\n", - " }\n", - " }\n", - "}\n", - "\n", - "public class MarkdownPresenter : TextPresenter\n", - "{\n", - " protected override string MakeTitle(string title) => $\"### {title}\";\n", - " protected override string? StartTable() => null;\n", - "\n", - " protected override IEnumerable HeaderLines(IEnumerable headerValues, IEnumerable widths)\n", - " {\n", - " yield return lineStart + string.Join(lineDelim, headerValues.Zip(widths).Select(pair => pair.First.PadLeft(pair.Second))) + lineEnd;\n", - " }\n", - "\n", - " protected override string? HeaderBorder(IEnumerable widths)\n", - " => lineStart + string.Join(lineDelim, widths.Select(n => NDashes(n-1) + \":\")) + lineEnd;\n", - "\n", - " protected override IEnumerable DataLine(XValue xvalue, IEnumerable values, IEnumerable widths)\n", - " {\n", - " var cells = values.Select(p => p?.y?.ToString(\"N3\")).Prepend(xvalue.ToString())\n", - " .Zip(widths).Select(p => (p.First ?? \"\").PadLeft(p.Second));\n", - " yield return lineStart + string.Join(lineDelim, cells) + lineEnd;\n", - " }\n", - "\n", - " protected override string? EndTable() => null;\n", - "\n", - " public override void Display()\n", - " {\n", - " foreach (List table in Result)\n", - " {\n", - " string.Join(\"\\n\", table).DisplayAs(\"text/markdown\");\n", - " }\n", - " }\n", - "}\n", - "\n", - "public class HtmlPresenter : TextPresenter\n", - "{\n", - " protected override string MakeTitle(string title) => $\"

{title}

\";\n", - " protected override string? StartTable() => \"\";\n", - " protected override IEnumerable HeaderLines(IEnumerable headerValues, IEnumerable widths)\n", - " {\n", - " yield return \"\";\n", - " foreach (string value in headerValues)\n", - " {\n", - " yield return $\" \";\n", - " }\n", - " yield return \"\";\n", - " }\n", - "\n", - " protected override string? HeaderBorder(IEnumerable widths) => null;\n", - "\n", - " protected override IEnumerable DataLine(XValue xvalue, IEnumerable values, IEnumerable widths)\n", - " {\n", - " var cells = values.Select(p => p?.y?.ToString(\"N3\")).Prepend(xvalue.ToString());\n", - " yield return \"\";\n", - " foreach (string value in cells)\n", - " {\n", - " yield return $\" \";\n", - " }\n", - " yield return \"\";\n", - " }\n", - "\n", - " protected override string? EndTable() => \"
{value}
{value}
\";\n", - "\n", - " public override void Display()\n", - " {\n", - " foreach (List table in Result)\n", - " {\n", - " string.Join(\"\\n\", table).DisplayAs(\"text/html\");\n", - " }\n", - " }\n", - "}\n", - "\n", - "public class CsvPresenter : TextPresenter\n", - "{\n", - " protected override string MakeTitle(string title) => $\"# {title}\";\n", - " protected override string? StartTable() => null;\n", - " protected override IEnumerable HeaderLines(IEnumerable headerValues, IEnumerable widths)\n", - " {\n", - " yield return string.Join(\",\", headerValues);\n", - " }\n", - "\n", - " protected override string? HeaderBorder(IEnumerable widths) => null;\n", - "\n", - " protected override IEnumerable DataLine(XValue xvalue, IEnumerable values, IEnumerable widths)\n", - " {\n", - " var cells = values.Select(p => p?.y?.ToString(\"N3\")).Prepend(xvalue.ToString());\n", - " yield return string.Join(\",\", cells);\n", - " }\n", - "\n", - " protected override string? EndTable() => null;\n", - "\n", - " public override void Display()\n", - " {\n", - " foreach (List table in Result)\n", - " {\n", - " string.Join(\"\\n\", table).DisplayAs(\"text/csv\");\n", - " }\n", - " }\n", - "}\n", - "\n", - "public class ChartPresenter : DataPresenter>\n", - "{\n", - " private string _scatterMode;\n", - " private List _uniqueUnits;\n", - " private ColorProvider _colorProvider;\n", - " private List _charts = new();\n", - "\n", - " public ChartPresenter(string scatterMode = null)\n", - " {\n", - " _scatterMode = scatterMode;\n", - " }\n", - "\n", - " public override void Clear() => _charts.Clear();\n", - "\n", - " public override bool PrepareUnits(IEnumerable units)\n", - " {\n", - " _uniqueUnits = new();\n", - " foreach (string unit in units)\n", - " {\n", - " if (!_uniqueUnits.Contains(unit)) _uniqueUnits.Add(unit);\n", - " }\n", - " if (_uniqueUnits.Count > 2)\n", - " {\n", - " Console.WriteLine($\"Too many units: {string.Join(\", \", _uniqueUnits)}\");\n", - " return false;\n", - " }\n", - " return true;\n", - " }\n", - "\n", - " private int yaxis(string unit) => _uniqueUnits.IndexOf(unit);\n", - "\n", - " public override void SetColorGroups(Dictionary colorGroups)\n", - " {\n", - " _colorProvider = new(colorGroups);\n", - " if (Debug) _colorProvider.DumpColorGroups();\n", - " }\n", - "\n", - " public override void Display()\n", - " {\n", - " foreach (PlotlyChart chart in Result) chart.Display();\n", - " }\n", - "\n", - " public override List Result => _charts;\n", - "\n", - " // Below members are per-chart\n", - "\n", - " private Layout.Layout _layout;\n", - " private List _scatters;\n", - "\n", - " public override void Start(string title, string xlabel)\n", - " {\n", - " _layout = new Layout.Layout\n", - " {\n", - " xaxis = new Xaxis { title = xlabel },\n", - " yaxis = new Yaxis { title = _uniqueUnits[0] },\n", - " title = title,\n", - " // margin = new Margin() { r = 123 },\n", - " };\n", - "\n", - " if (_uniqueUnits.Count > 1)\n", - " {\n", - " _layout.yaxis2 = new Yaxis { title = _uniqueUnits[1], side = \"right\", overlaying = \"y\" };\n", - " }\n", - "\n", - " _scatters = new();\n", - " }\n", - "\n", - " public override void AddSeries(string title, string unit, string colorFamilyKey, string colorFamilyId, List<(XValue x, double? y)> data)\n", - " {\n", - " Scatter scatter =\n", - " new Scatter {\n", - " name = title,\n", - " x = data[0].x.HasName ? data.Select(d => d.x.GetName()) : data.Select(d => d.x.GetValue()),\n", - " y = data.Select(d => d.y),\n", - " };\n", - " if (_scatterMode != null) scatter.mode = _scatterMode;\n", - " if (yaxis(unit) == 1) scatter.yaxis = \"y2\";\n", - " _colorProvider.SetMarker(scatter, colorFamilyKey, colorFamilyId);\n", - " // scatter.marker will throw if marker hasn't been set.\n", - " // ShouldSerializemarker appears to check if it has been set.\n", - " if (Debug) Console.WriteLine($\"color '{colorFamilyKey}': '{(scatter.ShouldSerializemarker() ? scatter.marker.color : \"\")}'\");\n", - " _scatters.Add(scatter);\n", - " }\n", - "\n", - " public override void Finish(XArrangement xArrangement) => _charts.Add(Chart.Plot(_scatters, _layout));\n", - "}\n", - "\n", - "TResult ChartInternal(DataPresenter presenter, ChartType chartType,\n", - " DataManager dataManager, List> metrics,\n", - " Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null,\n", - " ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, Func benchmarkMap = null,\n", - " BaseMetric<(string, TData), XValue> xMetric = null, XArrangement xArrangement = null,\n", - " NameSimplifier configNameSimplifier = null, bool includeRunName = false,\n", - " bool display = true, bool debug = false)\n", - "{\n", - " runFilter = runFilter ?? Filter.All;\n", - " configFilter = configFilter ?? Filter.All;\n", - " benchmarkFilter = benchmarkFilter ?? Filter.All;\n", - " iterationFilter = iterationFilter ?? IntFilter.All;\n", - " // configIterationFilter is not set to an empty dictionary as that would exclude everything\n", - " dataFilter = dataFilter ?? (data => true);\n", - " benchmarkMap = benchmarkMap ?? chartType.DefaultBenchmarkMap;\n", - " xMetric = xMetric ?? chartType.DefaultXMetric;\n", - " xArrangement = xArrangement ?? XArrangements.Default;\n", - "\n", - " presenter.Clear();\n", - " presenter.Debug = debug;\n", - "\n", - " if (metrics.Count == 0)\n", - " {\n", - " Console.WriteLine(\"No metrics\");\n", - " return default(TResult);\n", - " }\n", - "\n", - " List configs = dataManager.GetConfigs(runFilter: runFilter, configFilter: configFilter).Select(tuple => tuple.config).Distinct().ToList();\n", - " if (configs.Count == 0)\n", - " {\n", - " Console.WriteLine(\"No configs afer filtering\");\n", - " return default(TResult);\n", - " }\n", - "\n", - " if (debug) Console.WriteLine(\"Simplify config names\");\n", - " Dictionary configDisplayNames = null;\n", - " string configPrefix = null;\n", - " if (configNameSimplifier != null)\n", - " {\n", - " (configPrefix, configDisplayNames) = configNameSimplifier.Simplify(configs);\n", - " }\n", - " \n", - " if (debug) Console.WriteLine(\"Prepare units\");\n", - " presenter.PrepareUnits(metrics.Select(metric => metric.Unit));\n", - "\n", - " Dictionary> benchmarkGroups = new();\n", - " HashSet benchmarkSet = new();\n", - " foreach ((string run, string config, string benchmark) in\n", - " dataManager.GetBenchmarks(runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter))\n", - " {\n", - " if (!benchmarkSet.Add(benchmark)) continue;\n", - "\n", - " string benchmarkGroup = (benchmarkMap != null) ? benchmarkMap(benchmark) : benchmark;\n", - " benchmarkGroups.GetOrAdd(benchmarkGroup, new());\n", - " benchmarkGroups[benchmarkGroup].Add(benchmark);\n", - " }\n", - "\n", - " foreach (var (groupName, benchmarkList) in benchmarkGroups)\n", - " {\n", - " benchmarkList.Sort();\n", - "\n", - " if (debug)\n", - " {\n", - " Console.Write($\"{groupName}:\");\n", - " foreach (var benchmark in benchmarkList)\n", - " {\n", - " Console.Write($\" {benchmark}\");\n", - " }\n", - " Console.WriteLine();\n", - " }\n", - " }\n", - "\n", - " foreach (var (benchmarkGroup, benchmarkList) in benchmarkGroups)\n", - " {\n", - " if (debug) Console.WriteLine(\"Initialize colors\");\n", - "\n", - " // Consider moving 'colorGroups' to the presenter\n", - " Dictionary colorGroups = new();\n", - " foreach (SeriesInfo info in\n", - " chartType.GetSeries(dataManager, metrics, runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter,\n", - " iterationFilter: iterationFilter, configIterationFilter: configIterationFilter, benchmarkList: benchmarkList))\n", - " {\n", - " string colorFamilyKey = chartType.GetColorFamilyKey(info, multipleMetrics: metrics.Count > 1, includeRunName: includeRunName, multipleConfigs: configs.Count > 1,\n", - " configDisplayNames: configDisplayNames, multipleBenchmarks: benchmarkList.Count > 1);\n", - "\n", - " colorGroups[colorFamilyKey] = colorGroups.GetValueOrDefault(colorFamilyKey, 0) + 1;\n", - " }\n", - "\n", - " presenter.SetColorGroups(colorGroups);\n", - "\n", - " {\n", - " List scatters = new();\n", - "\n", - " string xlabel = xArrangement.GetNewTitle(xMetric.Title);\n", - "\n", - " string titlePrefix = chartType.GetChartTitle();\n", - " List titleParts = new();\n", - " if (!string.IsNullOrWhiteSpace(benchmarkGroup)) titleParts.Add(benchmarkGroup);\n", - " if (metrics.Count == 1) titleParts.Add(metrics[0].Title);\n", - " if (configPrefix != null) titleParts.Add(configPrefix);\n", - " else if (configs.Count == 1) titleParts.Add(configDisplayNames?.GetValueOrDefault(configs[0]) ?? configs[0]);\n", - " string titleWithoutPrefix = string.Join(\" / \", titleParts);\n", - " string title = string.Join(\" / \", titleParts.Prepend(titlePrefix));\n", - " presenter.Start(title: title, xlabel: xlabel);\n", - "\n", - " List<(XValue x, double? y)> firstDataPreSorted = null;\n", - " double firstDataMin = 0;\n", - " HashSet firstDataSet = new();\n", - "\n", - " foreach ((SeriesInfo info, int indexForOffsetting) in\n", - " chartType.GetSeries(dataManager, metrics, runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter,\n", - " iterationFilter: iterationFilter, configIterationFilter: configIterationFilter, benchmarkList: benchmarkList).WithIndex())\n", - " {\n", - " string colorFamilyKey = chartType.GetColorFamilyKey(info, multipleMetrics: metrics.Count > 1, includeRunName: includeRunName, multipleConfigs: configs.Count > 1,\n", - " configDisplayNames: configDisplayNames, multipleBenchmarks: benchmarkList.Count > 1);\n", - " string seriesTitle = chartType.GetSeriesTitle(info, colorFamilyKey, metrics.Count > 1);\n", - " if (debug) Console.Write($\"series title: {seriesTitle}, \");\n", - "\n", - " List> dataSource;\n", - " try { dataSource = chartType.GetDataSource(info, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter, dataFilter: dataFilter); }\n", - " catch (Exception e) { Console.WriteLine($\"Exception {e} processing data source for {title} / {seriesTitle}\"); dataSource = null; }\n", - " if (dataSource == null)\n", - " {\n", - " Console.WriteLine($\"No data for {titleWithoutPrefix} / {seriesTitle}\");\n", - " continue;\n", - " }\n", - " int dataSourceCount = dataSource.Count;\n", - " if (debug) Console.Write($\"source count = {dataSourceCount}, \");\n", - "\n", - " List<(XValue x, double? y)> data;\n", - " // Theory: For numeric x values, null y values need to be filtered or \"mode==lines\" won't show\n", - " // values that have null neighbors.\n", - " // Theory: For non-numeric x values, null y values are needed to avoid shuffling of the x values\n", - " // (Example: if series 1 has \"a\" \"c\" and series 2 has \"a\" \"b\" \"c\", then 2 will be displayed\n", - " // \"a\" \"c\" \"b\" -AND- \"mode==lines\" will connect the \"a\" to the \"b\" to the \"c\")\n", - " // TODO: We probably need to add fake entries to the first (?) series if the different\n", - " // series have different sets of x values. The existing code will work if the x value\n", - " // exists in the DataManager but the metrics don't. (Example: we have ASP.NET metrics but\n", - " // no GC trace for a benchmark, but the chart contains GC metrics)\n", - " info.Metric.ResetDiagnostics();\n", - " try { data = dataSource.Select(b => (x: xMetric.DoExtract((b.Key, b.Value)), y: info.Metric.DoExtract(b.Value, indexForOffsetting))).ToList(); }\n", - " catch { Console.WriteLine($\"Exception processing data items for {title} / {seriesTitle}\"); data = null; }\n", - " info.Metric.DisplayDiagnostics($\"{titleWithoutPrefix} / {seriesTitle}\");\n", - " if (debug) Console.Write($\"data count = {data.Count}, \");\n", - " if (!data.Any(d => d.y != null))\n", - " {\n", - " Console.WriteLine($\"No data items for {titleWithoutPrefix} / {seriesTitle}\");\n", - " continue;\n", - " }\n", - "\n", - " // This should probably be factored into CombinedSortedXArrangement. The idea is that firstDataPreSorted\n", - " // contains the first series' data so that each series can be merged into it, sorted the same way, and\n", - " // then all displayed in the same order of x values. However, the first series might not have all of the\n", - " // values, so this tacks them on the end arbitrarily.\n", - " if (firstDataPreSorted == null)\n", - " {\n", - " firstDataPreSorted = new(data); // make a copy so that edits don't change the original\n", - " firstDataMin = firstDataPreSorted.Select(pair => pair.y).Where(NotNull).Min(y => y.Value);\n", - " firstDataSet = new(firstDataPreSorted.Select(pair => pair.x));\n", - " }\n", - " foreach (var d in data)\n", - " {\n", - " if (firstDataSet.Add(d.x))\n", - " {\n", - " // The \"--\" is a hack to produce lower values. This should be fixed to be clearer.\n", - " firstDataPreSorted.Add((d.x, --firstDataMin));\n", - " }\n", - " }\n", - "\n", - " data = xArrangement.Arrange(data, firstDataPreSorted);\n", - "\n", - " // See above comment. If x values are numeric, remove ones without y values.\n", - " // Note that xarrangement can change the x value type.\n", - " if (data[0].x.HasValue)\n", - " {\n", - " data = data.Where(d => d.y != null);\n", - " }\n", - "\n", - " if (debug) Console.Write($\"data count = {data.Count}, \");\n", - " if (data.Count == 0)\n", - " {\n", - " Console.WriteLine($\"No data items after filtering nulls for {titleWithoutPrefix} / {seriesTitle}\");\n", - " continue;\n", - " }\n", - "\n", - " string colorFamilyId = chartType.GetColorFamilyId(info, multipleMetrics: metrics.Count > 1);\n", - " presenter.AddSeries(title: seriesTitle, unit: info.Metric.Unit, colorFamilyKey: colorFamilyKey, colorFamilyId: colorFamilyId, data: data);\n", - " }\n", - "\n", - " presenter.Finish(xArrangement);\n", - " }\n", - " }\n", - "\n", - " if (display)\n", - " {\n", - " presenter.Display();\n", - " }\n", - "\n", - " return presenter.Result;\n", - "}\n", - "\n", - "List> TableBenchmarks(DataManager dataManager, List> metrics, TextPresenter textPresenter = null,\n", - " Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null,\n", - " ConfigIterationFilter configIterationFilter = null, Func dataFilter = null,\n", - " Func benchmarkMap = null, BaseMetric<(string, BenchmarkData), XValue> xMetric = null, XArrangement xArrangement = null,\n", - " NameSimplifier configNameSimplifier = null, bool includeRunName = false,\n", - " bool display = true, bool debug = false)\n", - " => ChartInternal(textPresenter ?? TextPresenter.RawText, new BenchmarksChartType(),\n", - " dataManager, metrics,\n", - " runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter, dataFilter: dataFilter,\n", - " benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement,\n", - " configNameSimplifier: configNameSimplifier, includeRunName: includeRunName,\n", - " display: display, debug: debug);\n", - "\n", - "List> TableBenchmarks(DataManager dataManager, Metric metric, TextPresenter textPresenter = null,\n", - " Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null,\n", - " ConfigIterationFilter configIterationFilter = null, Func dataFilter = null,\n", - " Func benchmarkMap = null, BaseMetric<(string, BenchmarkData), XValue> xMetric = null, XArrangement xArrangement = null,\n", - " NameSimplifier configNameSimplifier = null, bool includeRunName = false,\n", - " bool display = true, bool debug = false)\n", - " => TableBenchmarks(dataManager, ML(metric), textPresenter,\n", - " runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter, dataFilter: dataFilter,\n", - " benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement,\n", - " configNameSimplifier: configNameSimplifier, includeRunName: includeRunName,\n", - " display: display, debug: debug);\n", - "\n", - "List> TableIterations(DataManager dataManager, List> metrics, TextPresenter textPresenter = null,\n", - " Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null,\n", - " ConfigIterationFilter configIterationFilter = null, Func dataFilter = null,\n", - " Func benchmarkMap = null, BaseMetric<(string, IterationData), XValue> xMetric = null, XArrangement xArrangement = null,\n", - " NameSimplifier configNameSimplifier = null, bool includeRunName = false,\n", - " bool display = true, bool debug = false)\n", - " => ChartInternal(textPresenter ?? TextPresenter.RawText, new IterationsChartType(),\n", - " dataManager, metrics,\n", - " runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter, dataFilter: dataFilter,\n", - " benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement,\n", - " configNameSimplifier: configNameSimplifier, includeRunName: includeRunName,\n", - " display: display, debug: debug);\n", - "\n", - "List> TableIterations(DataManager dataManager, Metric metric, TextPresenter textPresenter = null,\n", - " Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null,\n", - " ConfigIterationFilter configIterationFilter = null, Func dataFilter = null,\n", - " Func benchmarkMap = null, BaseMetric<(string, IterationData), XValue> xMetric = null, XArrangement xArrangement = null,\n", - " NameSimplifier configNameSimplifier = null, bool includeRunName = false,\n", - " bool display = true, bool debug = false)\n", - " => TableIterations(dataManager, ML(metric), textPresenter,\n", - " runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter, dataFilter: dataFilter,\n", - " benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement,\n", - " configNameSimplifier: configNameSimplifier, includeRunName: includeRunName,\n", - " display: display, debug: debug);\n", - "\n", - "List> TableGCData(DataManager dataManager, List> metrics, TextPresenter textPresenter = null,\n", - " Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null,\n", - " ConfigIterationFilter configIterationFilter = null, Func dataFilter = null,\n", - " Func benchmarkMap = null, BaseMetric<(string, TraceGC), XValue> xMetric = null, XArrangement xArrangement = null,\n", - " NameSimplifier configNameSimplifier = null, bool includeRunName = false,\n", - " bool display = true, bool debug = false)\n", - " => ChartInternal(textPresenter ?? TextPresenter.RawText, new TraceGCChartType(),\n", - " dataManager, metrics,\n", - " runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter, dataFilter: dataFilter,\n", - " benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement,\n", - " configNameSimplifier: configNameSimplifier, includeRunName: includeRunName,\n", - " display: display, debug: debug);\n", - "\n", - "List> TableGCData(DataManager dataManager, Metric metric, TextPresenter textPresenter = null,\n", - " Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null,\n", - " ConfigIterationFilter configIterationFilter = null, Func dataFilter = null,\n", - " Func benchmarkMap = null, BaseMetric<(string, TraceGC), XValue> xMetric = null, XArrangement xArrangement = null,\n", - " NameSimplifier configNameSimplifier = null, bool includeRunName = false,\n", - " bool display = true, bool debug = false)\n", - " => TableGCData(dataManager, ML(metric), textPresenter,\n", - " runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter, dataFilter: dataFilter,\n", - " benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement,\n", - " configNameSimplifier: configNameSimplifier, includeRunName: includeRunName,\n", - " display: display, debug: debug);\n", - "\n", - "List ChartBenchmarks(DataManager dataManager, List> metrics,\n", - " Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null,\n", - " ConfigIterationFilter configIterationFilter = null, Func dataFilter = null,\n", - " Func benchmarkMap = null, BaseMetric<(string, BenchmarkData), XValue> xMetric = null, XArrangement xArrangement = null,\n", - " NameSimplifier configNameSimplifier = null, bool includeRunName = false,\n", - " bool display = true, bool debug = false)\n", - " => ChartInternal(new ChartPresenter(scatterMode: null), new BenchmarksChartType(),\n", - " dataManager, metrics,\n", - " runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter, dataFilter: dataFilter,\n", - " benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement,\n", - " configNameSimplifier: configNameSimplifier, includeRunName: includeRunName,\n", - " display: display, debug: debug);\n", - "\n", - "List ChartBenchmarks(DataManager dataManager, Metric metric,\n", - " Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null,\n", - " ConfigIterationFilter configIterationFilter = null, Func dataFilter = null,\n", - " Func benchmarkMap = null, BaseMetric<(string, BenchmarkData), XValue> xMetric = null, XArrangement xArrangement = null,\n", - " NameSimplifier configNameSimplifier = null, bool includeRunName = false,\n", - " bool display = true, bool debug = false)\n", - " => ChartBenchmarks(dataManager, ML(metric),\n", - " runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter, dataFilter: dataFilter,\n", - " benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement,\n", - " configNameSimplifier: configNameSimplifier, includeRunName: includeRunName,\n", - " display: display, debug: debug);\n", - "\n", - "List ChartIterations(DataManager dataManager, List> metrics,\n", - " Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null,\n", - " ConfigIterationFilter configIterationFilter = null, Func dataFilter = null,\n", - " Func benchmarkMap = null, BaseMetric<(string, IterationData), XValue> xMetric = null, XArrangement xArrangement = null,\n", - " NameSimplifier configNameSimplifier = null, bool includeRunName = false,\n", - " bool display = true, bool debug = false)\n", - " => ChartInternal(new ChartPresenter(scatterMode: \"markers\"), new IterationsChartType(),\n", - " dataManager, metrics,\n", - " runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter, dataFilter: dataFilter,\n", - " benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement,\n", - " configNameSimplifier: configNameSimplifier, includeRunName: includeRunName,\n", - " display: display, debug: debug);\n", - "\n", - "List ChartIterations(DataManager dataManager, Metric metric,\n", - " Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null,\n", - " ConfigIterationFilter configIterationFilter = null, Func dataFilter = null,\n", - " Func benchmarkMap = null, BaseMetric<(string, IterationData), XValue> xMetric = null, XArrangement xArrangement = null,\n", - " NameSimplifier configNameSimplifier = null, bool includeRunName = false,\n", - " bool display = true, bool debug = false)\n", - " => ChartIterations(dataManager, ML(metric),\n", - " runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter, dataFilter: dataFilter,\n", - " benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement,\n", - " configNameSimplifier: configNameSimplifier, includeRunName: includeRunName,\n", - " display: display, debug: debug);\n", - "\n", - "List ChartGCData(DataManager dataManager, List> metrics,\n", - " Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null,\n", - " ConfigIterationFilter configIterationFilter = null, Func dataFilter = null,\n", - " Func benchmarkMap = null, BaseMetric<(string, TraceGC), XValue> xMetric = null, XArrangement xArrangement = null,\n", - " NameSimplifier configNameSimplifier = null, bool includeRunName = false,\n", - " bool display = true, bool debug = false)\n", - " => ChartInternal(new ChartPresenter(scatterMode: null), new TraceGCChartType(),\n", - " dataManager, metrics,\n", - " runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter, dataFilter: dataFilter,\n", - " benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement,\n", - " configNameSimplifier: configNameSimplifier, includeRunName: includeRunName,\n", - " display: display, debug: debug);\n", - "\n", - "List ChartGCData(DataManager dataManager, Metric metric,\n", - " Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null,\n", - " ConfigIterationFilter configIterationFilter = null, Func dataFilter = null,\n", - " Func benchmarkMap = null, BaseMetric<(string, TraceGC), XValue> xMetric = null, XArrangement xArrangement = null,\n", - " NameSimplifier configNameSimplifier = null, bool includeRunName = false,\n", - " bool display = true, bool debug = false)\n", - " => ChartGCData(dataManager, ML(metric),\n", - " runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter,\n", - " configIterationFilter: configIterationFilter, dataFilter: dataFilter,\n", - " benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement,\n", - " configNameSimplifier: configNameSimplifier, includeRunName: includeRunName,\n", - " display: display, debug: debug);" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "// Benchmark lists\n", - "\n", - "// scoutList is a list of ASP.NET benchmarks identified by looking at allocation rates.\n", - "// scoutList2 adds some tests that Maoni identified.\n", - "// smallList is for very quick looks.\n", - "\n", - "// Often a test infra run will have been limited to a smaller set of tests when desired,\n", - "// in which case these aren't necessary. However, these predefined lists can be used to\n", - "// help load (or chart after loading) a subset of a run when desired.\n", - "\n", - "List scoutList = ML(\n", - " \"ConnectionClose\",\n", - " \"ConnectionCloseHttps\",\n", - " \"ConnectionCloseHttpsHttpSys\",\n", - " \"ConnectionCloseHttpSys\",\n", - " \"Fortunes\",\n", - " \"FortunesDapper\",\n", - " \"FortunesEf\",\n", - " \"FortunesPlatform\",\n", - " \"FortunesPlatformDapper\",\n", - " \"FortunesPlatformEF\",\n", - " \"Json\",\n", - " \"JsonHttps\",\n", - " \"JsonHttpsHttpSys\",\n", - " \"JsonMin\",\n", - " \"JsonMvc\",\n", - " \"MultipleQueriesPlatform\",\n", - " \"PlaintextMvc\",\n", - " \"PlaintextQueryString\",\n", - " \"PlaintextWithParametersEmptyFilter\",\n", - " \"PlaintextWithParametersNoFilter\",\n", - " \"SingleQueryPlatform\",\n", - " \"Stage1\",\n", - " \"Stage1Grpc\",\n", - " \"Stage2\",\n", - " \"UpdatesPlatform\"\n", - ");\n", - "\n", - "List scoutList2 = scoutList.Concat(ML(\"CachingPlatform\", \"JsonMapAction\", \"Stage1TrimR2RSingleFile\")).ToList();\n", - "List smallList = ML(\"Fortunes\", \"JsonHttpsHttpSys\", \"PlaintextQueryString\", \"Stage2\", \"PlaintextMvc\");" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Examples" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "string rootDir = @\"C:\\home\\repro\\decommit\";\n", - "var hugeDM = new DataManager();\n", - "foreach (string config in ML(\"rcbase\", \"rc1\", \"alt-draft-log3\"))\n", - "{\n", - " string configDir = $\"gcperfsim_{config}_gc\";\n", - " foreach (string benchBase in ML(\"100mb\", \"100mb-10mb\", \"10mb-100mb\"))\n", - " {\n", - " foreach (string benchSuffix in ML(\"\", \"-low\"))\n", - " {\n", - " string bench = benchBase + benchSuffix;\n", - " string etl = $@\"{rootDir}\\{configDir}\\{bench}\\{bench}.{config}.0.etl\";\n", - " if (!File.Exists(etl))\n", - " {\n", - " Console.WriteLine($\"{etl} does not exist\");\n", - " continue;\n", - " }\n", - " hugeDM.AddGCTrace(etl, ML(\"corerun\"), config: config, loadMultipleProcesses: false);\n", - " }\n", - " }\n", - "}" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "hugeDM" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "string sds_process_name = \"Microsoft.M365.Core.Sds.Service\";\n", - "string store_worker_process_name = \"Microsoft.Exchange.Store.Worker\";\n", - "var store_work_process_list = ML(sds_process_name, store_worker_process_name);\n", - "var sdsDM = DataManager.CreateGCTraces(@\"c:\\home\\repro\\notebook-gcapi\", store_work_process_list);\n", - "\n", - "(sdsDM.Data.Runs.First().Value.Configs.First().Value.Benchmarks.Skip(1).First().Value.Iterations[0].GCProcessData.ProcessID,\n", - "sdsDM.Data.Runs.First().Value.Configs.First().Value.Benchmarks.Skip(1).First().Value.Iterations[0].GCProcessData.CommandLine)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "var diffDataManager = DataManager.CreateAspNetData(ML(\n", - " @\"C:\\home\\repro\\hc\\asp_traceplus3_gc\",\n", - " @\"C:\\home\\repro\\hc\\asp_tp3-m4_gc\",\n", - " @\"C:\\home\\repro\\hc\\asp_slope_gc\",\n", - " @\"C:\\home\\repro\\hc\\asp_evaldecr_gc\"\n", - "));" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "var cardsDM = DataManager.CreateGCTraces(@\"c:\\home\\repro\\2401310010004275\", pertinentProcesses: ML(\"EXCEL\"));" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "var low4DM = DataManager.CreateAspNetData(@\"c:\\home\\repro\\hc\\asp_v2-fixrearranged-mult-max_gc\"\n", - " //, benchmarkFilter: Filter.RE(\"Stage.*|Json.*\");\n", - " //, iterationFilter: IntFilter.Values(1)\n", - ");\n", - "\n", - "(low4DM.Data.Runs[\"asp_v2-fixrearranged-mult-max_gc\"].Configs.Keys,\n", - " low4DM.Data.Runs[\"asp_v2-fixrearranged-mult-max_gc\"].Configs[\"v2-fixrearranged-mult-max-h4\"].Benchmarks[\"Stage2\"].Iterations[1].GCSummaryInfo.MaxHeapCount,\n", - " low4DM.Data.Runs[\"asp_v2-fixrearranged-mult-max_gc\"].Configs[\"v2-fixrearranged-mult-max\"].Benchmarks[\"Stage2\"].Iterations[1].GCSummaryInfo.MaxHeapCount,\n", - " low4DM.GetConfigs(Filter.All, Filter.RE(\"max\")))\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "var rc3DataManager = DataManager.CreateAspNetData(@\"C:\\home\\repro\\hc\\asp_v2-fixrearranged_gc\");\n", - "rc3DataManager.AddAspNetData(@\"C:\\home\\repro\\hc\\asp_v2-tune_gc\");\n", - "\n", - "rc3DataManager.GetConfigs(Filter.All, Filter.All)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "var net6dm = DataManager.CreateGCTrace(@\"C:\\home\\repro\\2046032\\Microsoft.MWC.Workload.OneLake.Service.EntryPoint.exe_WithGCon.nettrace\", ML(\"Microsoft.MWC.Workload.OneLake.Service.EntryPoint\"));\n", - "net6dm.AddGCTrace(@\"C:\\home\\repro\\2046032\\TESTenvCST350 with perf.nettrace\", ML(\"Microsoft.MWC.Workload.OneLake.Service.EntryPoint\"));" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Charting examples" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "ChartBenchmarks(sdsDM, ML(Metrics.B.AveragePercentPauseTimeInGC, Metrics.B.MaxHeapCount)\n", - " //, benchmarkFilter: Filter.Names(\"JsonMapAction\")\n", - " , configNameSimplifier: NameSimplifier.PrefixDashed\n", - " );" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "TableBenchmarks(low4DM, ML(Metrics.B.AveragePercentPauseTimeInGC, Metrics.B.MaxHeapCount)\n", - " //, benchmarkFilter: Filter.Names(\"JsonMapAction\")\n", - " , configNameSimplifier: NameSimplifier.PrefixDashed\n", - " );" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "hugeDM.Data.Runs.First().Value.Configs.First().Value.Benchmarks.First().Value.Iterations[0].GCProcessData.GCs" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "ChartGCData(hugeDM, ML(Metrics.G.CommittedBeforeInUse, Metrics.G.CommittedBeforeInFree, Metrics.G.CommittedBeforeInGlobalFree, Metrics.G.CommittedBeforeInGlobalDecommit)\n", - " , configNameSimplifier: NameSimplifier.PrefixDashed\n", - " , debug: true\n", - " );" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "foreach (var xarr in new[] { (XArrangement) XArrangements.Default, XArrangements.Sorted, XArrangements.CombinedSorted })\n", - "{\n", - "TableBenchmarks(low4DM,\n", - " ML(Metrics.B.MaxHeapCount,\n", - " Metrics.Promote(Metrics.I.MaxHeapCount, Aggregation.Min),\n", - " Metrics.Promote(Metrics.I.MaxHeapCount, Aggregation.Range),\n", - " Metrics.Promote(Metrics.I.NumberOfHeapCountSwitches, Aggregation.Range),\n", - " Metrics.Promote(Metrics.I.NumberOfHeapCountDirectionChanges, Aggregation.Range)),\n", - " configNameSimplifier: NameSimplifier.PrefixDashed,\n", - " xArrangement: xarr,\n", - " configFilter: new Filter(excludeRE: \"h4\")\n", - " //, debug: true\n", - " );\n", - "}" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "foreach (var xarr in new[] { (XArrangement) XArrangements.Default, XArrangements.Sorted, XArrangements.CombinedSorted })\n", - "{\n", - "TableBenchmarks(diffDataManager,\n", - " ML((Metrics.B.MaxHeapCount),\n", - " Metrics.Promote(Metrics.I.MaxHeapCount, Aggregation.Min),\n", - " Metrics.Promote(Metrics.I.MaxHeapCount, Aggregation.Range),\n", - " Metrics.Promote(Metrics.I.NumberOfHeapCountSwitches, Aggregation.Range),\n", - " Metrics.Promote(Metrics.I.NumberOfHeapCountDirectionChanges, Aggregation.Range)),\n", - " //configNameSimplifier: NameSimplifier.PrefixDashed,\n", - " xArrangement: xarr);\n", - "}" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "TableIterations(diffDataManager, Metrics.I.MaxHeapCount\n", - " , configFilter: Filter.Names(\"traceplus3\", \"tp3-m4\")\n", - ");" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "foreach (var lat in Metrics.I.LatencyMSList)\n", - "{\n", - " TableBenchmarks(diffDataManager, Metrics.Promote(lat, Aggregation.Average)\n", - " // , benchmarkFilter: Filter.RE(\"Stage.*\")\n", - " // configNameSimplifier: NameSimplifier.PrefixDashed,\n", - " // types: B_XType.All,\n", - " //configFilter: new Filter(excludeRE: \".*h4\")\n", - " );\n", - "}" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "ChartIterations(diffDataManager, ML(/*Metrics.I.GCScore,*/ Metrics.I.RequestsPerMSec));" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "TableGCData(net6dm,\n", - " ML(\n", - " Metrics.G.HeapSizeAfter,\n", - " Metrics.G.HeapSizeBefore,\n", - " new Metric(gc => gc.HeapStats.GCHandleCount, \"GC Handles\", \"#\"),\n", - " Metrics.G.GenLargeSizeBefore,\n", - " Metrics.G.GenLargeSizeAfter,\n", - " Metrics.G.GenLargeObjSizeAfter,\n", - " new Metric(gc => gc.HeapStats.TotalHeapSize / 1000000, \"Total heap size\", \"MB\")\n", - " )\n", - " , textPresenter: TextPresenter.Html\n", - " , configNameSimplifier: new ListSimplifier((\"Microsoft.MWC.Workload.OneLake.Service.EntryPoint.exe_WithGCon\", \"Entry\"), (\"TESTenvCST350 with perf\", \"TES\"))\n", - " , dataFilter: gc => gc.Number > 300 && gc.Number < 310\n", - " );" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "ChartGCData(low4DM\n", - " , metrics: ML(Metrics.G.PauseDuration)\n", - " , benchmarkFilter: Filter.Names(\"Stage2\")\n", - " , configNameSimplifier: NameSimplifier.PrefixDashed\n", - " , xMetric: Metrics.X.StartRelativeMSec\n", - " , dataFilter: gc => gc.StartRelativeMSec > 2000\n", - " , iterationFilter: IntFilter.Values(0)\n", - " , xArrangement: XArrangements.Relative\n", - ");" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "ChartGCData(low4DM\n", - " , metrics: ML(Metrics.G.HctMtcp, Metrics.G.NumHeaps)\n", - " , benchmarkFilter: Filter.RE(\"Stage2$\")\n", - " , configNameSimplifier: NameSimplifier.PrefixDashed\n", - " , iterationFilter: IntFilter.Values(0)\n", - ");" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "ChartGCData(low4DM, Metrics.G.NumHeaps, configNameSimplifier: NameSimplifier.PrefixDashed, debug: false);" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "var low4CompRuns = ML((\"v2-fixrearranged-mult-max\", \"base\"), (\"v2-fixrearranged-mult-max-h4\", \"max4\"),\n", - " (\"v2-fixrearranged-mult-max-svr\", \"svr\"), (\"v2-fixrearranged-mult-max-svr4\", \"svr4\"),\n", - " (\"v2-fixrearranged-mult-max-mult8\", \"mult8\"), (\"v2-fixrearranged-mult-max-mult32\", \"mult32\"),\n", - " (\"v2-fixrearranged-mult-max-mult8x10\", \"m8x10\"), (\"v2-fixrearranged-mult-max-mult32x10\", \"m32x10\"),\n", - " (\"v2-fixrearranged-mult-max-x10\", \"x10\"));\n", - "\n", - "ChartGCData(low4DM, Metrics.G.HctMtcp\n", - " , configFilter: Filter.ExcludeRE(\"svr\")\n", - " , configNameSimplifier: new ListSimplifier(low4CompRuns)\n", - " );" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "ChartGCData(\n", - " sdsDM\n", - " , metrics: ML(Metrics.G.AllocRateMBSec, Metrics.G.PauseDuration)\n", - " //, benchmarkFilter: Filter.RE(\"Run32\")\n", - " , configNameSimplifier: NameSimplifier.PrefixDashed\n", - ");" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "ChartGCData(\n", - " cardsDM\n", - " , metrics: ML(Metrics.G.AllocRateMBSec, Metrics.G.PauseDuration)\n", - " , configFilter: Filter.RE(\"Run32\")\n", - ");" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "ChartGCData(\n", - " cardsDM,\n", - " metrics: ML(Metrics.G.PauseDuration.WithCap(100), Metrics.G.PauseStack.WithCap(100), Metrics.G.PauseFQ, Metrics.G.PauseHandles.WithCap(100), Metrics.G.PauseCards, Metrics.G.Suspend,\n", - " new Metric(gc => gc.HeapStats.GCHandleCount, \"GC Handles\", \"#\"),\n", - " new Metric(gc => gc.HeapStats.FinalizationPromotedCount, \"F promoted\", \"#\"))\n", - " //, dataFilter: gc => gc.Generation == 0\n", - " , configFilter: Filter.RE(\"Only\")\n", - " , xMetric: Metrics.X.GCIndex\n", - " , configNameSimplifier: new ListSimplifier((\"2401310010004275\", \"a\"))\n", - " );" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "ChartGCData(rc3DataManager, Metrics.G.HeapSizeBefore, benchmarkFilter: Filter.RE(\"Stage2$\"));" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "ChartGCData(rc3DataManager, Metrics.G.NumHeaps\n", - " , benchmarkFilter: Filter.Names(\"Fortunes\", \"FortunesDapper\", \"JsonHttpsHttpSys\", \"PlaintextQueryString\", \"Stage1\", \"Stage2\", \"PlaintextMvc\")\n", - " , benchmarkMap: x => (x == \"Stage1\" || x == \"Stage2\" ? \"S1/2\" : x));" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "var rc3RearrNoBaseRuns = ML((\"v2-rc3\", \"rc3\"), (\"v2-fixrearranged\", \"rc3rearr\"), (\"v2-tune\", \"rc3tune\"));\n", - "\n", - "ChartGCData(rc3DataManager, Metrics.G.NumHeaps\n", - " , configNameSimplifier: new ListSimplifier(rc3RearrNoBaseRuns)\n", - " , benchmarkFilter: new Filter(includeNames: scoutList2)\n", - ");" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Obsolete stuff - for temporary reference" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "// Old comparison/summary code (commented out)\n", - " /*\n", - " public LoadInfo GetComparison(LoadInfo baseline, LoadInfo comparand)\n", - " {\n", - " return new LoadInfo\n", - " {\n", - " MaxWorkingSetMB = DeltaPercent(baseline.MaxWorkingSetMB, comparand.MaxWorkingSetMB),\n", - " P99WorkingSetMB = DeltaPercent(baseline.P99WorkingSetMB, comparand.P99WorkingSetMB),\n", - " P95WorkingSetMB = DeltaPercent(baseline.P95WorkingSetMB, comparand.P95WorkingSetMB),\n", - " P90WorkingSetMB = DeltaPercent(baseline.P90WorkingSetMB, comparand.P90WorkingSetMB),\n", - " P75WorkingSetMB = DeltaPercent(baseline.P75WorkingSetMB, comparand.P75WorkingSetMB),\n", - " P50WorkingSetMB = DeltaPercent(baseline.P50WorkingSetMB, comparand.P50WorkingSetMB),\n", - "\n", - " MaxPrivateMemoryMB = DeltaPercent(baseline.MaxPrivateMemoryMB, comparand.MaxPrivateMemoryMB),\n", - " P99PrivateMemoryMB = DeltaPercent(baseline.P99PrivateMemoryMB, comparand.P99PrivateMemoryMB),\n", - " P95PrivateMemoryMB = DeltaPercent(baseline.P95PrivateMemoryMB, comparand.P95PrivateMemoryMB),\n", - " P90PrivateMemoryMB = DeltaPercent(baseline.P90PrivateMemoryMB, comparand.P90PrivateMemoryMB),\n", - " P75PrivateMemoryMB = DeltaPercent(baseline.P75PrivateMemoryMB, comparand.P75PrivateMemoryMB),\n", - " P50PrivateMemoryMB = DeltaPercent(baseline.P50PrivateMemoryMB, comparand.P50PrivateMemoryMB),\n", - " \n", - " Latency50thMS = DeltaPercent(baseline.Latency50thMS, comparand.Latency50thMS),\n", - " Latency75thMS = DeltaPercent(baseline.Latency75thMS, comparand.Latency75thMS),\n", - " Latency90thMS = DeltaPercent(baseline.Latency90thMS, comparand.Latency90thMS), \n", - " Latency99thMS = DeltaPercent(baseline.Latency99thMS, comparand.Latency99thMS), \n", - " MeanLatencyMS = DeltaPercent(baseline.MeanLatencyMS, comparand.MeanLatencyMS),\n", - " RequestsPerMSec = DeltaPercent(baseline.RequestsPerMSec, comparand.RequestsPerMSec),\n", - " TotalSuspensionTimeMSec = DeltaPercent(baseline.TotalSuspensionTimeMSec, comparand.TotalSuspensionTimeMSec),\n", - " PercentPauseTimeInGC = DeltaPercent(baseline.PercentPauseTimeInGC, comparand.PercentPauseTimeInGC),\n", - " PercentTimeInGC = DeltaPercent(baseline.PercentTimeInGC, comparand.PercentTimeInGC),\n", - " MeanHeapSizeBeforeMB = DeltaPercent(baseline.MeanHeapSizeBeforeMB, comparand.MeanHeapSizeBeforeMB),\n", - " MaxHeapSizeMB = DeltaPercent(baseline.MaxHeapSizeMB, comparand.MaxHeapSizeMB),\n", - " TotalAllocationsMB = DeltaPercent(baseline.TotalAllocationsMB, comparand.TotalAllocationsMB),\n", - " GCScore = DeltaPercent(baseline.GCScore, comparand.GCScore),\n", - " MaxHeapCount = DeltaPercent(baseline.MaxHeapCount, comparand.MaxHeapCount),\n", - " NumberOfHeapCountSwitches = DeltaPercent(baseline.NumberOfHeapCountSwitches, comparand.NumberOfHeapCountSwitches),\n", - " NumberOfHeapCountDirectionChanges = DeltaPercent(baseline.NumberOfHeapCountDirectionChanges, comparand.NumberOfHeapCountDirectionChanges),\n", - " Data = baseline.Data,\n", - " Data2 = comparand.Data,\n", - " Run = $\"{baseline.Run} vs. {comparand.Run}\",\n", - " Benchmark = baseline.Benchmark,\n", - " Id = $\"{baseline.Run} vs. {comparand.Run} for {baseline.Benchmark}\"\n", - " };\n", - " }\n", - "\n", - " public Dictionary? GetAllBenchmarksForRun(string run)\n", - " {\n", - " if (!_runToBenchmarkData.TryGetValue(run, out var benchmarksForRun))\n", - " {\n", - " Console.WriteLine($\"No benchmarks found for run: {run}\");\n", - " return null;\n", - " }\n", - "\n", - " return benchmarksForRun;\n", - " }\n", - "\n", - " public void SaveBenchmarkData(string outputPath = \"\")\n", - " {\n", - " if (string.IsNullOrEmpty(outputPath))\n", - " {\n", - " outputPath = _basePath;\n", - " }\n", - "\n", - " StringBuilder sb = new();\n", - " sb.AppendLine($\"Run,Benchmark,Max Working Set (MB), Max Private Memory (MB), Request/MSec, Mean Latency (MSec), Latency 50th Percentile MSec, Latency 75th Percentile MSec, Latency 90th Percentile MSec, Latency 99th Percentile MSec\");\n", - " foreach (var b in _data)\n", - " {\n", - " var val = b.Value; \n", - " sb.AppendLine($\"{val.Run},{val.Benchmark},{val.MaxWorkingSetMB},{val.MaxPrivateMemoryMB},{val.RequestsPerMSec},{val.MeanLatencyMS},{val.Latency50thMS},{val.Latency75thMS},{val.Latency90thMS},{val.Latency99thMS}\");\n", - " }\n", - "\n", - " File.WriteAllText(Path.Combine(outputPath, \"AllBenchmarks.csv\"), sb.ToString());\n", - " }\n", - "\n", - " public Dictionary? GetAllRunsForBenchmark(string benchmark)\n", - " {\n", - " if (!_benchmarkToRunData.TryGetValue(benchmark, out var runsForBenchmark))\n", - " {\n", - " Console.WriteLine($\"No runs found for benchmark: {benchmark}\");\n", - " return null;\n", - " }\n", - "\n", - " return runsForBenchmark;\n", - " }\n", - "*/\n", - "\n", - "/*\n", - " public LoadInfo? GetBenchmarkData(string benchmark, string run)\n", - " {\n", - " if (!_benchmarkToRunData.TryGetValue(benchmark, out var runData))\n", - " {\n", - " Console.WriteLine($\"Benchmark: {benchmark} not found!\");\n", - " return null;\n", - " }\n", - "\n", - " if (!runData.TryGetValue(run, out var loadInfo))\n", - " {\n", - " Console.WriteLine($\"Run: {run} not found!\");\n", - " return null;\n", - " }\n", - "\n", - " return loadInfo;\n", - " }\n", - "*/\n", - "\n", - " /*\n", - " public Dictionary GetBenchmarkToComparison(string baselineRun, string comparandRun)\n", - " {\n", - " Dictionary comparisons = new();\n", - "\n", - " Dictionary baselineData = new();\n", - " Dictionary comparandData = new();\n", - " HashSet allBenchmarks = new();\n", - "\n", - " foreach (var d in _data)\n", - " {\n", - " allBenchmarks.Add(d.Value.Benchmark);\n", - "\n", - " string run = d.Key.Split(\"|\", StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)[0];\n", - "\n", - " if (string.CompareOrdinal(run, baselineRun) == 0 && !baselineData.TryGetValue(d.Key, out var baselineInfo))\n", - " {\n", - " baselineInfo = baselineData[d.Value.Benchmark] = d.Value;\n", - " }\n", - "\n", - " else if (string.CompareOrdinal(run, comparandRun) == 0 && !comparandData.TryGetValue(d.Key, out var comparandInfo))\n", - " {\n", - " comparandInfo = comparandData[d.Value.Benchmark] = d.Value;\n", - " }\n", - " }\n", - "\n", - " foreach (var benchmark in allBenchmarks)\n", - " {\n", - " if (!baselineData.TryGetValue(benchmark, out var baselineBenchmarkInfo))\n", - " {\n", - " Console.WriteLine($\"Benchmark: {benchmark} not found on the baseline: {baselineRun}\");\n", - " continue;\n", - " }\n", - "\n", - " if (!comparandData.TryGetValue(benchmark, out var comparandBenchmarkInfo))\n", - " {\n", - " Console.WriteLine($\"Benchmark: {benchmark} not found on the comparand: {comparandRun}\");\n", - " continue;\n", - " }\n", - "\n", - " LoadInfo comparison = GetComparison(baselineBenchmarkInfo, comparandBenchmarkInfo);\n", - " comparisons[benchmark] = comparison;\n", - " }\n", - " \n", - " return comparisons;\n", - " }\n", - " */\n", - "\n", - " // Haven't used this in a while - writes a summary file to disk\n", - "\n", - "/*\n", - " public void SummarizeResults(DataManager dataManager, string outFile, Dictionary info = null)\n", - " {\n", - " if (info == null)\n", - " {\n", - " info = dataManager._data;\n", - " }\n", - "\n", - " using (StreamWriter sw = new StreamWriter(outFile))\n", - " {\n", - " sw.WriteLine(\"{0,12} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 10:0.00} | {14, 10:0.00} |\", \n", - " \"run\", \"benchmark\", \"gen0\", \"pause\", \"gen1\", \"pause\", \"ngc2\", \"pause\", \"bgc\", \"pause\", \"allocMB\", \"alloc/gc\", \"pct\", \"peakMB\", \"meanMB\");\n", - " sw.WriteLine(\"{0,12} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 10:0.00} | {14, 10:0.00} |\", \n", - " \"\", \"\", \"\", \"susp\", \"\", \"susp\", \"\", \"susp\", \"\", \"susp\", \"\", \"\", \"\", \"totalcpu\", \"meancpu\");\n", - " sw.WriteLine(\"{0}\", new String('-', 174));\n", - " foreach (var kvp in info)\n", - " {\n", - " List gcs = kvp.Value?.Data?.GCs;\n", - " if (gcs == null || gcs.Count == 0)\n", - " {\n", - " continue;\n", - " }\n", - "\n", - " int[] gc_counts = new int[4];\n", - " double[] gc_pauses = new double[4];\n", - " double[] gc_susps = new double[4];\n", - " for (int i = 0; i < gcs.Count; i++)\n", - " {\n", - " TraceGC gc = gcs[i];\n", - " //if (gc.SuspendDurationMSec > 5) sw.WriteLine($\"i={gc.Number} gen={gc.Generation} suspension={gc.SuspendDurationMSec} totalpause={gc.PauseDurationMSec}\");\n", - " if (gc.Generation < 2)\n", - " {\n", - " gc_counts[gc.Generation]++;\n", - " gc_pauses[gc.Generation] += gc.PauseDurationMSec;\n", - " gc_susps[gc.Generation] += gc.SuspendDurationMSec;\n", - " }\n", - " else\n", - " {\n", - " if (gc.Type == GCType.BackgroundGC)\n", - " {\n", - " gc_counts[3]++;\n", - " gc_pauses[3] += gc.PauseDurationMSec;\n", - " gc_susps[3] += gc.SuspendDurationMSec;\n", - " }\n", - " else\n", - " {\n", - " gc_counts[2]++;\n", - " gc_pauses[2] += gc.PauseDurationMSec;\n", - " gc_susps[2] += gc.SuspendDurationMSec;\n", - " }\n", - " }\n", - " }\n", - " \n", - " for (int i = 0; i < 4; i++)\n", - " {\n", - " if (gc_counts[i] > 0)\n", - " {\n", - " gc_pauses[i] /= gc_counts[i];\n", - " gc_susps[i] /= gc_counts[i];\n", - " }\n", - " }\n", - " \n", - " sw.WriteLine(\"{0,12} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 10:0.00} | {14, 10:0.00} |\",\n", - " kvp.Value.Run, kvp.Value.Benchmark, gc_counts[0], gc_pauses[0], gc_counts[1], gc_pauses[1], gc_counts[2], gc_pauses[2], gc_counts[3], gc_pauses[3],\n", - " kvp.Value.Data.Stats.TotalAllocatedMB, (kvp.Value.Data.Stats.TotalAllocatedMB / gcs.Count), kvp.Value.Data.Stats.GetGCPauseTimePercentage(), kvp.Value.Data.Stats.MaxSizePeakMB, kvp.Value.Data.Stats.MeanSizePeakMB);\n", - " sw.WriteLine(\"{0,12} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 10:0.00} | {14, 10:0.00} |\",\n", - " \"\", \"\", \"\", gc_susps[0], \"\", gc_susps[1], \"\", gc_susps[2], \"\", gc_susps[3],\n", - " \"\", \"\", \"\", kvp.Value.Data.Stats.TotalCpuMSec, kvp.Value.Data.Stats.MeanCpuMSec);\n", - " }\n", - " }}\n", - "\n", - "class MeanDataComparison\n", - "{\n", - " public string bench { get; set; }\n", - " public double baselineMaxPrivateMemoryMB { get; set; }\n", - " public double baselineP50PrivateMemoryMB { get; set; }\n", - " public double baselineRequestsPerMSec { get; set; }\n", - " public double avgMaxPrivateMemoryMBDiff { get; set; }\n", - " public double avgP50PrivateMemoryMBDiff { get; set; }\n", - " public double avgRequestsPerMSecDiff { get; set; }\n", - " public double baselineCVMaxPrivateMemoryMB { get; set; }\n", - " public double baselineCVP50PrivateMemoryMB { get; set; }\n", - " public double baselineCVRequestsPerMSec { get; set; }\n", - " public double fixCVMaxPrivateMemoryMB { get; set; }\n", - " public double fixCVP50PrivateMemoryMB { get; set; }\n", - " public double fixCVRequestsPerMSec { get; set; }\n", - " public double cvMaxPrivateMemoryMBDiff { get; set; }\n", - " public double cvP50PrivateMemoryMBDiff { get; set; }\n", - " public double cvRequestsPerMSecDiff { get; set; }\n", - "}\n", - "\n", - "double GetCV(List dataPoints, out double avg)\n", - "{\n", - " // for (int i = 0; i < dataPoints.Count; i++)\n", - " // {\n", - " // Console.WriteLine(\"item {0}: {1}\", i, dataPoints[i]);\n", - " // }\n", - " double mean = dataPoints.Average();\n", - " avg = mean;\n", - " double sumOfSquaredDifferences = dataPoints.Sum(val => Math.Pow(val - mean, 2));\n", - " double populationStandardDeviation = Math.Sqrt(sumOfSquaredDifferences / dataPoints.Count);\n", - " double coefficientOfVariation = (populationStandardDeviation / mean) * 100;\n", - " return coefficientOfVariation;\n", - "}\n", - "\n", - "// accommodates when there are different numbers of iterations in first and second run.\n", - "// returns a list of benchmarks we added to the comparison data\n", - "List SummarizeResultsByBench(DataManager dataManager, List runNames, string benchName = null)\n", - "{\n", - " Dictionary> benchmarkToRunData = dataManager._benchmarkToRunData;\n", - " Console.WriteLine(\"benchmarkToRunData has {0} tests\\n\", benchmarkToRunData.Count);\n", - "\n", - " //bool fLogDetail = false;\n", - " bool fLogDetail = true;\n", - "\n", - " string strSeparator = new String('-', 223);\n", - " Console.WriteLine(\"{0}\", strSeparator);\n", - "\n", - " // key is the name of the run, eg, \"baseline\" or \"fix\". For each run, we add its summary data to a list.\n", - " Dictionary> summaryDataForRuns = new Dictionary>(2);\n", - " List comparisonData = new List(51);\n", - "\n", - " foreach (var benchmarkData in benchmarkToRunData)\n", - " {\n", - " // // Console.WriteLine(\"benchmark is {0}\", benchmarkData.Key);\n", - "\n", - " if ((benchName == null) || benchmarkData.Key.Equals(benchName, StringComparison.OrdinalIgnoreCase))\n", - " {\n", - " summaryDataForRuns.Clear();\n", - "\n", - " if (fLogDetail)\n", - " {\n", - " Console.WriteLine(\"{0,25} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 8:0.00} | {14, 8:0.00} | {15, 8:0.00} | {16, 8:0.00} | {17, 8:0.00} | {18, 4:0.00} | {19, 10} |\",\n", - " \"run\", \"benchmark\", \"gen0\", \"pause\", \"gen1\", \"pause\", \"ngc2\", \"pause\", \"bgc\", \"pause\", \"allocMB\", \"alloc/gc\", \"pct\", \"peakMB\", \"meanMB\", \"max mem\", \"rps\", \"latency\", \"hc\", \"gc count\");\n", - " Console.WriteLine(\"{0}\", strSeparator); \n", - " }\n", - "\n", - " // if no runs observed an hc change, we don't keep it in the summary data.\n", - " int totalHCChanges = 0;\n", - "\n", - " foreach (var kvp in benchmarkData.Value)\n", - " {\n", - " List gcs = kvp.Value?.Data?.GCs;\n", - " // We don't look at benchmarks that did very few GCs\n", - " if ((gcs == null) || (gcs.Count == 0))\n", - " {\n", - " continue;\n", - " }\n", - "\n", - " int[] gc_counts = new int[4];\n", - " double[] gc_pauses = new double[4];\n", - " for (int i = 0; i < gcs.Count; i++)\n", - " {\n", - " TraceGC gc = gcs[i];\n", - " if (gc.Generation < 2)\n", - " {\n", - " gc_counts[gc.Generation]++;\n", - " gc_pauses[gc.Generation] += gc.PauseDurationMSec;\n", - " }\n", - " else\n", - " {\n", - " if (gc.Type == GCType.BackgroundGC)\n", - " {\n", - " gc_counts[3]++;\n", - " gc_pauses[3] += gc.PauseDurationMSec;\n", - " }\n", - " else\n", - " {\n", - " gc_counts[2]++;\n", - " gc_pauses[2] += gc.PauseDurationMSec;\n", - " }\n", - " }\n", - " }\n", - "\n", - " for (int i = 0; i < 4; i++)\n", - " {\n", - " if (gc_counts[i] > 0)\n", - " {\n", - " gc_pauses[i] /= gc_counts[i];\n", - " }\n", - " }\n", - "\n", - " if (fLogDetail)\n", - " {\n", - " Console.WriteLine(\"{0,25} | {1,35} | {2, 5} | {3, 5:0.00} | {4, 5} | {5, 5:0.00} | {6, 5} | {7, 5:0.00} | {8, 5} | {9, 5:0.00} | {10, 10:0.00} | {11, 10:0.00} | {12, 5:0.00} | {13, 8:0.00} | {14, 8:0.00} | {15, 8:0.00} | {16, 8:0.00} | {17, 8:0.00} | {18, 4} | {19, 10} |\",\n", - " kvp.Value.Run, kvp.Value.Benchmark, gc_counts[0], gc_pauses[0], gc_counts[1], gc_pauses[1], gc_counts[2], gc_pauses[2], gc_counts[3], gc_pauses[3],\n", - " kvp.Value.Data.Stats.TotalAllocatedMB, (kvp.Value.Data.Stats.TotalAllocatedMB / gcs.Count), kvp.Value.Data.Stats.GetGCPauseTimePercentage(), kvp.Value.Data.Stats.MaxSizePeakMB, kvp.Value.Data.Stats.MeanSizePeakMB,\n", - " kvp.Value.MaxPrivateMemoryMB, kvp.Value.RequestsPerMSec, kvp.Value.MeanLatencyMS, kvp.Value.NumberOfHeapCountSwitches, kvp.Value.Data.Stats.Count);\n", - " }\n", - "\n", - " totalHCChanges += (int)kvp.Value.NumberOfHeapCountSwitches;\n", - "\n", - " for (int runIdx = 0; runIdx < runNames.Count; runIdx++)\n", - " {\n", - " if (kvp.Value.Run.StartsWith(runNames[runIdx]))\n", - " {\n", - " BenchmarkSummaryData data = new BenchmarkSummaryData \n", - " {\n", - " MaxPrivateMemoryMB = kvp.Value.MaxPrivateMemoryMB,\n", - " P50PrivateMemoryMB = kvp.Value.P50PrivateMemoryMB,\n", - " RequestsPerMSec = kvp.Value.RequestsPerMSec,\n", - " };\n", - "\n", - " if (summaryDataForRuns.ContainsKey(runNames[runIdx]))\n", - " {\n", - " summaryDataForRuns[runNames[runIdx]].Add(data);\n", - " }\n", - " else\n", - " {\n", - " List listData = new List(3);\n", - " listData.Add(data);\n", - " summaryDataForRuns.Add(runNames[runIdx], listData);\n", - " }\n", - " break;\n", - " }\n", - " }\n", - " }\n", - "\n", - " if (fLogDetail)\n", - " {\n", - " Console.WriteLine(\"{0}\", strSeparator);\n", - " }\n", - "\n", - " if (totalHCChanges == 0)\n", - " {\n", - " //Console.WriteLine(\"don't do comparison for bench {0}! no HC changes\", benchmarkData.Key);\n", - " continue;\n", - " }\n", - "\n", - " // Now write some summary stuff\n", - " //Console.WriteLine(\"summary dictionary has {0} elements\", summaryDataForRuns.Count);\n", - "\n", - " if (fLogDetail)\n", - " {\n", - " Console.WriteLine(\"{0,45} | {1,10} | {2,10} | {3,10} | {4,10} | {5,10} | {6,10} |\", \"data\", \"max mem\", \"CV%\", \"p50 mem\", \"CV%\", \"rps\", \"CV%\");\n", - " }\n", - "\n", - " int numRuns = summaryDataForRuns.Count;\n", - " double[] avgMaxPrivateMemoryMBForRuns = new double [numRuns];\n", - " double[] avgP50PrivateMemoryMBForRuns = new double [numRuns];\n", - " double[] avgRequestsPerMSecForRuns = new double [numRuns];\n", - " double[] cvMaxPrivateMemoryMBForRuns = new double [numRuns];\n", - " double[] cvP50PrivateMemoryMBForRuns = new double [numRuns];\n", - " double[] cvRequestsPerMSecForRuns = new double [numRuns];\n", - "\n", - " for (int i = 0; i < summaryDataForRuns.Count; i++)\n", - " {\n", - " //Console.WriteLine(\"bench {0} has {1} iteration in run {2}\", benchmarkData.Key, summaryDataForRuns.ElementAt(i).Value.Count, summaryDataForRuns.ElementAt(i).Key);\n", - " List listData = summaryDataForRuns.ElementAt(i).Value;\n", - "\n", - " // for (int runIdx = 0; runIdx < listData.Count; runIdx++)\n", - " // {\n", - " // Console.WriteLine(\"run {0} iter {1} max mem {2}, rps {3}\", summaryDataForRuns.ElementAt(i).Key, runIdx, listData[runIdx].MaxPrivateMemoryMB, listData[runIdx].RequestsPerMSec);\n", - " // }\n", - " List listMaxPrivateMemoryMB = listData.Select(s => s.MaxPrivateMemoryMB).ToList();\n", - " double avgMaxPrivateMemoryMB, avgP50PrivateMemoryMB, avgRequestsPerMSec;\n", - " double cvMaxPrivateMemoryMB = GetCV(listMaxPrivateMemoryMB, out avgMaxPrivateMemoryMB);\n", - " List listP50PrivateMemoryMB = listData.Select(s => s.P50PrivateMemoryMB).ToList();\n", - " double cvP50PrivateMemoryMB = GetCV(listP50PrivateMemoryMB, out avgP50PrivateMemoryMB);\n", - " List listRequestsPerMSec = listData.Select(s => s.RequestsPerMSec).ToList();\n", - " double cvRequestsPerMSec = GetCV(listRequestsPerMSec, out avgRequestsPerMSec);\n", - "\n", - " avgMaxPrivateMemoryMBForRuns[i] = avgMaxPrivateMemoryMB;\n", - " avgP50PrivateMemoryMBForRuns[i] = avgP50PrivateMemoryMB;\n", - " avgRequestsPerMSecForRuns[i] = avgRequestsPerMSec;\n", - " cvMaxPrivateMemoryMBForRuns[i] = cvMaxPrivateMemoryMB;\n", - " cvP50PrivateMemoryMBForRuns[i] = cvP50PrivateMemoryMB;\n", - " cvRequestsPerMSecForRuns[i] = cvRequestsPerMSec;\n", - " \n", - " if (fLogDetail)\n", - " {\n", - " Console.WriteLine(\"{0,45} | {1,10:0.00} | {2,10:0.00} | {3,10:0.00} | {4,10:0.00} | {5,10:0.00} | {6,10:0.00} |\", (\"-\" + benchmarkData.Key + \"-\" + summaryDataForRuns.ElementAt(i).Key), \n", - " avgMaxPrivateMemoryMB, cvMaxPrivateMemoryMB, avgP50PrivateMemoryMB, cvP50PrivateMemoryMB, avgRequestsPerMSec, cvRequestsPerMSec);\n", - " }\n", - " }\n", - "\n", - " // I'm just assuming we only have 2 runs.\n", - " MeanDataComparison comp = new MeanDataComparison\n", - " {\n", - " bench = benchmarkData.Key,\n", - " baselineMaxPrivateMemoryMB = avgMaxPrivateMemoryMBForRuns[0],\n", - " baselineP50PrivateMemoryMB = avgP50PrivateMemoryMBForRuns[0],\n", - " baselineRequestsPerMSec = avgRequestsPerMSecForRuns[0],\n", - " avgMaxPrivateMemoryMBDiff = (avgMaxPrivateMemoryMBForRuns[1] - avgMaxPrivateMemoryMBForRuns[0]) * 100.0 / avgMaxPrivateMemoryMBForRuns[0],\n", - " avgP50PrivateMemoryMBDiff = (avgP50PrivateMemoryMBForRuns[1] - avgP50PrivateMemoryMBForRuns[0]) * 100.0 / avgP50PrivateMemoryMBForRuns[0],\n", - " avgRequestsPerMSecDiff = (avgRequestsPerMSecForRuns[1] - avgRequestsPerMSecForRuns[0]) * 100.0 / avgRequestsPerMSecForRuns[0],\n", - " baselineCVMaxPrivateMemoryMB = cvMaxPrivateMemoryMBForRuns[0],\n", - " baselineCVP50PrivateMemoryMB = cvP50PrivateMemoryMBForRuns[0],\n", - " baselineCVRequestsPerMSec = cvRequestsPerMSecForRuns[0],\n", - " fixCVMaxPrivateMemoryMB = cvMaxPrivateMemoryMBForRuns[1],\n", - " fixCVP50PrivateMemoryMB = cvP50PrivateMemoryMBForRuns[1],\n", - " fixCVRequestsPerMSec = cvRequestsPerMSecForRuns[1],\n", - " cvMaxPrivateMemoryMBDiff = (cvMaxPrivateMemoryMBForRuns[1] - cvMaxPrivateMemoryMBForRuns[0]) * 100.0 / cvMaxPrivateMemoryMBForRuns[0],\n", - " cvP50PrivateMemoryMBDiff = (cvP50PrivateMemoryMBForRuns[1] - cvP50PrivateMemoryMBForRuns[0]) * 100.0 / cvP50PrivateMemoryMBForRuns[0],\n", - " cvRequestsPerMSecDiff = (cvRequestsPerMSecForRuns[1] - cvRequestsPerMSecForRuns[0]) * 100.0 / cvRequestsPerMSecForRuns[0], \n", - " };\n", - " comparisonData.Add(comp);\n", - "\n", - " if (fLogDetail)\n", - " {\n", - " Console.WriteLine(\"{0}\\n\", strSeparator);\n", - " }\n", - "\n", - " if (benchName != null)\n", - " {\n", - " break;\n", - " }\n", - " }\n", - " }\n", - "\n", - " if (true)\n", - " {\n", - " Console.WriteLine(\"displaying {0} benches that observed HC changes\", comparisonData.Count);\n", - "\n", - " Console.WriteLine(\"{0,35} | {1, 9} | {2,9} | {3,9} | {4,9} | {5,9} | {6,9} | {7,9} | {8,9} | {9,9} | {10,9} | {11,9} | {12,9} | {13,9} | {14,9} | {15,9} |\",\n", - " \"bench\", \"b max mem\", \"max mem %\", \"b cv%\", \"f cv%\", \"CV% %\", \"b p50 mem\", \"p50 mem %\", \"b cv%\", \"f cv%\", \"CV% %\", \"b rps\", \"rps %\", \"b cv%\", \"f cv%\", \"CV% %\");\n", - "\n", - " var sortedComparisonData = comparisonData.OrderByDescending(a => a.cvMaxPrivateMemoryMBDiff).ToList();\n", - " //var sortedComparisonData = comparisonData.OrderBy(a => a.avgMaxPrivateMemoryMBDiff).ToList();\n", - " for (int benchIdx = 0; benchIdx < sortedComparisonData.Count; benchIdx++)\n", - " {\n", - " MeanDataComparison currentComp = sortedComparisonData[benchIdx];\n", - " Console.WriteLine(\"{0,35} | {1,9:0.00} | {2,9:0.00} | {3,9:0.00} | {4,9:0.00} | {5,9:0.00} | {6,9:0.00} | {7,9:0.00} | {8,9:0.00} | {9,9:0.00} | {10,9:0.00} | {11,9:0.00} | {12,9:0.00} | {13,9:0.00} | {14,9:0.00} | {15,9:0.00} |\",\n", - " currentComp.bench,\n", - " currentComp.baselineMaxPrivateMemoryMB, currentComp.avgMaxPrivateMemoryMBDiff, currentComp.baselineCVMaxPrivateMemoryMB, currentComp.fixCVMaxPrivateMemoryMB, currentComp.cvMaxPrivateMemoryMBDiff,\n", - " currentComp.baselineP50PrivateMemoryMB, currentComp.avgP50PrivateMemoryMBDiff, currentComp.baselineCVP50PrivateMemoryMB, currentComp.fixCVP50PrivateMemoryMB, currentComp.cvP50PrivateMemoryMBDiff,\n", - " currentComp.baselineRequestsPerMSec, currentComp.avgRequestsPerMSecDiff, currentComp.baselineCVRequestsPerMSec, currentComp.fixCVRequestsPerMSec, currentComp.cvRequestsPerMSecDiff);\n", - " }\n", - " }\n", - "\n", - " return comparisonData;\n", - "}\n", - "*/\n", - "\n", - "// I haven't used this in a while. I'm not sure if it works.\n", - "/*\n", - " public void SaveDifferences(DataManager dataManager, string baseline, string comparand, List sortingCriteria = null)\n", - " {\n", - " // This function assumes the runs are all in:\n", - " // {build}_{iteration} form.\n", - " // Else, it will except.\n", - "using (StreamWriter sw = new StreamWriter(@\"c:\\home\\repro\\hc\\hc-savediff.txt\")) {\n", - " sw.WriteLine(\"start\");\n", - " // Iteration -> LoadInfos\n", - " Dictionary> iterationData = new();\n", - "\n", - " // Get the max iteration.\n", - " int maxIteration = -1;\n", - " foreach (var run in dataManager._runToBenchmarkData)\n", - " {\n", - " string runName = run.Key;\n", - " int iteration = 0;\n", - " if (run.Key.Contains(\"_\"))\n", - " {\n", - " string[] split = run.Key.Split(\"_\");\n", - " Debug.Assert(split.Length == 2);\n", - " string build = split[0];\n", - " string iterationAsString = split[1];\n", - " iteration = Convert.ToInt32(iterationAsString);\n", - " }\n", - " maxIteration = System.Math.Max(iteration, maxIteration);\n", - " }\n", - " sw.WriteLine(maxIteration);\n", - " // Compute Average Diff\n", - " // Build to Benchmark -> Data\n", - " Dictionary> averageData = new();\n", - "\n", - " for (int i = 0; i <= maxIteration; i++)\n", - " {\n", - " sw.WriteLine(i);\n", - " sw.WriteLine(maxIteration);\n", - " string baselineIteration;\n", - " string comparandIteration;\n", - " if (maxIteration == 0)\n", - " {\n", - " baselineIteration = baseline;\n", - " comparandIteration = comparand;\n", - " }\n", - " else\n", - " {\n", - " baselineIteration = baseline + \"_\" + i.ToString();\n", - " comparandIteration = comparand + \"_\" + i.ToString();\n", - " }\n", - " foreach (var x in dataManager._runToBenchmarkData.Keys) { sw.WriteLine(x); }\n", - " Dictionary baselineIterationRuns = dataManager._runToBenchmarkData[baselineIteration];\n", - " Dictionary comparandIterationRuns = dataManager._runToBenchmarkData[comparandIteration];\n", - "\n", - " foreach (var b in baselineIterationRuns)\n", - " {\n", - " if (!iterationData.TryGetValue(i, out var benchmarks))\n", - " {\n", - " iterationData[i] = benchmarks = new();\n", - " }\n", - "\n", - " benchmarks.Add(dataManager.GetComparison(baselineIterationRuns[b.Key], comparandIterationRuns[b.Key]));\n", - " }\n", - "\n", - " if (!averageData.TryGetValue(baseline, out var bVal))\n", - " {\n", - " averageData[baseline] = bVal = new();\n", - " foreach (var benchmark in baselineIterationRuns)\n", - " {\n", - " bVal[benchmark.Key] = new LoadInfo\n", - " {\n", - " Benchmark = benchmark.Key,\n", - " MaxWorkingSetMB = benchmark.Value.MaxWorkingSetMB,\n", - " MaxPrivateMemoryMB = benchmark.Value.MaxPrivateMemoryMB,\n", - " P99PrivateMemoryMB = benchmark.Value.P99PrivateMemoryMB,\n", - " P95PrivateMemoryMB = benchmark.Value.P95PrivateMemoryMB,\n", - " P90PrivateMemoryMB = benchmark.Value.P90PrivateMemoryMB,\n", - " P75PrivateMemoryMB = benchmark.Value.P75PrivateMemoryMB,\n", - " P50PrivateMemoryMB = benchmark.Value.P50PrivateMemoryMB,\n", - " RequestsPerMSec = benchmark.Value.RequestsPerMSec,\n", - " MeanLatencyMS = benchmark.Value.MeanLatencyMS,\n", - " Latency50thMS = benchmark.Value.Latency50thMS, \n", - " Latency75thMS = benchmark.Value.Latency75thMS,\n", - " Latency90thMS = benchmark.Value.Latency90thMS,\n", - " Latency99thMS = benchmark.Value.Latency99thMS,\n", - " MaxHeapCount = benchmark.Value.MaxHeapCount,\n", - " NumberOfHeapCountSwitches = benchmark.Value.NumberOfHeapCountSwitches,\n", - " NumberOfHeapCountDirectionChanges = benchmark.Value.NumberOfHeapCountDirectionChanges,\n", - " };\n", - " }\n", - " }\n", - "\n", - " else\n", - " {\n", - " foreach (var benchmark in baselineIterationRuns)\n", - " {\n", - " var data = bVal[benchmark.Key];\n", - " data.Benchmark = benchmark.Key;\n", - " data.MaxWorkingSetMB += benchmark.Value.MaxWorkingSetMB;\n", - " data.MaxPrivateMemoryMB += benchmark.Value.MaxPrivateMemoryMB;\n", - " data.P99PrivateMemoryMB += benchmark.Value.P99PrivateMemoryMB;\n", - " data.P95PrivateMemoryMB += benchmark.Value.P95PrivateMemoryMB;\n", - " data.P90PrivateMemoryMB += benchmark.Value.P90PrivateMemoryMB;\n", - " data.P75PrivateMemoryMB += benchmark.Value.P75PrivateMemoryMB;\n", - " data.P50PrivateMemoryMB += benchmark.Value.P50PrivateMemoryMB;\n", - " data.RequestsPerMSec += benchmark.Value.RequestsPerMSec;\n", - " data.MeanLatencyMS += benchmark.Value.MeanLatencyMS;\n", - " data.Latency50thMS += benchmark.Value.Latency50thMS; \n", - " data.Latency75thMS += benchmark.Value.Latency75thMS;\n", - " data.Latency90thMS += benchmark.Value.Latency90thMS;\n", - " data.Latency99thMS += benchmark.Value.Latency99thMS;\n", - " data.MaxHeapCount += benchmark.Value.MaxHeapCount;\n", - " data.NumberOfHeapCountSwitches += benchmark.Value.NumberOfHeapCountSwitches;\n", - " data.NumberOfHeapCountDirectionChanges += benchmark.Value.NumberOfHeapCountDirectionChanges;\n", - " }\n", - " }\n", - "\n", - " if (!averageData.TryGetValue(comparand, out var cVal))\n", - " {\n", - " averageData[comparand] = cVal = new();\n", - " foreach (var benchmark in comparandIterationRuns)\n", - " {\n", - " cVal[benchmark.Key] = new LoadInfo\n", - " {\n", - " Benchmark = benchmark.Key,\n", - " MaxWorkingSetMB = benchmark.Value.MaxWorkingSetMB,\n", - " MaxPrivateMemoryMB = benchmark.Value.MaxPrivateMemoryMB,\n", - " P99PrivateMemoryMB = benchmark.Value.P99PrivateMemoryMB,\n", - " P95PrivateMemoryMB = benchmark.Value.P95PrivateMemoryMB,\n", - " P90PrivateMemoryMB = benchmark.Value.P90PrivateMemoryMB,\n", - " P75PrivateMemoryMB = benchmark.Value.P75PrivateMemoryMB,\n", - " P50PrivateMemoryMB = benchmark.Value.P50PrivateMemoryMB,\n", - " RequestsPerMSec = benchmark.Value.RequestsPerMSec,\n", - " MeanLatencyMS = benchmark.Value.MeanLatencyMS,\n", - " Latency50thMS = benchmark.Value.Latency50thMS, \n", - " Latency75thMS = benchmark.Value.Latency75thMS,\n", - " Latency90thMS = benchmark.Value.Latency90thMS,\n", - " Latency99thMS = benchmark.Value.Latency99thMS,\n", - " MaxHeapCount = benchmark.Value.MaxHeapCount,\n", - " NumberOfHeapCountSwitches = benchmark.Value.NumberOfHeapCountSwitches,\n", - " NumberOfHeapCountDirectionChanges = benchmark.Value.NumberOfHeapCountDirectionChanges,\n", - " };\n", - " }\n", - " }\n", - "\n", - " else\n", - " {\n", - " foreach (var benchmark in comparandIterationRuns)\n", - " {\n", - " var data = cVal[benchmark.Key];\n", - " data.Benchmark = benchmark.Key;\n", - " data.MaxWorkingSetMB += benchmark.Value.MaxWorkingSetMB;\n", - " data.MaxPrivateMemoryMB += benchmark.Value.MaxPrivateMemoryMB;\n", - " data.P99PrivateMemoryMB += benchmark.Value.P99PrivateMemoryMB;\n", - " data.P95PrivateMemoryMB += benchmark.Value.P95PrivateMemoryMB;\n", - " data.P90PrivateMemoryMB += benchmark.Value.P90PrivateMemoryMB;\n", - " data.P75PrivateMemoryMB += benchmark.Value.P75PrivateMemoryMB;\n", - " data.P50PrivateMemoryMB += benchmark.Value.P50PrivateMemoryMB;\n", - " data.RequestsPerMSec += benchmark.Value.RequestsPerMSec;\n", - " data.MeanLatencyMS += benchmark.Value.MeanLatencyMS;\n", - " data.Latency50thMS += benchmark.Value.Latency50thMS; \n", - " data.Latency75thMS += benchmark.Value.Latency75thMS;\n", - " data.Latency90thMS += benchmark.Value.Latency90thMS;\n", - " data.Latency99thMS += benchmark.Value.Latency99thMS;\n", - " data.MaxHeapCount += benchmark.Value.MaxHeapCount;\n", - " data.NumberOfHeapCountSwitches += benchmark.Value.NumberOfHeapCountSwitches;\n", - " data.NumberOfHeapCountDirectionChanges += benchmark.Value.NumberOfHeapCountDirectionChanges;\n", - " }\n", - " }\n", - " }\n", - "\n", - " foreach (var benchmark in dataManager._benchmarkToRunData)\n", - " {\n", - " foreach (var build in averageData)\n", - " {\n", - " var data = build.Value[benchmark.Key];\n", - " data.Benchmark = benchmark.Key;\n", - " data.MaxWorkingSetMB /= (maxIteration + 1); \n", - " data.MaxPrivateMemoryMB /= (maxIteration + 1);\n", - " data.P99PrivateMemoryMB /= (maxIteration + 1);\n", - " data.P95PrivateMemoryMB /= (maxIteration + 1);\n", - " data.P90PrivateMemoryMB /= (maxIteration + 1);\n", - " data.P75PrivateMemoryMB /= (maxIteration + 1);\n", - " data.P50PrivateMemoryMB /= (maxIteration + 1);\n", - " data.RequestsPerMSec /= (maxIteration + 1);\n", - " data.MeanLatencyMS /= (maxIteration + 1);\n", - " data.Latency50thMS /= (maxIteration + 1);\n", - " data.Latency75thMS /= (maxIteration + 1);\n", - " data.Latency90thMS /= (maxIteration + 1);\n", - " data.Latency99thMS /= (maxIteration + 1);\n", - " data.MaxHeapCount /= (maxIteration + 1);\n", - " data.NumberOfHeapCountSwitches /= (maxIteration + 1);\n", - " data.NumberOfHeapCountDirectionChanges /= (maxIteration + 1);\n", - " }\n", - " }\n", - "\n", - " string DisplayDetailsForABenchmark(LoadInfo val) =>\n", - " $\"{val.Benchmark},{val.MaxWorkingSetMB},{val.MaxPrivateMemoryMB},{val.RequestsPerMSec},{val.MeanLatencyMS},{val.Latency50thMS},{val.Latency75thMS},{val.Latency90thMS},{val.Latency99thMS},{val.NumberOfHeapCountSwitches},{val.MaxHeapCount}\";\n", - "\n", - " if (sortingCriteria == null)\n", - " {\n", - " sortingCriteria = new() { nameof(LoadInfo.MaxPrivateMemoryMB) };\n", - " }\n", - "\n", - " foreach (var s in sortingCriteria)\n", - " {\n", - " Func sortingFunctor = null;\n", - " Func, double> selectionFunctor = null;\n", - "\n", - " switch (s)\n", - " {\n", - " case nameof(LoadInfo.MaxWorkingSetMB):\n", - " sortingFunctor = (data) => data.MaxWorkingSetMB;\n", - " selectionFunctor = (data) => data.Value.MaxWorkingSetMB;\n", - " break;\n", - " case nameof(LoadInfo.MaxPrivateMemoryMB):\n", - " sortingFunctor = (data) => data.MaxPrivateMemoryMB;\n", - " selectionFunctor = (data) => data.Value.MaxPrivateMemoryMB;\n", - " break;\n", - " case nameof(LoadInfo.P99PrivateMemoryMB):\n", - " sortingFunctor = (data) => data.P99PrivateMemoryMB;\n", - " selectionFunctor = (data) => data.Value.P99PrivateMemoryMB;\n", - " break;\n", - " case nameof(LoadInfo.P95PrivateMemoryMB):\n", - " sortingFunctor = (data) => data.P95PrivateMemoryMB;\n", - " selectionFunctor = (data) => data.Value.P95PrivateMemoryMB;\n", - " break;\n", - " case nameof(LoadInfo.P90PrivateMemoryMB):\n", - " sortingFunctor = (data) => data.P90PrivateMemoryMB;\n", - " selectionFunctor = (data) => data.Value.P90PrivateMemoryMB;\n", - " break;\n", - " case nameof(LoadInfo.P75PrivateMemoryMB):\n", - " sortingFunctor = (data) => data.P75PrivateMemoryMB;\n", - " selectionFunctor = (data) => data.Value.P75PrivateMemoryMB;\n", - " break;\n", - " case nameof(LoadInfo.P50PrivateMemoryMB):\n", - " sortingFunctor = (data) => data.P50PrivateMemoryMB;\n", - " selectionFunctor = (data) => data.Value.P50PrivateMemoryMB;\n", - " break;\n", - " case nameof(LoadInfo.RequestsPerMSec):\n", - " sortingFunctor = (data) => data.RequestsPerMSec;\n", - " selectionFunctor = (data) => data.Value.RequestsPerMSec;\n", - " break;\n", - " case nameof(LoadInfo.MeanLatencyMS):\n", - " sortingFunctor = (data) => data.MeanLatencyMS;\n", - " selectionFunctor = (data) => data.Value.MeanLatencyMS;\n", - " break;\n", - " case nameof(LoadInfo.Latency50thMS):\n", - " sortingFunctor = (data) => data.Latency50thMS;\n", - " selectionFunctor = (data) => data.Value.Latency50thMS;\n", - " break;\n", - " case nameof(LoadInfo.Latency75thMS):\n", - " sortingFunctor = (data) => data.Latency75thMS;\n", - " selectionFunctor = (data) => data.Value.Latency75thMS;\n", - " break;\n", - " case nameof(LoadInfo.Latency90thMS):\n", - " sortingFunctor = (data) => data.Latency90thMS;\n", - " selectionFunctor = (data) => data.Value.Latency90thMS;\n", - " break;\n", - " case nameof(LoadInfo.Latency99thMS):\n", - " sortingFunctor = (data) => data.Latency99thMS;\n", - " selectionFunctor = (data) => data.Value.Latency99thMS;\n", - " break;\n", - " case nameof(LoadInfo.MaxHeapCount):\n", - " sortingFunctor = (data) => data.MaxHeapCount;\n", - " selectionFunctor = (data) => data.Value.MaxHeapCount;\n", - " break;\n", - " case nameof(LoadInfo.NumberOfHeapCountSwitches):\n", - " sortingFunctor = (data) => data.NumberOfHeapCountSwitches;\n", - " selectionFunctor = (data) => data.Value.NumberOfHeapCountSwitches;\n", - " break;\n", - " case nameof(LoadInfo.NumberOfHeapCountDirectionChanges):\n", - " sortingFunctor = (data) => data.NumberOfHeapCountDirectionChanges;\n", - " selectionFunctor = (data) => data.Value.NumberOfHeapCountDirectionChanges;\n", - " break;\n", - "\n", - " case nameof(BenchmarkSummaryData.TotalSuspensionTimeMSec):\n", - " sortingFunctor = (data) => data.TotalSuspensionTimeMSec;\n", - " selectionFunctor = (data) => data.Value.TotalSuspensionTimeMSec;\n", - " break;\n", - " case nameof(BenchmarkSummaryData.PercentPauseTimeInGC):\n", - " sortingFunctor = (data) => data.PercentPauseTimeInGC;\n", - " selectionFunctor = (data) => data.Value.PercentPauseTimeInGC;\n", - " break;\n", - " case nameof(BenchmarkSummaryData.PercentTimeInGC):\n", - " sortingFunctor = (data) => data.PercentTimeInGC;\n", - " selectionFunctor = (data) => data.Value.PercentTimeInGC;\n", - " break;\n", - " case nameof(BenchmarkSummaryData.MeanHeapSizeBeforeMB):\n", - " sortingFunctor = (data) => data.MeanHeapSizeBeforeMB;\n", - " selectionFunctor = (data) => data.Value.MeanHeapSizeBeforeMB;\n", - " break;\n", - " case nameof(BenchmarkSummaryData.MaxHeapSizeMB):\n", - " sortingFunctor = (data) => data.MaxHeapSizeMB;\n", - " selectionFunctor = (data) => data.Value.MaxHeapSizeMB;\n", - " break;\n", - " case nameof(BenchmarkSummaryData.TotalAllocationsMB):\n", - " sortingFunctor = (data) => data.TotalAllocationsMB;\n", - " selectionFunctor = (data) => data.Value.TotalAllocationsMB;\n", - " break;\n", - " case nameof(BenchmarkSummaryData.GCScore):\n", - " sortingFunctor = (data) => data.GCScore;\n", - " selectionFunctor = (data) => data.Value.GCScore;\n", - " break;\n", - "\n", - " default:\n", - " throw new Exception($\"unexpected {s}\");\n", - " }\n", - "\n", - " List> sortedLoadInfo = new(); \n", - " foreach (var iteration in iterationData)\n", - " {\n", - " sortedLoadInfo.Add(iteration.Value.OrderByDescending(sortingFunctor).ToList());\n", - " }\n", - "\n", - " List sortedAverages = new();\n", - "\n", - " foreach (var benchmark in averageData[baseline])\n", - " {\n", - " LoadInfo baselineInfo = benchmark.Value;\n", - " LoadInfo comparandInfo = averageData[comparand][benchmark.Key];\n", - " LoadInfo comparisonInfo = dataManager.GetComparison(baselineInfo, comparandInfo);\n", - " sortedAverages.Add(comparisonInfo);\n", - " }\n", - " sortedAverages = sortedAverages.OrderByDescending(sortingFunctor).ToList();\n", - "\n", - " // Create CSV.\n", - " StringBuilder top = new();\n", - "\n", - " // Iterate over each of the runs.\n", - " const int singleBuildColumnSize = 11;\n", - " int numberOfIterations = maxIteration + 1;\n", - " string columnHeader = \"Benchmark Name,WorkingSetMB,PrivateMemoryMB,RequestsPerMSec,MeanLatencyMS,Latency50thMS,Latency75thMS,Latency90thMS,Latency99thMS,# HC Switches\";\n", - "\n", - " int totalCountOfBenchmarks = sortedLoadInfo.First().Count;\n", - "\n", - " string first = string.Join(\"\", Enumerable.Range(0, numberOfIterations).Select(build => build + string.Join(\"\", Enumerable.Repeat(\",\", singleBuildColumnSize))));\n", - " string second = string.Join(\",,\", Enumerable.Repeat(columnHeader, numberOfIterations));\n", - "\n", - " // Add the average diff.\n", - " first += \"Average Diff %\" + string.Join(\"\", Enumerable.Repeat(\",\", singleBuildColumnSize));\n", - " second += \",,\" + string.Join(\",,\", columnHeader);\n", - "\n", - " top.AppendLine(first);\n", - " top.AppendLine(second);\n", - "\n", - " for (int benchmarkIdx = 0; benchmarkIdx < totalCountOfBenchmarks; benchmarkIdx++)\n", - " {\n", - " string benchmarkData = string.Join(\",,\", Enumerable.Range(0, numberOfIterations).Select(iteration => DisplayDetailsForABenchmark(sortedLoadInfo[iteration][benchmarkIdx])));\n", - " benchmarkData += $\",,{DisplayDetailsForABenchmark(sortedAverages[benchmarkIdx])}\";\n", - "\n", - " top.AppendLine(benchmarkData);\n", - " }\n", - "\n", - " File.WriteAllText(Path.Combine(dataManager._basePath, $\"Difference_{s}.csv\"), top.ToString());\n", - "\n", - " var layout = new Layout.Layout\n", - " {\n", - " xaxis = new Xaxis { title = \"Benchmark Name\" },\n", - " yaxis = new Yaxis { title = $\"{s}\" },\n", - " width = 1500,\n", - " title = $\"Raw values of {s} for Runs\"\n", - " };\n", - "\n", - " List scatters = new();\n", - "\n", - " const int baseColor = 150;\n", - "\n", - " for (int iterationIdx = 0; iterationIdx <= maxIteration; iterationIdx++)\n", - " {\n", - " string baselineIteration;\n", - " string comparandIteration;\n", - " if (maxIteration == 0)\n", - " {\n", - " baselineIteration = baseline;\n", - " comparandIteration = comparand;\n", - " }\n", - " else\n", - " {\n", - " baselineIteration = baseline + \"_\" + iterationIdx.ToString();\n", - " comparandIteration = comparand + \"_\" + iterationIdx.ToString();\n", - " }\n", - "\n", - " Dictionary baselineData = dataManager._runToBenchmarkData[baselineIteration];\n", - " Dictionary comparandData = dataManager._runToBenchmarkData[comparandIteration];\n", - "\n", - " if (iterationIdx == 0)\n", - " {\n", - " var sortedBaseline = baselineData.Values.OrderByDescending(sortingFunctor);\n", - " baselineData = sortedBaseline.ToDictionary(d => d.Benchmark);\n", - " }\n", - "\n", - " Scatter baselineScatter = new()\n", - " {\n", - " x = baselineData.Select(b => b.Key),\n", - " y = baselineData.Select(selectionFunctor),\n", - " name = $\"{baselineIteration} - {s}\",\n", - " mode = \"markers\",\n", - " marker = new Marker { color = $\"rgb({baseColor + iterationIdx * 50}, 0, 0)\" } \n", - " };\n", - "\n", - " Scatter comparandScatter = new()\n", - " {\n", - " x = comparandData.Select(b => b.Key),\n", - " y = comparandData.Select(selectionFunctor),\n", - " name = $\"{comparandIteration} - {s}\",\n", - " mode = \"markers\",\n", - " marker = new Marker { color = $\"rgb(0, 0, {baseColor + iterationIdx * 50})\" } \n", - " };\n", - "\n", - " scatters.Add(baselineScatter);\n", - " scatters.Add(comparandScatter);\n", - " }\n", - "\n", - " Chart.Plot(scatters, layout).Display();\n", - " }\n", - " }\n", - " }\n", - " */\n", - "\n", - " /*\n", - " public class BuildNameComparer : IEqualityComparer\n", - " {\n", - " public bool Equals(BuildName b1, BuildName b2) => b1.InData == b2.InData;\n", - " public int GetHashCode(BuildName b) => b.InData.GetHashCode();\n", - " }\n", - " public record PerBuildData((DataType, string) Criteria, string Unit, BuildName BuildName, Func Selector, List Data);\n", - " \n", - " public Func, double>[] summarizers = new Func, double>[] { ComputeVolatility, ComputeMin, ComputeMax, ComputeAverage, ComputeRange, ComputeGeoMean };\n", - " public void SaveData(DataManager dataManager, List builds, List<(DataType, string)> chartCriteria = null)\n", - " => SaveData(dataManager, builds, chartCriteria?.Select(s => new List<(DataType, string)> {s}).ToList());\n", - "\n", - " public void SaveData(DataManager dataManager, List builds, DataType dataType, List chartCriteria = null)\n", - " => SaveData(dataManager, builds, chartCriteria?.Select(s => (dataType, s)).ToList());\n", - " public void SaveData(DataManager dataManager, List builds, DataType dataType, List> chartCriteria = null)\n", - " => SaveData(dataManager, builds, chartCriteria?.Select(s => s.Select(s2 => (dataType, s2)).ToList()).ToList());\n", - " public void SaveDataOne(DataManager dataManager, List builds, DataType dataType, List chartCriteria = null)\n", - " => SaveData(dataManager, builds, new List>() { chartCriteria?.Select(s => (dataType, s)).ToList() });\n", - "\n", - " public void SaveData(DataManager dataManager, List builds, List> chartCriteria = null)\n", - " {\n", - " // Build Parent -> < Run -> < Benchmark -> Data >>>\n", - " Dictionary>> listOfData = new(new BuildNameComparer());\n", - "\n", - " foreach (var build in builds)\n", - " {\n", - " if (!listOfData.TryGetValue(build, out var b))\n", - " {\n", - " listOfData[build] = b = new();\n", - " }\n", - "\n", - " foreach (var run in dataManager._runToBenchmarkData)\n", - " {\n", - " if (run.Key.Contains(build.InData))\n", - " {\n", - " b.Add(run.Key, run.Value);\n", - " }\n", - " }\n", - " }\n", - "\n", - " // At this point all the data has been categorized.\n", - "\n", - " // Build Parent -> < DataType -> < Benchmark -> BenchmarkSummaryData >>\n", - " Dictionary[]> buildToBenchmarkSummaryData = new(new BuildNameComparer());\n", - " //summarizers.Select(_ => new Dictionary>()).ToArray();\n", - "\n", - " // Get the Summary Data Per Build.\n", - " foreach (var b in listOfData)\n", - " {\n", - " if (!buildToBenchmarkSummaryData.TryGetValue(b.Key, out var data))\n", - " {\n", - " buildToBenchmarkSummaryData[b.Key] = data = summarizers.Select(_ => new Dictionary()).ToArray();\n", - " }\n", - "\n", - " foreach (var br in dataManager._benchmarkToRunData)\n", - " {\n", - " for (DataType type = DataType.MIN_VALUE; type < DataType.COUNT; ++type)\n", - " {\n", - " data[(int) type][br.Key] = new();\n", - " }\n", - " }\n", - "\n", - " Dictionary> benchmarkToData = new();\n", - " foreach (var run in b.Value)\n", - " {\n", - " foreach (var benchmark in run.Value)\n", - " {\n", - " if (!benchmarkToData.TryGetValue(benchmark.Key, out var d))\n", - " {\n", - " benchmarkToData[benchmark.Key] = d = new();\n", - " }\n", - "\n", - " d.Add(benchmark.Value);\n", - " }\n", - " }\n", - " }\n", - "\n", - " //string DisplayDetailsForABenchmark(BenchmarkSummaryData val) =>\n", - " // $\"{val.Benchmark},{val.MaxWorkingSetMB},{val.MaxPrivateMemoryMB},{val.RequestsPerMSec},{val.MeanLatencyMS},{val.Latency50thMS},{val.Latency75thMS},{val.Latency90thMS},{val.Latency99thMS},{val.NumberOfHeapCountSwitches},{val.MaxHeapCount}\";\n", - " if (chartCriteria == null)\n", - " {\n", - " chartCriteria = new() { new() { (DataType.Volatility, nameof(LoadInfo.MaxPrivateMemoryMB)) } };\n", - " }\n", - "\n", - " foreach (var (group, criteriaIndex) in chartCriteria.WithIndex())\n", - " {\n", - " Func, double> sortingFunctor = null;\n", - " List> selectionFunctors = new();\n", - " List units = new();\n", - "\n", - " foreach (var ((type, s), index) in group.WithIndex())\n", - " {\n", - " Func, double> thisSortingFunctor = null;\n", - " Func selectionFunctor = null;\n", - " string unit = null;\n", - " switch (s)\n", - " {\n", - " //case nameof()\n", - " case nameof(BenchmarkSummaryData.MaxWorkingSetMB):\n", - " thisSortingFunctor = (data) => data.Value.MaxWorkingSetMB;\n", - " selectionFunctor = (data) => data.MaxWorkingSetMB;\n", - " unit = \"MB\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.MaxPrivateMemoryMB):\n", - " thisSortingFunctor = (data) => data.Value.MaxPrivateMemoryMB;\n", - " selectionFunctor = (data) => data.MaxPrivateMemoryMB;\n", - " unit = \"MB\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.P99PrivateMemoryMB):\n", - " thisSortingFunctor = (data) => data.Value.P99PrivateMemoryMB;\n", - " selectionFunctor = (data) => data.P99PrivateMemoryMB;\n", - " unit = \"MB\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.P95PrivateMemoryMB):\n", - " thisSortingFunctor = (data) => data.Value.P95PrivateMemoryMB;\n", - " selectionFunctor = (data) => data.P95PrivateMemoryMB;\n", - " unit = \"MB\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.P90PrivateMemoryMB):\n", - " thisSortingFunctor = (data) => data.Value.P90PrivateMemoryMB;\n", - " selectionFunctor = (data) => data.P90PrivateMemoryMB;\n", - " unit = \"MB\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.P75PrivateMemoryMB):\n", - " thisSortingFunctor = (data) => data.Value.P75PrivateMemoryMB;\n", - " selectionFunctor = (data) => data.P75PrivateMemoryMB;\n", - " unit = \"MB\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.P50PrivateMemoryMB):\n", - " thisSortingFunctor = (data) => data.Value.P50PrivateMemoryMB;\n", - " selectionFunctor = (data) => data.P50PrivateMemoryMB;\n", - " unit = \"MB\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.RequestsPerMSec):\n", - " thisSortingFunctor = (data) => data.Value.RequestsPerMSec;\n", - " selectionFunctor = (data) => data.RequestsPerMSec;\n", - " unit = \"Req/sec\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.MeanLatencyMS):\n", - " thisSortingFunctor = (data) => data.Value.MeanLatencyMS;\n", - " selectionFunctor = (data) => data.MeanLatencyMS;\n", - " unit = \"ms\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.Latency50thMS):\n", - " thisSortingFunctor = (data) => data.Value.Latency50thMS;\n", - " selectionFunctor = (data) => data.Latency50thMS;\n", - " unit = \"ms\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.Latency75thMS):\n", - " thisSortingFunctor = (data) => data.Value.Latency75thMS;\n", - " selectionFunctor = (data) => data.Latency75thMS;\n", - " unit = \"ms\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.Latency90thMS):\n", - " thisSortingFunctor = (data) => data.Value.Latency90thMS;\n", - " selectionFunctor = (data) => data.Latency90thMS;\n", - " unit = \"ms\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.Latency99thMS):\n", - " thisSortingFunctor = (data) => data.Value.Latency99thMS;\n", - " selectionFunctor = (data) => data.Latency99thMS;\n", - " unit = \"ms\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.MaxHeapCount):\n", - " thisSortingFunctor = (data) => data.Value.MaxHeapCount;\n", - " selectionFunctor = (data) => data.MaxHeapCount;\n", - " unit = \"heap count\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.NumberOfHeapCountSwitches):\n", - " thisSortingFunctor = (data) => data.Value.NumberOfHeapCountSwitches;\n", - " selectionFunctor = (data) => data.NumberOfHeapCountSwitches;\n", - " unit = \"hc switches\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.NumberOfHeapCountDirectionChanges):\n", - " thisSortingFunctor = (data) => data.Value.NumberOfHeapCountDirectionChanges;\n", - " selectionFunctor = (data) => data.NumberOfHeapCountDirectionChanges;\n", - " unit = \"hc dir changes\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.TotalSuspensionTimeMSec):\n", - " thisSortingFunctor = (data) => data.Value.TotalSuspensionTimeMSec;\n", - " selectionFunctor = (data) => data.TotalSuspensionTimeMSec;\n", - " unit = \"ms\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.PercentPauseTimeInGC):\n", - " thisSortingFunctor = (data) => data.Value.PercentPauseTimeInGC;\n", - " selectionFunctor = (data) => data.PercentPauseTimeInGC;\n", - " unit = \"%\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.PercentTimeInGC):\n", - " thisSortingFunctor = (data) => data.Value.PercentTimeInGC;\n", - " selectionFunctor = (data) => data.PercentTimeInGC;\n", - " unit = \"%\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.MeanHeapSizeBeforeMB):\n", - " thisSortingFunctor = (data) => data.Value.MeanHeapSizeBeforeMB;\n", - " selectionFunctor = (data) => data.MeanHeapSizeBeforeMB;\n", - " unit = \"MB\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.MaxHeapSizeMB):\n", - " thisSortingFunctor = (data) => data.Value.MaxHeapSizeMB;\n", - " selectionFunctor = (data) => data.MaxHeapSizeMB;\n", - " unit = \"MB\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.TotalAllocationsMB):\n", - " thisSortingFunctor = (data) => data.Value.TotalAllocationsMB;\n", - " selectionFunctor = (data) => data.TotalAllocationsMB;\n", - " unit = \"MB\";\n", - " break;\n", - " case nameof(BenchmarkSummaryData.GCScore):\n", - " thisSortingFunctor = (data) => data.Value.GCScore;\n", - " selectionFunctor = (data) => data.GCScore;\n", - " unit = \"score\";\n", - " break;\n", - "\n", - " default:\n", - " throw new Exception($\"unexpected {s}\");\n", - " //thisSortingFunctor = (data) => data.Value.MaxPrivateMemoryMB;\n", - " //selectionFunctor = (data) => data.MaxPrivateMemoryMB;\n", - " //unit = \"MB\";\n", - " //break;\n", - " }\n", - " sortingFunctor = sortingFunctor ?? thisSortingFunctor; // keep first one\n", - " selectionFunctors.Add(selectionFunctor);\n", - " units.Add(unit);\n", - " }\n", - "\n", - " var uniqueUnits = units.Zip(group.Select(t => t.Item1)).Select(p => p.Item2 == DataType.Volatility ? \"Volatility Score\" : p.Item1).Distinct();\n", - " if (uniqueUnits.Count() > 2) throw new Exception(\"More than two units in chart\");\n", - "\n", - " List pairedPerBuildData = new();\n", - " List sortedPerBuildData = new();\n", - "\n", - " foreach (BuildName build in buildToBenchmarkSummaryData.Keys)\n", - " {\n", - " for (int groupIndex = 0; groupIndex < group.Count; ++groupIndex)\n", - " {\n", - " var b = buildToBenchmarkSummaryData[build][(int) group[groupIndex].Item1];\n", - " var pairedData = b.Zip(buildToBenchmarkSummaryData[buildToBenchmarkSummaryData.Keys.First()][(int) group[0].Item1]).OrderByDescending(pair => sortingFunctor(pair.Second)).Select(pair => pair.First.Value);\n", - " //pairedPerBuildData.Add(new PerBuildData(group[groupIndex], units[groupIndex], build, selectionFunctors[groupIndex], b.OrderByDescending(sortingFunctor).Select(k => k.Value).ToList()));\n", - " sortedPerBuildData.Add(new PerBuildData(group[groupIndex], units[groupIndex], build, selectionFunctors[groupIndex], pairedData)); // b.OrderByDescending(sortingFunctor).Select(k => k.Value).ToList() ));\n", - " //sortedPerBuildData.Add(new PerBuildData(group[groupIndex], units[groupIndex], build, selectionFunctors[groupIndex], b.OrderByDescending(sortingFunctor).Select(k => k.Value).ToList()));\n", - " }\n", - " }\n", - "\n", - "\n", - "// // Create CSV.\n", - "// StringBuilder top = new();\n", - "//\n", - "// // Iterate over each of the runs.\n", - "// const int singleBuildColumnSize = 10;\n", - "// int numberOfBuilds = buildToBenchmarkSummaryData.Count;\n", - "// string columnHeader = \"Benchmark Name,MaxWorkingSetMB,MaxPrivateMemoryMB,RequestsPerMSec,MeanLatencyMS,Latency50thMS,Latency75thMS,Latency90thMS,Latency99thMS,# HC Switches\";\n", - "//\n", - "// // Assumption: the same benchmarks are present for all runs.\n", - "// int totalCountOfBenchmarks = buildToBenchmarkSummaryData.First().Value.Count;\n", - "//\n", - "// string first = string.Join(\",\", namesOfBuilds.Select(build => build + string.Join(\"\", Enumerable.Repeat(\",\", singleBuildColumnSize))));\n", - "// string second = string.Join(\",,\", Enumerable.Repeat(columnHeader, numberOfBuilds));\n", - "//\n", - "// top.AppendLine(first);\n", - "// top.AppendLine(second);\n", - "//\n", - "// for (int benchmarkIdx = 0; benchmarkIdx < totalCountOfBenchmarks; benchmarkIdx++)\n", - "// {\n", - "// top.AppendLine(string.Join(\",,\", namesOfBuilds.Select(buildName => DisplayDetailsForABenchmark(sortedPerBuildVolatility[buildName][benchmarkIdx]))));\n", - "// }\n", - "//\n", - "// File.WriteAllText(Path.Combine(dataManager._basePath, $\"Volatility_{group[0]}.csv\"), top.ToString());\n", - "\n", - " // Chart the sorted % Vol Results.\n", - "\n", - " ColorProvider colorProvider = new();\n", - " //colorProvider.StartColors(builds.Select(build => build.InData));\n", - " List scatters = new();\n", - " //string mode = \"markers\";\n", - " string mode = \"lines+markers\";\n", - " string firstUnit = sortedPerBuildData[0].Unit;\n", - "\n", - " var layout = new Layout.Layout\n", - " {\n", - " xaxis = new Xaxis { title = \"Benchmark Name\" },\n", - " yaxis = new Yaxis { title = firstUnit },\n", - " width = 1200,\n", - " title = $\"GCMetrcs Sorted by {group[0].Item1} of {group[0].Item2} for {builds[0].ToDisplay} (by test)\"\n", - " };\n", - "\n", - " foreach (var (b, index) in sortedPerBuildData.WithIndex())\n", - " {\n", - " var scatter = new Scatter\n", - " {\n", - " x = b.Data.Select(s => s.Benchmark),\n", - " y = b.Data.Select(v => b.Selector(v)),// + 0.1 * index),\n", - " mode = mode,\n", - " name = $\"{b.BuildName.ToDisplay}: {b.Criteria.Item1.ToString()} of {b.Criteria.Item2}\",\n", - " };\n", - "\n", - " if (b.Unit != firstUnit)\n", - " {\n", - " layout.yaxis2 = new Yaxis { title = b.Unit, side = \"right\", overlaying = \"y\" };\n", - " scatter.yaxis = \"y2\";\n", - " }\n", - "\n", - " colorProvider.SetMarker(scatter, b.BuildName.InData, sortedPerBuildData.Count());\n", - " scatters.Add(scatter);\n", - " }\n", - "\n", - " Chart.Plot(scatters, layout).Display();\n", - "\n", - "// scatters.Clear();\n", - "// layout = new Layout.Layout\n", - "// {\n", - "// xaxis = new Xaxis { title = \"Benchmark Index\" },\n", - "// yaxis = new Yaxis { title = firstUnit },\n", - "// width = 1200,\n", - "// title = $\"GCMetrcs Sorted by {group[0].Item1} of {group[0].Item2} for {builds[0].ToDisplay} (by index)\"\n", - "// };\n", - "\n", - "// //colorProvider.StartColors(builds.Select(build => build.InData));\n", - "// foreach (var b in sortedPerBuildData)\n", - "// {\n", - "// var sortedData = b.Data.OrderByDescending(b.Selector);\n", - "// var scatter = new Scatter\n", - "// {\n", - "// x = Enumerable.Range(0, sortedData.Count()),\n", - "// y = sortedData.Select(b.Selector),\n", - "// mode = mode,\n", - "// name = $\"{b.BuildName.ToDisplay}: {b.Criteria.Item1.ToString()} of {b.Criteria.Item2}\",\n", - "// text = sortedData.Select(ss => ss.Benchmark),\n", - "// };\n", - "\n", - "// if (b.Unit != firstUnit)\n", - "// {\n", - "// layout.yaxis2 = new Yaxis { title = b.Unit, side = \"right\", overlaying = \"y\" };\n", - "// scatter.yaxis = \"y2\";\n", - "// }\n", - "\n", - "// colorProvider.SetMarker(scatter, b.BuildName.InData, sortedPerBuildData.Count());\n", - "// scatters.Add(scatter);\n", - "// }\n", - " \n", - "// Chart.Plot(scatters, layout).Display();\n", - " }\n", - " }\n", - "*/\n", - "\n", - "// CompareFull is used to compare different builds.\n", - "\n", - "/*\n", - "public class CollectedBenchmarkData\n", - "{\n", - " public List Data = new();\n", - " public double Sum => Data.Sum(x => x);\n", - " public double Prod => Data.Aggregate(1.0, (prod, next) => prod * next);\n", - " public double Average => Sum / Data.Count();\n", - " public double GeoMean => Math.Pow(Prod, 1.0 / Data.Count());\n", - "\n", - " public void Add(double value) => Data.Add(value);\n", - "}\n", - "\n", - "public class Blob // rename this...\n", - "{\n", - " public CollectedBenchmarkData Baseline = new();\n", - " public List Diffs = new();\n", - " public double Ratio(int i) => Diffs[i].GeoMean / Baseline.GeoMean;\n", - "}\n", - "\n", - "void CheckAdd(string benchmark, CollectedBenchmarkData data, Func selector, string includeRE, string excludeRE)\n", - "{\n", - " if ((includeRE != null) && !Regex.Match(benchmark, includeRE).Success) return;\n", - " if ((excludeRE != null) && Regex.Match(benchmark, excludeRE).Success) return;\n", - " if (!data.TryGetValue(benchmark, out var blob)) data[benchmark] = blob = new Blob();\n", - "\n", - "}\n", - "\n", - "// selector -> to extract the data to CompareFull\n", - "// includeRE -> which benchmarks to include (regex), all if null\n", - "// excludeRE -> which benchmarks to exclude (regex), none if null\n", - "// baseline/diffs -> names of builds to compare\n", - "// includeIndiv -> whether to include the individual benchmark comparisons (probably use true)\n", - "void CompareFull(DataManager dataManager, Func selector, string includeRE, string excludeRE, string baseline, List diffs, bool includeIndiv)\n", - "{\n", - " HashSet seen = new();\n", - " // benchmark -> Blob\n", - " Dictionary data = new();\n", - " foreach (var (run, benchmarkData) in dataManager._runToBenchmarkData)\n", - " {\n", - " var build = run.Substring(0, run.LastIndexOf('_'));\n", - " //if (!seen.Contains(build)) { Console.WriteLine(build); seen.Add(build); }\n", - " if (build != baseline && build != diff) continue;\n", - " foreach (var (benchmark, loadInfo) in benchmarkData)\n", - " {\n", - " if ((includeRE != null) && !Regex.Match(benchmark, includeRE).Success) continue;\n", - " if ((excludeRE != null) && Regex.Match(benchmark, excludeRE).Success) continue;\n", - " if (!data.TryGetValue(benchmark, out var blob)) data[benchmark] = blob = new Blob();\n", - " if (build == baseline) blob.Baseline.Add(selector(loadInfo));\n", - " else \n", - " blob.GetData(build == baseline).Add(selector(loadInfo));\n", - " }\n", - " }\n", - "\n", - " List ratios = new();\n", - " Console.WriteLine($\"Baseline: {baseline}\");\n", - " foreach (var (d, i) in diffs.WithIndex())\n", - " {\n", - " Console.WriteLine($\"Diff{i}: {d}\");\n", - " }\n", - " {\n", - " Console.WriteLine($\"{\"Benchmark\",35} | {\"D/B\",5} | {\"Base\",8} | {\"Diff\",8}\");\n", - " Console.WriteLine($\"{new string('-', 35)}-+-{new string('-', 5)}-+-{new string('-', 8)}-+-{new string('-', 8)}\");\n", - " foreach (var (benchmark, value) in data.OrderByDescending(kvp => kvp.Value.Ratio))\n", - " {\n", - " if (includeIndiv)\n", - " {\n", - " Console.WriteLine($\"{benchmark,35} | {value.Ratio,5:N3} | {value.Baseline.GeoMean,8:N2} | {value.Diff.GeoMean,8:N2}\");\n", - " }\n", - " ratios.Add(value.Ratio);\n", - " }\n", - " }\n", - "\n", - " Console.WriteLine($\"{new string('-', 35)}-+-{new string('-', 5)}-+-{new string('-', 8)}-+-{new string('-', 8)}\");\n", - " double baseGeoMean = ComputeGeoMean(data.Select(kvp => kvp.Value.Baseline.GeoMean));\n", - " double diffGeoMean = ComputeGeoMean(data.Select(kvp => kvp.Value.Diff.GeoMean));\n", - " Console.WriteLine($\"{\"GeoMean\",35} | {diffGeoMean / baseGeoMean,5:N3} | {baseGeoMean,8:N2} | {diffGeoMean,8:N2}\");\n", - " Console.WriteLine($\"{\"ArithMean\",35} | {ComputeAverage(ratios),5:N3} | {\"\",8} | {\"\",8}\");\n", - " Console.WriteLine();\n", - "}\n", - "*/\n", - "\n", - "// Display individual benchmark runs\n", - "\n", - "// extract -> to extract the data to CompareFull\n", - "// benchmark -> benchmarks(s) to include\n", - "// exactMatch -> impact matching of benchmark - odd behavior.. see code\n", - "\n", - "/*\n", - "void ProcessDataMean(DataManager dataManager, Func extract, string benchmarkName, bool exactMatch = false)\n", - "{\n", - " Console.WriteLine(\"Benchmark {0}\", benchmarkName);\n", - " var names = dataManager.Data.Keys;\n", - " // build -> (sum of GCMetrc, count)\n", - " Dictionary GCMetrcByBuild = new(2);\n", - " foreach (var name in names)\n", - " {\n", - " bool matched = (exactMatch ? name.EndsWith(benchmarkName) : name.Contains(benchmarkName));\n", - " if (matched)\n", - " {\n", - " string[] fields = name.Split(new Char[] { '_' }, StringSplitOptions.RemoveEmptyEntries);\n", - " string buildname = fields[0];\n", - " LoadInfo info = dataManager.Data[name];\n", - " //Console.WriteLine(\"build name is {0}\", buildname);\n", - " if (GCMetrcByBuild.TryGetValue(buildname, out (double sum, int count) p))\n", - " {\n", - " metricByBuild[buildname] = (p.sum + extract(info), p.count + 1);\n", - " }\n", - " else\n", - " {\n", - " metricByBuild.Add(buildname, (extract(info), 1));\n", - " }\n", - "\n", - " //Console.WriteLine(\"metric is now {0:0.00}\", metricByBuild[buildname]);\n", - "\n", - " Console.WriteLine(\"{0,60}: mean latency {1:0.00} ms, time in GC {2:0.00}%, heap switches {3}, max HC {4,2}, maxHeapMB {5:0.0}\",\n", - " name, info.MeanLatencyMS, info.PercentPauseTimeInGC,\n", - " info.NumberOfHeapCountSwitches, info.MaxHeapCount, info.MaxHeapSizeMB);\n", - " }\n", - " }\n", - "\n", - " foreach (var (build, (sum, count)) in metricByBuild)\n", - " {\n", - " Console.WriteLine(\"build {0, 10}: {1:0.00}\", build, sum / count);\n", - " }\n", - "}\n", - "\n", - "void ProcessDataMeanLatency(DataManager dataManager, string benchmarkName, bool exactMatch = false)\n", - " => ProcessDataMean(dataManager, loadInfo => loadInfo.MeanLatencyMS, benchmarkName, exactMatch);\n", - "*/\n", - "\n", - "// Shows benchmark runs that increment the metric two times in a row\n", - "\n", - "// builds -> the build name (with trailing _, like \"fix_\")\n", - "// extract -> data to examine\n", - "// benchmarkFilterRE -> benchmarks to include\n", - "// listAll -> show all data points instead of just the changes - usually too much\n", - "/*\n", - "public void DisplayDoubleIncrement(DataManager dataManager, IEnumerable builds, Func extract, string benchmarkFilterRE, bool listAll = false)\n", - "{\n", - " foreach ((string benchmark, var allRuns) in dataManager._benchmarkToRunData)\n", - " {\n", - " bool firstForBenchmark = true;\n", - " if (!Regex.IsMatch(benchmark, benchmarkFilterRE)) continue;\n", - " foreach ((string run, var results) in allRuns)\n", - " {\n", - " if (!builds.Any(b => run.StartsWith(b))) continue;\n", - " if (results.Data == null)\n", - " {\n", - " Console.WriteLine($\"No data for {benchmark} {run}\");\n", - " continue;\n", - " }\n", - " var doubleIncr =\n", - " results.Data.GCs\n", - " .Where(gc => gc.GlobalHeapHistory != null)\n", - " .Select(extract)\n", - " .SlidingRange(10)\n", - " .SkipWhile(window => ((window[0] + 1) != window[3]) || ((window[3] + 1) != window[6])|| ((window[6] + 1) != window[9]));\n", - " bool anyDouble = doubleIncr.Any(x => true);\n", - " if (!anyDouble) continue;\n", - "\n", - " if (firstForBenchmark)\n", - " {\n", - " Console.WriteLine($\"{benchmark}:\");\n", - " firstForBenchmark = false;\n", - " }\n", - " Console.Write($\" {run,12}:\");\n", - " if (listAll)\n", - " {\n", - " foreach (int num in\n", - " results.Data.GCs\n", - " .Where(gc => gc.GlobalHeapHistory != null)\n", - " .Select(extract)\n", - " .SlidingWindow(-1)\n", - " .Where(window => window.PrevItem != window.CurrentItem)\n", - " .Select(window => window.CurrentItem))\n", - " {\n", - " Console.Write($\" {num}\");\n", - " }\n", - " }\n", - " Console.WriteLine();\n", - " }\n", - " }\n", - "}\n", - "\n", - "\n", - "// Shows benchmark runs that decrease a metric (ever)\n", - "\n", - "// builds -> the build name (with trailing _, like \"fix_\")\n", - "// extract -> data to examine\n", - "// amount -> threshold of decrease to include\n", - "// benchmarkFilterRE -> benchmarks to include\n", - "// listAll -> show all data points instead of just the changes - usually too much\n", - "\n", - "public void DisplayChangesDown(DataManager dataManager, IEnumerable builds, Func extract, double amount, string benchmarkFilterRE, bool listAll = false)\n", - "{\n", - " foreach ((string benchmark, var allRuns) in dataManager._benchmarkToRunData)\n", - " {\n", - " bool firstForBenchmark = true;\n", - " if (!Regex.IsMatch(benchmark, benchmarkFilterRE)) continue;\n", - " foreach ((string run, var results) in allRuns)\n", - " {\n", - " if (!builds.Any(b => run.StartsWith(b))) continue;\n", - " if (results.Data == null)\n", - " {\n", - " Console.WriteLine($\"No data for {benchmark} {run}\");\n", - " continue;\n", - " }\n", - " var dec =\n", - " results.Data.GCs\n", - " .Where(gc => gc.GlobalHeapHistory != null)\n", - " .Select(extract)\n", - " .SlidingWindow(-1)\n", - " .SkipWhile(window => window.PrevItem <= window.CurrentItem + amount);\n", - " bool anyDecrease = dec.Any(x => true);\n", - " if (!anyDecrease) continue;\n", - "\n", - " var incAfterDec = dec.SkipWhile(window => window.PrevItem >= window.CurrentItem - amount);\n", - " var anyIncAfterDec = incAfterDec.Any(x => true);\n", - " if (!anyIncAfterDec) continue;\n", - "\n", - " if (firstForBenchmark)\n", - " {\n", - " Console.WriteLine($\"{benchmark}:\");\n", - " firstForBenchmark = false;\n", - " }\n", - " Console.Write($\" {run,12}:\");\n", - " if (listAll)\n", - " {\n", - " foreach (int num in\n", - " results.Data.GCs\n", - " .Where(gc => gc.GlobalHeapHistory != null)\n", - " .Select(extract)\n", - " .SlidingWindow(-1)\n", - " .Where(window => window.PrevItem != window.CurrentItem)\n", - " .Select(window => window.CurrentItem))\n", - " {\n", - " Console.Write($\" {num}\");\n", - " }\n", - " }\n", - " Console.WriteLine();\n", - " }\n", - " }\n", - "}\n", - "\n", - "public void DisplayHeapChangesDown(DataManager dataManager, IEnumerable builds, string benchmarkFilterRE)\n", - " => DisplayChangesDown(dataManager, builds, gc => gc.GlobalHeapHistory.NumHeaps, 0, benchmarkFilterRE, listAll: true);\n", - "\n", - "// This is used to reduce a list of charts into a small enough number for the notebook to fully display.\n", - "// I have a habit of calling GetPage(0) for small lists, but this isn't needed - just display the whole list.\n", - "// (When displaying all benchmarks, having that and changing the 0 to 1, 2, etc., can be useful)\n", - "static IEnumerable GetPage(this IEnumerable groups, int page, int numPerPage = 18)\n", - " => groups.Skip(numPerPage * page).Take(numPerPage);\n", - "*/\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "// Old examples - use as resources then delete\n", - "\n", - "/*\n", - "\n", - "SummarizeResultsByBench(low4DM, ML(\"v2-fixrearranged-all\", \"v2-fixrearranged-all-svr\"));\n", - "SummarizeResultsByBench(low4DM, ML(\"v2-rc3\", \"v2-fixrearranged\"));\n", - "\n", - "SummarizeResults(diffDataManager, latestPath + @\"\\summarize.txt\");\n", - "\n", - "// The specific values are busted here, but more paths can be added to an existing DataManager.\n", - "// Note: Adding/overwriting more benchmarks to an existing loaded directory is untested/etc.\n", - "// This is intended for adding a new run when you already have a baseline or previous run\n", - "// loaded and don't want to wait to read it again.\n", - "\n", - "dataManager.AddData(new[] { slopePath, evalDecrPath }, scoutList.ToList());\n", - "\n", - "// Again, the values are busted, but you can speed up loading if you only want to look at\n", - "// certain benchmarks.\n", - "\n", - "var x = new DataManager(new[] { evalDecrPath }, filter: debugList);\n", - "\n", - "var low4BaseRun = ML(new BuildName(\"v2-fixrearranged-mult-max_\", \"base\"));\n", - "var low4Run = ML(new BuildName(\"v2-fixrearranged-mult-max-h4_\", \"max4\"));\n", - "var svrRun = ML(new BuildName(\"v2-fixrearranged-mult-max-svr_\", \"svr\"));\n", - "var svr4Run = ML(new BuildName(\"v2-fixrearranged-mult-max-svr4_\", \"svr4\"));\n", - "var mult8Run = ML(new BuildName(\"v2-fixrearranged-mult-max-mult8_\", \"mult8\"));\n", - "var mult32Run = ML(new BuildName(\"v2-fixrearranged-mult-max-mult32_\", \"mult32\"));\n", - "var mult8max10Run = ML(new BuildName(\"v2-fixrearranged-mult-max-mult8x10_\", \"m8x10\"));\n", - "var max10Run = ML(new BuildName(\"v2-fixrearranged-mult-max-x10_\", \"x10\"));\n", - "var low4CompRuns = Concat(low4BaseRun, low4Run, svrRun, svr4Run, mult8Run, mult32Run, mult8max10Run, max10Run);\n", - "\n", - "string compareBase = \"v2-fixrearranged-mult-max-svr4\";\n", - "string compareDiff = \"v2-fixrearranged-mult-max-mult8\";\n", - "\n", - "//var extract = (LoadInfo loadInfo) => loadInfo.RequestsPerMSec;\n", - "//var extract = (LoadInfo loadInfo) => loadInfo.Latency50thMS;\n", - "var extract = (LoadInfo loadInfo) => loadInfo.MeanLatencyMS;\n", - "//var extract = (LoadInfo loadInfo) => loadInfo.MaxPrivateMemoryMB;\n", - "//var extract = (LoadInfo loadInfo) => loadInfo.P50PrivateMemoryMB;\n", - "//var extract = (LoadInfo loadInfo) => loadInfo.PercentPauseTimeInGC;\n", - "\n", - "string includeRE = null; // scoutREListShort2;\n", - "string excludeRE = null; // \"ConnectionClose\";\n", - "CompareFull(low4DM, extract, includeRE, excludeRE, compareBase, compareDiff, true);\n", - "//CompareFull(rc3DataManager, (LoadInfo loadInfo) => Math.Max(5, loadInfo.PercentPauseTimeInGC), scoutREListShort, null, compareBase, compareDiff, true);\n", - "\n", - "foreach (string benchmark in new[] { \"ConnectionClose\", \"SingleQueryPlatform\" })\n", - "{\n", - " ProcessDataMeanLatency(rc2DataManager, benchmark, true);\n", - "}\n", - "\n", - "// Not tested for a while.\n", - "\n", - "DisplayDoubleIncrement(rc2DataManager, ML(\"fix_\"), gc => gc.GlobalHeapHistory.NumHeaps, \"\", true)\n", - "\n", - "// Not tested for a while\n", - "\n", - "DisplayChangesDown(rc2DataManager, ML(\"fix_\"), gc => gc.HeapSizeAfterMB, 3, \"\")\n", - "\n", - "// Leftover code - manually displays heap changes\n", - "\n", - "foreach (string build in ML(\"v2-fixrearranged-all_\"))\n", - "{\n", - " for (int i = 0; i < 3; ++i)\n", - " {\n", - " Console.Write($\"{build}{i}:\");\n", - " foreach (int num in\n", - " low4DM.Data[$\"{build}{i} | Stage1\"].Data.GCs\n", - " .Where(gc => gc.GlobalHeapHistory != null)\n", - " .Select(gc => gc.GlobalHeapHistory.NumHeaps)\n", - " .SlidingWindow(-1)\n", - " .Where(window => window.PrevItem != window.CurrentItem)\n", - " .Select(window => window.CurrentItem))\n", - " {\n", - " Console.Write($\" {num}\");\n", - " }\n", - " Console.WriteLine();\n", - " }\n", - "}\n", - "\n", - "*/" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "// Old charting examples\n", - "/*\n", - "SaveData(low4DM, low4CompRuns, ML((DataType.Average, nameof(LoadInfo.P90PrivateMemoryMB)), (DataType.Max, nameof(LoadInfo.MaxPrivateMemoryMB)), (DataType.Max, nameof(LoadInfo.MaxHeapCount))));\n", - "SaveData(low4DM, low4CompRuns, DataType.Average, ML(nameof(LoadInfo.RequestsPerMSec)));\n", - "SaveData(dataManager, allRuns, DataType.Average, ML(nameof(LoadInfo.RequestsPerMSec), nameof(LoadInfo.Latency50thMS)));\n", - "SaveData(rc3DataManager, rc3RearrangedRun,\n", - " ML(ML((DataType.Max, nameof(LoadInfo.MaxPrivateMemoryMB)), (DataType.Min, nameof(LoadInfo.MaxPrivateMemoryMB))),\n", - " ML((DataType.Max, nameof(LoadInfo.P99PrivateMemoryMB)), (DataType.Min, nameof(LoadInfo.P99PrivateMemoryMB))),\n", - " ML((DataType.Max, nameof(LoadInfo.P50PrivateMemoryMB)), (DataType.Min, nameof(LoadInfo.P50PrivateMemoryMB)))));\n", - "SaveData(rc3DataManager, rc3Runs, DataType.Average, priMemList);\n", - "SaveData(diffDataManager, vsBaseRuns, priMemList.Select(m => ML((DataType.Min, m), (DataType.Max, m))));\n", - "SaveDataOne(v2DataManager, v2Runs, DataType.Average, priMemList);\n", - "SaveDataOne(diffDataManager, allRuns, DataType.Volatility, volList);\n", - "\n", - "// Using the DataManager - I haven't been using this section.\n", - "\n", - "// The following cells demonstrates how to make use of the ``DataManager``. \n", - "\n", - "// The name of the run from the yaml file for which the ASP.NET run is created for.\n", - "string runName = \"base_0\";\n", - "\n", - "Dictionary run = dataManager.GetAllBenchmarksForRun(runName);\n", - "dataManager.Data.Display();\n", - "List> runsWithGCData = dataManager.GetAllBenchmarksForRun(runName).Where(gc => gc.Value.Data != null);\n", - "\n", - "string benchmarkName = \"Stage2\";\n", - "LoadInfo benchmarkData = dataManager.GetBenchmarkData(benchmark: benchmarkName, run: runName);\n", - "benchmarkData.Id\n", - "\n", - "Dictionary allRunsForBenchmark = dataManager.GetAllRunsForBenchmark(benchmark: benchmarkName);\n", - "allRunsForBenchmark.Keys\n", - "\n", - "dataManager.SaveBenchmarkData()\n", - "\n", - "// ## Build to Build Comparison and Volatility Analysis\n", - "\n", - "// I haven't been using this section, but it is an obvious one to start using again.\n", - "\n", - "var run1_vs_run2 = diffDataManager.GetBenchmarkToComparison(\"tp3-m_0\", \"tp3-m_1\");\n", - "\n", - "static bool IsNotInvalidDouble(double val) => \n", - " !double.IsNaN(val) && \n", - " !double.IsInfinity(val) && \n", - " !double.IsPositiveInfinity(val) && \n", - " !double.IsNegativeInfinity(val);\n", - "\n", - "public class SummaryTable\n", - "{\n", - " public SummaryTable(Dictionary> comparisons)\n", - " {\n", - " Comparisons = comparisons;\n", - " }\n", - "\n", - " private string GenerateSummaryForComparison(string comparisonKey, Dictionary comparison)\n", - " {\n", - " double averageWorkingSet = comparison.Where(a => IsNotInvalidDouble(a.Value.MaxWorkingSetMB)).Average(a => a.Value.MaxWorkingSetMB);\n", - " double privateMemory = comparison.Where(a => IsNotInvalidDouble(a.Value.MaxPrivateMemoryMB)).Average(a => a.Value.MaxPrivateMemoryMB);\n", - " double throughput = comparison.Where(a => IsNotInvalidDouble(a.Value.RequestsPerMSec)).Average(a => a.Value.RequestsPerMSec);\n", - " double meanLatency = comparison.Where(a => IsNotInvalidDouble(a.Value.MeanLatencyMS)).Average(a => a.Value.MeanLatencyMS);\n", - "\n", - " double p50Latency = comparison.Where(a => IsNotInvalidDouble(a.Value.Latency50thMS)).Average(a => a.Value.Latency50thMS);\n", - " double p75Latency = comparison.Where(a => IsNotInvalidDouble(a.Value.Latency75thMS)).Average(a => a.Value.Latency75thMS);\n", - " double p90Latency = comparison.Where(a => IsNotInvalidDouble(a.Value.Latency90thMS)).Average(a => a.Value.Latency90thMS);\n", - " double p99Latency = comparison.Where(a => IsNotInvalidDouble(a.Value.Latency99thMS)).Average(a => a.Value.Latency99thMS);\n", - "\n", - " return $\"{comparisonKey},{averageWorkingSet},{privateMemory},{throughput},{meanLatency},{p50Latency},{p75Latency},{p90Latency},{p99Latency}\";\n", - " }\n", - "\n", - " public string GenerateSummaryForComparisons()\n", - " {\n", - " StringBuilder sb = new();\n", - " sb.AppendLine(\"Build to Build,Average Max Working Set (MB) %, Average Max Private Memory (MB) %, Average Request/MSec %, Average Mean Latency (MSec), Average P50 Latency (MSec) %, Average P75 Latency (MSec) %, Average P90 Latency (MSec) %, Average P99 Latency (MSec) %\");\n", - " foreach (var comparison in Comparisons)\n", - " {\n", - " sb.AppendLine(GenerateSummaryForComparison(comparison.Key, comparison.Value));\n", - " }\n", - "\n", - " return sb.ToString();\n", - " }\n", - "\n", - " private int GetCountOfRegressions(List selected, double thresholdPercentage, bool lessIsBetter = true)\n", - " {\n", - " // If throughput, less is worse => threshold <= -5%.\n", - " var comparison = selected.Where(d => IsNotInvalidDouble(d) && ( (lessIsBetter) ? (d >= thresholdPercentage) : (d <= -thresholdPercentage)));\n", - " return comparison.Count;\n", - " }\n", - "\n", - " private int GetCountOfAbsRegressions(List selected, double thresholdPercentage)\n", - " {\n", - " var comparison = selected.Where(d => IsNotInvalidDouble(d) && Math.Abs(d) >= thresholdPercentage);\n", - " return comparison.Count;\n", - " }\n", - "\n", - " // # of benchmarks with throughput regressed by >= 5% and 10%\n", - " private string GenerateRegressionSummary(string comparisonKey, Dictionary comparison)\n", - " {\n", - " List workingSet = comparison.Select(c => c.Value.MaxWorkingSetMB);\n", - " int workingSetCountGT_5 = GetCountOfRegressions(workingSet, 5);\n", - " int workingSetCountGT_10 = GetCountOfRegressions(workingSet, 10);\n", - "\n", - " List privateMemory = comparison.Select(c => c.Value.MaxPrivateMemoryMB);\n", - " int privateMemoryCountGT_5 = GetCountOfRegressions(privateMemory, 5);\n", - " int privateMemoryCountGT_10 = GetCountOfRegressions(privateMemory, 10);\n", - "\n", - " List throughput = comparison.Select(a => a.Value.RequestsPerMSec);\n", - " int throughputCountGT_5 = GetCountOfRegressions(throughput, 5, false);\n", - " int throughputCountGT_10 = GetCountOfRegressions(throughput, 10, false);\n", - "\n", - " List meanLatency = comparison.Select(a => a.Value.MeanLatencyMS);\n", - " int meanLatencyCountGT_5 = GetCountOfRegressions(meanLatency, 5);\n", - " int meanLatencyCountGT_10 = GetCountOfRegressions(meanLatency, 10);\n", - "\n", - " List p50Latency = comparison.Select(a => a.Value.Latency50thMS);\n", - " int p50LatencyCountGT_5 = GetCountOfRegressions(p50Latency, 5);\n", - " int p50LatencyCountGT_10 = GetCountOfRegressions(p50Latency, 10);\n", - "\n", - " List p75Latency = comparison.Select(a => a.Value.Latency75thMS);\n", - " int p75LatencyCountGT_5 = GetCountOfRegressions(p75Latency, 5);\n", - " int p75LatencyCountGT_10 = GetCountOfRegressions(p75Latency, 10);\n", - "\n", - " List p90Latency = comparison.Select(a => a.Value.Latency90thMS);\n", - " int p90LatencyCountGT_5 = GetCountOfRegressions(p90Latency, 5);\n", - " int p90LatencyCountGT_10 = GetCountOfRegressions(p90Latency, 10);\n", - " \n", - " List p99Latency = comparison.Select(a => a.Value.Latency99thMS);\n", - " int p99LatencyCountGT_5 = GetCountOfRegressions(p99Latency, 5);\n", - " int p99LatencyCountGT_10 = GetCountOfRegressions(p99Latency, 10);\n", - "\n", - " return $\"{comparisonKey},{workingSetCountGT_5},{workingSetCountGT_10},{privateMemoryCountGT_5},{privateMemoryCountGT_10},{throughputCountGT_5},{throughputCountGT_10},{meanLatencyCountGT_5},{meanLatencyCountGT_10},{p50LatencyCountGT_5},{p50LatencyCountGT_10},{p75LatencyCountGT_5},{p75LatencyCountGT_10},{p90LatencyCountGT_5},{p90LatencyCountGT_10},{p99LatencyCountGT_5},{p99LatencyCountGT_10}\";\n", - " }\n", - "\n", - " public string GenerateRegressionSummaryForComparisons()\n", - " {\n", - " StringBuilder sb = new();\n", - " sb.AppendLine(\"Build to Build,Reg. Count - Working Set (MB),Large Reg. Count - Working Set (MB),Reg. Count - Max Private Memory (MB),Large Reg. Count - Max Private Memory (MB),Reg. Count - Throughput, Large Reg. Count - Throughput,Reg. Count - Mean Latency,Large Reg. Count - Mean Latency,Reg. Count - P50 Latency, Large Reg. Count - P50 Latency, Reg. Count - P75 Latency, Large Reg. Count - P75 Latency,Reg. Count - P90 Latency, Large Reg. Count - P90 Latency,Reg. Count - P99 Latency, Large Reg. Count - P99 Latency\");\n", - " foreach (var comparison in Comparisons)\n", - " {\n", - " sb.AppendLine(GenerateRegressionSummary(comparison.Key, comparison.Value));\n", - " }\n", - "\n", - " return sb.ToString();\n", - " }\n", - "\n", - " public Dictionary GenerateRegressionAnalysisForComparison(string comparisonKey)\n", - " {\n", - " StringBuilder sb = new();\n", - " Dictionary csvData = new();\n", - " Dictionary comparison = Comparisons[comparisonKey];\n", - "\n", - " string header = \"Benchmark,MaxWorkingSetMB,MaxPrivateMemoryMB,RequestsPerMSec,MeanLatencyMS,Latency50thMS,Latency75thMS,Latency90thMS,Latency99thMS\";\n", - "\n", - " // Generate Memory Regressions.\n", - " StringBuilder memRegressions = new();\n", - " memRegressions.AppendLine(header);\n", - " foreach (var benchmark in comparison.Where(c => c.Value.MaxWorkingSetMB >= 10 || c.Value.MaxPrivateMemoryMB >= 10 ))\n", - " {\n", - " memRegressions.AppendLine($\"{benchmark.Key},{benchmark.Value.MaxWorkingSetMB},{benchmark.Value.MaxPrivateMemoryMB},{benchmark.Value.RequestsPerMSec},{benchmark.Value.MeanLatencyMS},{benchmark.Value.Latency50thMS},{benchmark.Value.Latency75thMS},{benchmark.Value.Latency90thMS},{benchmark.Value.Latency99thMS}\");\n", - " }\n", - " csvData[\"memory\"] = memRegressions.ToString();\n", - "\n", - " // Generate Throughput Regressions.\n", - " StringBuilder throughputRegressions = new();\n", - " throughputRegressions.AppendLine(header);\n", - " foreach (var benchmark in comparison.Where(c => c.Value.RequestsPerMSec <= -10))\n", - " {\n", - " throughputRegressions.AppendLine($\"{benchmark.Key},{benchmark.Value.MaxWorkingSetMB},{benchmark.Value.MaxPrivateMemoryMB},{benchmark.Value.RequestsPerMSec},{benchmark.Value.MeanLatencyMS},{benchmark.Value.Latency50thMS},{benchmark.Value.Latency75thMS},{benchmark.Value.Latency90thMS},{benchmark.Value.Latency99thMS}\");\n", - " }\n", - " csvData[\"throughput\"] = throughputRegressions.ToString();\n", - "\n", - " // Generate Latency Regressions.\n", - " StringBuilder latencyRegressions = new();\n", - " latencyRegressions.AppendLine(header);\n", - " foreach (var benchmark in comparison.Where(c => c.Value.MeanLatencyMS >= 10 || \n", - " c.Value.Latency50thMS >= 10 || \n", - " c.Value.Latency75thMS >= 10 || \n", - " c.Value.Latency90thMS >= 10 || \n", - " c.Value.Latency99thMS >= 10 ))\n", - " {\n", - " latencyRegressions.AppendLine($\"{benchmark.Key},{benchmark.Value.MaxWorkingSetMB},{benchmark.Value.MaxPrivateMemoryMB},{benchmark.Value.RequestsPerMSec},{benchmark.Value.MeanLatencyMS},{benchmark.Value.Latency50thMS},{benchmark.Value.Latency75thMS},{benchmark.Value.Latency90thMS},{benchmark.Value.Latency99thMS}\");\n", - " }\n", - " csvData[\"latency\"] = latencyRegressions.ToString();\n", - "\n", - " // All.\n", - " StringBuilder all = new();\n", - " all.AppendLine(header);\n", - " foreach (var benchmark in comparison)\n", - " {\n", - " all.AppendLine($\"{benchmark.Key},{benchmark.Value.MaxWorkingSetMB},{benchmark.Value.MaxPrivateMemoryMB},{benchmark.Value.RequestsPerMSec},{benchmark.Value.MeanLatencyMS},{benchmark.Value.Latency50thMS},{benchmark.Value.Latency75thMS},{benchmark.Value.Latency90thMS},{benchmark.Value.Latency99thMS}\");\n", - " }\n", - " csvData[\"all\"] = all.ToString();\n", - "\n", - " return csvData;\n", - " }\n", - "\n", - " public void SaveComparisons(string basePath)\n", - " {\n", - " // Add Summary for Comparisons.\n", - " string summaryOfComparisons = GenerateSummaryForComparisons();\n", - " File.WriteAllText(Path.Combine(basePath, \"SummaryOfComparisons.csv\"), summaryOfComparisons);\n", - "\n", - " // Add Regression Summary for Comparisons.\n", - " string regressionSummary = GenerateRegressionSummaryForComparisons();\n", - " File.WriteAllText(Path.Combine(basePath, \"RegressionSummary.csv\"), regressionSummary);\n", - "\n", - " // Add Large Regression Analysis for Comparison.\n", - " string perComparisonDataPath = Path.Combine(basePath, \"PerComparisonData\");\n", - " if (!Directory.Exists(perComparisonDataPath))\n", - " {\n", - " Directory.CreateDirectory(perComparisonDataPath);\n", - " }\n", - "\n", - " foreach (var comparison in Comparisons)\n", - " {\n", - " string comparisonPath = Path.Combine(perComparisonDataPath, comparison.Key);\n", - " Directory.CreateDirectory(comparisonPath);\n", - "\n", - " Dictionary regressionComparisons = GenerateRegressionAnalysisForComparison(comparison.Key);\n", - "\n", - " // Memory\n", - " File.WriteAllText(Path.Combine(comparisonPath, \"MemoryRegressions.csv\"), regressionComparisons[\"memory\"]);\n", - "\n", - " // Throughput\n", - " File.WriteAllText(Path.Combine(comparisonPath, \"ThroughputRegressions.csv\"), regressionComparisons[\"throughput\"]);\n", - "\n", - " // Latency\n", - " File.WriteAllText(Path.Combine(comparisonPath, \"LatencyRegressions.csv\"), regressionComparisons[\"latency\"]);\n", - "\n", - " // All\n", - " File.WriteAllText(Path.Combine(comparisonPath, \"All.csv\"), regressionComparisons[\"all\"]);\n", - " }\n", - " }\n", - "\n", - " public Dictionary> Comparisons { get; }\n", - "}\n", - "\n", - "\n", - "//var datas3_vs_datas_4 = baseDataManager.GetBenchmarkToComparison(\"base_0\", \"base_1\");\n", - "\n", - "Dictionary> comparisons = new()\n", - "{\n", - " { nameof(run1_vs_run2), run1_vs_run2 },\n", - "};\n", - "\n", - "SummaryTable summaryTable = new(comparisons);\n", - "summaryTable.SaveComparisons(diffPath);\n", - "\n", - "\n", - "// I don't use this anymore (or GCCharting at all)\n", - "\n", - "void ChartProperty(LoadInfo baseline, LoadInfo comparand, string nameOfProperty)\n", - "{\n", - " GCProcessData baselineGC = baseline.Data;\n", - " GCProcessData comparandGC = comparand.Data;\n", - "\n", - " List<(string scatterName, List gcs)> gcData = \n", - " new()\n", - " {\n", - " { ( scatterName : $\"{nameOfProperty} for {baseline.Id}\" , gcs : baselineGC.GCs )},\n", - " { ( scatterName : $\"{nameOfProperty} for {comparand.Id}\" , gcs : comparandGC.GCs )}\n", - " };\n", - "\n", - " GCCharting.ChartGCData(gcData : gcData, \n", - " title : $\"{nameOfProperty} Comparison Between {baseline.Run} and {comparand.Run}\", \n", - " isXAxisRelative : false,\n", - " fieldName : nameOfProperty).Display();\n", - "\n", - "}\n", - "\n", - "void ChartProperty(LoadInfo comparison, string nameOfProperty)\n", - "{\n", - " GCProcessData baselineGC = comparison.Data;\n", - " GCProcessData comparandGC = comparison.Data2;\n", - "\n", - " List<(string scatterName, List gcs)> gcData = \n", - " new()\n", - " {\n", - " { ( scatterName : $\"{nameOfProperty} for Baseline\" , gcs : baselineGC.GCs )},\n", - " { ( scatterName : $\"{nameOfProperty} for Comparand\" , gcs : comparandGC.GCs )}\n", - " };\n", - "\n", - " GCCharting.ChartGCData(gcData : gcData, \n", - " title : $\"{nameOfProperty} Comparison\", \n", - " isXAxisRelative : false,\n", - " fieldName : nameOfProperty).Display();\n", - "\n", - "}\n", - "\n", - "void ChartProperty(IEnumerable info, string nameOfProperty)\n", - "{\n", - " List<(string scatterName, List gcs)> gcData =\n", - " info.Select(li => (scatterName: $\"{nameOfProperty}\", gcs: li.Data.GCs)).ToList();\n", - " GCCharting.ChartGCData(gcData: gcData, title: \"${nameOfProperty} Comparison\", isXAxisRelative: false, fieldName: nameOfProperty).Display();\n", - "}\n", - "\n", - "\n", - "var run1_Benchmark = diffDataManager.GetBenchmarkData(benchmark: \"CachingPlatform\", \"tp3-m_0\");\n", - "var run2_Benchmark = diffDataManager.GetBenchmarkData(benchmark: \"CachingPlatform\", \"tp3-m_1\");\n", - "\n", - "// Chart the PauseDurationMSec for the run1 vs. run2.\n", - "ChartProperty(baseline: run1_Benchmark, comparand: run2_Benchmark, nameof(TraceGC.HeapCount))\n", - "\n", - "\n", - "// Leftover code that bucketed ranges of values for metrics and displayed them in columns\n", - "// - probably out-of-date (and very hardwired to the data I was looking at) - probably ignore this\n", - "\n", - "int[] ranges = {1,11,12,13};\n", - "\n", - "for (int i = 0; i < 4; ++i)\n", - "{\n", - " string trace = \"fixed-newlinear-nosmooth_\" + i;\n", - " Console.Write($\"{trace}: \");\n", - " //var cpData = diffDataManager.GetBenchmarkData(\"MultipleQueriesPlatform\", trace);\n", - " var cpData = noDataManager.GetBenchmarkData(\"Fortunes\", trace);\n", - "\n", - " int prevNumHeaps = -1;\n", - " int count = 0;\n", - " int nextRangeIndex = 0;\n", - " foreach (int numHeaps in cpData.Data.GCs.Select(gc => gc.GlobalHeapHistory?.NumHeaps).Where(x => x.HasValue).Append(-1))\n", - " {\n", - " if (numHeaps == prevNumHeaps)\n", - " {\n", - " count++;\n", - " continue;\n", - " }\n", - "\n", - " if (count != 0)\n", - " {\n", - " int skip = ranges.Skip(nextRangeIndex).TakeWhile(r => prevNumHeaps > r).Count();\n", - " Console.Write(new string(' ', skip * 13));\n", - " nextRangeIndex += skip + 1;\n", - " Console.Write($\"{count,5} @ {prevNumHeaps,2} {(numHeaps == -1 ? ' ' : (numHeaps > prevNumHeaps ? '^' : 'v'))} \");\n", - " }\n", - " prevNumHeaps = numHeaps;\n", - " count = 1;\n", - " }\n", - " Console.WriteLine();\n", - "}\n", - "*/" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Debugging" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "System.Diagnostics.Process.GetCurrentProcess().Id" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "dotnet_interactive": { - "language": "csharp" - }, - "polyglot_notebook": { - "kernelName": "csharp" - }, - "vscode": { - "languageId": "polyglot-notebook" - } - }, - "outputs": [], - "source": [ - "#!about" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".NET (C#)", - "language": "C#", - "name": ".net-csharp" - }, - "language_info": { - "name": "python" - }, - "orig_nbformat": 4, - "polyglot_notebook": { - "kernelInfo": { - "defaultKernelName": "csharp", - "items": [ - { - "aliases": [], - "name": "csharp" - } - ] - } - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/src/benchmarks/gc/GC.Infrastructure/Notebooks/BenchmarkAnalysis.md b/src/benchmarks/gc/GC.Infrastructure/Notebooks/BenchmarkAnalysis.md new file mode 100644 index 00000000000..33baecb3ce5 --- /dev/null +++ b/src/benchmarks/gc/GC.Infrastructure/Notebooks/BenchmarkAnalysis.md @@ -0,0 +1,120 @@ +# Benchmark Analysis + +This notebook contains code for producing charts (and soon, tables) for GC benchmarks. It can currently process data +from the ASP.NET benchmarks obtained using crank as well as ETL data. One of the design points of this notebook is +that the different operations have a similar "feel"; they have many optional parameters that build on default settings. +The parameters are intended to be identical (or at least similar) across operations. + +The data is organized in a hierarchy. (See `TopLevelData`.) + +- A "run" consists of multiple "configurations". (See `RunData`.) +- A "configuration" consists of multiple "benchmarks". (See `ConfigData`.) +- A "benchmark" consists of multiple "iterations". (See `BenchmarkData`.) +- An "iteration" consists of multiple GCs. (See `IterationData`.) + +In addition to multiple instances of the next lower level, each level contains data appropriate for that level. +For example, an iteration of an ASP.NET benchmark will have an RPS (requests per second) score. The overall +benchmark could have the average RPS score across the iterations (though this can also be computed at presentation-time - +more on that later). + +Data is stored in a `DataManager` object. This class has a number of `Create...` and `Add...` methods. They process +data identically; a `Create` method is simply shorthand for `new` and `Add` and is the common usage. + +`CreateAspNetData` expects the directory structure that is produced by the GC infrastructure for ASP.NET. For example: + +``` xml +\_0\._0.log + \.gc.etl + \._0.log + \.gc.etl + \_1\... + \_2\... + \_3\... + \_0\... + \_1\... + \_2\... + \_3\... +``` + +Because of the way these names are generated, do not put `.` in any name or `_` in configuration names. The `_0`, `_1`, +etc., are the iterations. + +Many operations including `CreateAspNetData` use the `Filter` class. It is a consistent way to specify names to +include or exclude and can be done by listing names or by regular expression. `CreateAspNetData` can filter by +config or benchmark. (To filter by run, simply don't pass that directory to the method.) By default, it has a list +of process names that it will look for in the ETL data, but the optional parameter `pertinentProcesses` can override +that. + +`CreateGCTrace(s)` only loads ETL files. Since there is no context for a default value, `pertinentProcesses` must be +specified. GC traces can be loaded in two ways. The first expects one relevant process per trace and defaults to +setting the run as blank, the config as the enclosing directory name, and the iteration as zero. The +benchmark name is extracted from the ETL filename but can be overridden or filtered. The second allows multiple +processes per trace. It uses the process as the benchmark name and promotes the other values up one level (e.g., +the ETL filename becomes the config). This behavior is controlled by the `loadMultipleProcesses` parameter. + +The data is stored in nested dictionaries that can be directly modified or accessed through a number of `Get...` +helpers. However, typically charting (and soon tabling) methods will be called next. There are charting methods +for each of the three levels (the "run" level is not included since aggregating across configurations is not +expected), and at each level there are two overloads that only differ based on whether they expect one metric or +a list of metrics. + +- `ChartBenchmarks` will chart benchmarks across the x-axis using aggregation of data from the iterations. Each + run/configuration will be a data series. +- `ChartIterations` will chart benchmarks across the x-axis using data from each iteration. Each + run/configuration/iteration will be a data series. +- `ChartGCData` will chart GCs across the x-axis using data from each iteration. Each run/configuration/iteration + will be a data series, and by default each benchmark will be on a different chart. + +Each charting method requires one or more metrics to include in the chart. These are represented by the `Metric` +class, which encapsulates a way to extract the metric from the data source, a label for that data, and the unit +for that data. Many examples of metrics are provided in the `Metrics` class. Data from one level can be +aggregated to the next level via the `Metrics.Promote` methods and the `Aggregation` class. For example, the +average GC pause time for the execution of a single iteration can be extracted using +`Metrics.Promote(Metrics.G.PauseDuration, Aggregation.Max)`, though this particular example is already available as +`Metrics.I.MaxPauseDuration`. Sample GC metrics are in `Metrics.G`. Sample iteration metrics are in `Metrics.I`. +Sample benchmark metrics are in `Metrics.B`. + +For typical cases, x-axis values are handled automatically (the GC index or the benchmark name as appropriate), but +the start time of the GC can be used instead by passing `Metrics.X.StartRelativeMSec` as the optional `xMetric` +argument. (See the class `BaseMetric` for more details on how this works.) + +Each charting method accepts `Filter`s for the runs, configs, and benchmarks and a predicate `dataFilter` for the +data itself (`BenchmarkData`, `IterationData`, or `TraceGC`). + +In addition, some more advanced arguments are available: + +- `xArrangement` - controls how the x-axis is arranged + - `XArrangements.Default` - normal sorting by x values + - `XArrangements.Sorted` - each series is sorted (highest-to-lowest), and the x-axis values are changed to ranks + - `XArrangements.CombinedSorted` - the first series is sorted (highest-to-lowest), then other series are updated + to match the resulting ordering of x values found from that sort + - `XArrangements.Percentile` - similar to sorted except lower-to-highest, and the x-axis values are the + percentiles of the data within that series - `Sorted` is useful for a small number of items where the x values + have specific meanings (such as benchmark names), whereas `Percentile` is useful when considering the x values + as a distribution. + - Alternatively, create a new subclass of the `XArrangement` class +- `configNameSimplifier` - XPlot has trouble if the series' names (and thus the chart legend) get too large. The + configuration names can be long and repetitive, so this option can be used to display shorter values. + - `NameSimplifier.PrefixDashed` - a predefined strategy that considers configurations as a series of names + separated by dashes. Common prefixes are removed. For example, `a`, `a-b-d`, `a-b-e`, and `a-c` will be + simplified to `<>`, `b-d`, `b-e`, and `c`. The blank value and delimiter can be adjusted by creating a new + `PrefixSimplifier`. + - `ListSimplifier` - applies key-value pairs to the names + - Alternatively, create a new subclass of the `NameSimplifier` class +- `includeRunName` - By default, the run name is discarded when charting under the assumption that the typical + case is multiple configurations under the same run. Setting this parameter concatenates the run and configuration + together. +- `display` - By default, generated chart(s) will be displayed. Clearing this parameters prevents that behavior. + Charts are always returned to the caller for possible further processing. +- `debug` - Enables a bit of debug spew. + +Upcoming: + +- Add the ability to specify a primary data series and add metrics that compare against it. +- Fill out the predefined metrics. +- Add requested features (specify width of chart). +- Add more aggregations, including adding the aggregation of iterations to an iteration-level chart/table. + (e.g., b1_1, b1_2, b1_3, b1_max, b1_avg, b2_1, b2_2, b2_3, b2_max, b2_avg) +- Consider splitting `SeriesInfo` into level-specific versions and make methods such as `ChartInternal` generic + on the series information. + \ No newline at end of file diff --git a/src/benchmarks/gc/GC.Infrastructure/Notebooks/DataManager.dib b/src/benchmarks/gc/GC.Infrastructure/Notebooks/DataManager.dib new file mode 100644 index 00000000000..3c20162e09c --- /dev/null +++ b/src/benchmarks/gc/GC.Infrastructure/Notebooks/DataManager.dib @@ -0,0 +1,797 @@ +#!meta + +{"kernelInfo":{"defaultKernelName":"csharp","items":[{"aliases":[],"languageName":"csharp","name":"csharp"}]}} + +#!csharp + +#i "nuget: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json" + +#r "nuget: Microsoft.Diagnostics.Tracing.TraceEvent" +#r "nuget: Microsoft.Data.Analysis, 0.19.1" +//#r "nuget: Newtonsoft.Json" +#r "nuget: XPlot.Plotly" +#r "nuget: XPlot.Plotly.Interactive" +//#r "nuget: YamlDotnet" + +// TODO: Ensure you are pointing to the right artifacts folder. +#r "..\..\..\..\..\artifacts\bin\GC.Analysis.API\Release\net7.0\GC.Analysis.API.dll" + +using System.IO; +using System.Text.RegularExpressions; + +using GC.Analysis.API; + +//using Etlx = Microsoft.Diagnostics.Tracing.Etlx; +//using Microsoft.Data.Analysis; +using Microsoft.Diagnostics.Tracing.Analysis.GC; +//using Microsoft.Diagnostics.Tracing.Analysis; +using Microsoft.Diagnostics.Tracing.Parsers.Clr; +//using Microsoft.Diagnostics.Tracing; +//using Newtonsoft.Json; +//using System.Diagnostics; +using XPlot.Plotly; + +#!csharp + +// ML and MA are convenience syntax for making lists and arrays. +public static List ML(params T[] elems) => new List(elems); +public static T[] MA(params T[] elems) => elems; + +public static V GetOrAdd(this Dictionary dict, K key, V value) + => dict.TryAdd(key, value) ? value : dict[key]; + +public static void SetWithExtend(this List list, int index, T value) +{ + int count = list.Count; + int needed = index + 1; + for (int i = 0; i < (needed - count); ++i) + { + list.Add(default(T)); + } + list[index] = value; +} + +public static IEnumerable<(T, int)> WithIndex(this IEnumerable list) => list.Select((value, index) => (value, index)); +public static bool NotNull(T x) => x != null; + +#!csharp + +// Data Acquisition +// +// The next few cells detail how to retrieve the data from a base path. The run name below is the name of the folder generated from running the ``aspnetbenchmarks`` command from the GC.Infrastructure API. + +#!csharp + +// The LoadInfo class consists of all the pertinent fields needed to represent both the result from a particular benchmark +// as well as the the comparison between two runs where the Data2 represents the GCProcessData of the comparand. +public sealed class LoadInfo +{ + public double MaxWorkingSetMB {get;set;} = double.NaN; + public double P99WorkingSetMB {get;set;} = double.NaN; + public double P95WorkingSetMB {get;set;} = double.NaN; + public double P90WorkingSetMB {get;set;} = double.NaN; + public double P75WorkingSetMB {get;set;} = double.NaN; + public double P50WorkingSetMB {get;set;} = double.NaN; + + public double MaxPrivateMemoryMB {get;set;} = double.NaN; + public double P99PrivateMemoryMB {get;set;} = double.NaN; + public double P95PrivateMemoryMB {get;set;} = double.NaN; + public double P90PrivateMemoryMB {get;set;} = double.NaN; + public double P75PrivateMemoryMB {get;set;} = double.NaN; + public double P50PrivateMemoryMB {get;set;} = double.NaN; + + public double RequestsPerMSec {get; set;} = double.NaN; + public double MeanLatencyMS {get; set;} = double.NaN; + public double Latency99thMS {get; set;} = double.NaN; + public double Latency90thMS {get; set;} = double.NaN; + public double Latency75thMS {get; set;} = double.NaN; + public double Latency50thMS {get; set;} = double.NaN; + + // Do these need to be stored on the LoadInfo? Context should already have this information. + public string Run {get; set;} + public string Config {get; set;} + public string Benchmark {get; set;} + public int Iteration {get; set;} = -1; +} + +public class GCSummaryInfo +{ + public double TotalSuspensionTimeMSec {get;set;} = double.NaN; + public double PercentPauseTimeInGC {get; set;} = double.NaN; + public double PercentTimeInGC {get; set;} = double.NaN; + public double MeanHeapSizeBeforeMB {get; set;} = double.NaN; + public double MaxHeapSizeMB {get; set;} = double.NaN; + public double TotalAllocationsMB {get;set;} = double.NaN; + public double GCScore {get;set;} = double.NaN; + + public double MaxHeapCount {get;set;} = double.NaN; + public double NumberOfHeapCountSwitches {get;set;} = double.NaN; + public double NumberOfHeapCountDirectionChanges {get;set;} = double.NaN; + + // Consider removing + public GCProcessData Data {get;set;} + public GCProcessData? Data2 {get;set;} + + public int ProcessId {get;set;} + public string CommandLine {get;set;} + public string TracePath {get; set;} + public string ProcessName {get;set;} +} + +public class BenchmarkSummaryData +{ + public double MaxWorkingSetMB {get;set;} = double.NaN; + public double P99WorkingSetMB {get;set;} = double.NaN; + public double P95WorkingSetMB {get;set;} = double.NaN; + public double P90WorkingSetMB {get;set;} = double.NaN; + public double P75WorkingSetMB {get;set;} = double.NaN; + public double P50WorkingSetMB {get;set;} = double.NaN; + + public double MaxPrivateMemoryMB {get;set;} = double.NaN; + public double P99PrivateMemoryMB {get;set;} = double.NaN; + public double P95PrivateMemoryMB {get;set;} = double.NaN; + public double P90PrivateMemoryMB {get;set;} = double.NaN; + public double P75PrivateMemoryMB {get;set;} = double.NaN; + public double P50PrivateMemoryMB {get;set;} = double.NaN; + + public double RequestsPerMSec {get;set;} = double.NaN; + public double MeanLatencyMS {get; set;} = double.NaN; + public double Latency50thMS {get; set;} = double.NaN; + public double Latency75thMS {get; set;} = double.NaN; + public double Latency90thMS {get; set;} = double.NaN; + public double Latency99thMS {get; set;} = double.NaN; + + public string Benchmark {get; set;} +} + +// XXXData is the Data for an XXX, not a mapping from XXX to data. +// For example, BenchmarkData is a mapping from iterations to data because a benchmark can have multiple iterations. +public record IterationData(LoadInfo LoadInfo, GCSummaryInfo GCSummaryInfo, GCProcessData GCProcessData) +{ + public LoadInfo LoadInfo { get; set; } = LoadInfo; + public GCSummaryInfo GCSummaryInfo { get; set; } = GCSummaryInfo; + public GCProcessData GCProcessData { get; set; } = GCProcessData; + // GCLogInfo GCLogInfo; + // Dictionary Other; +} +public record BenchmarkData(LoadInfo SummaryLoadInfo, List Iterations); // Iteration # -> data +public record ConfigData(Dictionary Benchmarks); // Benchmark name -> data +public record RunData(Dictionary Configs); // Config name -> data +public record TopLevelData(Dictionary Runs); // Run name -> data + +public class Filter // abstraction used whenever names should be filtered +{ + private string[] _includeNames; + private string[] _excludeNames; + private Regex _includeRE; + private Regex _excludeRE; + + public Filter(params string[] includeNames) : this(includeNames: includeNames, excludeNames: null) {} + public Filter(IEnumerable includeNames = null, IEnumerable excludeNames = null, + string includeRE = null, string excludeRE = null) + : this( + includeNames: includeNames?.ToArray(), + excludeNames: excludeNames?.ToArray(), + includeRE: (includeRE != null) ? (new Regex(includeRE)) : null, + excludeRE: (excludeRE != null) ? (new Regex(excludeRE)) : null + ) + {} + + private Filter(string[] includeNames = null, string[] excludeNames = null, + Regex includeRE = null, Regex excludeRE = null) + { + _includeNames = includeNames; + _excludeNames = excludeNames; + _includeRE = includeRE; + _excludeRE = excludeRE; + } + + public static Filter Names(params string[] includeNames) => new(includeNames: includeNames); + public static Filter ExcludeNames(params string[] includeNames) => new(excludeNames: includeNames); + public static Filter RE(string includeRE) => new(includeRE: includeRE); + public static Filter ExcludeRE(string includeRE) => new(excludeRE: includeRE); + public static Filter All { get; } = new(null); + + public bool Include(string candidate) + => (((_includeNames != null) || (_includeRE != null)) + ? ((_includeNames?.Contains(candidate) ?? false) || ((_includeRE?.Match(candidate).Success ?? false))) + : true) + && (!_excludeNames?.Contains(candidate) ?? true) + && (!_excludeRE?.Match(candidate).Success ?? true); +} + +public class IntFilter +{ + private (int min, int max)[] _includeRanges; + private (int min, int max)[] _excludeRanges; + + private static IEnumerable EmptyIfNull(IEnumerable enumerable) + => enumerable ?? Enumerable.Empty(); + + public IntFilter(params int[] includeValues) : this(includeValues: includeValues, excludeRanges: null) {} + public IntFilter(params (int min, int max)[] includeRanges) : this(includeRanges: includeRanges, excludeRanges: null) {} + public IntFilter(IEnumerable includeValues = null, IEnumerable excludeValues = null, + IEnumerable<(int min, int max)> includeRanges = null, IEnumerable<(int min, int max)> excludeRanges = null) + : this( + includeRanges: + (includeValues != null || includeRanges != null) + ? (EmptyIfNull(includeValues).Select(v => (v,v))).Concat(EmptyIfNull(includeRanges)).ToArray() + : null, + excludeRanges: + (excludeValues != null || excludeRanges != null) + ? (EmptyIfNull(excludeValues).Select(v => (v,v))).Concat(EmptyIfNull(excludeRanges)).ToArray() + : null + ) + {} + + private IntFilter((int min, int max)[] includeRanges = null, (int min, int max)[] excludeRanges = null) + { + _includeRanges = includeRanges; + _excludeRanges = excludeRanges; + } + + public static IntFilter Values(params int[] includeValues) => new(includeValues: includeValues); + public static IntFilter Ranges(params (int min, int max)[] includeRanges) => new(includeRanges: includeRanges); + public static IntFilter ExcludeValues(params int[] excludeValues) => new(excludeValues: excludeValues); + public static IntFilter ExcludeRanges(params (int min, int max)[] excludeRanges) => new(excludeRanges: excludeRanges); + public static IntFilter All { get; } = new(includeValues: null); + + public bool Include(int candidate) + => (_includeRanges?.Any(pair => pair.min <= candidate && candidate <= pair.max) ?? true) + && (!_excludeRanges?.Any(pair => pair.min <= candidate && candidate <= pair.max) ?? true); +} + +#!csharp + +// Filter tests +int failed = 0; +void Assert(bool b, string message) +{ + if (!b) + { + failed++; + Console.WriteLine($"Failed: {message}"); + } +} + +{ + foreach (Filter fa in ML(new("a"), new (includeNames: ML("a")), Filter.Names("a"), new(includeRE: "a"), Filter.RE("a"))) + { + Assert(fa.Include("a"), "a~a"); + Assert(!fa.Include("b"), "a~!b"); + } + + foreach (Filter fab in ML(new("a", "b"), new(includeNames: ML("a", "b")), Filter.Names("a", "b"), new(includeRE: "a|b"), Filter.RE("a|b"), + new(includeNames: ML("a"), includeRE: "b"))) + { + Assert(fab.Include("a"), "ab~a"); + Assert(fab.Include("b"), "ab~b"); + Assert(!fab.Include("c"), "ab~!c"); + } + + foreach (Filter fna in ML(new(excludeNames: ML("a")), Filter.ExcludeNames("a"), new(excludeRE: "a"), Filter.ExcludeRE("a"))) + { + Assert(!fna.Include("a"), "!a~!a"); + Assert(fna.Include("b"), "!a~b"); + } + + foreach (Filter fnab in ML(new(excludeNames: ML("a", "b")), Filter.ExcludeNames("a", "b"), new(excludeRE: "a|b"), Filter.ExcludeRE("a|b"), + new(excludeNames: ML("a"), excludeRE: "b"))) + { + Assert(!fnab.Include("a"), "!ab~!a"); + Assert(!fnab.Include("b"), "!ab~!b"); + Assert(fnab.Include("c"), "!ab~c"); + } + + foreach (Filter fanb in ML(new(includeNames: ML("a", "b"), excludeNames: ML("b")), new(includeRE: "a|b", excludeRE: "b"))) + { + Assert(fanb.Include("a"), "a!b~a"); + Assert(!fanb.Include("b"), "a!b~!b"); + } + + Assert(Filter.All.Include("a"), "all~a"); + + foreach (IntFilter f1 in ML(new(1), new((1,1)), new (includeValues: ML(1)), new (includeRanges: ML((1,1))), + IntFilter.Values(1), IntFilter.Ranges((1,1)))) + { + Assert(f1.Include(1), "1~1"); + Assert(!f1.Include(2), "1~!2"); + } + + foreach (IntFilter f12 in ML(new((1,2)), new((1,1), (2,2)), + new (includeValues: ML(1,2)), new (includeRanges: ML((1,1), (2,2))), new (includeRanges: ML((1,2))), + IntFilter.Values(1, 2), IntFilter.Ranges((1,2)), IntFilter.Ranges((1,1), (2,2)))) + { + Assert(f12.Include(1), "1~1"); + Assert(f12.Include(2), "1~!2"); + } + + foreach (IntFilter fn1 in ML(new (excludeValues: ML(1)), new (excludeRanges: ML((1,1))), + IntFilter.ExcludeValues(1), IntFilter.ExcludeRanges((1,1)))) + { + Assert(!fn1.Include(1), "!1~!1"); + Assert(fn1.Include(2), "!1~2"); + } + + foreach (IntFilter fn12 in ML(new(excludeValues: ML(1,2)), IntFilter.ExcludeValues(1,2), + new(excludeRanges: ML((1,1),(2,2))), new(excludeRanges: ML((1,2))), IntFilter.ExcludeRanges((1,2)), IntFilter.ExcludeRanges((1,2)))) + { + Assert(!fn12.Include(1), "!12~!1"); + Assert(!fn12.Include(2), "!12~!2"); + Assert(fn12.Include(3), "!12~3"); + } + + foreach (IntFilter f1n2 in ML(new(includeValues: ML(1,2), excludeValues: ML(2)), new(includeRanges: ML((1,2)), excludeRanges: ML((2,2))))) + { + Assert(f1n2.Include(1), "1!2~1"); + Assert(!f1n2.Include(2), "1!2~!2"); + } + + Assert(IntFilter.All.Include(1), "all~1"); +} +if (failed > 0) throw new Exception($"Failed {failed} test(s)"); + +#!csharp + +using ConfigIterationFilter = System.Collections.Generic.IReadOnlyDictionary; + +public static bool MightInclude(this ConfigIterationFilter configIterationFilter, string config) + => (configIterationFilter == null) || configIterationFilter.ContainsKey(config); + +public static bool Include(this ConfigIterationFilter configIterationFilter, string config, int iteration) + => (configIterationFilter == null) || (configIterationFilter.GetValueOrDefault(config)?.Include(iteration) ?? true); + +public class DataManager +{ + public readonly TopLevelData _data; + + public DataManager() => _data = new(new()); + + public static DataManager CreateAspNetData(string basePath, + Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, ConfigIterationFilter configIterationFilter = null, + List pertinentProcesses = null) + => CreateAspNetData(MA(basePath), + configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, configIterationFilter: configIterationFilter, + pertinentProcesses: pertinentProcesses); + + public static DataManager CreateAspNetData(IEnumerable basePaths, + Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, ConfigIterationFilter configIterationFilter = null, + List pertinentProcesses = null) + { + DataManager dataManager = new(); + dataManager.AddAspNetData(basePaths: basePaths, + configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, configIterationFilter: configIterationFilter, + pertinentProcesses: pertinentProcesses); + return dataManager; + } + + public static DataManager CreateGCTrace(string file, List pertinentProcesses, string run = null, string config = null, int? iteration = null, + bool loadMultipleProcesses = true) + { + DataManager dataManager = new(); + dataManager.AddGCTrace(file: file, pertinentProcesses: pertinentProcesses, run: run, config: config, iteration: iteration, + loadMultipleProcesses: loadMultipleProcesses); + return dataManager; + } + + public static DataManager CreateGCTraces(string basePath, List pertinentProcesses, SearchOption searchOption = SearchOption.TopDirectoryOnly, + Filter benchmarkFilter = null, string run = null, string config = null, int? iteration = null, bool loadMultipleProcesses = true) + { + DataManager dataManager = new(); + dataManager.AddGCTraces(basePath: basePath, pertinentProcesses: pertinentProcesses, searchOption: searchOption, + benchmarkFilter: benchmarkFilter, run: run, config: config, iteration: iteration, loadMultipleProcesses: loadMultipleProcesses); + return dataManager; + + } + + public void AddAspNetData(string basePath, + Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, ConfigIterationFilter configIterationFilter = null, + List pertinentProcesses = null) + => AddAspNetData(basePaths: MA(basePath), + configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, configIterationFilter: configIterationFilter, + pertinentProcesses: pertinentProcesses); + + public void AddAspNetData(IEnumerable basePaths, + Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, ConfigIterationFilter configIterationFilter = null, + List pertinentProcesses = null) + { + configFilter = configFilter ?? Filter.All; + benchmarkFilter = benchmarkFilter ?? Filter.All; + iterationFilter = iterationFilter ?? IntFilter.All; + // configIterationFilter is not set to an empty dictionary as that would exclude everything + + foreach (var basePath in basePaths) + { + LoadAspNetDataFromBasePath(basePath: basePath, + configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, configIterationFilter: configIterationFilter, + pertinentProcesses: pertinentProcesses); + } + } + + public void AddGCTrace(string file, List pertinentProcesses, string run = null, string config = null, string benchmark = null, int? iteration = null, bool loadMultipleProcesses = true) + { + LoadGCTrace(file: file, configFilter: Filter.All, benchmarkFilter: Filter.All, run: run, config: config, benchmark: benchmark, iteration: iteration, pertinentProcesses: pertinentProcesses, + expectAspNetData: false, loadMultipleProcesses: loadMultipleProcesses); + } + + public void AddGCTraces(string basePath, List pertinentProcesses, SearchOption searchOption = SearchOption.TopDirectoryOnly, Filter configFilter = null, Filter benchmarkFilter = null, + string run = null, string config = null, string benchmark = null, int? iteration = null, bool loadMultipleProcesses = true) + { + configFilter = configFilter ?? Filter.All; + benchmarkFilter = benchmarkFilter ?? Filter.All; + + LoadGCTracesFromPath(path: basePath, searchOption: searchOption, configFilter: configFilter, benchmarkFilter: benchmarkFilter, + run: run, config: config, benchmark: benchmark, iteration: iteration, pertinentProcesses: pertinentProcesses, + expectAspNetData: false, loadMultipleProcesses: loadMultipleProcesses); + } + + public static double DeltaPercent (double baseline, double comparand) => Math.Round((comparand - baseline) / baseline * 100, 2); + + public TopLevelData Data => _data; + + //public static LoadInfo LoadLogFile(string file) + //{ + // + //} + + // Consider generalizing the error reporting here + private (string, int) ParseConfigIterName(string dir) + { + int lastUnderscore = dir.LastIndexOf("_"); + string config; + int iteration; + if ((lastUnderscore != -1) + && int.TryParse(dir.AsSpan(lastUnderscore + 1), out iteration)) + { + config = dir.Substring(0, lastUnderscore); + } + else + { + Console.WriteLine($"{dir} is not in the form _"); + config = dir; + iteration = 0; + } + + return (config, iteration); + } + + private (string, string, int) ParseBenchmarkLogFileName(string logName) + { + string[] split = Path.GetFileName(logName).Split("."); + if ((split.Length != 3) || (split[2] != "log")) + { + Console.WriteLine($"{logName} is not in the form ._.log"); + } + // TODO: Store these suffixes + string benchmark = Path.GetFileName( split[0] ).Replace("_Windows", "").Replace("_Linux", "").Replace(".gc", "").Replace(".nettrace", ""); + (string config, int iteration) = ParseConfigIterName(split[1]); + return (config, benchmark, iteration); + } + + private List AspNetProcesses = new() + { + "PlatformBenchmarks", + "Benchmarks", + "MapAction", + "TodosApi", + "BasicGrpc", + "BasicMinimalApi", + }; + + private void LoadAspNetDataFromBasePath(string basePath, + Filter configFilter, Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, + List pertinentProcesses) + { + pertinentProcesses = pertinentProcesses ?? AspNetProcesses; + + string run = Path.GetFileName(basePath); + + foreach (string fullDir in Directory.GetDirectories(basePath)) + { + string subDir = Path.GetFileName(fullDir); + (string config, int iteration) = ParseConfigIterName(subDir); + if (configFilter.Include(config) && iterationFilter.Include(iteration) && configIterationFilter.Include(config, iteration)) + { + LoadAspNetDataFromPath(fullDir, benchmarkFilter, run, config, iteration); + // configFilter has alreay been done- LoadGCTracesFromPath needs it for the case where loadMultipleProcesses is true + // and the filenames become the configs + LoadGCTracesFromPath(fullDir, SearchOption.TopDirectoryOnly, configFilter: Filter.All, benchmarkFilter: benchmarkFilter, + run: run, config: config, benchmark: null, iteration: iteration, + pertinentProcesses: pertinentProcesses, expectAspNetData: true, loadMultipleProcesses: false); + } + } + } + + // Returns a LoadInfo with information extracted from the log file. + // Does not populate the Benchmark, etc., fields. + private LoadInfo LoadAspNetLogFile(string file) + { + LoadInfo info = new(); + + int idxOfApplication = Int32.MaxValue; + int idxOfLoad = Int32.MaxValue; + int idx = 0; + + foreach (var line in File.ReadLines(file)) + { + string[] sp = line.Split("|", StringSplitOptions.TrimEntries); + if (line.Contains("| application")) + { + idxOfApplication = idx; + } + else if (line.Contains("| load")) + { + idxOfLoad = idx; + } + else if (line.Contains("| Latency 50th")) + { + info.Latency50thMS = double.Parse(sp[2]); + } + else if (line.Contains("| Latency 75th")) + { + info.Latency75thMS = double.Parse(sp[2]); + } + else if (line.Contains("| Latency 90th")) + { + info.Latency90thMS = double.Parse(sp[2]); + } + else if (line.Contains("| Latency 99th")) + { + info.Latency99thMS = double.Parse(sp[2]); + } + else if (line.Contains("Requests/sec")) + { + info.RequestsPerMSec = double.Parse(sp[2]) / 1000; + } + else if (line.Contains("Mean latency")) + { + info.MeanLatencyMS = double.Parse(sp[2]); + } + else if (line.Contains("Max Working Set") && (idxOfApplication < idx && idx < idxOfLoad)) + { + info.MaxWorkingSetMB = double.Parse(sp[2]); + } + else if (line.Contains("Working Set P99") && (idxOfApplication < idx && idx < idxOfLoad)) + { + info.P99WorkingSetMB = double.Parse(sp[2]); + } + else if (line.Contains("Working Set P95") && (idxOfApplication < idx && idx < idxOfLoad)) + { + info.P95WorkingSetMB = double.Parse(sp[2]); + } + else if (line.Contains("Working Set P90") && (idxOfApplication < idx && idx < idxOfLoad)) + { + info.P90WorkingSetMB = double.Parse(sp[2]); + } + else if (line.Contains("Working Set P75") && (idxOfApplication < idx && idx < idxOfLoad)) + { + info.P75WorkingSetMB = double.Parse(sp[2]); + } + else if (line.Contains("Working Set P50") && (idxOfApplication < idx && idx < idxOfLoad)) + { + info.P50WorkingSetMB = double.Parse(sp[2]); + } + else if (line.Contains("Max Private Memory") && (idxOfApplication < idx && idx < idxOfLoad)) + { + info.MaxPrivateMemoryMB = double.Parse(sp[2]); + } + else if (line.Contains("Private Memory P99") && (idxOfApplication < idx && idx < idxOfLoad)) + { + info.P99PrivateMemoryMB = double.Parse(sp[2]); + } + else if (line.Contains("Private Memory P95") && (idxOfApplication < idx && idx < idxOfLoad)) + { + info.P95PrivateMemoryMB = double.Parse(sp[2]); + } + else if (line.Contains("Private Memory P90") && (idxOfApplication < idx && idx < idxOfLoad)) + { + info.P90PrivateMemoryMB = double.Parse(sp[2]); + } + else if (line.Contains("Private Memory P75") && (idxOfApplication < idx && idx < idxOfLoad)) + { + info.P75PrivateMemoryMB = double.Parse(sp[2]); + } + else if (line.Contains("Private Memory P50") && (idxOfApplication < idx && idx < idxOfLoad)) + { + info.P50PrivateMemoryMB = double.Parse(sp[2]); + } + + ++idx; + } + + return info; + } + + private void LoadAspNetDataFromPath(string path, Filter benchmarkFilter, string run, string config, int iteration) + { + var files = Directory.GetFiles(path, "*.log", SearchOption.AllDirectories); + + foreach (var file in files) + { + if (file.Contains("build.log") || file.Contains("output.log") || file.Contains("_GCLog")) + { + continue; + } + + (string logConfig, string benchmark, int logIteration) = ParseBenchmarkLogFileName(file); + + if (!benchmarkFilter.Include(benchmark)) + { + continue; + } + + if ((config != logConfig) || (iteration != logIteration)) + { + Console.WriteLine($"Directory name and log filename in {file} disagree on config/iteration"); + } + + LoadInfo info = LoadAspNetLogFile(file); + + info.Run = run; + info.Config = config; + info.Benchmark = benchmark; + info.Iteration = iteration; + + RunData runData = _data.Runs.GetOrAdd(run, new(new())); + ConfigData configData = runData.Configs.GetOrAdd(config, new(new())); + BenchmarkData benchmarkData = configData.Benchmarks.GetOrAdd(benchmark, new(null, new())); + if ((benchmarkData.Iterations.Count > iteration) + && (benchmarkData.Iterations[iteration] != null)) + { + Console.WriteLine($"WARNING: Duplicate iteration '{run} / {config} / {benchmark} / {iteration}' found"); + benchmarkData.Iterations[iteration].LoadInfo = info; + } + else + { + benchmarkData.Iterations.SetWithExtend(iteration, new(info, null, null)); + } + } + } + + private void LoadGCTracesFromPath(string path, SearchOption searchOption, Filter configFilter, Filter benchmarkFilter, string run, string config, string benchmark, int? iteration, List pertinentProcesses, + bool expectAspNetData, bool loadMultipleProcesses) + { + var traceFiles = Directory.GetFiles(path, "*.etl.zip", searchOption).ToList(); + var nettraceFiles = Directory.GetFiles(path, "*.nettrace", searchOption); + traceFiles.AddRange(nettraceFiles); + + Parallel.ForEach(traceFiles, + file => LoadGCTrace(file: file, configFilter: configFilter, benchmarkFilter: benchmarkFilter, run: run, config: config, benchmark: benchmark, iteration: iteration, + pertinentProcesses: pertinentProcesses, expectAspNetData: expectAspNetData, loadMultipleProcesses: loadMultipleProcesses)); + } + + private void LoadGCTrace(string file, Filter configFilter, Filter benchmarkFilter, string run, string config, string benchmark, int? iteration, List pertinentProcesses, bool expectAspNetData, bool loadMultipleProcesses) + { + string dir = Path.GetFileName(Path.GetDirectoryName(file)); + //string[] sp = file.Split("\\"); + //sp[sp.Length - 1] + string fileBaseName = Path.GetFileNameWithoutExtension(file) + .Replace("_Windows", "") + .Replace(".gc.etl", "") + .Replace("_Linux", "") + .Replace(".nettrace", "") + .Replace(".gc", "") + .Replace(".etl", ""); + + if (loadMultipleProcesses && (benchmark != null)) + { + throw new ArgumentException("LoadGCTrace: loadMultipleProcesses and setting a benchmark name are not compatible"); + } + + run = run ?? (loadMultipleProcesses ? dir : ""); + config = config ?? (loadMultipleProcesses ? fileBaseName : dir); + if (!configFilter.Include(config)) return; + + Analyzer analyzer = AnalyzerManager.GetAnalyzer(file); + List allData; + + //foreach (var pair in analyzer.AllGCProcessData) + //{ + // Console.WriteLine($"{pair.Key}: {pair.Value.Count}"); + //} + //if (file.Contains(".nettrace")) + //{ + // data = analyzer.AllGCProcessData.First().Value.First(); + //} + //else + { + allData = pertinentProcesses.SelectMany(p => analyzer.GetProcessGCData(p)).ToList(); //.Where(NotNull).FirstOrDefault(); + } + + if (allData.Count == 0) + { + Console.WriteLine($"The following trace doesn't have a pertinent process: {file}"); + Console.WriteLine($"Processes: {string.Join(", ", analyzer.AllGCProcessData.Keys)}"); + Console.WriteLine($"Check: {string.Join(", ", analyzer.AllGCProcessData.Keys.Select(k => k == pertinentProcesses[0]))}"); + return; + } + if (!loadMultipleProcesses && (allData.Count > 1)) + { + Console.WriteLine($"The following trace has more than one pertinent process: {file}"); + Console.WriteLine($"Found processes: {string.Join(", ", allData.Select(d => d.ProcessName))}'"); + return; + } + + foreach (GCProcessData data in allData) + { + benchmark = benchmark ?? (loadMultipleProcesses ? data.ProcessName : fileBaseName); + if (!benchmarkFilter.Include(benchmark)) continue; + LoadGCTraceOneProcess(file, data, run, config, benchmark, iteration, expectAspNetData); + } + } + + private void LoadGCTraceOneProcess(string file, GCProcessData data, string run, string config, string benchmark, int? iteration, bool expectAspNetData) + { + GCSummaryInfo gcSummaryInfo = new(); + gcSummaryInfo.MeanHeapSizeBeforeMB = data.Stats.MeanSizePeakMB; + gcSummaryInfo.MaxHeapSizeMB = data.Stats.MaxSizePeakMB; + gcSummaryInfo.PercentTimeInGC = (data.GCs.Sum(gc => gc.PauseDurationMSec - gc.SuspendDurationMSec) / (data.Stats.ProcessDuration) ) * 100; + gcSummaryInfo.TracePath = data.Parent.TraceLogPath; + gcSummaryInfo.TotalAllocationsMB = data.Stats.TotalAllocatedMB; + gcSummaryInfo.CommandLine = data.CommandLine; + gcSummaryInfo.PercentPauseTimeInGC = data.Stats.GetGCPauseTimePercentage(); + gcSummaryInfo.GCScore = (gcSummaryInfo.MaxHeapSizeMB * gcSummaryInfo.PercentPauseTimeInGC); + gcSummaryInfo.ProcessId = data.ProcessID; + gcSummaryInfo.Data = data; + gcSummaryInfo.ProcessName = data.ProcessName; + gcSummaryInfo.TotalSuspensionTimeMSec = data.GCs.Sum(gc => gc.SuspendDurationMSec); + + gcSummaryInfo.MaxHeapCount = 0; + gcSummaryInfo.NumberOfHeapCountSwitches = 0; + gcSummaryInfo.NumberOfHeapCountDirectionChanges = 0; + + int? prevNumHeapsOption = null; + bool prevChangeUp = true; // don't want to count the initial 1->n change as a change in direction + for (int i = 0; i < data.GCs.Count; i++) + { + if (data.GCs[i].GlobalHeapHistory == null) continue; + int thisNumHeaps = data.GCs[i].GlobalHeapHistory.NumHeaps; + gcSummaryInfo.MaxHeapCount = Math.Max(gcSummaryInfo.MaxHeapCount, thisNumHeaps); + if (prevNumHeapsOption.HasValue) + { + int prevNumHeaps = prevNumHeapsOption.Value; + if (prevNumHeaps != thisNumHeaps) + { + gcSummaryInfo.NumberOfHeapCountSwitches++; + bool thisChangeUp = thisNumHeaps > prevNumHeaps; + if (prevChangeUp != thisChangeUp) + { + gcSummaryInfo.NumberOfHeapCountDirectionChanges++; + } + prevChangeUp = thisChangeUp; + } + } + prevNumHeapsOption = thisNumHeaps; + } + + lock (_data) + { + RunData runData = _data.Runs.GetOrAdd(run, new(new())); + ConfigData configData = runData.Configs.GetOrAdd(config, new(new())); + BenchmarkData benchmarkData = configData.Benchmarks.GetOrAdd(benchmark, new(null, new())); + + int iterationToUse = iteration ?? benchmarkData.Iterations.FindIndex(iterationData => iterationData == null); + if (iterationToUse == -1) iterationToUse = benchmarkData.Iterations.Count; + + if ((benchmarkData.Iterations.Count > iterationToUse) + && (benchmarkData.Iterations[iterationToUse] != null)) + { + if (benchmarkData.Iterations[iterationToUse].GCSummaryInfo != null) + { + Console.WriteLine($"Replacing existing GC information for '{run} / {config} / {benchmark} / {iterationToUse}' - {file}"); + } + benchmarkData.Iterations[iterationToUse].GCSummaryInfo = gcSummaryInfo; + benchmarkData.Iterations[iterationToUse].GCProcessData = data; + } + else + { + if (expectAspNetData) + { + Console.WriteLine($"The following trace doesn't have a corresponding ASP.NET log '{run} / {config} / {benchmark} / {iterationToUse}' - {file}"); + } + + benchmarkData.Iterations.SetWithExtend(iterationToUse, new(null, gcSummaryInfo, data)); + } + } + } +} diff --git a/src/benchmarks/gc/GC.Infrastructure/Notebooks/Reports-Examples.ipynb b/src/benchmarks/gc/GC.Infrastructure/Notebooks/Reports-Examples.ipynb new file mode 100644 index 00000000000..49fbb492fb8 --- /dev/null +++ b/src/benchmarks/gc/GC.Infrastructure/Notebooks/Reports-Examples.ipynb @@ -0,0 +1,804 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "#!import BenchmarkAnalysis.dib" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "// This only needs to be evaluated when iterating on the Reports code itself.\n", + "\n", + "#!import Reports.dib" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that this is just the state of my working notebook, not a comprehensive set of examples." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "string rootDir = @\"C:\\home\\repro\\decommit\";\n", + "var hugeDM = new DataManager();\n", + "foreach (string config in ML(\"rcbase\", \"rc1\", \"alt-draft-log3\"))\n", + "{\n", + " string configDir = $\"gcperfsim_{config}_gc\";\n", + " foreach (string benchBase in ML(\"100mb\", \"100mb-10mb\", \"10mb-100mb\"))\n", + " {\n", + " foreach (string benchSuffix in ML(\"\", \"-low\"))\n", + " {\n", + " string bench = benchBase + benchSuffix;\n", + " string etl = $@\"{rootDir}\\{configDir}\\{bench}\\{bench}.{config}.0.etl\";\n", + " if (!File.Exists(etl))\n", + " {\n", + " Console.WriteLine($\"{etl} does not exist\");\n", + " continue;\n", + " }\n", + " hugeDM.AddGCTrace(etl, ML(\"corerun\"), config: config, loadMultipleProcesses: false);\n", + " }\n", + " }\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "hugeDM" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "string sds_process_name = \"Microsoft.M365.Core.Sds.Service\";\n", + "string store_worker_process_name = \"Microsoft.Exchange.Store.Worker\";\n", + "var store_work_process_list = ML(sds_process_name, store_worker_process_name);\n", + "var sdsDM = DataManager.CreateGCTraces(@\"c:\\home\\repro\\notebook-gcapi\", store_work_process_list);\n", + "\n", + "(sdsDM.Data.Runs.First().Value.Configs.First().Value.Benchmarks.First().Value.Iterations[0].GCProcessData.ProcessID,\n", + "sdsDM.Data.Runs.First().Value.Configs.First().Value.Benchmarks.First().Value.Iterations[0].GCProcessData.CommandLine)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var diffDataManager = DataManager.CreateAspNetData(ML(\n", + " @\"C:\\home\\repro\\hc\\asp_traceplus3_gc\",\n", + " @\"C:\\home\\repro\\hc\\asp_tp3-m4_gc\",\n", + " @\"C:\\home\\repro\\hc\\asp_slope_gc\",\n", + " @\"C:\\home\\repro\\hc\\asp_evaldecr_gc\"\n", + "));" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var cardsDM = DataManager.CreateGCTraces(@\"c:\\home\\repro\\2401310010004275\", pertinentProcesses: ML(\"EXCEL\"));" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var low4DM = DataManager.CreateAspNetData(@\"c:\\home\\repro\\hc\\asp_v2-fixrearranged-mult-max_gc\"\n", + " //, benchmarkFilter: Filter.RE(\"Stage.*|Json.*\");\n", + " //, iterationFilter: IntFilter.Values(1)\n", + ");\n", + "\n", + "(low4DM.Data.Runs[\"asp_v2-fixrearranged-mult-max_gc\"].Configs.Keys,\n", + " low4DM.Data.Runs[\"asp_v2-fixrearranged-mult-max_gc\"].Configs[\"v2-fixrearranged-mult-max-h4\"].Benchmarks[\"Stage2\"].Iterations[1].GCSummaryInfo.MaxHeapCount,\n", + " low4DM.Data.Runs[\"asp_v2-fixrearranged-mult-max_gc\"].Configs[\"v2-fixrearranged-mult-max\"].Benchmarks[\"Stage2\"].Iterations[1].GCSummaryInfo.MaxHeapCount,\n", + " low4DM.GetConfigs(Filter.All, Filter.RE(\"max\")))\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var rc3DataManager = DataManager.CreateAspNetData(@\"C:\\home\\repro\\hc\\asp_v2-fixrearranged_gc\");\n", + "rc3DataManager.AddAspNetData(@\"C:\\home\\repro\\hc\\asp_v2-tune_gc\");\n", + "\n", + "rc3DataManager.GetConfigs(Filter.All, Filter.All)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var net6dm = DataManager.CreateGCTrace(@\"C:\\home\\repro\\2046032\\Microsoft.MWC.Workload.OneLake.Service.EntryPoint.exe_WithGCon.nettrace\", ML(\"Microsoft.MWC.Workload.OneLake.Service.EntryPoint\"));\n", + "net6dm.AddGCTrace(@\"C:\\home\\repro\\2046032\\TESTenvCST350 with perf.nettrace\", ML(\"Microsoft.MWC.Workload.OneLake.Service.EntryPoint\"));" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Charting examples" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "ChartBenchmarks(sdsDM, ML(Metrics.B.AveragePercentPauseTimeInGC, Metrics.B.MaxHeapCount)\n", + " //, benchmarkFilter: Filter.Names(\"JsonMapAction\")\n", + " , configNameSimplifier: NameSimplifier.PrefixDashed\n", + " );" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "TableBenchmarks(low4DM, ML(Metrics.B.AveragePercentPauseTimeInGC, Metrics.B.MaxHeapCount)\n", + " //, benchmarkFilter: Filter.Names(\"JsonMapAction\")\n", + " , configNameSimplifier: NameSimplifier.PrefixDashed\n", + " );" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "hugeDM.Data.Runs.First().Value.Configs.First().Value.Benchmarks.First().Value.Iterations[0].GCProcessData.GCs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "ChartGCData(hugeDM, ML(Metrics.G.CommittedBeforeInUse, Metrics.G.CommittedBeforeInFree, Metrics.G.CommittedBeforeInGlobalFree, Metrics.G.CommittedBeforeInGlobalDecommit)\n", + " , configNameSimplifier: NameSimplifier.PrefixDashed\n", + " , debug: true\n", + " );" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "foreach (var xarr in new[] { (XArrangement) XArrangements.Default, XArrangements.Sorted, XArrangements.CombinedSorted })\n", + "{\n", + "TableBenchmarks(low4DM,\n", + " ML(Metrics.B.MaxHeapCount,\n", + " Metrics.Promote(Metrics.I.MaxHeapCount, Aggregation.Min),\n", + " Metrics.Promote(Metrics.I.MaxHeapCount, Aggregation.Range),\n", + " Metrics.Promote(Metrics.I.NumberOfHeapCountSwitches, Aggregation.Range),\n", + " Metrics.Promote(Metrics.I.NumberOfHeapCountDirectionChanges, Aggregation.Range)),\n", + " configNameSimplifier: NameSimplifier.PrefixDashed,\n", + " xArrangement: xarr,\n", + " configFilter: new Filter(excludeRE: \"h4\")\n", + " //, debug: true\n", + " );\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "foreach (var xarr in new[] { (XArrangement) XArrangements.Default, XArrangements.Sorted, XArrangements.CombinedSorted })\n", + "{\n", + "TableBenchmarks(diffDataManager,\n", + " ML((Metrics.B.MaxHeapCount),\n", + " Metrics.Promote(Metrics.I.MaxHeapCount, Aggregation.Min),\n", + " Metrics.Promote(Metrics.I.MaxHeapCount, Aggregation.Range),\n", + " Metrics.Promote(Metrics.I.NumberOfHeapCountSwitches, Aggregation.Range),\n", + " Metrics.Promote(Metrics.I.NumberOfHeapCountDirectionChanges, Aggregation.Range)),\n", + " //configNameSimplifier: NameSimplifier.PrefixDashed,\n", + " xArrangement: xarr);\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "TableIterations(diffDataManager, Metrics.I.MaxHeapCount\n", + " , configFilter: Filter.Names(\"traceplus3\", \"tp3-m4\")\n", + ");" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "foreach (var lat in Metrics.I.LatencyMSList)\n", + "{\n", + " TableBenchmarks(diffDataManager, Metrics.Promote(lat, Aggregation.Average)\n", + " // , benchmarkFilter: Filter.RE(\"Stage.*\")\n", + " // configNameSimplifier: NameSimplifier.PrefixDashed,\n", + " // types: B_XType.All,\n", + " //configFilter: new Filter(excludeRE: \".*h4\")\n", + " );\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "ChartIterations(diffDataManager, ML(/*Metrics.I.GCScore,*/ Metrics.I.RequestsPerMSec));" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "TableGCData(net6dm,\n", + " ML(\n", + " Metrics.G.HeapSizeAfter,\n", + " Metrics.G.HeapSizeBefore,\n", + " new Metric(gc => gc.HeapStats.GCHandleCount, \"GC Handles\", \"#\"),\n", + " Metrics.G.GenLargeSizeBefore,\n", + " Metrics.G.GenLargeSizeAfter,\n", + " Metrics.G.GenLargeObjSizeAfter,\n", + " new Metric(gc => gc.HeapStats.TotalHeapSize / 1000000, \"Total heap size\", \"MB\")\n", + " )\n", + " , textPresenter: TextPresenter.Html\n", + " , configNameSimplifier: new ListSimplifier((\"Microsoft.MWC.Workload.OneLake.Service.EntryPoint.exe_WithGCon\", \"Entry\"), (\"TESTenvCST350 with perf\", \"TES\"))\n", + " , dataFilter: gc => gc.Number > 300 && gc.Number < 310\n", + " );" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "ChartGCData(low4DM\n", + " , metrics: ML(Metrics.G.PauseDuration)\n", + " , benchmarkFilter: Filter.Names(\"Stage2\")\n", + " , configNameSimplifier: NameSimplifier.PrefixDashed\n", + " , xMetric: Metrics.X.StartRelativeMSec\n", + " , dataFilter: gc => gc.StartRelativeMSec > 2000\n", + " , iterationFilter: IntFilter.Values(0)\n", + " , xArrangement: XArrangements.Relative\n", + ");" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "ChartGCData(low4DM\n", + " , metrics: ML(Metrics.G.HctMtcp, Metrics.G.NumHeaps)\n", + " , benchmarkFilter: Filter.RE(\"Stage2$\")\n", + " , configNameSimplifier: NameSimplifier.PrefixDashed\n", + " , iterationFilter: IntFilter.Values(0)\n", + ");" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "ChartGCData(low4DM, Metrics.G.NumHeaps, configNameSimplifier: NameSimplifier.PrefixDashed, debug: false);" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var low4CompRuns = ML((\"v2-fixrearranged-mult-max\", \"base\"), (\"v2-fixrearranged-mult-max-h4\", \"max4\"),\n", + " (\"v2-fixrearranged-mult-max-svr\", \"svr\"), (\"v2-fixrearranged-mult-max-svr4\", \"svr4\"),\n", + " (\"v2-fixrearranged-mult-max-mult8\", \"mult8\"), (\"v2-fixrearranged-mult-max-mult32\", \"mult32\"),\n", + " (\"v2-fixrearranged-mult-max-mult8x10\", \"m8x10\"), (\"v2-fixrearranged-mult-max-mult32x10\", \"m32x10\"),\n", + " (\"v2-fixrearranged-mult-max-x10\", \"x10\"));\n", + "\n", + "ChartGCData(low4DM, Metrics.G.HctMtcp\n", + " , configFilter: Filter.ExcludeRE(\"svr\")\n", + " , configNameSimplifier: new ListSimplifier(low4CompRuns)\n", + " );" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "ChartGCData(\n", + " sdsDM\n", + " , metrics: ML(Metrics.G.AllocRateMBSec, Metrics.G.PauseDuration)\n", + " //, benchmarkFilter: Filter.RE(\"Run32\")\n", + " , configNameSimplifier: NameSimplifier.PrefixDashed\n", + ");" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "ChartGCData(\n", + " cardsDM\n", + " , metrics: ML(Metrics.G.AllocRateMBSec, Metrics.G.PauseDuration)\n", + " , configFilter: Filter.RE(\"Run32\")\n", + ");" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "ChartGCData(\n", + " cardsDM,\n", + " metrics: ML(Metrics.G.PauseDuration.WithCap(100), Metrics.G.PauseStack.WithCap(100), Metrics.G.PauseFQ, Metrics.G.PauseHandles.WithCap(100), Metrics.G.PauseCards, Metrics.G.Suspend,\n", + " new Metric(gc => gc.HeapStats.GCHandleCount, \"GC Handles\", \"#\"),\n", + " new Metric(gc => gc.HeapStats.FinalizationPromotedCount, \"F promoted\", \"#\"))\n", + " //, dataFilter: gc => gc.Generation == 0\n", + " , configFilter: Filter.RE(\"Only\")\n", + " , xMetric: Metrics.X.GCIndex\n", + " , configNameSimplifier: new ListSimplifier((\"2401310010004275\", \"a\"))\n", + " );" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "ChartGCData(rc3DataManager, Metrics.G.HeapSizeBefore, benchmarkFilter: Filter.RE(\"Stage2$\"));" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "ChartGCData(rc3DataManager, Metrics.G.NumHeaps\n", + " , benchmarkFilter: Filter.Names(\"Fortunes\", \"FortunesDapper\", \"JsonHttpsHttpSys\", \"PlaintextQueryString\", \"Stage1\", \"Stage2\", \"PlaintextMvc\")\n", + " , benchmarkMap: x => (x == \"Stage1\" || x == \"Stage2\" ? \"S1/2\" : x));" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "var rc3RearrNoBaseRuns = ML((\"v2-rc3\", \"rc3\"), (\"v2-fixrearranged\", \"rc3rearr\"), (\"v2-tune\", \"rc3tune\"));\n", + "\n", + "ChartGCData(rc3DataManager, Metrics.G.NumHeaps\n", + " , configNameSimplifier: new ListSimplifier(rc3RearrNoBaseRuns)\n", + " , benchmarkFilter: new Filter(includeNames: scoutList2)\n", + ");" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Debugging" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "System.Diagnostics.Process.GetCurrentProcess().Id" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "dotnet_interactive": { + "language": "csharp" + }, + "polyglot_notebook": { + "kernelName": "csharp" + }, + "vscode": { + "languageId": "polyglot-notebook" + } + }, + "outputs": [], + "source": [ + "#!about" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".NET (C#)", + "language": "C#", + "name": ".net-csharp" + }, + "language_info": { + "name": "python" + }, + "orig_nbformat": 4, + "polyglot_notebook": { + "kernelInfo": { + "defaultKernelName": "csharp", + "items": [ + { + "aliases": [], + "name": "csharp" + } + ] + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/src/benchmarks/gc/GC.Infrastructure/Notebooks/Reports.dib b/src/benchmarks/gc/GC.Infrastructure/Notebooks/Reports.dib new file mode 100644 index 00000000000..e2b952d9fb1 --- /dev/null +++ b/src/benchmarks/gc/GC.Infrastructure/Notebooks/Reports.dib @@ -0,0 +1,1810 @@ +#!meta + +{"kernelInfo":{"defaultKernelName":"csharp","items":[{"aliases":[],"languageName":"csharp","name":"csharp"}]}} + +#!csharp + +// Instructions to get Intellisense, etc., in this file: +// +// Normally this file is #!import-ed into an environment (BenchmarkAnalysis.dib) that already has types such as DataManager defined. +// However, to work on this file, we need that context available here. To get that, uncomment the following #!import, execute this +// cell, and then comment the line again. This will provide an editing environment. Keeping it commented it necessary because +// splitting the imports/usings in the BenchmarkAnalysis case can cause strange name resolution and type conversion errors. + +//#!import DataManager.dib + +#!csharp + +// Huge block of code that operates on DataManager +// ----------------------------------------------- + +// Notebook cells are already in implicit classes, so this isn't needed (and doesn't work): +// public static class DataManagerExtensions + +public static IEnumerable<(string run, string config, ConfigData configData)> GetConfigsWithData(this DataManager dataManager, Filter runFilter, Filter configFilter) +{ + foreach ((string run, RunData runData) in dataManager.Data.Runs) + { + if (!runFilter.Include(run)) continue; + foreach ((string config, ConfigData configData) in runData.Configs) + { + if (!configFilter.Include(config)) continue; + yield return (run, config, configData); + } + } +} + +public static IEnumerable<(string run, string config)> GetConfigs(this DataManager dataManager, Filter runFilter, Filter configFilter) + => dataManager.GetConfigsWithData(runFilter, configFilter).Select(tuple => (tuple.run, tuple.config)); + +public static IEnumerable<(string run, string config, string benchmark, BenchmarkData benchmarkData)> GetBenchmarksWithData( + this DataManager dataManager, Filter runFilter, Filter configFilter, Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter) +{ + foreach ((string run, string config, ConfigData configData) in dataManager.GetConfigsWithData(runFilter, configFilter)) + { + if (!configIterationFilter.MightInclude(config)) continue; + + foreach ((string benchmark, BenchmarkData benchmarkData) in configData.Benchmarks) + { + if (!benchmarkFilter.Include(benchmark)) continue; + if (!benchmarkData.Iterations.WithIndex() + .Where(pair => pair.Item1 != null) + .Select(pair => pair.Item2) + .Any(iteration => iterationFilter.Include(iteration) && configIterationFilter.Include(config, iteration))) continue; + yield return (run, config, benchmark, benchmarkData); + } + } +} + +public static IEnumerable<(string run, string config, string benchmark)> GetBenchmarks(this DataManager dataManager, Filter runFilter, Filter configFilter, + Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter) + => dataManager.GetBenchmarksWithData(runFilter, configFilter, benchmarkFilter, iterationFilter, configIterationFilter) + .Select(tuple => (tuple.run, tuple.config, tuple.benchmark)); + +public static IEnumerable<(string run, string config, int iteration, IterationData data)> GetIterationsForBenchmark(this DataManager dataManager, + Filter runFilter, Filter configFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, string benchmark) +{ + foreach ((string run, string config, ConfigData configData) in dataManager.GetConfigsWithData(runFilter, configFilter)) + { + if (!configIterationFilter.MightInclude(config)) continue; + if (!configData.Benchmarks.TryGetValue(benchmark, out BenchmarkData benchmarkData)) continue; + + foreach ((IterationData iterationData, int iteration) in benchmarkData.Iterations.WithIndex()) + { + if (!iterationFilter.Include(iteration)) continue; + if (!configIterationFilter.Include(config, iteration)) continue; + if (iterationData == null) continue; + yield return (run, config, iteration, iterationData); + } + } +} + +public static IEnumerable GetIterations(this ConfigData data, string config, + Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter) + // May need to improve efficiency here + => data.Benchmarks + .Where((b, _) => benchmarkFilter.Include(b.Key)) + .SelectMany(b => + b.Value.Iterations + .WithIndex() + .Where(pair => pair.Item1 != null) + .Select(pair => pair.Item2) + .Where(iteration => iterationFilter.Include(iteration) && configIterationFilter.Include(config, iteration))) + .Distinct() + .OrderBy(x => x); + +// Utilities + +// https://stackoverflow.com/a/49058506 +public static IEnumerable<(T PrevItem, T CurrentItem, T NextItem)> + SlidingWindow(this IEnumerable source, T emptyValue = default) +{ + using (var iter = source.GetEnumerator()) + { + if (!iter.MoveNext()) + yield break; + var prevItem = emptyValue; + var currentItem = iter.Current; + while (iter.MoveNext()) + { + var nextItem = iter.Current; + yield return (prevItem, currentItem, nextItem); + prevItem = currentItem; + currentItem = nextItem; + } + yield return (prevItem, currentItem, emptyValue); + } +} + +// overkill for what is needed now but leftover + +public struct CircularListAccess : IReadOnlyList +{ + private IList _list; + private int _start; + private int _length; + + public CircularListAccess(IList list, int start, int length) + { + if (list == null) throw new ArgumentException("list"); + if (start < 0 || start >= list.Count) throw new ArgumentException("start"); + if (length < 0 || length > list.Count) throw new ArgumentException("length"); + + _list = list; + _start = start; + _length = length; + } + + public T this[int index] + { + get + { + if (index >= _length) throw new IndexOutOfRangeException(); + return _list[(_start + index) % _list.Count]; + } + } + + public int Count => _length; + + public struct Enumerator : IEnumerator + { + private CircularListAccess _list; + private int _index; + private T _current; + + public Enumerator(CircularListAccess list) + { + _list = list; + _index = 0; + _current = default; + } + public T Current => _current; + object IEnumerator.Current => Current; + public bool MoveNext() + { + int count = _list.Count; + if (_index < count) + { + _current = _list[_index++]; + return true; + } + else + { + _current = default; + return false; + } + } + public void Reset() { _index = 0; _current = default; } + public void Dispose() {} + } + + public IEnumerator GetEnumerator() => new Enumerator(this); + IEnumerator IEnumerable.GetEnumerator() => new Enumerator(this); +} + +public static IEnumerable> + SlidingRange(this List source, int size) +{ + for (int i = 0; i <= source.Count - size; ++i) + { + // don't actually need CircularListAccess - was from an earlier idea + yield return new CircularListAccess(source, i, size); + } +} + +public class ColorProvider +{ + // Families of gradients + // 80 00 00 -> ff 00 00 -> ff 80 80 (3) + // 80 80 00 -> ff ff 00 -> ff ff 80 (3) + // 80 40 00 -> ff 80 00 -> ff c0 80 (6) + // 40 40 40 -> 80 80 80 -> c0 c0 c0 (1) + // 80 2A 00 -> ff 55 00 -> ff aa 80 (6) + // 80 55 00 -> ff aa 00 -> ff d4 80 (6) + enum Scale + { + Zero, + Full, + Half, + OneThird, + TwoThird, + } + + static (int first, int mid, int last) GetScale(Scale scale) + => scale switch + { + Scale.Zero => (0, 0, 0x80), + Scale.Full => (0x80, 0xFF, 0xFF), + Scale.Half => (0x40, 0x80, 0xC0), + Scale.OneThird => (0x2A, 0x55, 0xAA), + Scale.TwoThird => (0x55, 0xAA, 0xD4), + _ => throw new Exception("Unknown Scale") + }; + + public record RGB(int R, int G, int B); + record ScaleRGB(Scale R, Scale G, Scale B); + + static ScaleRGB[] _colorFamilies = + { + new ScaleRGB(Scale.Full, Scale.Zero, Scale.Zero), + new ScaleRGB(Scale.Zero, Scale.Full, Scale.Zero), + new ScaleRGB(Scale.Zero, Scale.Zero, Scale.Full), + + new ScaleRGB(Scale.Half, Scale.Half, Scale.Half), + + //new ScaleRGB(Scale.Full, Scale.Full, Scale.Zero), // yellow isn't scaling very well + new ScaleRGB(Scale.Full, Scale.Zero, Scale.Full), + new ScaleRGB(Scale.Zero, Scale.Full, Scale.Full), + + new ScaleRGB(Scale.Full, Scale.Half, Scale.Zero), + new ScaleRGB(Scale.Zero, Scale.Full, Scale.Half), + new ScaleRGB(Scale.Half, Scale.Zero, Scale.Full), + + new ScaleRGB(Scale.Full, Scale.Zero, Scale.Half), + new ScaleRGB(Scale.Half, Scale.Full, Scale.Zero), + new ScaleRGB(Scale.Zero, Scale.Half, Scale.Full), + + new ScaleRGB(Scale.Full, Scale.OneThird, Scale.Zero), + new ScaleRGB(Scale.Zero, Scale.Full, Scale.OneThird), + new ScaleRGB(Scale.OneThird, Scale.Zero, Scale.Full), + + new ScaleRGB(Scale.Full, Scale.Zero, Scale.OneThird), + new ScaleRGB(Scale.OneThird, Scale.Full, Scale.Zero), + new ScaleRGB(Scale.Zero, Scale.OneThird, Scale.Full), + + new ScaleRGB(Scale.Full, Scale.TwoThird, Scale.Zero), + new ScaleRGB(Scale.Zero, Scale.Full, Scale.TwoThird), + new ScaleRGB(Scale.TwoThird, Scale.Zero, Scale.Full), + + new ScaleRGB(Scale.Full, Scale.Zero, Scale.TwoThird), + new ScaleRGB(Scale.TwoThird, Scale.Full, Scale.Zero), + new ScaleRGB(Scale.Zero, Scale.TwoThird, Scale.Full), + }; + + int GetComponent(Scale scale, int index, int count) + { + int max = count - 1; + float half = max / 2.0f; + var scaleValue = GetScale(scale); + if (max == 0) return scaleValue.first; + (int baseValue, int topValue, float fraction) = + (index > half) + ? (scaleValue.mid, scaleValue.last, (index - half) / half) + : (scaleValue.first, scaleValue.mid, (index / half)); + return (int)(baseValue + fraction * (topValue - baseValue)); + } + + public static Marker GetMarker(RGB rgb) => (rgb != null) ? (new Marker { color = $"rgb({rgb.R}, {rgb.G}, {rgb.B})" }) : null; + + RGB GetColor(int colorIndex, int groupIndex, int numInBuild) + { + if (colorIndex >= _colorFamilies.Length) return null; + + var RGB = _colorFamilies[colorIndex]; + var R = GetComponent(RGB.R, groupIndex, numInBuild); + var G = GetComponent(RGB.G, groupIndex, numInBuild); + var B = GetComponent(RGB.B, groupIndex, numInBuild); + return new RGB(R, G, B); + } + + record ColorGroup(int FamilyIndex, int GroupIndex, int GroupSize, Dictionary GroupColorMap) + { + public int GroupIndex { get; set; } = GroupIndex; + } + + Dictionary? _groups; // name of build -> (color index, next index in group) + + public ColorProvider(Dictionary groups) + { + if (groups.Count <= 1) return; + + _groups = groups + .Take(_colorFamilies.Length) + .Select((kvp, index) => (kvp.Key, new ColorGroup(index, 0, kvp.Value, new()))) + .ToDictionary(); + } + + public RGB GetColor(string buildName, string id = null) + { + //Console.WriteLine($"- '{buildName}' '{id}'"); + if (_groups == null) return null; + ColorGroup group = _groups[buildName]; + if (group.FamilyIndex >= _colorFamilies.Length) return null; + + if ((id != null) && group.GroupColorMap.TryGetValue(id, out RGB color)) + { + return color; + } + //Console.WriteLine($"--- '{group}'"); + color = GetColor(group.FamilyIndex, group.GroupIndex++, group.GroupSize); + //Console.WriteLine($"----- '{color}'"); + if (id != null) group.GroupColorMap[id] = color; + return color; + } + + public void SetMarker(Scatter scatter, string buildName, string id = null) + { + Marker marker = GetMarker(GetColor(buildName, id)); + if (marker != null) scatter.marker = marker; + } + + public void DumpColorGroups() + { + if (_groups == null) + { + Console.WriteLine("No groups"); + return; + } + Console.WriteLine($"Number of groups: {_groups.Count}"); + foreach (var (name, group) in _groups) + { + Console.WriteLine($" '{name}': {group.FamilyIndex}, {group.GroupIndex}/{group.GroupSize}"); + } + } +} + +public class Aggregation +{ + public Func, double> Func; + public string Title; + public string UnitOverride; + + public Aggregation(Func, double> func, string title, string unitOverride) + { + Func = func; + Title = title; + UnitOverride = unitOverride; + } + + public static class Funcs + { + public static double Min(IEnumerable data) => data.Min(); + public static double Max(IEnumerable data) => data.Max(); + + public static double Volatility(IEnumerable data) + { + var max = data.Max(); + var min = data.Min(); + return Math.Round(((max - min) / min) * 100, 2); + } + + public static double Average(IEnumerable data) => data.Average(); + public static double Range(IEnumerable data) => data.Max() - data.Min(); + + public static double GeoMean(IEnumerable data) + { + double mult = 1; + int count = 0; + foreach (double value in data) + { + mult *= value; + count++; + } + return Math.Pow(mult, 1.0 / count); + } + } + + public static Aggregation Min { get; } = new Aggregation(Funcs.Min, "Min", null); + public static Aggregation Max { get; } = new Aggregation(Funcs.Max, "Max", null); + public static Aggregation Volatility { get; } = new Aggregation(Funcs.Volatility, "Volatility", "?"); + public static Aggregation Average { get; } = new Aggregation(Funcs.Average, "Average", null); + public static Aggregation Range { get; } = new Aggregation(Funcs.Range, "Range", null); + public static Aggregation GeoMean { get; } = new Aggregation(Funcs.GeoMean, "GeoMean", null); +} + +public class BaseMetric +{ + protected Func ExtractFunc; + public string Title; + + public BaseMetric(Func extract, string title) + { + ExtractFunc = extract; + Title = title; + } + + public TValue? DoExtract(TSource gc) + { + TValue? value; + try + { + value = ExtractFunc(gc); + } + catch (Exception e) + { + //Console.WriteLine($"Exception processing {Title}"); + //Console.WriteLine($" {e}"); + value = default; + } + return value; + } +} + +public class Metric : BaseMetric +{ + public string Unit; + public double? Cap; + private int _capExceededCount; + private double _capExceededMin; + private double _capExceededMax; + public double? AxisCountOffset; + + public Metric(Func extract, string title, string unit, double? cap = null, double? axisCountOffset = null) + : base((s => extract(s)), title) + { + Unit = unit; + Cap = cap; + AxisCountOffset = axisCountOffset; + } + + public Metric(Func extract, string title, string unit, double? cap = null, double? axisCountOffset = null) + : base(extract, title) + { + Unit = unit; + Cap = cap; + AxisCountOffset = axisCountOffset; + } + + public double? DoExtract(TSource gc, int count) + { + double? value = base.DoExtract(gc); + if (value.HasValue) + { + if (value > Cap) + { + _capExceededCount++; + _capExceededMin = Math.Min(_capExceededMin, value.Value); + _capExceededMax = Math.Max(_capExceededMax, value.Value); + value = Cap; + } + if (AxisCountOffset.HasValue) value += AxisCountOffset * count; + } + return value; + } + + private Metric Copy() => new(ExtractFunc, Title, Unit, Cap); + public Metric WithCap(double cap) => new(ExtractFunc, Title, Unit, cap, AxisCountOffset); + public Metric WithOffset(double offset) => new(ExtractFunc, Title, Unit, Cap, offset); + + public void ResetDiagnostics() + { + _capExceededCount = 0; + _capExceededMin = double.MaxValue; + _capExceededMax = double.MinValue; + } + + public void DisplayDiagnostics(string context) + { + if (_capExceededCount > 0) + { + Console.WriteLine($"Cap ({Cap.Value}) exceeded {_capExceededCount} times (min={_capExceededMin:N2}, max={_capExceededMax:N2}) for {context}"); + } + } + + public static Metric Promote(Metric metric, Func> oldExtract, Aggregation aggregation) + => new(extract: source => aggregation.Func(oldExtract(source).Select(metric.ExtractFunc).Where(NotNull).Select(value => value.Value)), + title: $"{aggregation.Title} of {metric.Title}", + unit: aggregation.UnitOverride ?? metric.Unit); +} + +public static class Metrics +{ + public static Metric Promote(Metric metric, Aggregation aggregation) + => Metric.Promote(metric, iterationData => iterationData.GCProcessData.GCs, aggregation); + public static Metric Promote(Metric metric, Aggregation aggregation) + => Metric.Promote(metric, benchmarkData => benchmarkData.Iterations, aggregation); + public static Metric Promote(Metric metric, Aggregation aggregation) + => Metric.Promote(metric, configData => configData.Benchmarks.Values, aggregation); + public static Metric Promote(Metric metric, Aggregation aggregation) + => Metric.Promote(metric, runData => runData.Configs.Values, aggregation); + public static Metric Promote(Metric metric, Aggregation aggregation) + => Metric.Promote(metric, data => data.Runs.Values, aggregation); + + public static class X + { + public static BaseMetric<(string, TraceGC), XValue> GCIndex { get; } = new(pair => new XValue(pair.Item2.Number), "GC Index"); + public static BaseMetric<(string, TraceGC), XValue> StartRelativeMSec { get; } = new(pair => new XValue(pair.Item2.StartRelativeMSec), "GC Start"); + public static BaseMetric<(string, BenchmarkData), XValue> BenchmarkName { get; } = new(pair => new XValue(pair.Item1), "Benchmark Name"); + public static BaseMetric<(string, IterationData), XValue> IterationBenchmarkName { get; } = new(pair => new XValue(pair.Item1), "Benchmark Name"); + } + + public static class G + { + public static Metric AllocedSinceLastGCMB = new(gc => gc.AllocedSinceLastGCMB, title: "Allocated", unit: "MB"); + // AllocRateMBSec is MB/s but this puts it on same y-axis as plain MB + public static Metric AllocRateMBSec = new(gc => gc.AllocRateMBSec, title: "Allocation rate", unit: "MB"); + public static Metric CommittedAfterTotalBookkeeping = new(gc => gc.CommittedUsageAfter.TotalBookkeepingCommitted, title: "Committed Book (after)", unit: "MB"); + public static Metric CommittedAfterInFree = new(gc => gc.CommittedUsageAfter.TotalCommittedInFree, title: "Committed In Free (after)", unit: "MB"); + public static Metric CommittedAfterInGlobalDecommit = new(gc => gc.CommittedUsageAfter.TotalCommittedInGlobalDecommit, title: "Committed In Global Decommit (after)", unit: "MB"); + public static Metric CommittedAfterInGlobalFree = new(gc => gc.CommittedUsageAfter.TotalCommittedInGlobalFree, title: "Committed In Global Free (after)", unit: "MB"); + public static Metric CommittedAfterInUse = new(gc => gc.CommittedUsageAfter.TotalCommittedInUse, title: "Committed In Use (after)", unit: "MB"); + public static List> CommittedAfterMetrics = ML(CommittedAfterTotalBookkeeping, CommittedAfterInFree, CommittedAfterInGlobalDecommit, CommittedAfterInGlobalFree, CommittedAfterInUse); + public static Metric CommittedBeforeTotalBookkeeping = new(gc => gc.CommittedUsageBefore.TotalBookkeepingCommitted, title: "Committed Book (before)", unit: "MB"); + public static Metric CommittedBeforeInFree = new(gc => gc.CommittedUsageBefore.TotalCommittedInFree, title: "Committed In Free (before)", unit: "MB"); + public static Metric CommittedBeforeInGlobalDecommit = new(gc => gc.CommittedUsageBefore.TotalCommittedInGlobalDecommit, title: "Committed In Global Decommit (before)", unit: "MB"); + public static Metric CommittedBeforeInGlobalFree = new(gc => gc.CommittedUsageBefore.TotalCommittedInGlobalFree, title: "Committed In Global Free (before)", unit: "MB"); + public static Metric CommittedBeforeInUse = new(gc => gc.CommittedUsageBefore.TotalCommittedInUse, title: "Committed In Use (before)", unit: "MB"); + public static List> CommittedBeforeMetrics = ML(CommittedBeforeTotalBookkeeping, CommittedBeforeInFree, CommittedBeforeInGlobalDecommit, CommittedBeforeInGlobalFree, CommittedBeforeInUse); + public static Metric DurationMSec = new(gc => gc.DurationMSec, "Duration", "ms"); + public static Metric GCCpuMSec = new(gc => gc.GCCpuMSec, "GC CPU", "ms"); + public static Metric Gen0Budget = new(gc => gc.GenBudgetMB(Gens.Gen0), "Gen0 budget", "MB"); + public static Metric Gen1Budget = new(gc => gc.GenBudgetMB(Gens.Gen1), "Gen1 budget", "MB"); + public static Metric Gen2Budget = new(gc => gc.GenBudgetMB(Gens.Gen2), "Gen2 budget", "MB"); + public static Metric GenLargeBudget = new(gc => gc.GenBudgetMB(Gens.GenLargeObj), "GenLarge budget", "MB"); + public static Metric GenPinBudget = new(gc => gc.GenBudgetMB(Gens.GenPinObj), "GenPin budget", "MB"); + public static Metric Generation = new(gc => gc.Generation, "Generation", "gen"); + public static Metric Gen0Fragmentation = new(gc => gc.GenFragmentationMB(Gens.Gen0), "Gen0 fragmentation", "MB"); + public static Metric Gen1Fragmentation = new(gc => gc.GenFragmentationMB(Gens.Gen1), "Gen1 fragmentation", "MB"); + public static Metric Gen2Fragmentation = new(gc => gc.GenFragmentationMB(Gens.Gen2), "Gen2 fragmentation", "MB"); + public static Metric GenLargeFragmentation = new(gc => gc.GenFragmentationMB(Gens.GenLargeObj), "GenLarge fragmentation", "MB"); + public static Metric GenPinFragmentation = new(gc => gc.GenFragmentationMB(Gens.GenPinObj), "GenPin fragmentation", "MB"); + public static Metric Gen0FragmentationPercent = new(gc => gc.GenFragmentationPercent(Gens.Gen0), "Gen0 fragmentation %", "%"); + public static Metric Gen1FragmentationPercent = new(gc => gc.GenFragmentationPercent(Gens.Gen1), "Gen1 fragmentation %", "%"); + public static Metric Gen2FragmentationPercent = new(gc => gc.GenFragmentationPercent(Gens.Gen2), "Gen2 fragmentation %", "%"); + public static Metric GenLargeFragmentationPercent = new(gc => gc.GenFragmentationPercent(Gens.GenLargeObj), "GenLarge fragmentation %", "%"); + public static Metric GenPinFragmentationPercent = new(gc => gc.GenFragmentationPercent(Gens.GenPinObj), "GenPin fragmentation %", "%"); + public static Metric Gen0In = new(gc => gc.GenInMB(Gens.Gen0), "Gen0 Memory (in)", "MB"); + public static Metric Gen1In = new(gc => gc.GenInMB(Gens.Gen1), "Gen1 Memory (in)", "MB"); + public static Metric Gen2In = new(gc => gc.GenInMB(Gens.Gen2), "Gen2 Memory (in)", "MB"); + public static Metric GenLargeIn = new(gc => gc.GenInMB(Gens.GenLargeObj), "GenLarge Memory (in)", "MB"); + public static Metric GenPinIn = new(gc => gc.GenInMB(Gens.GenPinObj), "GenPin Memory (in)", "MB"); + public static Metric Gen0ObjSizeAfter = new(gc => gc.GenObjSizeAfterMB(Gens.Gen0), "Gen0 object size (after)", "MB"); + public static Metric Gen1ObjSizeAfter = new(gc => gc.GenObjSizeAfterMB(Gens.Gen1), "Gen1 object size (after)", "MB"); + public static Metric Gen2ObjSizeAfter = new(gc => gc.GenObjSizeAfterMB(Gens.Gen2), "Gen2 object size (after)", "MB"); + public static Metric GenLargeObjSizeAfter = new(gc => gc.GenObjSizeAfterMB(Gens.GenLargeObj), "GenLarge object size (after)", "MB"); + public static Metric GenPinObjSizeAfter = new(gc => gc.GenObjSizeAfterMB(Gens.GenPinObj), "GenPin object size (after)", "MB"); + public static Metric Gen0Out = new(gc => gc.GenOutMB(Gens.Gen0), "Gen0 Memory (out)", "MB"); + public static Metric Gen1Out = new(gc => gc.GenOutMB(Gens.Gen1), "Gen1 Memory (out)", "MB"); + public static Metric Gen2Out = new(gc => gc.GenOutMB(Gens.Gen2), "Gen2 Memory (out)", "MB"); + public static Metric GenLargeOut = new(gc => gc.GenOutMB(Gens.GenLargeObj), "GenLarge Memory (out)", "MB"); + public static Metric GenPinOut = new(gc => gc.GenOutMB(Gens.GenPinObj), "GenPin Memory (out)", "MB"); + public static Metric Gen0Promoted = new(gc => gc.GenPromotedMB(Gens.Gen0), "Gen0 Promoted", "MB"); + public static Metric Gen1Promoted = new(gc => gc.GenPromotedMB(Gens.Gen1), "Gen1 Promoted", "MB"); + public static Metric Gen2Promoted = new(gc => gc.GenPromotedMB(Gens.Gen2), "Gen2 Promoted", "MB"); + public static Metric GenLargePromoted = new(gc => gc.GenPromotedMB(Gens.GenLargeObj), "GenLarge Promoted", "MB"); + public static Metric GenPinPromoted = new(gc => gc.GenPromotedMB(Gens.GenPinObj), "GenPin Promoted", "MB"); + public static Metric Gen0SizeAfter = new(gc => gc.GenSizeAfterMB(Gens.Gen0), "Gen0 size (after)", "MB"); + public static Metric Gen1SizeAfter = new(gc => gc.GenSizeAfterMB(Gens.Gen1), "Gen1 size (after)", "MB"); + public static Metric Gen2SizeAfter = new(gc => gc.GenSizeAfterMB(Gens.Gen2), "Gen2 size (after)", "MB"); + public static Metric GenLargeSizeAfter = new(gc => gc.GenSizeAfterMB(Gens.GenLargeObj), "GenLarge size (after)", "MB"); + public static Metric GenPinSizeAfter = new(gc => gc.GenSizeAfterMB(Gens.GenPinObj), "GenPin size (after)", "MB"); + public static Metric Gen0SizeBefore = new(gc => gc.GenSizeBeforeMB[(int) Gens.Gen0], "Gen0 size (before)", "MB"); + public static Metric Gen1SizeBefore = new(gc => gc.GenSizeBeforeMB[(int) Gens.Gen1], "Gen1 size (before)", "MB"); + public static Metric Gen2SizeBefore = new(gc => gc.GenSizeBeforeMB[(int) Gens.Gen2], "Gen2 size (before)", "MB"); + public static Metric GenLargeSizeBefore = new(gc => gc.GenSizeBeforeMB[(int) Gens.GenLargeObj], "GenLarge size (before)", "MB"); + public static Metric GenPinSizeBefore = new(gc => gc.GenSizeBeforeMB[(int) Gens.GenPinObj], "GenPin size (before)", "MB"); + //public static Metric Condemned = new(gc => gc.GetCondemnedReasons()); + + // TODO: GlobalHeapHistory.* + //public static Metric Ghh = new(gc => gc.GlobalHeapHistory., "", ""); + public static Metric IsConcurrent = new (gc => Convert.ToDouble((gc.GlobalHeapHistory.GlobalMechanisms & GCGlobalMechanisms.Concurrent) != 0), "Is concurrent", "Y/N"); + public static Metric IsCompaction = new (gc => Convert.ToDouble((gc.GlobalHeapHistory.GlobalMechanisms & GCGlobalMechanisms.Compaction) != 0), "Is compaction", "Y/N"); + public static Metric IsPromotion = new (gc => Convert.ToDouble((gc.GlobalHeapHistory.GlobalMechanisms & GCGlobalMechanisms.Promotion) != 0), "Is promotion", "Y/N"); + public static Metric IsDemotion = new (gc => Convert.ToDouble((gc.GlobalHeapHistory.GlobalMechanisms & GCGlobalMechanisms.Demotion) != 0), "Is demotion", "Y/N"); + public static Metric IsCardBundles = new (gc => Convert.ToDouble((gc.GlobalHeapHistory.GlobalMechanisms & GCGlobalMechanisms.CardBundles) != 0), "Is cardbundles", "Y/N"); + public static Metric NumHeaps = new((gc => gc.GlobalHeapHistory.NumHeaps), "GC Heaps", "#"); + public static Metric NumHeapsWithOffset = NumHeaps.WithOffset(0.05); + + public static Metric HeapCount = new(gc => gc.HeapCount, "Heap count", "#"); + + // HeapCountSample + public static Metric HcsElapsedTimeBetweenGCs = new(gc => gc.HeapCountSample.ElapsedTimeBetweenGCsMSec, "HCSampleElapsed", "ms"); + public static Metric HcsGCIndex = new(gc => gc.HeapCountSample.GCIndex, "HCSampleGCIndex", "#"); + public static Metric HcsGCPauseTime = new(gc => gc.HeapCountSample.GCPauseTimeMSec, "HCSampleGCPause", "ms"); + public static Metric HcsMslWaitTime = new(gc => gc.HeapCountSample.MslWaitTimeMSec, "HCSampleGCMslWait", "ms"); + + // HeapCountTuning + public static Metric HctGCIndex = new(gc => gc.HeapCountTuning?.GCIndex, "HCTuningGCIndex", "#"); + public static Metric HctMtcp = new((gc => gc.HeapCountTuning?.MedianThroughputCostPercent), "Median TCP", "%"); + public static Metric HctMtcpCap15 = HctMtcp.WithCap(15); + public static Metric HctNewHeapCount = new(gc => gc.HeapCountTuning?.NewHeapCount, "HCTuningNewHeapCount", "#"); + public static Metric HctSmtcp = new(gc => gc.HeapCountTuning?.SmoothedMedianThroughputCostPercent, "Smoothed MTCP", "%"); + public static Metric HctSpaceCostDown = new(gc => gc.HeapCountTuning?.SpaceCostPercentDecreasePerStepDown, "Space cost (down)", "%"); + public static Metric HctSpaceCostUp = new(gc => gc.HeapCountTuning?.SpaceCostPercentIncreasePerStepUp, "Space cost (up)TCP", "%"); + public static Metric HctTPCostDown = new(gc => gc.HeapCountTuning?.ThroughputCostPercentIncreasePerStepDown, "TP cost (down)", "%"); + public static Metric HctTPCostUp = new(gc => gc.HeapCountTuning?.ThroughputCostPercentReductionPerStepUp, "TP cost (up)", "%"); + + public static Metric HeapSizeAfter = new(gc => gc.HeapSizeAfterMB, "Heap size (after)", "MB"); + public static Metric HeapSizeBefore = new(gc => gc.HeapSizeBeforeMB, "Heap size (before)", "MB"); + public static Metric HeapSizePeak = new(gc => gc.HeapSizePeakMB, "Heap size (peak)", "MB"); + + // TODO: HeapStats.* + //public static Metric Hs = new(gc => gc.HeapStats., "", ""); + + // TODO: Remaining are less comprehensive + public static Metric PauseDuration = new((gc => gc.PauseDurationMSec), "GC pause", "ms"); + public static Metric PausePercent = new((gc => gc.PauseTimePercentageSinceLastGC), "GC pause %", "%"); + public static Metric EndOfSegAllocated = new(gc => gc.PerHeapHistories.Sum(p => p.EndOfSegAllocated), title: "EndOfSegAllocated", unit: "?"); + public static Metric PauseStack = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkTimes[(int) MarkRootType.MarkStack]).Sum(), "Pause (stack)", "ms"); + public static Metric PauseFQ = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkTimes[(int) MarkRootType.MarkFQ]).Sum(), "Pause (FQ)", "ms"); + public static Metric PauseHandles = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkTimes[(int) MarkRootType.MarkHandles]).Sum(), "Pause (handles)", "ms"); + public static Metric PauseCards = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkTimes[(int) MarkRootType.MarkOlder]).Sum(), "Pause (cards)", "ms"); + public static Metric ObjectSpaceStack = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkPromoted[(int) MarkRootType.MarkStack]).Sum(), "Obj space (stack)", "bytes"); + public static Metric ObjectSpaceFQ = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkPromoted[(int) MarkRootType.MarkFQ]).Sum(), "Obj space (FQ)", "bytes"); + public static Metric ObjectSpaceHandles = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkPromoted[(int) MarkRootType.MarkHandles]).Sum(), "Obj space (handles)", "bytes"); + public static Metric ObjectSpaceCards = new(gc => gc.PerHeapMarkTimes.Values.Select(mi => mi.MarkPromoted[(int) MarkRootType.MarkOlder]).Sum(), "Obj space (cards)", "bytes"); + public static Metric Suspend = new(gc => gc.SuspendDurationMSec, "Suspend", "ms"); + public static Metric UserAllocated = new(gc => gc.UserAllocated.Sum(), "UserAllocated", "bytes"); + } + + public static class I + { + public static Metric MaxNumHeaps = Promote(Metrics.G.NumHeaps, Aggregation.Max); + public static Metric MaxPauseDuration = Promote(Metrics.G.PauseDuration, Aggregation.Max); + + public static Metric TotalSuspensionTime = new (iterationData => iterationData.GCSummaryInfo.TotalSuspensionTimeMSec, "Total suspension time", "ms"); + public static Metric PercentPauseTimeInGC = new (iterationData => iterationData.GCSummaryInfo.PercentPauseTimeInGC, "% pause GC", "%"); + public static Metric PercentTimeInGC = new (iterationData => iterationData.GCSummaryInfo.PercentTimeInGC, "% GC", "%"); + public static Metric MeanHeapSizeBeforeMB = new (iterationData => iterationData.GCSummaryInfo.MeanHeapSizeBeforeMB, "Mean heap size (before)", "MB"); + public static Metric MaxHeapSizeMB = new (iterationData => iterationData.GCSummaryInfo.MaxHeapSizeMB, "Max heap size", "MB"); + public static Metric TotalAllocationsMB = new (iterationData => iterationData.GCSummaryInfo.TotalAllocationsMB, "Total allocations", "MB"); + public static Metric GCScore = new (iterationData => iterationData.GCSummaryInfo.GCScore, "GC score", "score"); // MB * % + + public static Metric MaxHeapCount = new (iterationData => iterationData.GCSummaryInfo.MaxHeapCount, "Max heap count", "#"); + public static Metric NumberOfHeapCountSwitches = new (iterationData => iterationData.GCSummaryInfo.NumberOfHeapCountSwitches, "# hc changes", "#"); + public static Metric NumberOfHeapCountDirectionChanges = new (iterationData => iterationData.GCSummaryInfo.NumberOfHeapCountDirectionChanges, "# hc dir changes", "#"); + + public static Metric MaxWorkingSetMB = new (iterationData => iterationData.LoadInfo.MaxWorkingSetMB, "Max working set", "MB"); + public static Metric P99WorkingSetMB = new (iterationData => iterationData.LoadInfo.P99WorkingSetMB, "P99 working set", "MB"); + public static Metric P95WorkingSetMB = new (iterationData => iterationData.LoadInfo.P95WorkingSetMB, "P95 working set", "MB"); + public static Metric P90WorkingSetMB = new (iterationData => iterationData.LoadInfo.P90WorkingSetMB, "P90 working set", "MB"); + public static Metric P75WorkingSetMB = new (iterationData => iterationData.LoadInfo.P75WorkingSetMB, "P75 working set", "MB"); + public static Metric P50WorkingSetMB = new (iterationData => iterationData.LoadInfo.P50WorkingSetMB, "P50 working set", "MB"); + public static List> WorkingSetMBList = ML(MaxWorkingSetMB, P99PrivateMemoryMB, P95PrivateMemoryMB, P90PrivateMemoryMB, P75PrivateMemoryMB, P50PrivateMemoryMB); + + public static Metric MaxPrivateMemoryMB = new (iterationData => iterationData.LoadInfo.MaxPrivateMemoryMB, "Max private memory", "MB"); + public static Metric P99PrivateMemoryMB = new (iterationData => iterationData.LoadInfo.P99PrivateMemoryMB, "P99 private memory", "MB"); + public static Metric P95PrivateMemoryMB = new (iterationData => iterationData.LoadInfo.P95PrivateMemoryMB, "P95 private memory", "MB"); + public static Metric P90PrivateMemoryMB = new (iterationData => iterationData.LoadInfo.P90PrivateMemoryMB, "P90 private memory", "MB"); + public static Metric P75PrivateMemoryMB = new (iterationData => iterationData.LoadInfo.P75PrivateMemoryMB, "P75 private memory", "MB"); + public static Metric P50PrivateMemoryMB = new (iterationData => iterationData.LoadInfo.P50PrivateMemoryMB, "P50 private memory", "MB"); + public static List> PrivateMemoryMBList = ML(MaxPrivateMemoryMB, P99PrivateMemoryMB, P95PrivateMemoryMB, P90PrivateMemoryMB, P75PrivateMemoryMB, P50PrivateMemoryMB); + + public static Metric RequestsPerMSec = new (iterationData => iterationData.LoadInfo.RequestsPerMSec, "RPS", "RPS"); + public static Metric MeanLatencyMS = new (iterationData => iterationData.LoadInfo.MeanLatencyMS, "Mean latency", "ms"); + public static Metric Latency99thMS = new (iterationData => iterationData.LoadInfo.Latency99thMS, "Latency 99th", "ms"); + public static Metric Latency90thMS = new (iterationData => iterationData.LoadInfo.Latency90thMS, "Latency 90th", "ms"); + public static Metric Latency75thMS = new (iterationData => iterationData.LoadInfo.Latency75thMS, "Latency 75th", "ms"); + public static Metric Latency50thMS = new (iterationData => iterationData.LoadInfo.Latency50thMS, "Latency 50th", "ms"); + public static List> LatencyMSList = ML(MeanLatencyMS, Latency99thMS, Latency90thMS, Latency75thMS, Latency50thMS); + } + + public static class B + { + public static Metric MaxHeapCount = Promote(Metrics.I.MaxHeapCount, Aggregation.Max); + public static Metric MaxPauseDurationBenchmark = Promote(Metrics.I.MaxPauseDuration, Aggregation.Max); + public static Metric MaxPercentPauseTimeInGC = Promote(Metrics.I.PercentPauseTimeInGC, Aggregation.Max); + public static Metric AveragePercentPauseTimeInGC = Promote(Metrics.I.PercentPauseTimeInGC, Aggregation.Average); + } + +} + +// Exploratory +public abstract class NameSimplifier +{ + public abstract (string title, Dictionary) Simplify(List names); + + public static PrefixSimplifier PrefixDashed { get; } = new PrefixSimplifier('-'); +} + +public class ListSimplifier : NameSimplifier +{ + private Dictionary _nameMap; + + public ListSimplifier(params (string inData, string toDisplay)[] names) + : this((IEnumerable<(string, string)>) names) {} + + public ListSimplifier(IEnumerable<(string inData, string toDisplay)> names) + => _nameMap = names.ToDictionary(); + + public override (string title, Dictionary) Simplify(List names) => (null, _nameMap); +} + +public class PrefixSimplifier : NameSimplifier +{ + private char _delimiter; + private string _emptyResult; + + public PrefixSimplifier(char delimiter, string emptyResult = "<>") + { + _delimiter = delimiter; + _emptyResult = emptyResult; + } + + public override (string title, Dictionary) Simplify(List names) + { + if (names.Count == 0) return (null, null); + List namesToScan = names; + int longestMatch = namesToScan.Select(n => n.Length).Min(); + bool allContinueWithDelimiter = namesToScan.All(n => (n.Length == longestMatch) || (n[longestMatch] == _delimiter)); + if (allContinueWithDelimiter) + { + namesToScan = namesToScan.Select(n => ((allContinueWithDelimiter && (n.Length == longestMatch)) ? (n + _delimiter) : n)).ToList(); + longestMatch++; + } + foreach (string name in namesToScan) + { + int overlap = name.TakeWhile((ch, i) => (i < longestMatch) && (ch == namesToScan[0][i])).Count(); + longestMatch = (overlap == 0) ? 0 : name.LastIndexOf(_delimiter, overlap - 1) + 1; + if (longestMatch == 0) break; + } + if (longestMatch > 0) + { + return ( + names[0].Substring(0, longestMatch - 1), + names.Select(config => (config, (longestMatch >= config.Length) ? _emptyResult : config.Substring(longestMatch))) + .ToDictionary() + ); + } + return (null, null); + } +} + +// Some will be null depending on the chart type +record SeriesInfo(Metric Metric, string Run, string Config, ConfigData ConfigData, string Benchmark, int? Iteration, IterationData IterationData); + +abstract class ChartType +{ + public abstract BaseMetric<(string, TData), XValue> DefaultXMetric { get; } + public abstract string DefaultBenchmarkMap(string benchmark); + + public abstract IEnumerable> GetSeries(DataManager dataManager, List> metrics, Filter runFilter, Filter configFilter, + Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, IEnumerable benchmarkList); + public abstract string GetColorFamilyKey(SeriesInfo info, bool multipleMetrics, bool includeRunName, bool multipleConfigs, + Dictionary configDisplayNames, bool multipleBenchmarks); + public abstract string GetColorFamilyId(SeriesInfo info, bool multipleMetrics); + public abstract string GetSeriesTitle(SeriesInfo info, string colorFamilyKey, bool multipleMetrics); + public abstract string GetChartTitle(); + public abstract List> GetDataSource(SeriesInfo info, + Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, Func dataFilter); +} + +class BenchmarksChartType : ChartType +{ + public override BaseMetric<(string, BenchmarkData), XValue> DefaultXMetric { get; } = Metrics.X.BenchmarkName; + public override string DefaultBenchmarkMap(string benchmark) => ""; + + public override IEnumerable> GetSeries(DataManager dataManager, List> metrics, Filter runFilter, Filter configFilter, + Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, IEnumerable benchmarkList) + { + foreach (var metric in metrics) + { + foreach ((string run, string config, ConfigData configData) in dataManager.GetConfigsWithData(runFilter, configFilter)) + { + if (!configIterationFilter.MightInclude((config))) continue; + + // Note - could filter out configs that don't have a relevant benchmark/iteration + yield return new (metric, run, config, configData, null, null, null); + } + } + } + + public override string GetColorFamilyKey(SeriesInfo info, bool multipleMetrics, bool includeRunName, bool multipleConfigs, + Dictionary configDisplayNames, bool multipleBenchmarks) + { + string runDisplay = includeRunName ? $"{info.Run}, " : ""; + string configDisplay = multipleConfigs ? (configDisplayNames?.GetValueOrDefault(info.Config) ?? info.Config) : ""; + string colorFamilyKey = $"{runDisplay}{configDisplay}"; + return colorFamilyKey; + } + + public override string GetColorFamilyId(SeriesInfo info, bool multipleMetrics) => multipleMetrics ? $"{info.Metric.Title} / " : ""; + public override string GetSeriesTitle(SeriesInfo info, string colorFamilyKey, bool multipleMetrics) => $"{GetColorFamilyId(info, multipleMetrics)}{colorFamilyKey}"; + + public override string GetChartTitle() => "Per-benchmark behavior"; + + public override List> GetDataSource(SeriesInfo info, + Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, Func dataFilter) + => info.ConfigData.Benchmarks + .Where(benchmark => benchmarkFilter.Include(benchmark.Key) + && benchmark.Value.Iterations.WithIndex() + .Any(pair => (pair.Item1 != null) + && iterationFilter.Include(pair.Item2) + && configIterationFilter.Include(info.Config, pair.Item2))); +} + +class IterationsChartType : ChartType +{ + public override BaseMetric<(string, IterationData), XValue> DefaultXMetric { get; } = Metrics.X.IterationBenchmarkName; + public override string DefaultBenchmarkMap(string benchmark) => ""; + + public override IEnumerable> GetSeries(DataManager dataManager, List> metrics, Filter runFilter, Filter configFilter, + Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, IEnumerable benchmarkList) + { + foreach (var metric in metrics) + { + foreach ((string run, string config, ConfigData configData) in dataManager.GetConfigsWithData(runFilter, configFilter)) + { + foreach (int iteration in configData.GetIterations(config, benchmarkFilter, iterationFilter, configIterationFilter)) + { + yield return new (metric, run, config, configData, null, iteration, null); + } + } + } + } + + public override string GetColorFamilyKey(SeriesInfo info, bool multipleMetrics, bool includeRunName, bool multipleConfigs, + Dictionary configDisplayNames, bool multipleBenchmarks) + { + string metricDisplay = multipleMetrics ? $"{info.Metric.Title}, " : ""; + string runDisplay = includeRunName ? $"{info.Run}, " : ""; + string configDisplay = multipleConfigs ? (configDisplayNames?.GetValueOrDefault(info.Config) ?? info.Config) : ""; + string colorFamilyKey = $"{metricDisplay}{runDisplay}{configDisplay}"; + + return colorFamilyKey; + } + + public override string GetColorFamilyId(SeriesInfo info, bool multipleMetrics) => $"_{info.Iteration}"; + public override string GetSeriesTitle(SeriesInfo info, string colorFamilyKey, bool multipleMetrics) => $"{colorFamilyKey}{GetColorFamilyId(info, multipleMetrics)}"; + + public override string GetChartTitle() => "Per-iteration behavior"; + + public override List> GetDataSource(SeriesInfo info, + Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, Func dataFilter) + { + if (!iterationFilter.Include(info.Iteration.Value) + || !configIterationFilter.Include(info.Config, info.Iteration.Value)) + { + throw new Exception("IterationsChartType.GetDataSource expected GetSeries to filter iterations"); + } + + return info.ConfigData.Benchmarks + .Where(benchmark => benchmarkFilter.Include(benchmark.Key)) + .Where(benchmark => info.Iteration < benchmark.Value.Iterations.Count) + .Select(benchmark => KeyValuePair.Create(benchmark.Key, benchmark.Value.Iterations[info.Iteration.Value])) + .Where(kvp => kvp.Value != null); + } +} + +class TraceGCChartType : ChartType +{ + public override BaseMetric<(string, TraceGC), XValue> DefaultXMetric { get; } = Metrics.X.GCIndex; + public override string DefaultBenchmarkMap(string benchmark) => benchmark; + + public override IEnumerable> GetSeries(DataManager dataManager, List> metrics, Filter runFilter, Filter configFilter, + Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, IEnumerable benchmarkList) + { + foreach (var metric in metrics) + { + foreach (string benchmark in benchmarkList) + { + foreach ((string run, string config, int iteration, IterationData iterationData) in + dataManager.GetIterationsForBenchmark(runFilter, configFilter, iterationFilter, configIterationFilter, benchmark)) + { + yield return new (metric, run, config, null, benchmark, iteration, iterationData); + } + } + } + } + + public override string GetColorFamilyKey(SeriesInfo info, bool multipleMetrics, bool includeRunName, bool multipleConfigs, + Dictionary configDisplayNames, bool multipleBenchmarks) + { + string benchmarkDisplay = multipleBenchmarks ? $"{info.Benchmark}, " : ""; + string metricDisplay = multipleMetrics ? $"{info.Metric.Title}, " : ""; + string runDisplay = includeRunName ? $"{info.Run}, " : ""; + string configDisplay = multipleConfigs ? (configDisplayNames?.GetValueOrDefault(info.Config) ?? info.Config) : ""; + string colorFamilyKey = $"{benchmarkDisplay}{metricDisplay}{runDisplay}{configDisplay}"; + + return colorFamilyKey; + } + + public override string GetColorFamilyId(SeriesInfo info, bool multipleMetrics) => $"_{info.Iteration}"; + public override string GetSeriesTitle(SeriesInfo info, string colorFamilyKey, bool multipleMetrics) => $"{colorFamilyKey}{GetColorFamilyId(info, multipleMetrics)}"; + + public override string GetChartTitle() => "Per-run behavior"; + + public override List> GetDataSource(SeriesInfo info, + Filter benchmarkFilter, IntFilter iterationFilter, ConfigIterationFilter configIterationFilter, Func dataFilter) + => info.IterationData.GCProcessData?.GCs.Where(gc => gc.GlobalHeapHistory != null).Where(dataFilter).Select(gc => KeyValuePair.Create("", gc)); +} + +public struct XValue : IComparable, IEquatable +{ + private double _value; + private string _name; + + public XValue(double value) { _value = value; _name = null; } + public XValue(string name) { _value = 0; _name = name; } + + public bool HasValue => _name == null; + public bool HasName => _name != null; + + public double GetValue() => HasValue ? _value : throw new Exception("XValue.GetValue on a named value"); + public string GetName() => HasName ? _name : throw new Exception("XValue.GetName on a numerical value"); + + public override int GetHashCode() => HasValue ? GetValue().GetHashCode() : GetName().GetHashCode(); + public bool Equals(XValue other) => HasValue ? (other.HasValue && (GetValue() == other.GetValue())) : (other.HasName && (GetName() == other.GetName())); + public override bool Equals(object other) => other is XValue otherX && Equals(otherX); + + public int CompareTo(XValue other) + => (HasValue && other.HasName) ? 1 + : (HasName && other.HasValue) ? -1 + : HasValue ? GetValue().CompareTo(other.GetValue()) + : GetName().CompareTo(other.GetName()); + + public override string ToString() => HasValue ? _value.ToString() : _name; + public string ToString(string format) => HasValue ? _value.ToString(format) : _name; +} + +public abstract class XArrangement +{ + private string _titleOverride; + + public XArrangement(string titleOverride) { _titleOverride = titleOverride; } + + public string GetNewTitle(string oldTitle) => _titleOverride ?? oldTitle; + public abstract List<(XValue x, double? y)> Arrange(List<(XValue x, double? y)> data, List<(XValue x, double? y)> firstDataPreSorted); + // This interface probably needs some work. The idea is that, given the next xvalue in each series, this selects which one + // should be next overall. + public abstract XValue? ChooseNext(IEnumerable xavlues); + + public class DefaultXArrangement : XArrangement + { + public DefaultXArrangement() : base(null) {} + public override List<(XValue x, double? y)> Arrange(List<(XValue x, double? y)> data, List<(XValue x, double? y)> firstDataPreSorted) => data; + public override XValue? ChooseNext(IEnumerable xvalues) => xvalues.FirstOrDefault(x => x.HasValue); + } + + public class PercentileXArrangement : XArrangement + { + private bool _descending; + public PercentileXArrangement(bool descending = false) : base("Percentile") { _descending = descending; } + public override List<(XValue x, double? y)> Arrange(List<(XValue x, double? y)> data, List<(XValue x, double? y)> firstDataPreSorted) + { + var sortedData = _descending + ? data.Select(d => d.y).OrderByDescending(y => y) + : data.Select(d => d.y).OrderBy(y => y); + return sortedData.Select((d, i) => (new XValue(i / (double) data.Count), d)).ToList(); + } + public override XValue? ChooseNext(IEnumerable xvalues) => xvalues.Min(); + } + + public class SortedXArrangement : XArrangement + { + private bool _descending; + public SortedXArrangement(bool descending = true) : base("Metric Rank") { _descending = descending; } + public override List<(XValue x, double? y)> Arrange(List<(XValue x, double? y)> data, List<(XValue x, double? y)> firstDataPreSorted) + { + var sortedData = _descending + ? data.Select(d => d.y).OrderByDescending(y => y) + : data.Select(d => d.y).OrderBy(y => y); + return sortedData.Select((d, i) => (new XValue(i), d)).ToList(); + } + public override XValue? ChooseNext(IEnumerable xvalues) => xvalues.Min(); + } + + public class CombinedSortedXArrangement : XArrangement + { + public CombinedSortedXArrangement() : base(null) {} + public override List<(XValue x, double? y)> Arrange(List<(XValue x, double? y)> data, List<(XValue x, double? y)> firstDataPreSorted) + => data.Join(firstDataPreSorted, d => d.x, d => d.x, ((d, sortedEntry) => (d.x, d.y, sortedEntry.y))) + .OrderByDescending(triple => triple.Item3) + .Select(triple => (triple.x, triple.Item2)); + public override XValue? ChooseNext(IEnumerable xvalues) => xvalues.FirstOrDefault(x => x.HasValue); + } + + public class RelativeXArrangement : XArrangement + { + public RelativeXArrangement() : base(null) {} + public override List<(XValue x, double? y)> Arrange(List<(XValue x, double? y)> data, List<(XValue x, double? y)> firstDataPreSorted) + { + if (data.Count == 0) return data; + if (data[0].x.HasName) + { + Console.WriteLine($"Applying {nameof(RelativeXArrangement)} on non-numeric x values (first is {data[0].x})"); + return data; + } + double firstValue = data[0].x.GetValue(); + return data.Select(d => (new XValue(d.x.GetValue() - firstValue), d.y)); + } + public override XValue? ChooseNext(IEnumerable xvalues) => xvalues.Min(); // not necessarily? + } +} + +public static class XArrangements +{ + public static XArrangement.DefaultXArrangement Default { get; } = new (); + public static XArrangement.PercentileXArrangement Percentile { get; } = new(); + public static XArrangement.SortedXArrangement Sorted { get; } = new(); + public static XArrangement.CombinedSortedXArrangement CombinedSorted { get; } = new(); + public static XArrangement.RelativeXArrangement Relative { get; } = new(); +} + +public abstract class DataPresenter +{ + public bool Debug; + + public abstract void Clear(); + + // true if ok + public abstract bool PrepareUnits(IEnumerable units); + public abstract void SetColorGroups(Dictionary colorGroups); + public abstract void Display(); + public abstract TResult Result { get; } + + // Below members are per-chart + + public abstract void Start(string title, string xlabel); + public abstract void AddSeries(string title, string unit, string colorFamilyKey, string colorFamilyId, List<(XValue x, double? y)> data); + public abstract void Finish(XArrangement xArrangement); +} + +public abstract class TextPresenter : DataPresenter>> +{ + public static RawTextPresenter RawText { get; } = new RawTextPresenter(); + public static MarkdownPresenter Markdown { get; } = new MarkdownPresenter(); + public static HtmlPresenter Html { get; } = new HtmlPresenter(); + public static CsvPresenter Csv { get; } = new CsvPresenter(); + + protected record struct DataPoint(XValue x, double? y); + private record Series(string title, string unit, List data); + private record Table(string title, string xlabel, List series); + + private List> _result = new(); + public override void Clear() => _result.Clear(); + + // true if ok + public override bool PrepareUnits(IEnumerable units) => true; + public override void SetColorGroups(Dictionary colorGroups) {} + + public override List> Result => _result; + + // Below members are per-table + + private Table _current; + + public override void Start(string title, string xlabel) { _current = new(title: title, xlabel: xlabel, series: new()); } + public override void AddSeries(string title, string unit, string colorFamilyKey, string colorFamilyId, List<(XValue x, double? y)> data) + => _current.series.Add(new(title: title, unit: unit, data: data.Select(pair => new DataPoint(pair.x, pair.y)).ToList())); + + private int MaxTokenLength(string phrase) => phrase.Split(' ').Select(s => s.Length).Max(); + protected string NDashes(int n) => new string('-', n); + protected string NSpaces(int n) => new string(' ', n); + protected void PadLeft(StringBuilder sb, int width) => sb.Insert(0, NSpaces(width - sb.Length)); + + protected abstract string MakeTitle(string title); + protected abstract string? StartTable(); + protected abstract IEnumerable HeaderLines(IEnumerable headerValues, IEnumerable widths); + protected abstract string? HeaderBorder(IEnumerable widths); + protected abstract IEnumerable DataLine(XValue xvalue, IEnumerable values, IEnumerable widths); + protected abstract string? EndTable(); + + protected const string lineStart = "| "; + protected const string lineDelim = " | "; + protected const string lineEnd = " |"; + + private static void AddIfNotNull(List list, string? value) + { + if (value != null) list.Add(value); + } + + public override void Finish(XArrangement xArrangement) + { + + int xWidth = _current.series.SelectMany(series => series.data.Select(d => d.x.ToString().Length)) + .Append(MaxTokenLength(_current.xlabel)) + .Max(); + List seriesWidths = _current.series.Select( + series => series.data.Select(d => d.y?.ToString("N3").Length ?? 0) + .Append((this is RawTextPresenter) ? MaxTokenLength(series.title) : series.title.Length) + .Append(series.unit.Length + 2) // "()" + .Max()); + var allWidths = seriesWidths.Prepend(xWidth); + List tableText = new(); + tableText.Add(MakeTitle(_current.title)); + tableText.Add(""); + AddIfNotNull(tableText, StartTable()); + + var headerValues = _current.series.Select(series => series.title).Prepend(_current.xlabel); + tableText.AddRange(HeaderLines(headerValues, allWidths)); + AddIfNotNull(tableText, HeaderBorder(allWidths)); + + int numSeries = _current.series.Count; + int[] nextIndices = new int[numSeries]; // all zeroes + DataPoint?[] candidates = new DataPoint?[numSeries]; + string[] elements = new string[numSeries + 1]; // includes X + while (true) + { + // Find next xvalue, if it exists. + for (int i = 0; i < numSeries; ++i) + { + while ((nextIndices[i] < _current.series[i].data.Count) + && !_current.series[i].data[nextIndices[i]].y.HasValue) + { + nextIndices[i]++; + } + + candidates[i] = nextIndices[i] < _current.series[i].data.Count + ? _current.series[i].data[nextIndices[i]] + : null; + } + XValue? next = xArrangement.ChooseNext(candidates.Select(p => p?.x)); + if (!next.HasValue) break; + + // Get values + for (int i = 0; i < numSeries; ++i) + { + if (!candidates[i].HasValue) continue; + if (!next.Value.Equals(candidates[i].Value.x)) + { + candidates[i] = null; + continue; + } + nextIndices[i]++; + } + if (!candidates.Any(NotNull)) throw new Exception("internal error - no candidate used"); + + tableText.AddRange(DataLine(next.Value, candidates, allWidths)); + } + + AddIfNotNull(tableText, EndTable()); + _result.Add(tableText); + } +} + +public class RawTextPresenter : TextPresenter +{ + private const string borderStart = "+ "; + private const string borderDelim = "-+-"; + private const string borderEnd = " +"; + + protected override string MakeTitle(string title) => title; + protected override string? StartTable() => null; + + private List MakeLines(string phrase, int width) + { + List result = new(); + + string[] tokens = phrase.Split(' '); + StringBuilder current = new(); + foreach (string token in tokens) + { + if (token.Length > width) throw new Exception("Tokenization inconsistent"); + if ((current.Length + token.Length + 1) > width) + { + PadLeft(current, width); + result.Add(current.ToString()); + current = new(); + } + + if (current.Length > 0) current.Append(' '); + current.Append(token); + } + PadLeft(current, width); + result.Add(current.ToString()); + + return result; + } + + protected override IEnumerable HeaderLines(IEnumerable headerValues, IEnumerable widths) + { + var headerCells = headerValues.Zip(widths).Select(headerAndWidth => MakeLines(headerAndWidth.First, headerAndWidth.Second)); + int maxHeaderLines = headerCells.Select(lines => lines.Count).Max(); + foreach ((List cell, int width) in headerCells.Zip(widths)) + { + while (cell.Count < maxHeaderLines) cell.Insert(0, NSpaces(width)); + } + for (int i = 0; i < maxHeaderLines; ++i) + { + yield return (lineStart + string.Join(lineDelim, headerCells.Select(cell => cell[i])) + lineEnd); + } + } + + protected override string? HeaderBorder(IEnumerable widths) + => borderStart + string.Join(borderDelim, widths.Select(n => NDashes(n))) + borderEnd; + + protected override IEnumerable DataLine(XValue xvalue, IEnumerable values, IEnumerable widths) + { + var cells = values.Select(p => p?.y?.ToString("N3")).Prepend(xvalue.ToString()) + .Zip(widths).Select(p => (p.First ?? "").PadLeft(p.Second)); + yield return lineStart + string.Join(lineDelim, cells) + lineEnd; + } + + protected override string? EndTable() => null; + + public override void Display() + { + foreach (List table in Result) + { + + Console.WriteLine(); + foreach (string line in table) + { + Console.WriteLine(line); + } + } + } +} + +public class MarkdownPresenter : TextPresenter +{ + protected override string MakeTitle(string title) => $"### {title}"; + protected override string? StartTable() => null; + + protected override IEnumerable HeaderLines(IEnumerable headerValues, IEnumerable widths) + { + yield return lineStart + string.Join(lineDelim, headerValues.Zip(widths).Select(pair => pair.First.PadLeft(pair.Second))) + lineEnd; + } + + protected override string? HeaderBorder(IEnumerable widths) + => lineStart + string.Join(lineDelim, widths.Select(n => NDashes(n-1) + ":")) + lineEnd; + + protected override IEnumerable DataLine(XValue xvalue, IEnumerable values, IEnumerable widths) + { + var cells = values.Select(p => p?.y?.ToString("N3")).Prepend(xvalue.ToString()) + .Zip(widths).Select(p => (p.First ?? "").PadLeft(p.Second)); + yield return lineStart + string.Join(lineDelim, cells) + lineEnd; + } + + protected override string? EndTable() => null; + + public override void Display() + { + foreach (List table in Result) + { + string.Join("\n", table).DisplayAs("text/markdown"); + } + } +} + +public class HtmlPresenter : TextPresenter +{ + protected override string MakeTitle(string title) => $"

{title}

"; + protected override string? StartTable() => ""; + protected override IEnumerable HeaderLines(IEnumerable headerValues, IEnumerable widths) + { + yield return ""; + foreach (string value in headerValues) + { + yield return $" "; + } + yield return ""; + } + + protected override string? HeaderBorder(IEnumerable widths) => null; + + protected override IEnumerable DataLine(XValue xvalue, IEnumerable values, IEnumerable widths) + { + var cells = values.Select(p => p?.y?.ToString("N3")).Prepend(xvalue.ToString()); + yield return ""; + foreach (string value in cells) + { + yield return $" "; + } + yield return ""; + } + + protected override string? EndTable() => "
{value}
{value}
"; + + public override void Display() + { + foreach (List table in Result) + { + string.Join("\n", table).DisplayAs("text/html"); + } + } +} + +public class CsvPresenter : TextPresenter +{ + protected override string MakeTitle(string title) => $"# {title}"; + protected override string? StartTable() => null; + protected override IEnumerable HeaderLines(IEnumerable headerValues, IEnumerable widths) + { + yield return string.Join(",", headerValues); + } + + protected override string? HeaderBorder(IEnumerable widths) => null; + + protected override IEnumerable DataLine(XValue xvalue, IEnumerable values, IEnumerable widths) + { + var cells = values.Select(p => p?.y?.ToString("N3")).Prepend(xvalue.ToString()); + yield return string.Join(",", cells); + } + + protected override string? EndTable() => null; + + public override void Display() + { + foreach (List table in Result) + { + string.Join("\n", table).DisplayAs("text/csv"); + } + } +} + +public class ChartPresenter : DataPresenter> +{ + private string _scatterMode; + private List _uniqueUnits; + private ColorProvider _colorProvider; + private List _charts = new(); + + public ChartPresenter(string scatterMode = null) + { + _scatterMode = scatterMode; + } + + public override void Clear() => _charts.Clear(); + + public override bool PrepareUnits(IEnumerable units) + { + _uniqueUnits = new(); + foreach (string unit in units) + { + if (!_uniqueUnits.Contains(unit)) _uniqueUnits.Add(unit); + } + if (_uniqueUnits.Count > 2) + { + Console.WriteLine($"Too many units: {string.Join(", ", _uniqueUnits)}"); + return false; + } + return true; + } + + private int yaxis(string unit) => _uniqueUnits.IndexOf(unit); + + public override void SetColorGroups(Dictionary colorGroups) + { + _colorProvider = new(colorGroups); + if (Debug) _colorProvider.DumpColorGroups(); + } + + public override void Display() + { + foreach (PlotlyChart chart in Result) chart.Display(); + } + + public override List Result => _charts; + + // Below members are per-chart + + private Layout.Layout _layout; + private List _scatters; + + public override void Start(string title, string xlabel) + { + _layout = new Layout.Layout + { + xaxis = new Xaxis { title = xlabel }, + yaxis = new Yaxis { title = _uniqueUnits[0] }, + title = title, + // margin = new Margin() { r = 123 }, + }; + + if (_uniqueUnits.Count > 1) + { + _layout.yaxis2 = new Yaxis { title = _uniqueUnits[1], side = "right", overlaying = "y" }; + } + + _scatters = new(); + } + + public override void AddSeries(string title, string unit, string colorFamilyKey, string colorFamilyId, List<(XValue x, double? y)> data) + { + Scatter scatter = + new Scatter { + name = title, + x = data[0].x.HasName ? data.Select(d => d.x.GetName()) : data.Select(d => d.x.GetValue()), + y = data.Select(d => d.y), + }; + if (_scatterMode != null) scatter.mode = _scatterMode; + if (yaxis(unit) == 1) scatter.yaxis = "y2"; + _colorProvider.SetMarker(scatter, colorFamilyKey, colorFamilyId); + // scatter.marker will throw if marker hasn't been set. + // ShouldSerializemarker appears to check if it has been set. + if (Debug) Console.WriteLine($"color '{colorFamilyKey}': '{(scatter.ShouldSerializemarker() ? scatter.marker.color : "")}'"); + _scatters.Add(scatter); + } + + public override void Finish(XArrangement xArrangement) => _charts.Add(Chart.Plot(_scatters, _layout)); +} + +TResult ChartInternal(DataPresenter presenter, ChartType chartType, + DataManager dataManager, List> metrics, + Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, + ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, Func benchmarkMap = null, + BaseMetric<(string, TData), XValue> xMetric = null, XArrangement xArrangement = null, + NameSimplifier configNameSimplifier = null, bool includeRunName = false, + bool display = true, bool debug = false) +{ + runFilter = runFilter ?? Filter.All; + configFilter = configFilter ?? Filter.All; + benchmarkFilter = benchmarkFilter ?? Filter.All; + iterationFilter = iterationFilter ?? IntFilter.All; + // configIterationFilter is not set to an empty dictionary as that would exclude everything + dataFilter = dataFilter ?? (data => true); + benchmarkMap = benchmarkMap ?? chartType.DefaultBenchmarkMap; + xMetric = xMetric ?? chartType.DefaultXMetric; + xArrangement = xArrangement ?? XArrangements.Default; + + presenter.Clear(); + presenter.Debug = debug; + + if (metrics.Count == 0) + { + Console.WriteLine("No metrics"); + return default(TResult); + } + + List configs = dataManager.GetConfigs(runFilter: runFilter, configFilter: configFilter).Select(tuple => tuple.config).Distinct().ToList(); + if (configs.Count == 0) + { + Console.WriteLine("No configs afer filtering"); + return default(TResult); + } + + if (debug) Console.WriteLine("Simplify config names"); + Dictionary configDisplayNames = null; + string configPrefix = null; + if (configNameSimplifier != null) + { + (configPrefix, configDisplayNames) = configNameSimplifier.Simplify(configs); + } + + if (debug) Console.WriteLine("Prepare units"); + presenter.PrepareUnits(metrics.Select(metric => metric.Unit)); + + Dictionary> benchmarkGroups = new(); + HashSet benchmarkSet = new(); + foreach ((string run, string config, string benchmark) in + dataManager.GetBenchmarks(runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter)) + { + if (!benchmarkSet.Add(benchmark)) continue; + + string benchmarkGroup = (benchmarkMap != null) ? benchmarkMap(benchmark) : benchmark; + benchmarkGroups.GetOrAdd(benchmarkGroup, new()); + benchmarkGroups[benchmarkGroup].Add(benchmark); + } + + foreach (var (groupName, benchmarkList) in benchmarkGroups) + { + benchmarkList.Sort(); + + if (debug) + { + Console.Write($"{groupName}:"); + foreach (var benchmark in benchmarkList) + { + Console.Write($" {benchmark}"); + } + Console.WriteLine(); + } + } + + foreach (var (benchmarkGroup, benchmarkList) in benchmarkGroups) + { + if (debug) Console.WriteLine("Initialize colors"); + + // Consider moving 'colorGroups' to the presenter + Dictionary colorGroups = new(); + foreach (SeriesInfo info in + chartType.GetSeries(dataManager, metrics, runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, + iterationFilter: iterationFilter, configIterationFilter: configIterationFilter, benchmarkList: benchmarkList)) + { + string colorFamilyKey = chartType.GetColorFamilyKey(info, multipleMetrics: metrics.Count > 1, includeRunName: includeRunName, multipleConfigs: configs.Count > 1, + configDisplayNames: configDisplayNames, multipleBenchmarks: benchmarkList.Count > 1); + + colorGroups[colorFamilyKey] = colorGroups.GetValueOrDefault(colorFamilyKey, 0) + 1; + } + + presenter.SetColorGroups(colorGroups); + + { + List scatters = new(); + + string xlabel = xArrangement.GetNewTitle(xMetric.Title); + + string titlePrefix = chartType.GetChartTitle(); + List titleParts = new(); + if (!string.IsNullOrWhiteSpace(benchmarkGroup)) titleParts.Add(benchmarkGroup); + if (metrics.Count == 1) titleParts.Add(metrics[0].Title); + if (configPrefix != null) titleParts.Add(configPrefix); + else if (configs.Count == 1) titleParts.Add(configDisplayNames?.GetValueOrDefault(configs[0]) ?? configs[0]); + string titleWithoutPrefix = string.Join(" / ", titleParts); + string title = string.Join(" / ", titleParts.Prepend(titlePrefix)); + presenter.Start(title: title, xlabel: xlabel); + + List<(XValue x, double? y)> firstDataPreSorted = null; + double firstDataMin = 0; + HashSet firstDataSet = new(); + + foreach ((SeriesInfo info, int indexForOffsetting) in + chartType.GetSeries(dataManager, metrics, runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, + iterationFilter: iterationFilter, configIterationFilter: configIterationFilter, benchmarkList: benchmarkList).WithIndex()) + { + string colorFamilyKey = chartType.GetColorFamilyKey(info, multipleMetrics: metrics.Count > 1, includeRunName: includeRunName, multipleConfigs: configs.Count > 1, + configDisplayNames: configDisplayNames, multipleBenchmarks: benchmarkList.Count > 1); + string seriesTitle = chartType.GetSeriesTitle(info, colorFamilyKey, metrics.Count > 1); + if (debug) Console.Write($"series title: {seriesTitle}, "); + + List> dataSource; + try { dataSource = chartType.GetDataSource(info, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter, dataFilter: dataFilter); } + catch (Exception e) { Console.WriteLine($"Exception {e} processing data source for {title} / {seriesTitle}"); dataSource = null; } + if (dataSource == null) + { + Console.WriteLine($"No data for {titleWithoutPrefix} / {seriesTitle}"); + continue; + } + int dataSourceCount = dataSource.Count; + if (debug) Console.Write($"source count = {dataSourceCount}, "); + + List<(XValue x, double? y)> data; + // Theory: For numeric x values, null y values need to be filtered or "mode==lines" won't show + // values that have null neighbors. + // Theory: For non-numeric x values, null y values are needed to avoid shuffling of the x values + // (Example: if series 1 has "a" "c" and series 2 has "a" "b" "c", then 2 will be displayed + // "a" "c" "b" -AND- "mode==lines" will connect the "a" to the "b" to the "c") + // TODO: We probably need to add fake entries to the first (?) series if the different + // series have different sets of x values. The existing code will work if the x value + // exists in the DataManager but the metrics don't. (Example: we have ASP.NET metrics but + // no GC trace for a benchmark, but the chart contains GC metrics) + info.Metric.ResetDiagnostics(); + try { data = dataSource.Select(b => (x: xMetric.DoExtract((b.Key, b.Value)), y: info.Metric.DoExtract(b.Value, indexForOffsetting))).ToList(); } + catch { Console.WriteLine($"Exception processing data items for {title} / {seriesTitle}"); data = null; } + info.Metric.DisplayDiagnostics($"{titleWithoutPrefix} / {seriesTitle}"); + if (debug) Console.Write($"data count = {data.Count}, "); + if (!data.Any(d => d.y != null)) + { + Console.WriteLine($"No data items for {titleWithoutPrefix} / {seriesTitle}"); + continue; + } + + // This should probably be factored into CombinedSortedXArrangement. The idea is that firstDataPreSorted + // contains the first series' data so that each series can be merged into it, sorted the same way, and + // then all displayed in the same order of x values. However, the first series might not have all of the + // values, so this tacks them on the end arbitrarily. + if (firstDataPreSorted == null) + { + firstDataPreSorted = new(data); // make a copy so that edits don't change the original + firstDataMin = firstDataPreSorted.Select(pair => pair.y).Where(NotNull).Min(y => y.Value); + firstDataSet = new(firstDataPreSorted.Select(pair => pair.x)); + } + foreach (var d in data) + { + if (firstDataSet.Add(d.x)) + { + // The "--" is a hack to produce lower values. This should be fixed to be clearer. + firstDataPreSorted.Add((d.x, --firstDataMin)); + } + } + + data = xArrangement.Arrange(data, firstDataPreSorted); + + // See above comment. If x values are numeric, remove ones without y values. + // Note that xarrangement can change the x value type. + if (data[0].x.HasValue) + { + data = data.Where(d => d.y != null); + } + + if (debug) Console.Write($"data count = {data.Count}, "); + if (data.Count == 0) + { + Console.WriteLine($"No data items after filtering nulls for {titleWithoutPrefix} / {seriesTitle}"); + continue; + } + + string colorFamilyId = chartType.GetColorFamilyId(info, multipleMetrics: metrics.Count > 1); + presenter.AddSeries(title: seriesTitle, unit: info.Metric.Unit, colorFamilyKey: colorFamilyKey, colorFamilyId: colorFamilyId, data: data); + } + + presenter.Finish(xArrangement); + } + } + + if (display) + { + presenter.Display(); + } + + return presenter.Result; +} + +List> TableBenchmarks(DataManager dataManager, List> metrics, TextPresenter textPresenter = null, + Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, + ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, + Func benchmarkMap = null, BaseMetric<(string, BenchmarkData), XValue> xMetric = null, XArrangement xArrangement = null, + NameSimplifier configNameSimplifier = null, bool includeRunName = false, + bool display = true, bool debug = false) + => ChartInternal(textPresenter ?? TextPresenter.RawText, new BenchmarksChartType(), + dataManager, metrics, + runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter, dataFilter: dataFilter, + benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement, + configNameSimplifier: configNameSimplifier, includeRunName: includeRunName, + display: display, debug: debug); + +List> TableBenchmarks(DataManager dataManager, Metric metric, TextPresenter textPresenter = null, + Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, + ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, + Func benchmarkMap = null, BaseMetric<(string, BenchmarkData), XValue> xMetric = null, XArrangement xArrangement = null, + NameSimplifier configNameSimplifier = null, bool includeRunName = false, + bool display = true, bool debug = false) + => TableBenchmarks(dataManager, ML(metric), textPresenter, + runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter, dataFilter: dataFilter, + benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement, + configNameSimplifier: configNameSimplifier, includeRunName: includeRunName, + display: display, debug: debug); + +List> TableIterations(DataManager dataManager, List> metrics, TextPresenter textPresenter = null, + Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, + ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, + Func benchmarkMap = null, BaseMetric<(string, IterationData), XValue> xMetric = null, XArrangement xArrangement = null, + NameSimplifier configNameSimplifier = null, bool includeRunName = false, + bool display = true, bool debug = false) + => ChartInternal(textPresenter ?? TextPresenter.RawText, new IterationsChartType(), + dataManager, metrics, + runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter, dataFilter: dataFilter, + benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement, + configNameSimplifier: configNameSimplifier, includeRunName: includeRunName, + display: display, debug: debug); + +List> TableIterations(DataManager dataManager, Metric metric, TextPresenter textPresenter = null, + Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, + ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, + Func benchmarkMap = null, BaseMetric<(string, IterationData), XValue> xMetric = null, XArrangement xArrangement = null, + NameSimplifier configNameSimplifier = null, bool includeRunName = false, + bool display = true, bool debug = false) + => TableIterations(dataManager, ML(metric), textPresenter, + runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter, dataFilter: dataFilter, + benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement, + configNameSimplifier: configNameSimplifier, includeRunName: includeRunName, + display: display, debug: debug); + +List> TableGCData(DataManager dataManager, List> metrics, TextPresenter textPresenter = null, + Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, + ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, + Func benchmarkMap = null, BaseMetric<(string, TraceGC), XValue> xMetric = null, XArrangement xArrangement = null, + NameSimplifier configNameSimplifier = null, bool includeRunName = false, + bool display = true, bool debug = false) + => ChartInternal(textPresenter ?? TextPresenter.RawText, new TraceGCChartType(), + dataManager, metrics, + runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter, dataFilter: dataFilter, + benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement, + configNameSimplifier: configNameSimplifier, includeRunName: includeRunName, + display: display, debug: debug); + +List> TableGCData(DataManager dataManager, Metric metric, TextPresenter textPresenter = null, + Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, + ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, + Func benchmarkMap = null, BaseMetric<(string, TraceGC), XValue> xMetric = null, XArrangement xArrangement = null, + NameSimplifier configNameSimplifier = null, bool includeRunName = false, + bool display = true, bool debug = false) + => TableGCData(dataManager, ML(metric), textPresenter, + runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter, dataFilter: dataFilter, + benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement, + configNameSimplifier: configNameSimplifier, includeRunName: includeRunName, + display: display, debug: debug); + +List ChartBenchmarks(DataManager dataManager, List> metrics, + Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, + ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, + Func benchmarkMap = null, BaseMetric<(string, BenchmarkData), XValue> xMetric = null, XArrangement xArrangement = null, + NameSimplifier configNameSimplifier = null, bool includeRunName = false, + bool display = true, bool debug = false) + => ChartInternal(new ChartPresenter(scatterMode: null), new BenchmarksChartType(), + dataManager, metrics, + runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter, dataFilter: dataFilter, + benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement, + configNameSimplifier: configNameSimplifier, includeRunName: includeRunName, + display: display, debug: debug); + +List ChartBenchmarks(DataManager dataManager, Metric metric, + Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, + ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, + Func benchmarkMap = null, BaseMetric<(string, BenchmarkData), XValue> xMetric = null, XArrangement xArrangement = null, + NameSimplifier configNameSimplifier = null, bool includeRunName = false, + bool display = true, bool debug = false) + => ChartBenchmarks(dataManager, ML(metric), + runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter, dataFilter: dataFilter, + benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement, + configNameSimplifier: configNameSimplifier, includeRunName: includeRunName, + display: display, debug: debug); + +List ChartIterations(DataManager dataManager, List> metrics, + Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, + ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, + Func benchmarkMap = null, BaseMetric<(string, IterationData), XValue> xMetric = null, XArrangement xArrangement = null, + NameSimplifier configNameSimplifier = null, bool includeRunName = false, + bool display = true, bool debug = false) + => ChartInternal(new ChartPresenter(scatterMode: "markers"), new IterationsChartType(), + dataManager, metrics, + runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter, dataFilter: dataFilter, + benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement, + configNameSimplifier: configNameSimplifier, includeRunName: includeRunName, + display: display, debug: debug); + +List ChartIterations(DataManager dataManager, Metric metric, + Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, + ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, + Func benchmarkMap = null, BaseMetric<(string, IterationData), XValue> xMetric = null, XArrangement xArrangement = null, + NameSimplifier configNameSimplifier = null, bool includeRunName = false, + bool display = true, bool debug = false) + => ChartIterations(dataManager, ML(metric), + runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter, dataFilter: dataFilter, + benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement, + configNameSimplifier: configNameSimplifier, includeRunName: includeRunName, + display: display, debug: debug); + +List ChartGCData(DataManager dataManager, List> metrics, + Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, + ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, + Func benchmarkMap = null, BaseMetric<(string, TraceGC), XValue> xMetric = null, XArrangement xArrangement = null, + NameSimplifier configNameSimplifier = null, bool includeRunName = false, + bool display = true, bool debug = false) + => ChartInternal(new ChartPresenter(scatterMode: null), new TraceGCChartType(), + dataManager, metrics, + runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter, dataFilter: dataFilter, + benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement, + configNameSimplifier: configNameSimplifier, includeRunName: includeRunName, + display: display, debug: debug); + +List ChartGCData(DataManager dataManager, Metric metric, + Filter runFilter = null, Filter configFilter = null, Filter benchmarkFilter = null, IntFilter iterationFilter = null, + ConfigIterationFilter configIterationFilter = null, Func dataFilter = null, + Func benchmarkMap = null, BaseMetric<(string, TraceGC), XValue> xMetric = null, XArrangement xArrangement = null, + NameSimplifier configNameSimplifier = null, bool includeRunName = false, + bool display = true, bool debug = false) + => ChartGCData(dataManager, ML(metric), + runFilter: runFilter, configFilter: configFilter, benchmarkFilter: benchmarkFilter, iterationFilter: iterationFilter, + configIterationFilter: configIterationFilter, dataFilter: dataFilter, + benchmarkMap: benchmarkMap, xMetric: xMetric, xArrangement: xArrangement, + configNameSimplifier: configNameSimplifier, includeRunName: includeRunName, + display: display, debug: debug); + +#!csharp + +// Benchmark lists + +// scoutList is a list of ASP.NET benchmarks identified by looking at allocation rates. +// scoutList2 adds some tests that Maoni identified. +// smallList is for very quick looks. + +// Often a test infra run will have been limited to a smaller set of tests when desired, +// in which case these aren't necessary. However, these predefined lists can be used to +// help load (or chart after loading) a subset of a run when desired. + +List scoutList = ML( + "ConnectionClose", + "ConnectionCloseHttps", + "ConnectionCloseHttpsHttpSys", + "ConnectionCloseHttpSys", + "Fortunes", + "FortunesDapper", + "FortunesEf", + "FortunesPlatform", + "FortunesPlatformDapper", + "FortunesPlatformEF", + "Json", + "JsonHttps", + "JsonHttpsHttpSys", + "JsonMin", + "JsonMvc", + "MultipleQueriesPlatform", + "PlaintextMvc", + "PlaintextQueryString", + "PlaintextWithParametersEmptyFilter", + "PlaintextWithParametersNoFilter", + "SingleQueryPlatform", + "Stage1", + "Stage1Grpc", + "Stage2", + "UpdatesPlatform" +); + +List scoutList2 = scoutList.Concat(ML("CachingPlatform", "JsonMapAction", "Stage1TrimR2RSingleFile")).ToList(); +List smallList = ML("Fortunes", "JsonHttpsHttpSys", "PlaintextQueryString", "Stage2", "PlaintextMvc");