Compare commits

...

15 Commits

Author SHA1 Message Date
Adam Hathcock
87cab8e16a updating baseline to be what github actions do 2026-02-06 12:28:02 +00:00
Adam Hathcock
e85752ca1d reget results and change threshold to 25 % 2026-02-06 11:58:43 +00:00
Adam Hathcock
2860896640 Merge remote-tracking branch 'origin/master' into copilot/add-performance-benchmarks 2026-02-06 11:51:29 +00:00
Adam Hathcock
c96acf18dc updated results? 2026-02-05 15:21:19 +00:00
Adam Hathcock
6eec6faff2 simplify the results to compare 2026-02-05 15:08:00 +00:00
copilot-swe-agent[bot]
118fbbea64 Implement actual benchmark comparison logic with regression detection
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-05 14:28:41 +00:00
copilot-swe-agent[bot]
bbc664ddcc Add generate-baseline build target and JetBrains profiler support
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-05 14:14:54 +00:00
copilot-swe-agent[bot]
2fa8196105 Replace bash scripts with C# build targets for benchmark display and comparison
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-05 13:56:48 +00:00
Adam Hathcock
d4f1dafabb Merge remote-tracking branch 'origin/copilot/add-performance-benchmarks' into copilot/add-performance-benchmarks
# Conflicts:
#	tests/SharpCompress.Performance/Program.cs
2026-02-05 13:45:03 +00:00
Adam Hathcock
f2483be1da update benchmarkdotnet 2026-02-05 13:43:44 +00:00
copilot-swe-agent[bot]
7914b7ddaf Add implementation summary for performance benchmarks
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-05 13:34:10 +00:00
copilot-swe-agent[bot]
0848a1b940 Apply CSharpier formatting to performance benchmarks
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-05 13:33:01 +00:00
copilot-swe-agent[bot]
ca262920c8 Add GitHub Actions workflow, baseline results, and documentation for performance benchmarks
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-05 13:32:23 +00:00
copilot-swe-agent[bot]
2541c09973 Add BenchmarkDotNet performance benchmarks for all major formats
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-05 13:30:57 +00:00
copilot-swe-agent[bot]
2f9b6422c3 Initial plan 2026-02-05 13:24:04 +00:00
16 changed files with 1204 additions and 49 deletions

View File

@@ -0,0 +1,50 @@
name: Performance Benchmarks
on:
push:
branches:
- 'master'
- 'release'
pull_request:
branches:
- 'master'
- 'release'
workflow_dispatch:
permissions:
contents: read
jobs:
benchmark:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
- uses: actions/setup-dotnet@v5
with:
dotnet-version: 10.0.x
- name: Build Performance Project
run: dotnet build tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release
- name: Run Benchmarks
run: dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release --no-build -- --filter "*" --exporters json markdown --artifacts benchmark-results
continue-on-error: true
- name: Display Benchmark Results
if: always()
run: dotnet run --project build/build.csproj -- display-benchmark-results
- name: Compare with Baseline
if: always()
run: dotnet run --project build/build.csproj -- compare-benchmark-results
- name: Upload Benchmark Results
if: always()
uses: actions/upload-artifact@v6
with:
name: benchmark-results
path: benchmark-results/

4
.gitignore vendored
View File

@@ -17,6 +17,10 @@ tests/TestArchives/*/Scratch2
tools
.idea/
artifacts/
BenchmarkDotNet.Artifacts/
baseline-artifacts/
profiler-snapshots/
.DS_Store
*.snupkg
benchmark-results/

View File

@@ -1,5 +1,6 @@
<Project>
<ItemGroup>
<PackageVersion Include="BenchmarkDotNet" Version="0.15.8" />
<PackageVersion Include="Bullseye" Version="6.1.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
<PackageVersion Include="Glob" Version="1.1.9" />

View File

@@ -19,6 +19,9 @@ const string Publish = "publish";
const string DetermineVersion = "determine-version";
const string UpdateVersion = "update-version";
const string PushToNuGet = "push-to-nuget";
const string DisplayBenchmarkResults = "display-benchmark-results";
const string CompareBenchmarkResults = "compare-benchmark-results";
const string GenerateBaseline = "generate-baseline";
Target(
Clean,
@@ -210,6 +213,249 @@ Target(
}
);
Target(
DisplayBenchmarkResults,
() =>
{
var githubStepSummary = Environment.GetEnvironmentVariable("GITHUB_STEP_SUMMARY");
var resultsDir = "benchmark-results/results";
if (!Directory.Exists(resultsDir))
{
Console.WriteLine("No benchmark results found.");
return;
}
var markdownFiles = Directory
.GetFiles(resultsDir, "*-report-github.md")
.OrderBy(f => f)
.ToList();
if (markdownFiles.Count == 0)
{
Console.WriteLine("No benchmark markdown reports found.");
return;
}
var output = new List<string> { "## Benchmark Results", "" };
foreach (var file in markdownFiles)
{
Console.WriteLine($"Processing {Path.GetFileName(file)}");
var content = File.ReadAllText(file);
output.Add(content);
output.Add("");
}
// Write to GitHub Step Summary if available
if (!string.IsNullOrEmpty(githubStepSummary))
{
File.AppendAllLines(githubStepSummary, output);
Console.WriteLine($"Benchmark results written to GitHub Step Summary");
}
else
{
// Write to console if not in GitHub Actions
foreach (var line in output)
{
Console.WriteLine(line);
}
}
}
);
Target(
CompareBenchmarkResults,
() =>
{
var githubStepSummary = Environment.GetEnvironmentVariable("GITHUB_STEP_SUMMARY");
var baselinePath = "tests/SharpCompress.Performance/baseline-results.md";
var resultsDir = "benchmark-results/results";
var output = new List<string> { "## Comparison with Baseline", "" };
if (!File.Exists(baselinePath))
{
Console.WriteLine("Baseline file not found");
output.Add("⚠️ Baseline file not found. Run `generate-baseline` to create it.");
WriteOutput(output, githubStepSummary);
return;
}
if (!Directory.Exists(resultsDir))
{
Console.WriteLine("No current benchmark results found.");
output.Add("⚠️ No current benchmark results found. Showing baseline only.");
output.Add("");
output.Add("### Baseline Results");
output.AddRange(File.ReadAllLines(baselinePath));
WriteOutput(output, githubStepSummary);
return;
}
var markdownFiles = Directory
.GetFiles(resultsDir, "*-report-github.md")
.OrderBy(f => f)
.ToList();
if (markdownFiles.Count == 0)
{
Console.WriteLine("No current benchmark markdown reports found.");
output.Add("⚠️ No current benchmark results found. Showing baseline only.");
output.Add("");
output.Add("### Baseline Results");
output.AddRange(File.ReadAllLines(baselinePath));
WriteOutput(output, githubStepSummary);
return;
}
Console.WriteLine("Parsing baseline results...");
var baselineMetrics = ParseBenchmarkResults(File.ReadAllText(baselinePath));
Console.WriteLine("Parsing current results...");
var currentText = string.Join("\n", markdownFiles.Select(f => File.ReadAllText(f)));
var currentMetrics = ParseBenchmarkResults(currentText);
Console.WriteLine("Comparing results...");
output.Add("### Performance Comparison");
output.Add("");
output.Add(
"| Benchmark | Baseline Mean | Current Mean | Change | Baseline Memory | Current Memory | Change |"
);
output.Add(
"|-----------|---------------|--------------|--------|-----------------|----------------|--------|"
);
var hasRegressions = false;
var hasImprovements = false;
foreach (var method in currentMetrics.Keys.Union(baselineMetrics.Keys).OrderBy(k => k))
{
var hasCurrent = currentMetrics.TryGetValue(method, out var current);
var hasBaseline = baselineMetrics.TryGetValue(method, out var baseline);
if (!hasCurrent)
{
output.Add(
$"| {method} | {baseline!.Mean} | ❌ Missing | N/A | {baseline.Memory} | N/A | N/A |"
);
continue;
}
if (!hasBaseline)
{
output.Add(
$"| {method} | ❌ New | {current!.Mean} | N/A | N/A | {current.Memory} | N/A |"
);
continue;
}
var timeChange = CalculateChange(baseline!.MeanValue, current!.MeanValue);
var memChange = CalculateChange(baseline.MemoryValue, current.MemoryValue);
var timeIcon =
timeChange > 25 ? "🔴"
: timeChange < -25 ? "🟢"
: "⚪";
var memIcon =
memChange > 25 ? "🔴"
: memChange < -25 ? "🟢"
: "⚪";
if (timeChange > 25 || memChange > 25)
hasRegressions = true;
if (timeChange < -25 || memChange < -25)
hasImprovements = true;
output.Add(
$"| {method} | {baseline.Mean} | {current.Mean} | {timeIcon} {timeChange:+0.0;-0.0;0}% | {baseline.Memory} | {current.Memory} | {memIcon} {memChange:+0.0;-0.0;0}% |"
);
}
output.Add("");
output.Add("**Legend:**");
output.Add("- 🔴 Regression (>25% slower/more memory)");
output.Add("- 🟢 Improvement (>25% faster/less memory)");
output.Add("- ⚪ No significant change");
if (hasRegressions)
{
output.Add("");
output.Add(
"⚠️ **Warning**: Performance regressions detected. Review the changes carefully."
);
}
else if (hasImprovements)
{
output.Add("");
output.Add("✅ Performance improvements detected!");
}
else
{
output.Add("");
output.Add("✅ Performance is stable compared to baseline.");
}
WriteOutput(output, githubStepSummary);
}
);
Target(
GenerateBaseline,
() =>
{
var perfProject = "tests/SharpCompress.Performance/SharpCompress.Performance.csproj";
var baselinePath = "tests/SharpCompress.Performance/baseline-results.md";
var artifactsDir = "baseline-artifacts";
Console.WriteLine("Building performance project...");
Run("dotnet", $"build {perfProject} --configuration Release");
Console.WriteLine("Running benchmarks to generate baseline...");
Run(
"dotnet",
$"run --project {perfProject} --configuration Release --no-build -- --filter \"*\" --exporters markdown --artifacts {artifactsDir}"
);
var resultsDir = Path.Combine(artifactsDir, "results");
if (!Directory.Exists(resultsDir))
{
Console.WriteLine("ERROR: No benchmark results generated.");
return;
}
var markdownFiles = Directory
.GetFiles(resultsDir, "*-report-github.md")
.OrderBy(f => f)
.ToList();
if (markdownFiles.Count == 0)
{
Console.WriteLine("ERROR: No markdown reports found.");
return;
}
Console.WriteLine($"Combining {markdownFiles.Count} benchmark reports...");
var baselineContent = new List<string>();
foreach (var file in markdownFiles)
{
var lines = File.ReadAllLines(file);
baselineContent.AddRange(lines.Select(l => l.Trim()).Where(l => l.StartsWith('|')));
}
File.WriteAllText(baselinePath, string.Join(Environment.NewLine, baselineContent));
Console.WriteLine($"Baseline written to {baselinePath}");
// Clean up artifacts directory
if (Directory.Exists(artifactsDir))
{
Directory.Delete(artifactsDir, true);
Console.WriteLine("Cleaned up artifacts directory.");
}
}
);
Target("default", [Publish], () => Console.WriteLine("Done!"));
await RunTargetsAndExitAsync(args);
@@ -302,3 +548,142 @@ static async Task<string> GetGitOutput(string command, string args)
throw new Exception($"Git command failed: git {command} {args}\n{ex.Message}", ex);
}
}
static void WriteOutput(List<string> output, string? githubStepSummary)
{
if (!string.IsNullOrEmpty(githubStepSummary))
{
File.AppendAllLines(githubStepSummary, output);
Console.WriteLine("Comparison written to GitHub Step Summary");
}
else
{
foreach (var line in output)
{
Console.WriteLine(line);
}
}
}
static Dictionary<string, BenchmarkMetric> ParseBenchmarkResults(string markdown)
{
var metrics = new Dictionary<string, BenchmarkMetric>();
var lines = markdown.Split('\n');
for (int i = 0; i < lines.Length; i++)
{
var line = lines[i].Trim();
// Look for table rows with benchmark data
if (line.StartsWith("|") && line.Contains("&#39;") && i > 0)
{
var parts = line.Split('|', StringSplitOptions.TrimEntries);
if (parts.Length >= 5)
{
var method = parts[1].Replace("&#39;", "'");
var meanStr = parts[2];
// Find Allocated column - it's usually the last column or labeled "Allocated"
string memoryStr = "N/A";
for (int j = parts.Length - 2; j >= 2; j--)
{
if (
parts[j].Contains("KB")
|| parts[j].Contains("MB")
|| parts[j].Contains("GB")
|| parts[j].Contains("B")
)
{
memoryStr = parts[j];
break;
}
}
if (
!method.Equals("Method", StringComparison.OrdinalIgnoreCase)
&& !string.IsNullOrWhiteSpace(method)
)
{
var metric = new BenchmarkMetric
{
Method = method,
Mean = meanStr,
MeanValue = ParseTimeValue(meanStr),
Memory = memoryStr,
MemoryValue = ParseMemoryValue(memoryStr),
};
metrics[method] = metric;
}
}
}
}
return metrics;
}
static double ParseTimeValue(string timeStr)
{
if (string.IsNullOrWhiteSpace(timeStr) || timeStr == "N/A" || timeStr == "NA")
return 0;
// Remove thousands separators and parse
timeStr = timeStr.Replace(",", "").Trim();
var match = Regex.Match(timeStr, @"([\d.]+)\s*(\w+)");
if (!match.Success)
return 0;
var value = double.Parse(match.Groups[1].Value);
var unit = match.Groups[2].Value.ToLower();
// Convert to microseconds for comparison
return unit switch
{
"s" => value * 1_000_000,
"ms" => value * 1_000,
"μs" or "us" => value,
"ns" => value / 1_000,
_ => value,
};
}
static double ParseMemoryValue(string memStr)
{
if (string.IsNullOrWhiteSpace(memStr) || memStr == "N/A" || memStr == "NA")
return 0;
memStr = memStr.Replace(",", "").Trim();
var match = Regex.Match(memStr, @"([\d.]+)\s*(\w+)");
if (!match.Success)
return 0;
var value = double.Parse(match.Groups[1].Value);
var unit = match.Groups[2].Value.ToUpper();
// Convert to KB for comparison
return unit switch
{
"GB" => value * 1_024 * 1_024,
"MB" => value * 1_024,
"KB" => value,
"B" => value / 1_024,
_ => value,
};
}
static double CalculateChange(double baseline, double current)
{
if (baseline == 0)
return 0;
return ((current - baseline) / baseline) * 100;
}
record BenchmarkMetric
{
public string Method { get; init; } = "";
public string Mean { get; init; } = "";
public double MeanValue { get; init; }
public string Memory { get; init; } = "";
public double MemoryValue { get; init; }
}

View File

@@ -216,9 +216,9 @@
"net10.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "kICGrGYEzCNI3wPzfEXcwNHgTvlvVn9yJDhSdRK+oZQy4jvYH529u7O0xf5ocQKzOMjfS07+3z9PKRIjrFMJDA=="
"requested": "[10.0.2, )",
"resolved": "10.0.2",
"contentHash": "sXdDtMf2qcnbygw9OdE535c2lxSxrZP8gO4UhDJ0xiJbl1wIqXS1OTcTDFTIJPOFd6Mhcm8gPEthqWGUxBsTqw=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
@@ -264,9 +264,9 @@
"net8.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[8.0.22, )",
"resolved": "8.0.22",
"contentHash": "MhcMithKEiyyNkD2ZfbDZPmcOdi0GheGfg8saEIIEfD/fol3iHmcV8TsZkD4ZYz5gdUuoX4YtlVySUU7Sxl9SQ=="
"requested": "[8.0.23, )",
"resolved": "8.0.23",
"contentHash": "GqHiB1HbbODWPbY/lc5xLQH8siEEhNA0ptpJCC6X6adtAYNEzu5ZlqV3YHA3Gh7fuEwgA8XqVwMtH2KNtuQM1Q=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",

View File

@@ -0,0 +1,39 @@
using System;
using System.IO;
namespace SharpCompress.Performance.Benchmarks;
public abstract class ArchiveBenchmarkBase
{
protected static readonly string TEST_ARCHIVES_PATH;
static ArchiveBenchmarkBase()
{
var baseDirectory = AppDomain.CurrentDomain.BaseDirectory;
var index = baseDirectory.IndexOf(
"SharpCompress.Performance",
StringComparison.OrdinalIgnoreCase
);
if (index == -1)
{
throw new InvalidOperationException(
"Could not find SharpCompress.Performance in the base directory path"
);
}
var path = baseDirectory.Substring(0, index);
var solutionBasePath = Path.GetDirectoryName(path) ?? throw new InvalidOperationException();
TEST_ARCHIVES_PATH = Path.Combine(solutionBasePath, "TestArchives", "Archives");
if (!Directory.Exists(TEST_ARCHIVES_PATH))
{
throw new InvalidOperationException(
$"Test archives directory not found: {TEST_ARCHIVES_PATH}"
);
}
}
protected static string GetArchivePath(string fileName) =>
Path.Combine(TEST_ARCHIVES_PATH, fileName);
}

View File

@@ -0,0 +1,46 @@
using System;
using System.IO;
using BenchmarkDotNet.Attributes;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Performance.Benchmarks;
[MemoryDiagnoser]
public class GZipBenchmarks
{
private byte[] _sourceData = null!;
private byte[] _compressedData = null!;
[GlobalSetup]
public void Setup()
{
// Create 100KB of test data
_sourceData = new byte[100 * 1024];
new Random(42).NextBytes(_sourceData);
// Pre-compress for decompression benchmark
using var compressStream = new MemoryStream();
using (var gzipStream = new GZipStream(compressStream, CompressionMode.Compress))
{
gzipStream.Write(_sourceData, 0, _sourceData.Length);
}
_compressedData = compressStream.ToArray();
}
[Benchmark(Description = "GZip: Compress 100KB")]
public void GZipCompress()
{
using var outputStream = new MemoryStream();
using var gzipStream = new GZipStream(outputStream, CompressionMode.Compress);
gzipStream.Write(_sourceData, 0, _sourceData.Length);
}
[Benchmark(Description = "GZip: Decompress 100KB")]
public void GZipDecompress()
{
using var inputStream = new MemoryStream(_compressedData);
using var gzipStream = new GZipStream(inputStream, CompressionMode.Decompress);
gzipStream.CopyTo(Stream.Null);
}
}

View File

@@ -0,0 +1,46 @@
using System;
using System.IO;
using System.Linq;
using BenchmarkDotNet.Attributes;
using SharpCompress.Archives.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Performance.Benchmarks;
[MemoryDiagnoser]
public class RarBenchmarks : ArchiveBenchmarkBase
{
private byte[] _rarBytes = null!;
[GlobalSetup]
public void Setup()
{
_rarBytes = File.ReadAllBytes(GetArchivePath("Rar.rar"));
}
[Benchmark(Description = "Rar: Extract all entries (Archive API)")]
public void RarExtractArchiveApi()
{
using var stream = new MemoryStream(_rarBytes);
using var archive = RarArchive.OpenArchive(stream);
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
}
[Benchmark(Description = "Rar: Extract all entries (Reader API)")]
public void RarExtractReaderApi()
{
using var stream = new MemoryStream(_rarBytes);
using var reader = ReaderFactory.OpenReader(stream);
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryTo(Stream.Null);
}
}
}
}

View File

@@ -0,0 +1,45 @@
using System;
using System.IO;
using System.Linq;
using BenchmarkDotNet.Attributes;
using SharpCompress.Archives.SevenZip;
namespace SharpCompress.Performance.Benchmarks;
[MemoryDiagnoser]
public class SevenZipBenchmarks : ArchiveBenchmarkBase
{
private byte[] _lzmaBytes = null!;
private byte[] _lzma2Bytes = null!;
[GlobalSetup]
public void Setup()
{
_lzmaBytes = File.ReadAllBytes(GetArchivePath("7Zip.LZMA.7z"));
_lzma2Bytes = File.ReadAllBytes(GetArchivePath("7Zip.LZMA2.7z"));
}
[Benchmark(Description = "7Zip LZMA: Extract all entries")]
public void SevenZipLzmaExtract()
{
using var stream = new MemoryStream(_lzmaBytes);
using var archive = SevenZipArchive.OpenArchive(stream);
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
}
[Benchmark(Description = "7Zip LZMA2: Extract all entries")]
public void SevenZipLzma2Extract()
{
using var stream = new MemoryStream(_lzma2Bytes);
using var archive = SevenZipArchive.OpenArchive(stream);
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
}
}

View File

@@ -0,0 +1,81 @@
using System;
using System.IO;
using System.Linq;
using BenchmarkDotNet.Attributes;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Writers;
namespace SharpCompress.Performance.Benchmarks;
[MemoryDiagnoser]
public class TarBenchmarks : ArchiveBenchmarkBase
{
private byte[] _tarBytes = null!;
private byte[] _tarGzBytes = null!;
[GlobalSetup]
public void Setup()
{
_tarBytes = File.ReadAllBytes(GetArchivePath("Tar.tar"));
_tarGzBytes = File.ReadAllBytes(GetArchivePath("Tar.tar.gz"));
}
[Benchmark(Description = "Tar: Extract all entries (Archive API)")]
public void TarExtractArchiveApi()
{
using var stream = new MemoryStream(_tarBytes);
using var archive = TarArchive.OpenArchive(stream);
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
}
[Benchmark(Description = "Tar: Extract all entries (Reader API)")]
public void TarExtractReaderApi()
{
using var stream = new MemoryStream(_tarBytes);
using var reader = ReaderFactory.OpenReader(stream);
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryTo(Stream.Null);
}
}
}
[Benchmark(Description = "Tar.GZip: Extract all entries")]
public void TarGzipExtract()
{
using var stream = new MemoryStream(_tarGzBytes);
using var archive = TarArchive.OpenArchive(stream);
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
}
[Benchmark(Description = "Tar: Create archive with small files")]
public void TarCreateSmallFiles()
{
using var outputStream = new MemoryStream();
using var writer = WriterFactory.OpenWriter(
outputStream,
ArchiveType.Tar,
new WriterOptions(CompressionType.None) { LeaveStreamOpen = true }
);
// Create 10 small files
for (int i = 0; i < 10; i++)
{
var data = new byte[1024]; // 1KB each
using var entryStream = new MemoryStream(data);
writer.Write($"file{i}.txt", entryStream);
}
}
}

View File

@@ -0,0 +1,69 @@
using System;
using System.IO;
using System.Linq;
using BenchmarkDotNet.Attributes;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Writers;
namespace SharpCompress.Performance.Benchmarks;
[MemoryDiagnoser]
public class ZipBenchmarks : ArchiveBenchmarkBase
{
private string _archivePath = null!;
private byte[] _archiveBytes = null!;
[GlobalSetup]
public void Setup()
{
_archivePath = GetArchivePath("Zip.deflate.zip");
_archiveBytes = File.ReadAllBytes(_archivePath);
}
[Benchmark(Description = "Zip: Extract all entries (Archive API)")]
public void ZipExtractArchiveApi()
{
using var stream = new MemoryStream(_archiveBytes);
using var archive = ZipArchive.OpenArchive(stream);
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
}
[Benchmark(Description = "Zip: Extract all entries (Reader API)")]
public void ZipExtractReaderApi()
{
using var stream = new MemoryStream(_archiveBytes);
using var reader = ReaderFactory.OpenReader(stream);
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryTo(Stream.Null);
}
}
}
[Benchmark(Description = "Zip: Create archive with small files")]
public void ZipCreateSmallFiles()
{
using var outputStream = new MemoryStream();
using var writer = WriterFactory.OpenWriter(
outputStream,
ArchiveType.Zip,
new WriterOptions(CompressionType.Deflate) { LeaveStreamOpen = true }
);
// Create 10 small files
for (int i = 0; i < 10; i++)
{
var data = new byte[1024]; // 1KB each
using var entryStream = new MemoryStream(data);
writer.Write($"file{i}.txt", entryStream);
}
}
}

View File

@@ -1,54 +1,112 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Performance;
using SharpCompress.Readers;
using SharpCompress.Test;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using BenchmarkDotNet.Running;
using BenchmarkDotNet.Toolchains.InProcess.Emit;
var index = AppDomain.CurrentDomain.BaseDirectory.IndexOf(
"SharpCompress.Performance",
StringComparison.OrdinalIgnoreCase
);
var path = AppDomain.CurrentDomain.BaseDirectory.Substring(0, index);
var SOLUTION_BASE_PATH = Path.GetDirectoryName(path) ?? throw new ArgumentNullException();
namespace SharpCompress.Performance;
var TEST_ARCHIVES_PATH = Path.Combine(SOLUTION_BASE_PATH, "TestArchives", "Archives");
//using var _ = JetbrainsProfiler.Memory($"/Users/adam/temp/");
using (var __ = JetbrainsProfiler.Cpu($"/Users/adam/temp/"))
public class Program
{
var testArchives = new[]
public static void Main(string[] args)
{
"Rar.Audio_program.rar",
//"64bitstream.zip.7z",
//"TarWithSymlink.tar.gz"
};
var arcs = testArchives.Select(a => Path.Combine(TEST_ARCHIVES_PATH, a)).ToArray();
for (int i = 0; i < 50; i++)
{
using var found = ArchiveFactory.OpenArchive(arcs[0]);
foreach (var entry in found.Entries.Where(entry => !entry.IsDirectory))
// Check if profiling mode is requested
if (args.Length > 0 && args[0].Equals("--profile", StringComparison.OrdinalIgnoreCase))
{
Console.WriteLine($"Extracting {entry.Key}");
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
RunWithProfiler(args);
return;
}
/*using var found = ReaderFactory.OpenReader(arcs[0]);
while (found.MoveToNextEntry())
{
var entry = found.Entry;
if (entry.IsDirectory)
continue;
Console.WriteLine($"Extracting {entry.Key}");
found.WriteEntryTo(Stream.Null);
}*/
// Default: Run BenchmarkDotNet
var config = DefaultConfig.Instance.AddJob(
Job.Default.WithToolchain(InProcessEmitToolchain.Instance)
.WithWarmupCount(3) // Minimal warmup iterations for CI
.WithIterationCount(10) // Minimal measurement iterations for CI
.WithInvocationCount(10)
.WithUnrollFactor(1)
);
BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args, config);
}
Console.WriteLine("Still running...");
private static void RunWithProfiler(string[] args)
{
var profileType = "cpu"; // Default to CPU profiling
var outputPath = "./profiler-snapshots";
// Parse arguments
for (int i = 1; i < args.Length; i++)
{
if (args[i].Equals("--type", StringComparison.OrdinalIgnoreCase) && i + 1 < args.Length)
{
profileType = args[++i].ToLowerInvariant();
}
else if (
args[i].Equals("--output", StringComparison.OrdinalIgnoreCase)
&& i + 1 < args.Length
)
{
outputPath = args[++i];
}
}
Console.WriteLine($"Running with JetBrains Profiler ({profileType} mode)");
Console.WriteLine($"Output path: {outputPath}");
Console.WriteLine();
Console.WriteLine(
"Usage: dotnet run --project SharpCompress.Performance.csproj -c Release -- --profile [--type cpu|memory] [--output <path>]"
);
Console.WriteLine();
// Run a sample benchmark with profiling
RunSampleBenchmarkWithProfiler(profileType, outputPath);
}
private static void RunSampleBenchmarkWithProfiler(string profileType, string outputPath)
{
Console.WriteLine("Running sample benchmark with profiler...");
Console.WriteLine("Note: JetBrains profiler requires the profiler tools to be installed.");
Console.WriteLine("Install from: https://www.jetbrains.com/profiler/");
Console.WriteLine();
try
{
IDisposable? profiler = null;
if (profileType == "cpu")
{
profiler = Test.JetbrainsProfiler.Cpu(outputPath);
}
else if (profileType == "memory")
{
profiler = Test.JetbrainsProfiler.Memory(outputPath);
}
using (profiler)
{
// Run a simple benchmark iteration
var zipBenchmark = new Benchmarks.ZipBenchmarks();
zipBenchmark.Setup();
Console.WriteLine("Running benchmark iterations...");
for (int i = 0; i < 10; i++)
{
zipBenchmark.ZipExtractArchiveApi();
if (i % 3 == 0)
{
Console.Write(".");
}
}
Console.WriteLine();
Console.WriteLine("Benchmark iterations completed.");
}
Console.WriteLine($"Profiler snapshot saved to: {outputPath}");
}
catch (Exception ex)
{
Console.WriteLine($"Error running profiler: {ex.Message}");
Console.WriteLine("Make sure JetBrains profiler tools are installed and accessible.");
}
}
}
await Task.Delay(500);

View File

@@ -0,0 +1,143 @@
# SharpCompress Performance Benchmarks
This project contains performance benchmarks for SharpCompress using [BenchmarkDotNet](https://benchmarkdotnet.org/).
## Overview
The benchmarks test all major archive formats supported by SharpCompress:
- **Zip**: Read (Archive & Reader API) and Write operations
- **Tar**: Read (Archive & Reader API) and Write operations, including Tar.GZip
- **Rar**: Read operations (Archive & Reader API)
- **7Zip**: Read operations for LZMA and LZMA2 compression
- **GZip**: Compression and decompression
## Running Benchmarks
### Run all benchmarks
```bash
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release
```
### Run specific benchmark class
```bash
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --filter "*ZipBenchmarks*"
```
### Run with specific job configuration
```bash
# Quick run for testing (1 warmup, 1 iteration)
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --job Dry
# Short run (3 warmup, 3 iterations)
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --job Short
# Medium run (default)
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --job Medium
```
### Export results
```bash
# Export to JSON
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --exporters json
# Export to multiple formats
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --exporters json markdown html
```
### List available benchmarks
```bash
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --list flat
```
## Baseline Results
The baseline results are stored in `baseline-results.md` and represent the expected performance characteristics of the library. These results are used in CI to detect significant performance regressions.
### Generate Baseline (Automated)
Use the build target to generate baseline results:
```bash
dotnet run --project build/build.csproj -- generate-baseline
```
This will:
1. Build the performance project
2. Run all benchmarks
3. Combine the markdown reports into `baseline-results.md`
4. Clean up temporary artifacts
### Generate Baseline (Manual)
To manually update the baseline:
1. Run the benchmarks: `dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --exporters markdown --artifacts baseline-output`
2. Combine the results: `cat baseline-output/results/*-report-github.md > baseline-results.md`
3. Review the changes and commit if appropriate
## JetBrains Profiler Integration
The performance project supports JetBrains profiler for detailed CPU and memory profiling during local development.
### Prerequisites
Install JetBrains profiler tools from: https://www.jetbrains.com/profiler/
### Run with CPU Profiling
```bash
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --profile --type cpu --output ./my-cpu-snapshots
```
### Run with Memory Profiling
```bash
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --profile --type memory --output ./my-memory-snapshots
```
### Profiler Options
- `--profile`: Enable profiler mode
- `--type cpu|memory`: Choose profiling type (default: cpu)
- `--output <path>`: Specify snapshot output directory (default: ./profiler-snapshots)
The profiler will run a sample benchmark and save snapshots that can be opened in JetBrains profiler tools for detailed analysis.
## CI Integration
The performance benchmarks run automatically in GitHub Actions on:
- Push to `master` or `release` branches
- Pull requests to `master` or `release` branches
- Manual workflow dispatch
Results are displayed in the GitHub Actions summary and uploaded as artifacts.
## Benchmark Configuration
The benchmarks are configured with minimal iterations for CI efficiency:
- **Warmup Count**: 1 iteration
- **Iteration Count**: 3 iterations
- **Invocation Count**: 1
- **Unroll Factor**: 1
- **Toolchain**: InProcessEmitToolchain (for fast execution)
These settings provide a good balance between speed and accuracy for CI purposes. For more accurate results, use the `Short`, `Medium`, or `Long` job configurations.
## Memory Diagnostics
All benchmarks include memory diagnostics using `[MemoryDiagnoser]`, which provides:
- Total allocated memory per operation
- Gen 0/1/2 collection counts
## Understanding Results
Key metrics in the benchmark results:
- **Mean**: Average execution time
- **Error**: Half of 99.9% confidence interval
- **StdDev**: Standard deviation
- **Allocated**: Total managed memory allocated per operation
## Contributing
When adding new benchmarks:
1. Create a new class in the `Benchmarks/` directory
2. Inherit from `ArchiveBenchmarkBase` for archive-related benchmarks
3. Add `[MemoryDiagnoser]` attribute to the class
4. Use `[Benchmark(Description = "...")]` for each benchmark method
5. Add `[GlobalSetup]` for one-time initialization
6. Update this README if needed

View File

@@ -4,6 +4,7 @@
<TargetFramework>net10.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="BenchmarkDotNet" />
<PackageReference Include="JetBrains.Profiler.SelfApi" />
<ProjectReference Include="..\..\src\SharpCompress\SharpCompress.csproj" />
</ItemGroup>

View File

@@ -0,0 +1,23 @@
| Method | Mean | Error | StdDev | Allocated |
|------------------------- |-----------:|---------:|---------:|----------:|
| &#39;GZip: Compress 100KB&#39; | 3,268.7 μs | 28.50 μs | 16.96 μs | 519.2 KB |
| &#39;GZip: Decompress 100KB&#39; | 436.6 μs | 3.23 μs | 1.69 μs | 34.18 KB |
| Method | Mean | Error | StdDev | Allocated |
|----------------------------------------- |---------:|----------:|----------:|----------:|
| &#39;Rar: Extract all entries (Archive API)&#39; | 2.054 ms | 0.3927 ms | 0.2598 ms | 91.09 KB |
| &#39;Rar: Extract all entries (Reader API)&#39; | 2.235 ms | 0.0253 ms | 0.0132 ms | 149.48 KB |
| Method | Mean | Error | StdDev | Allocated |
|---------------------------------- |---------:|----------:|----------:|----------:|
| &#39;7Zip LZMA: Extract all entries&#39; | 9.124 ms | 2.1930 ms | 1.4505 ms | 272.8 KB |
| &#39;7Zip LZMA2: Extract all entries&#39; | 7.810 ms | 0.1323 ms | 0.0788 ms | 272.58 KB |
| Method | Mean | Error | StdDev | Allocated |
|----------------------------------------- |----------:|---------:|---------:|----------:|
| &#39;Tar: Extract all entries (Archive API)&#39; | 56.36 μs | 3.312 μs | 1.971 μs | 16.65 KB |
| &#39;Tar: Extract all entries (Reader API)&#39; | 175.34 μs | 2.616 μs | 1.557 μs | 213.36 KB |
| &#39;Tar.GZip: Extract all entries&#39; | NA | NA | NA | NA |
| &#39;Tar: Create archive with small files&#39; | 51.38 μs | 2.349 μs | 1.398 μs | 68.7 KB |
| Method | Mean | Error | StdDev | Gen0 | Allocated |
|----------------------------------------- |-----------:|---------:|---------:|---------:|-----------:|
| &#39;Zip: Extract all entries (Archive API)&#39; | 1,188.4 μs | 28.62 μs | 14.97 μs | - | 181.66 KB |
| &#39;Zip: Extract all entries (Reader API)&#39; | 1,137.0 μs | 5.58 μs | 2.92 μs | - | 123.19 KB |
| &#39;Zip: Create archive with small files&#39; | 258.2 μs | 8.98 μs | 4.70 μs | 100.0000 | 2806.93 KB |

View File

@@ -2,6 +2,24 @@
"version": 2,
"dependencies": {
"net10.0": {
"BenchmarkDotNet": {
"type": "Direct",
"requested": "[0.15.8, )",
"resolved": "0.15.8",
"contentHash": "paCfrWxSeHqn3rUZc0spYXVFnHCF0nzRhG0nOLnyTjZYs8spsimBaaNmb3vwqvALKIplbYq/TF393vYiYSnh/Q==",
"dependencies": {
"BenchmarkDotNet.Annotations": "0.15.8",
"CommandLineParser": "2.9.1",
"Gee.External.Capstone": "2.3.0",
"Iced": "1.21.0",
"Microsoft.CodeAnalysis.CSharp": "4.14.0",
"Microsoft.Diagnostics.Runtime": "3.1.512801",
"Microsoft.Diagnostics.Tracing.TraceEvent": "3.1.21",
"Microsoft.DotNet.PlatformAbstractions": "3.1.6",
"Perfolizer": "[0.6.1]",
"System.Management": "9.0.5"
}
},
"JetBrains.Profiler.SelfApi": {
"type": "Direct",
"requested": "[2.5.16, )",
@@ -37,6 +55,26 @@
"resolved": "17.14.15",
"contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw=="
},
"BenchmarkDotNet.Annotations": {
"type": "Transitive",
"resolved": "0.15.8",
"contentHash": "hfucY0ycAsB0SsoaZcaAp9oq5wlWBJcylvEJb9pmvdYUx6PD6S4mDiYnZWjdjAlLhIpe/xtGCwzORfzAzPqvzA=="
},
"CommandLineParser": {
"type": "Transitive",
"resolved": "2.9.1",
"contentHash": "OE0sl1/sQ37bjVsPKKtwQlWDgqaxWgtme3xZz7JssWUzg5JpMIyHgCTY9MVMxOg48fJ1AgGT3tgdH5m/kQ5xhA=="
},
"Gee.External.Capstone": {
"type": "Transitive",
"resolved": "2.3.0",
"contentHash": "2ap/rYmjtzCOT8hxrnEW/QeiOt+paD8iRrIcdKX0cxVwWLFa1e+JDBNeECakmccXrSFeBQuu5AV8SNkipFMMMw=="
},
"Iced": {
"type": "Transitive",
"resolved": "1.21.0",
"contentHash": "dv5+81Q1TBQvVMSOOOmRcjJmvWcX3BZPZsIq31+RLc5cNft0IHAyNlkdb7ZarOWG913PyBoFDsDXoCIlKmLclg=="
},
"JetBrains.FormatRipper": {
"type": "Transitive",
"resolved": "2.4.0",
@@ -63,6 +101,101 @@
"resolved": "10.0.102",
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
},
"Microsoft.CodeAnalysis.Analyzers": {
"type": "Transitive",
"resolved": "3.11.0",
"contentHash": "v/EW3UE8/lbEYHoC2Qq7AR/DnmvpgdtAMndfQNmpuIMx/Mto8L5JnuCfdBYtgvalQOtfNCnxFejxuRrryvUTsg=="
},
"Microsoft.CodeAnalysis.Common": {
"type": "Transitive",
"resolved": "4.14.0",
"contentHash": "PC3tuwZYnC+idaPuoC/AZpEdwrtX7qFpmnrfQkgobGIWiYmGi5MCRtl5mx6QrfMGQpK78X2lfIEoZDLg/qnuHg==",
"dependencies": {
"Microsoft.CodeAnalysis.Analyzers": "3.11.0"
}
},
"Microsoft.CodeAnalysis.CSharp": {
"type": "Transitive",
"resolved": "4.14.0",
"contentHash": "568a6wcTivauIhbeWcCwfWwIn7UV7MeHEBvFB2uzGIpM2OhJ4eM/FZ8KS0yhPoNxnSpjGzz7x7CIjTxhslojQA==",
"dependencies": {
"Microsoft.CodeAnalysis.Analyzers": "3.11.0",
"Microsoft.CodeAnalysis.Common": "[4.14.0]"
}
},
"Microsoft.Diagnostics.NETCore.Client": {
"type": "Transitive",
"resolved": "0.2.510501",
"contentHash": "juoqJYMDs+lRrrZyOkXXMImJHneCF23cuvO4waFRd2Ds7j+ZuGIPbJm0Y/zz34BdeaGiiwGWraMUlln05W1PCQ==",
"dependencies": {
"Microsoft.Extensions.Logging": "6.0.0"
}
},
"Microsoft.Diagnostics.Runtime": {
"type": "Transitive",
"resolved": "3.1.512801",
"contentHash": "0lMUDr2oxNZa28D6NH5BuSQEe5T9tZziIkvkD44YkkCGQXPJqvFjLq5ZQq1hYLl3RjQJrY+hR0jFgap+EWPDTw==",
"dependencies": {
"Microsoft.Diagnostics.NETCore.Client": "0.2.410101"
}
},
"Microsoft.Diagnostics.Tracing.TraceEvent": {
"type": "Transitive",
"resolved": "3.1.21",
"contentHash": "/OrJFKaojSR6TkUKtwh8/qA9XWNtxLrXMqvEb89dBSKCWjaGVTbKMYodIUgF5deCEtmd6GXuRerciXGl5bhZ7Q==",
"dependencies": {
"Microsoft.Diagnostics.NETCore.Client": "0.2.510501",
"System.Reflection.TypeExtensions": "4.7.0"
}
},
"Microsoft.DotNet.PlatformAbstractions": {
"type": "Transitive",
"resolved": "3.1.6",
"contentHash": "jek4XYaQ/PGUwDKKhwR8K47Uh1189PFzMeLqO83mXrXQVIpARZCcfuDedH50YDTepBkfijCZN5U/vZi++erxtg=="
},
"Microsoft.Extensions.DependencyInjection": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "k6PWQMuoBDGGHOQTtyois2u4AwyVcIwL2LaSLlTZQm2CYcJ1pxbt6jfAnpWmzENA/wfrYRI/X9DTLoUkE4AsLw==",
"dependencies": {
"Microsoft.Extensions.DependencyInjection.Abstractions": "6.0.0"
}
},
"Microsoft.Extensions.DependencyInjection.Abstractions": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "xlzi2IYREJH3/m6+lUrQlujzX8wDitm4QGnUu6kUXTQAWPuZY8i+ticFJbzfqaetLA6KR/rO6Ew/HuYD+bxifg=="
},
"Microsoft.Extensions.Logging": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "eIbyj40QDg1NDz0HBW0S5f3wrLVnKWnDJ/JtZ+yJDFnDj90VoPuoPmFkeaXrtu+0cKm5GRAwoDf+dBWXK0TUdg==",
"dependencies": {
"Microsoft.Extensions.DependencyInjection": "6.0.0",
"Microsoft.Extensions.DependencyInjection.Abstractions": "6.0.0",
"Microsoft.Extensions.Logging.Abstractions": "6.0.0",
"Microsoft.Extensions.Options": "6.0.0"
}
},
"Microsoft.Extensions.Logging.Abstractions": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/HggWBbTwy8TgebGSX5DBZ24ndhzi93sHUBDvP1IxbZD7FDokYzdAr6+vbWGjw2XAfR2EJ1sfKUotpjHnFWPxA=="
},
"Microsoft.Extensions.Options": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "dzXN0+V1AyjOe2xcJ86Qbo233KHuLEY0njf/P2Kw8SfJU+d45HNS2ctJdnEnrWbM9Ye2eFgaC5Mj9otRMU6IsQ==",
"dependencies": {
"Microsoft.Extensions.DependencyInjection.Abstractions": "6.0.0",
"Microsoft.Extensions.Primitives": "6.0.0"
}
},
"Microsoft.Extensions.Primitives": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "9+PnzmQFfEFNR9J2aDTfJGGupShHjOuGw4VUv+JB044biSHrnmCIMD+mJHmb2H7YryrfBEXDurxQ47gJZdCKNQ=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
@@ -73,6 +206,37 @@
"resolved": "10.0.102",
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
},
"Perfolizer": {
"type": "Transitive",
"resolved": "0.6.1",
"contentHash": "CR1QmWg4XYBd1Pb7WseP+sDmV8nGPwvmowKynExTqr3OuckIGVMhvmN4LC5PGzfXqDlR295+hz/T7syA1CxEqA==",
"dependencies": {
"Pragmastat": "3.2.4"
}
},
"Pragmastat": {
"type": "Transitive",
"resolved": "3.2.4",
"contentHash": "I5qFifWw/gaTQT52MhzjZpkm/JPlfjSeO/DTZJjO7+hTKI+0aGRgOgZ3NN6D96dDuuqbIAZSeA5RimtHjqrA2A=="
},
"System.CodeDom": {
"type": "Transitive",
"resolved": "9.0.5",
"contentHash": "cuzLM2MWutf9ZBEMPYYfd0DXwYdvntp7VCT6a/wvbKCa2ZuvGmW74xi+YBa2mrfEieAXqM4TNKlMmSnfAfpUoQ=="
},
"System.Management": {
"type": "Transitive",
"resolved": "9.0.5",
"contentHash": "n6o9PZm9p25+zAzC3/48K0oHnaPKTInRrxqFq1fi/5TPbMLjuoCm/h//mS3cUmSy+9AO1Z+qsC/Ilt/ZFatv5Q==",
"dependencies": {
"System.CodeDom": "9.0.5"
}
},
"System.Reflection.TypeExtensions": {
"type": "Transitive",
"resolved": "4.7.0",
"contentHash": "VybpaOQQhqE6siHppMktjfGBw1GCwvCqiufqmP8F1nj7fTUNtW35LOEt3UZTEsECfo+ELAl/9o9nJx3U91i7vA=="
},
"sharpcompress": {
"type": "Project"
}