Compare commits

..

1 Commits

Author SHA1 Message Date
Adam Hathcock
cc6e410be8 some options 2026-02-06 15:16:45 +00:00
66 changed files with 909 additions and 1453 deletions

View File

@@ -1,50 +0,0 @@
name: Performance Benchmarks
on:
push:
branches:
- 'master'
- 'release'
pull_request:
branches:
- 'master'
- 'release'
workflow_dispatch:
permissions:
contents: read
jobs:
benchmark:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 0
- uses: actions/setup-dotnet@v5
with:
dotnet-version: 10.0.x
- name: Build Performance Project
run: dotnet build tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release
- name: Run Benchmarks
run: dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release --no-build -- --filter "*" --exporters json markdown --artifacts benchmark-results
continue-on-error: true
- name: Display Benchmark Results
if: always()
run: dotnet run --project build/build.csproj -- display-benchmark-results
- name: Compare with Baseline
if: always()
run: dotnet run --project build/build.csproj -- compare-benchmark-results
- name: Upload Benchmark Results
if: always()
uses: actions/upload-artifact@v6
with:
name: benchmark-results
path: benchmark-results/

4
.gitignore vendored
View File

@@ -17,10 +17,6 @@ tests/TestArchives/*/Scratch2
tools
.idea/
artifacts/
BenchmarkDotNet.Artifacts/
baseline-artifacts/
profiler-snapshots/
.DS_Store
*.snupkg
benchmark-results/

View File

@@ -1,6 +1,5 @@
<Project>
<ItemGroup>
<PackageVersion Include="BenchmarkDotNet" Version="0.15.8" />
<PackageVersion Include="Bullseye" Version="6.1.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
<PackageVersion Include="Glob" Version="1.1.9" />

View File

@@ -19,9 +19,6 @@ const string Publish = "publish";
const string DetermineVersion = "determine-version";
const string UpdateVersion = "update-version";
const string PushToNuGet = "push-to-nuget";
const string DisplayBenchmarkResults = "display-benchmark-results";
const string CompareBenchmarkResults = "compare-benchmark-results";
const string GenerateBaseline = "generate-baseline";
Target(
Clean,
@@ -213,249 +210,6 @@ Target(
}
);
Target(
DisplayBenchmarkResults,
() =>
{
var githubStepSummary = Environment.GetEnvironmentVariable("GITHUB_STEP_SUMMARY");
var resultsDir = "benchmark-results/results";
if (!Directory.Exists(resultsDir))
{
Console.WriteLine("No benchmark results found.");
return;
}
var markdownFiles = Directory
.GetFiles(resultsDir, "*-report-github.md")
.OrderBy(f => f)
.ToList();
if (markdownFiles.Count == 0)
{
Console.WriteLine("No benchmark markdown reports found.");
return;
}
var output = new List<string> { "## Benchmark Results", "" };
foreach (var file in markdownFiles)
{
Console.WriteLine($"Processing {Path.GetFileName(file)}");
var content = File.ReadAllText(file);
output.Add(content);
output.Add("");
}
// Write to GitHub Step Summary if available
if (!string.IsNullOrEmpty(githubStepSummary))
{
File.AppendAllLines(githubStepSummary, output);
Console.WriteLine($"Benchmark results written to GitHub Step Summary");
}
else
{
// Write to console if not in GitHub Actions
foreach (var line in output)
{
Console.WriteLine(line);
}
}
}
);
Target(
CompareBenchmarkResults,
() =>
{
var githubStepSummary = Environment.GetEnvironmentVariable("GITHUB_STEP_SUMMARY");
var baselinePath = "tests/SharpCompress.Performance/baseline-results.md";
var resultsDir = "benchmark-results/results";
var output = new List<string> { "## Comparison with Baseline", "" };
if (!File.Exists(baselinePath))
{
Console.WriteLine("Baseline file not found");
output.Add("⚠️ Baseline file not found. Run `generate-baseline` to create it.");
WriteOutput(output, githubStepSummary);
return;
}
if (!Directory.Exists(resultsDir))
{
Console.WriteLine("No current benchmark results found.");
output.Add("⚠️ No current benchmark results found. Showing baseline only.");
output.Add("");
output.Add("### Baseline Results");
output.AddRange(File.ReadAllLines(baselinePath));
WriteOutput(output, githubStepSummary);
return;
}
var markdownFiles = Directory
.GetFiles(resultsDir, "*-report-github.md")
.OrderBy(f => f)
.ToList();
if (markdownFiles.Count == 0)
{
Console.WriteLine("No current benchmark markdown reports found.");
output.Add("⚠️ No current benchmark results found. Showing baseline only.");
output.Add("");
output.Add("### Baseline Results");
output.AddRange(File.ReadAllLines(baselinePath));
WriteOutput(output, githubStepSummary);
return;
}
Console.WriteLine("Parsing baseline results...");
var baselineMetrics = ParseBenchmarkResults(File.ReadAllText(baselinePath));
Console.WriteLine("Parsing current results...");
var currentText = string.Join("\n", markdownFiles.Select(f => File.ReadAllText(f)));
var currentMetrics = ParseBenchmarkResults(currentText);
Console.WriteLine("Comparing results...");
output.Add("### Performance Comparison");
output.Add("");
output.Add(
"| Benchmark | Baseline Mean | Current Mean | Change | Baseline Memory | Current Memory | Change |"
);
output.Add(
"|-----------|---------------|--------------|--------|-----------------|----------------|--------|"
);
var hasRegressions = false;
var hasImprovements = false;
foreach (var method in currentMetrics.Keys.Union(baselineMetrics.Keys).OrderBy(k => k))
{
var hasCurrent = currentMetrics.TryGetValue(method, out var current);
var hasBaseline = baselineMetrics.TryGetValue(method, out var baseline);
if (!hasCurrent)
{
output.Add(
$"| {method} | {baseline!.Mean} | ❌ Missing | N/A | {baseline.Memory} | N/A | N/A |"
);
continue;
}
if (!hasBaseline)
{
output.Add(
$"| {method} | ❌ New | {current!.Mean} | N/A | N/A | {current.Memory} | N/A |"
);
continue;
}
var timeChange = CalculateChange(baseline!.MeanValue, current!.MeanValue);
var memChange = CalculateChange(baseline.MemoryValue, current.MemoryValue);
var timeIcon =
timeChange > 25 ? "🔴"
: timeChange < -25 ? "🟢"
: "⚪";
var memIcon =
memChange > 25 ? "🔴"
: memChange < -25 ? "🟢"
: "⚪";
if (timeChange > 25 || memChange > 25)
hasRegressions = true;
if (timeChange < -25 || memChange < -25)
hasImprovements = true;
output.Add(
$"| {method} | {baseline.Mean} | {current.Mean} | {timeIcon} {timeChange:+0.0;-0.0;0}% | {baseline.Memory} | {current.Memory} | {memIcon} {memChange:+0.0;-0.0;0}% |"
);
}
output.Add("");
output.Add("**Legend:**");
output.Add("- 🔴 Regression (>25% slower/more memory)");
output.Add("- 🟢 Improvement (>25% faster/less memory)");
output.Add("- ⚪ No significant change");
if (hasRegressions)
{
output.Add("");
output.Add(
"⚠️ **Warning**: Performance regressions detected. Review the changes carefully."
);
}
else if (hasImprovements)
{
output.Add("");
output.Add("✅ Performance improvements detected!");
}
else
{
output.Add("");
output.Add("✅ Performance is stable compared to baseline.");
}
WriteOutput(output, githubStepSummary);
}
);
Target(
GenerateBaseline,
() =>
{
var perfProject = "tests/SharpCompress.Performance/SharpCompress.Performance.csproj";
var baselinePath = "tests/SharpCompress.Performance/baseline-results.md";
var artifactsDir = "baseline-artifacts";
Console.WriteLine("Building performance project...");
Run("dotnet", $"build {perfProject} --configuration Release");
Console.WriteLine("Running benchmarks to generate baseline...");
Run(
"dotnet",
$"run --project {perfProject} --configuration Release --no-build -- --filter \"*\" --exporters markdown --artifacts {artifactsDir}"
);
var resultsDir = Path.Combine(artifactsDir, "results");
if (!Directory.Exists(resultsDir))
{
Console.WriteLine("ERROR: No benchmark results generated.");
return;
}
var markdownFiles = Directory
.GetFiles(resultsDir, "*-report-github.md")
.OrderBy(f => f)
.ToList();
if (markdownFiles.Count == 0)
{
Console.WriteLine("ERROR: No markdown reports found.");
return;
}
Console.WriteLine($"Combining {markdownFiles.Count} benchmark reports...");
var baselineContent = new List<string>();
foreach (var file in markdownFiles)
{
var lines = File.ReadAllLines(file);
baselineContent.AddRange(lines.Select(l => l.Trim()).Where(l => l.StartsWith('|')));
}
File.WriteAllText(baselinePath, string.Join(Environment.NewLine, baselineContent));
Console.WriteLine($"Baseline written to {baselinePath}");
// Clean up artifacts directory
if (Directory.Exists(artifactsDir))
{
Directory.Delete(artifactsDir, true);
Console.WriteLine("Cleaned up artifacts directory.");
}
}
);
Target("default", [Publish], () => Console.WriteLine("Done!"));
await RunTargetsAndExitAsync(args);
@@ -548,142 +302,3 @@ static async Task<string> GetGitOutput(string command, string args)
throw new Exception($"Git command failed: git {command} {args}\n{ex.Message}", ex);
}
}
static void WriteOutput(List<string> output, string? githubStepSummary)
{
if (!string.IsNullOrEmpty(githubStepSummary))
{
File.AppendAllLines(githubStepSummary, output);
Console.WriteLine("Comparison written to GitHub Step Summary");
}
else
{
foreach (var line in output)
{
Console.WriteLine(line);
}
}
}
static Dictionary<string, BenchmarkMetric> ParseBenchmarkResults(string markdown)
{
var metrics = new Dictionary<string, BenchmarkMetric>();
var lines = markdown.Split('\n');
for (int i = 0; i < lines.Length; i++)
{
var line = lines[i].Trim();
// Look for table rows with benchmark data
if (line.StartsWith("|") && line.Contains("&#39;") && i > 0)
{
var parts = line.Split('|', StringSplitOptions.TrimEntries);
if (parts.Length >= 5)
{
var method = parts[1].Replace("&#39;", "'");
var meanStr = parts[2];
// Find Allocated column - it's usually the last column or labeled "Allocated"
string memoryStr = "N/A";
for (int j = parts.Length - 2; j >= 2; j--)
{
if (
parts[j].Contains("KB")
|| parts[j].Contains("MB")
|| parts[j].Contains("GB")
|| parts[j].Contains("B")
)
{
memoryStr = parts[j];
break;
}
}
if (
!method.Equals("Method", StringComparison.OrdinalIgnoreCase)
&& !string.IsNullOrWhiteSpace(method)
)
{
var metric = new BenchmarkMetric
{
Method = method,
Mean = meanStr,
MeanValue = ParseTimeValue(meanStr),
Memory = memoryStr,
MemoryValue = ParseMemoryValue(memoryStr),
};
metrics[method] = metric;
}
}
}
}
return metrics;
}
static double ParseTimeValue(string timeStr)
{
if (string.IsNullOrWhiteSpace(timeStr) || timeStr == "N/A" || timeStr == "NA")
return 0;
// Remove thousands separators and parse
timeStr = timeStr.Replace(",", "").Trim();
var match = Regex.Match(timeStr, @"([\d.]+)\s*(\w+)");
if (!match.Success)
return 0;
var value = double.Parse(match.Groups[1].Value);
var unit = match.Groups[2].Value.ToLower();
// Convert to microseconds for comparison
return unit switch
{
"s" => value * 1_000_000,
"ms" => value * 1_000,
"μs" or "us" => value,
"ns" => value / 1_000,
_ => value,
};
}
static double ParseMemoryValue(string memStr)
{
if (string.IsNullOrWhiteSpace(memStr) || memStr == "N/A" || memStr == "NA")
return 0;
memStr = memStr.Replace(",", "").Trim();
var match = Regex.Match(memStr, @"([\d.]+)\s*(\w+)");
if (!match.Success)
return 0;
var value = double.Parse(match.Groups[1].Value);
var unit = match.Groups[2].Value.ToUpper();
// Convert to KB for comparison
return unit switch
{
"GB" => value * 1_024 * 1_024,
"MB" => value * 1_024,
"KB" => value,
"B" => value / 1_024,
_ => value,
};
}
static double CalculateChange(double baseline, double current)
{
if (baseline == 0)
return 0;
return ((current - baseline) / baseline) * 100;
}
record BenchmarkMetric
{
public string Method { get; init; } = "";
public string Mean { get; init; } = "";
public double MeanValue { get; init; }
public string Memory { get; init; } = "";
public double MemoryValue { get; init; }
}

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
@@ -111,7 +112,7 @@ public abstract partial class AbstractWritableArchive<TEntry, TVolume>
public async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IWriterOptions options,
CancellationToken cancellationToken = default
)
{

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Writers;
@@ -174,7 +175,7 @@ public abstract partial class AbstractWritableArchive<TEntry, TVolume>
return entry;
}
public void SaveTo(Stream stream, WriterOptions options)
public void SaveTo(Stream stream, IWriterOptions options)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
@@ -210,14 +211,14 @@ public abstract partial class AbstractWritableArchive<TEntry, TVolume>
protected abstract void SaveTo(
Stream stream,
WriterOptions options,
IWriterOptions options,
IEnumerable<TEntry> oldEntries,
IEnumerable<TEntry> newEntries
);
protected abstract ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IWriterOptions options,
IAsyncEnumerable<TEntry> oldEntries,
IEnumerable<TEntry> newEntries,
CancellationToken cancellationToken = default

View File

@@ -5,6 +5,7 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
@@ -30,7 +31,7 @@ public partial class GZipArchive
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IWriterOptions options,
IAsyncEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
@@ -40,7 +41,10 @@ public partial class GZipArchive
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
using var writer = new GZipWriter(
stream,
options as GZipWriterOptions ?? new GZipWriterOptions(options)
);
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)

View File

@@ -6,6 +6,7 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
@@ -58,7 +59,7 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
protected override void SaveTo(
Stream stream,
WriterOptions options,
IWriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries
)
@@ -67,7 +68,10 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
using var writer = new GZipWriter(
stream,
options as GZipWriterOptions ?? new GZipWriterOptions(options)
);
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();

View File

@@ -2,6 +2,7 @@ using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Options;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
@@ -30,7 +31,7 @@ public interface IWritableArchive : IArchive, IWritableArchiveCommon
/// <summary>
/// Saves the archive to the specified stream using the given writer options.
/// </summary>
void SaveTo(Stream stream, WriterOptions options);
void SaveTo(Stream stream, IWriterOptions options);
/// <summary>
/// Removes the specified entry from the archive.
@@ -45,7 +46,7 @@ public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
/// </summary>
ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IWriterOptions options,
CancellationToken cancellationToken = default
);

View File

@@ -1,6 +1,7 @@
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
@@ -58,13 +59,16 @@ public static class IWritableArchiveExtensions
);
}
public void SaveTo(string filePath, WriterOptions? options = null) =>
writableArchive.SaveTo(new FileInfo(filePath), options ?? new(CompressionType.Deflate));
public void SaveTo(string filePath, IWriterOptions? options = null) =>
writableArchive.SaveTo(
new FileInfo(filePath),
options ?? new WriterOptions(CompressionType.Deflate)
);
public void SaveTo(FileInfo fileInfo, WriterOptions? options = null)
public void SaveTo(FileInfo fileInfo, IWriterOptions? options = null)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
writableArchive.SaveTo(stream, options ?? new(CompressionType.Deflate));
writableArchive.SaveTo(stream, options ?? new WriterOptions(CompressionType.Deflate));
}
}
}

View File

@@ -3,6 +3,7 @@ using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
@@ -62,24 +63,28 @@ public static class IWritableAsyncArchiveExtensions
public ValueTask SaveToAsync(
string filePath,
WriterOptions? options = null,
IWriterOptions? options = null,
CancellationToken cancellationToken = default
) =>
writableArchive.SaveToAsync(
new FileInfo(filePath),
options ?? new(CompressionType.Deflate),
options ?? new WriterOptions(CompressionType.Deflate),
cancellationToken
);
public async ValueTask SaveToAsync(
FileInfo fileInfo,
WriterOptions? options = null,
IWriterOptions? options = null,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await writableArchive
.SaveToAsync(stream, options ?? new(CompressionType.Deflate), cancellationToken)
.SaveToAsync(
stream,
options ?? new WriterOptions(CompressionType.Deflate),
cancellationToken
)
.ConfigureAwait(false);
}
}

View File

@@ -24,8 +24,7 @@ internal class FileInfoRarArchiveVolume : RarVolume
private static ReaderOptions FixOptions(ReaderOptions options)
{
//make sure we're closing streams with fileinfo
options.LeaveStreamOpen = false;
return options;
return options with { LeaveStreamOpen = false };
}
internal ReadOnlyCollection<RarFilePart> FileParts { get; }

View File

@@ -4,6 +4,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -18,13 +19,16 @@ public partial class TarArchive
{
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IWriterOptions options,
IAsyncEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
using var writer = new TarWriter(
stream,
options as TarWriterOptions ?? new TarWriterOptions(options)
);
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -115,12 +116,15 @@ public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVo
protected override void SaveTo(
Stream stream,
WriterOptions options,
IWriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
using var writer = new TarWriter(
stream,
options as TarWriterOptions ?? new TarWriterOptions(options)
);
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -71,13 +72,16 @@ public partial class ZipArchive
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IWriterOptions options,
IAsyncEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
using var writer = new ZipWriter(
stream,
options as ZipWriterOptions ?? new ZipWriterOptions(options)
);
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
@@ -113,12 +114,15 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
protected override void SaveTo(
Stream stream,
WriterOptions options,
IWriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
using var writer = new ZipWriter(
stream,
options as ZipWriterOptions ?? new ZipWriterOptions(options)
);
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)

View File

@@ -90,13 +90,15 @@ internal static partial class ExtractionMethods
{
if (entry.LinkTarget != null)
{
if (options?.WriteSymbolicLink is null)
if (options?.SymbolicLinkHandler is not null)
{
throw new ExtractionException(
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
);
options.SymbolicLinkHandler(destinationFileName, entry.LinkTarget);
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
else
{
ExtractionOptions.DefaultSymbolicLinkHandler(destinationFileName, entry.LinkTarget);
}
return;
}
else
{

View File

@@ -101,13 +101,15 @@ internal static partial class ExtractionMethods
{
if (entry.LinkTarget != null)
{
if (options?.WriteSymbolicLink is null)
if (options?.SymbolicLinkHandler is not null)
{
throw new ExtractionException(
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
);
options.SymbolicLinkHandler(destinationFileName, entry.LinkTarget);
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
else
{
ExtractionOptions.DefaultSymbolicLinkHandler(destinationFileName, entry.LinkTarget);
}
return;
}
else
{

View File

@@ -2,39 +2,107 @@ using System;
namespace SharpCompress.Common;
public class ExtractionOptions
/// <summary>
/// Options for configuring extraction behavior when extracting archive entries.
/// </summary>
/// <remarks>
/// This class is immutable. Use the <c>with</c> expression to create modified copies:
/// <code>
/// var options = new ExtractionOptions { Overwrite = false };
/// options = options with { PreserveFileTime = true };
/// </code>
/// </remarks>
public sealed record ExtractionOptions
{
/// <summary>
/// overwrite target if it exists
/// Overwrite target if it exists.
/// <para><b>Breaking change:</b> Default changed from false to true in version 0.40.0.</para>
/// </summary>
public bool Overwrite { get; set; }
public bool Overwrite { get; init; } = true;
/// <summary>
/// extract with internal directory structure
/// Extract with internal directory structure.
/// <para><b>Breaking change:</b> Default changed from false to true in version 0.40.0.</para>
/// </summary>
public bool ExtractFullPath { get; set; }
public bool ExtractFullPath { get; init; } = true;
/// <summary>
/// preserve file time
/// Preserve file time.
/// <para><b>Breaking change:</b> Default changed from false to true in version 0.40.0.</para>
/// </summary>
public bool PreserveFileTime { get; set; }
public bool PreserveFileTime { get; init; } = true;
/// <summary>
/// preserve windows file attributes
/// Preserve windows file attributes.
/// </summary>
public bool PreserveAttributes { get; set; }
public bool PreserveAttributes { get; init; }
/// <summary>
/// Delegate for writing symbolic links to disk.
/// sourcePath is where the symlink is created.
/// targetPath is what the symlink refers to.
/// The first parameter is the source path (where the symlink is created).
/// The second parameter is the target path (what the symlink refers to).
/// </summary>
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
/// <remarks>
/// <b>Breaking change:</b> Changed from field to init-only property in version 0.40.0.
/// The default handler logs a warning message.
/// </remarks>
public Action<string, string>? SymbolicLinkHandler { get; init; }
public SymbolicLinkWriterDelegate WriteSymbolicLink = (sourcePath, targetPath) =>
/// <summary>
/// Creates a new ExtractionOptions instance with default values.
/// </summary>
public ExtractionOptions() { }
/// <summary>
/// Creates a new ExtractionOptions instance with the specified overwrite behavior.
/// </summary>
/// <param name="overwrite">Whether to overwrite existing files.</param>
public ExtractionOptions(bool overwrite)
{
Overwrite = overwrite;
}
/// <summary>
/// Creates a new ExtractionOptions instance with the specified extraction path and overwrite behavior.
/// </summary>
/// <param name="extractFullPath">Whether to preserve directory structure.</param>
/// <param name="overwrite">Whether to overwrite existing files.</param>
public ExtractionOptions(bool extractFullPath, bool overwrite)
{
ExtractFullPath = extractFullPath;
Overwrite = overwrite;
}
/// <summary>
/// Creates a new ExtractionOptions instance with the specified extraction path, overwrite behavior, and file time preservation.
/// </summary>
/// <param name="extractFullPath">Whether to preserve directory structure.</param>
/// <param name="overwrite">Whether to overwrite existing files.</param>
/// <param name="preserveFileTime">Whether to preserve file modification times.</param>
public ExtractionOptions(bool extractFullPath, bool overwrite, bool preserveFileTime)
{
ExtractFullPath = extractFullPath;
Overwrite = overwrite;
PreserveFileTime = preserveFileTime;
}
/// <summary>
/// Gets an ExtractionOptions instance configured for safe extraction (no overwrite).
/// </summary>
public static ExtractionOptions SafeExtract => new(overwrite: false);
/// <summary>
/// Gets an ExtractionOptions instance configured for flat extraction (no directory structure).
/// </summary>
public static ExtractionOptions FlatExtract => new(extractFullPath: false, overwrite: true);
/// <summary>
/// Default symbolic link handler that logs a warning message.
/// </summary>
public static void DefaultSymbolicLinkHandler(string sourcePath, string targetPath)
{
Console.WriteLine(
$"Could not write symlink {sourcePath} -> {targetPath}, for more information please see https://github.com/dotnet/runtime/issues/24271"
);
};
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.GZip;
@@ -7,7 +8,7 @@ public partial class GZipEntry
{
internal static async IAsyncEnumerable<GZipEntry> GetEntriesAsync(
Stream stream,
OptionsBase options
ReaderOptions options
)
{
yield return new GZipEntry(await GZipFilePart.CreateAsync(stream, options.ArchiveEncoding));

View File

@@ -1,6 +1,7 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.GZip;
@@ -38,7 +39,7 @@ public partial class GZipEntry : Entry
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, ReaderOptions options)
{
yield return new GZipEntry(GZipFilePart.Create(stream, options.ArchiveEncoding));
}

View File

@@ -9,7 +9,7 @@ public class GZipVolume : Volume
: base(stream, options, index) { }
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
: base(fileInfo.OpenRead(), options) => options.LeaveStreamOpen = false;
: base(fileInfo.OpenRead(), options with { LeaveStreamOpen = false }) { }
public override bool IsFirstVolume => true;

View File

@@ -0,0 +1,18 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// This file is required for init-only properties to work on older target frameworks (.NET Framework 4.8, .NET Standard 2.0)
// The IsExternalInit type is used by the compiler for records and init-only properties
#if NETFRAMEWORK || NETSTANDARD2_0
using System.ComponentModel;
namespace System.Runtime.CompilerServices;
/// <summary>
/// Reserved to be used by the compiler for tracking metadata.
/// This class should not be used by developers in source code.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
internal static class IsExternalInit { }
#endif

View File

@@ -0,0 +1,6 @@
namespace SharpCompress.Common.Options;
public interface IEncodingOptions
{
IArchiveEncoding ArchiveEncoding { get; init; }
}

View File

@@ -0,0 +1,8 @@
using System;
namespace SharpCompress.Common.Options;
public interface IProgressOptions
{
IProgress<ProgressReport>? Progress { get; init; }
}

View File

@@ -0,0 +1,11 @@
namespace SharpCompress.Common.Options;
public interface IReaderOptions : IStreamOptions, IEncodingOptions, IProgressOptions
{
bool LookForHeader { get; init; }
string? Password { get; init; }
bool DisableCheckIncomplete { get; init; }
int BufferSize { get; init; }
string? ExtensionHint { get; init; }
int? RewindableBufferSize { get; init; }
}

View File

@@ -0,0 +1,6 @@
namespace SharpCompress.Common.Options;
public interface IStreamOptions
{
bool LeaveStreamOpen { get; init; }
}

View File

@@ -0,0 +1,9 @@
using SharpCompress.Common;
namespace SharpCompress.Common.Options;
public interface IWriterOptions : IStreamOptions, IEncodingOptions, IProgressOptions
{
CompressionType CompressionType { get; init; }
int CompressionLevel { get; init; }
}

View File

@@ -1,11 +0,0 @@
namespace SharpCompress.Common;
public class OptionsBase
{
/// <summary>
/// SharpCompress will keep the supplied streams open. Default is true.
/// </summary>
public bool LeaveStreamOpen { get; set; } = true;
public IArchiveEncoding ArchiveEncoding { get; set; } = new ArchiveEncoding();
}

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
@@ -7,6 +8,7 @@ using SharpCompress.Archives;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
@@ -156,27 +158,33 @@ public class GZipFactory
#region IWriterFactory
/// <inheritdoc/>
public IWriter OpenWriter(Stream stream, WriterOptions writerOptions)
public IWriter OpenWriter(Stream stream, IWriterOptions writerOptions)
{
if (writerOptions.CompressionType != CompressionType.GZip)
{
throw new InvalidFormatException("GZip archives only support GZip compression type.");
}
return new GZipWriter(stream, new GZipWriterOptions(writerOptions));
GZipWriterOptions gzipOptions = writerOptions switch
{
GZipWriterOptions gwo => gwo,
WriterOptions wo => new GZipWriterOptions(wo),
_ => throw new ArgumentException(
$"Expected WriterOptions or GZipWriterOptions, got {writerOptions.GetType().Name}",
nameof(writerOptions)
),
};
return new GZipWriter(stream, gzipOptions);
}
/// <inheritdoc/>
public IAsyncWriter OpenAsyncWriter(
Stream stream,
WriterOptions writerOptions,
IWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
if (writerOptions.CompressionType != CompressionType.GZip)
{
throw new InvalidFormatException("GZip archives only support GZip compression type.");
}
return (IAsyncWriter)OpenWriter(stream, writerOptions);
}

View File

@@ -7,6 +7,7 @@ using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
@@ -216,13 +217,24 @@ public class TarFactory
#region IWriterFactory
/// <inheritdoc/>
public IWriter OpenWriter(Stream stream, WriterOptions writerOptions) =>
new TarWriter(stream, new TarWriterOptions(writerOptions));
public IWriter OpenWriter(Stream stream, IWriterOptions writerOptions)
{
TarWriterOptions tarOptions = writerOptions switch
{
TarWriterOptions two => two,
WriterOptions wo => new TarWriterOptions(wo),
_ => throw new ArgumentException(
$"Expected WriterOptions or TarWriterOptions, got {writerOptions.GetType().Name}",
nameof(writerOptions)
),
};
return new TarWriter(stream, tarOptions);
}
/// <inheritdoc/>
public IAsyncWriter OpenAsyncWriter(
Stream stream,
WriterOptions writerOptions,
IWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
@@ -5,6 +6,7 @@ using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
@@ -185,13 +187,24 @@ public class ZipFactory
#region IWriterFactory
/// <inheritdoc/>
public IWriter OpenWriter(Stream stream, WriterOptions writerOptions) =>
new ZipWriter(stream, new ZipWriterOptions(writerOptions));
public IWriter OpenWriter(Stream stream, IWriterOptions writerOptions)
{
ZipWriterOptions zipOptions = writerOptions switch
{
ZipWriterOptions zwo => zwo,
WriterOptions wo => new ZipWriterOptions(wo),
_ => throw new ArgumentException(
$"Expected WriterOptions or ZipWriterOptions, got {writerOptions.GetType().Name}",
nameof(writerOptions)
),
};
return new ZipWriter(stream, zipOptions);
}
/// <inheritdoc/>
public IAsyncWriter OpenAsyncWriter(
Stream stream,
WriterOptions writerOptions,
IWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{

View File

@@ -1,9 +1,20 @@
using System;
using SharpCompress.Common;
using SharpCompress.Common.Options;
namespace SharpCompress.Readers;
public class ReaderOptions : OptionsBase
/// <summary>
/// Options for configuring reader behavior when opening archives.
/// </summary>
/// <remarks>
/// This class is immutable. Use the <c>with</c> expression to create modified copies:
/// <code>
/// var options = new ReaderOptions { Password = "secret" };
/// options = options with { LeaveStreamOpen = false };
/// </code>
/// </remarks>
public sealed record ReaderOptions : IReaderOptions
{
/// <summary>
/// The default buffer size for stream operations.
@@ -15,27 +26,46 @@ public class ReaderOptions : OptionsBase
)]
public const int DefaultBufferSize = 0x10000;
/// <summary>
/// SharpCompress will keep the supplied streams open. Default is true.
/// </summary>
public bool LeaveStreamOpen { get; init; } = true;
/// <summary>
/// Encoding to use for archive entry names.
/// </summary>
public IArchiveEncoding ArchiveEncoding { get; init; } = new ArchiveEncoding();
/// <summary>
/// Look for RarArchive (Check for self-extracting archives or cases where RarArchive isn't at the start of the file)
/// </summary>
public bool LookForHeader { get; set; }
public bool LookForHeader { get; init; }
public string? Password { get; set; }
/// <summary>
/// Password for encrypted archives.
/// </summary>
public string? Password { get; init; }
public bool DisableCheckIncomplete { get; set; }
/// <summary>
/// Disable checking for incomplete archives.
/// </summary>
public bool DisableCheckIncomplete { get; init; }
public int BufferSize { get; set; } = Constants.BufferSize;
/// <summary>
/// Buffer size for stream operations.
/// </summary>
public int BufferSize { get; init; } = Constants.BufferSize;
/// <summary>
/// Provide a hint for the extension of the archive being read, can speed up finding the correct decoder. Should be without the leading period in the form like: tar.gz or zip
/// </summary>
public string? ExtensionHint { get; set; }
public string? ExtensionHint { get; init; }
/// <summary>
/// An optional progress reporter for tracking extraction operations.
/// When set, progress updates will be reported as entries are extracted.
/// </summary>
public IProgress<ProgressReport>? Progress { get; set; }
public IProgress<ProgressReport>? Progress { get; init; }
/// <summary>
/// Size of the rewindable buffer for non-seekable streams.
@@ -78,5 +108,73 @@ public class ReaderOptions : OptionsBase
/// using var reader = ReaderFactory.OpenReader(networkStream, options);
/// </code>
/// </example>
public int? RewindableBufferSize { get; set; }
public int? RewindableBufferSize { get; init; }
/// <summary>
/// Creates a new ReaderOptions instance with default values.
/// </summary>
public ReaderOptions() { }
/// <summary>
/// Creates a new ReaderOptions instance with the specified password.
/// </summary>
/// <param name="password">The password for encrypted archives.</param>
public ReaderOptions(string? password) => Password = password;
/// <summary>
/// Creates a new ReaderOptions instance with the specified password and header search option.
/// </summary>
/// <param name="password">The password for encrypted archives.</param>
/// <param name="lookForHeader">Whether to search for the archive header.</param>
public ReaderOptions(string? password, bool lookForHeader)
{
Password = password;
LookForHeader = lookForHeader;
}
/// <summary>
/// Creates a new ReaderOptions instance with the specified encoding.
/// </summary>
/// <param name="encoding">The encoding for archive entry names.</param>
public ReaderOptions(IArchiveEncoding encoding) => ArchiveEncoding = encoding;
/// <summary>
/// Creates a new ReaderOptions instance with the specified password and encoding.
/// </summary>
/// <param name="password">The password for encrypted archives.</param>
/// <param name="encoding">The encoding for archive entry names.</param>
public ReaderOptions(string? password, IArchiveEncoding encoding)
{
Password = password;
ArchiveEncoding = encoding;
}
/// <summary>
/// Creates a new ReaderOptions instance with the specified stream open behavior.
/// </summary>
/// <param name="leaveStreamOpen">Whether to leave the stream open after reading.</param>
public ReaderOptions(bool leaveStreamOpen)
{
LeaveStreamOpen = leaveStreamOpen;
}
/// <summary>
/// Creates a new ReaderOptions instance with the specified stream open behavior and password.
/// </summary>
/// <param name="leaveStreamOpen">Whether to leave the stream open after reading.</param>
/// <param name="password">The password for encrypted archives.</param>
public ReaderOptions(bool leaveStreamOpen, string? password)
{
LeaveStreamOpen = leaveStreamOpen;
Password = password;
}
/// <summary>
/// Creates a new ReaderOptions instance with the specified buffer size.
/// </summary>
/// <param name="bufferSize">The buffer size for stream operations.</param>
public ReaderOptions(int bufferSize)
{
BufferSize = bufferSize;
}
}

View File

@@ -1,14 +1,15 @@
using System;
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.IO;
namespace SharpCompress.Writers;
#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable.
public abstract partial class AbstractWriter(ArchiveType type, WriterOptions writerOptions)
public abstract partial class AbstractWriter(ArchiveType type, IWriterOptions writerOptions)
: IWriter,
IAsyncWriter
{
@@ -23,7 +24,7 @@ public abstract partial class AbstractWriter(ArchiveType type, WriterOptions wri
public ArchiveType WriterType { get; } = type;
protected WriterOptions WriterOptions { get; } = writerOptions;
protected IWriterOptions WriterOptions { get; } = writerOptions;
/// <summary>
/// Wraps the source stream with a progress-reporting stream if progress reporting is enabled.

View File

@@ -1,18 +1,100 @@
using System;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using D = SharpCompress.Compressors.Deflate;
namespace SharpCompress.Writers.GZip;
public class GZipWriterOptions : WriterOptions
/// <summary>
/// Options for configuring GZip writer behavior.
/// </summary>
/// <remarks>
/// This class is immutable. Use the <c>with</c> expression to create modified copies:
/// <code>
/// var options = new GZipWriterOptions { CompressionLevel = 9 };
/// options = options with { LeaveStreamOpen = false };
/// </code>
/// </remarks>
public sealed record GZipWriterOptions : IWriterOptions
{
public GZipWriterOptions()
: base(CompressionType.GZip, (int)(D.CompressionLevel.Default)) { }
/// <summary>
/// The compression type (always GZip for this writer).
/// </summary>
public CompressionType CompressionType { get; init; } = CompressionType.GZip;
internal GZipWriterOptions(WriterOptions options)
: base(options.CompressionType, (int)(D.CompressionLevel.Default))
/// <summary>
/// The compression level to be used (0-9 for Deflate).
/// </summary>
public int CompressionLevel { get; init; } = (int)D.CompressionLevel.Default;
/// <summary>
/// SharpCompress will keep the supplied streams open. Default is true.
/// </summary>
public bool LeaveStreamOpen { get; init; } = true;
/// <summary>
/// Encoding to use for archive entry names.
/// </summary>
public IArchiveEncoding ArchiveEncoding { get; init; } = new ArchiveEncoding();
/// <summary>
/// An optional progress reporter for tracking compression operations.
/// </summary>
public IProgress<ProgressReport>? Progress { get; init; }
/// <summary>
/// Creates a new GZipWriterOptions instance with default values.
/// </summary>
public GZipWriterOptions() { }
/// <summary>
/// Creates a new GZipWriterOptions instance with the specified compression level.
/// </summary>
/// <param name="compressionLevel">The compression level (0-9).</param>
public GZipWriterOptions(int compressionLevel)
{
CompressionLevel = compressionLevel;
}
/// <summary>
/// Creates a new GZipWriterOptions instance with the specified Deflate compression level.
/// </summary>
/// <param name="compressionLevel">The Deflate compression level.</param>
public GZipWriterOptions(D.CompressionLevel compressionLevel)
{
CompressionLevel = (int)compressionLevel;
}
/// <summary>
/// Creates a new GZipWriterOptions instance with the specified stream open behavior.
/// </summary>
/// <param name="leaveStreamOpen">Whether to leave the stream open after writing.</param>
public GZipWriterOptions(bool leaveStreamOpen)
{
LeaveStreamOpen = leaveStreamOpen;
}
/// <summary>
/// Creates a new GZipWriterOptions instance from an existing WriterOptions instance.
/// </summary>
/// <param name="options">The WriterOptions to copy values from.</param>
public GZipWriterOptions(WriterOptions options)
{
CompressionLevel = options.CompressionLevel;
LeaveStreamOpen = options.LeaveStreamOpen;
ArchiveEncoding = options.ArchiveEncoding;
Progress = options.Progress;
}
/// <summary>
/// Creates a new GZipWriterOptions instance from an existing IWriterOptions instance.
/// </summary>
/// <param name="options">The IWriterOptions to copy values from.</param>
public GZipWriterOptions(IWriterOptions options)
{
CompressionLevel = options.CompressionLevel;
LeaveStreamOpen = options.LeaveStreamOpen;
ArchiveEncoding = options.ArchiveEncoding;
Progress = options.Progress;
}
}

View File

@@ -1,16 +1,17 @@
using System.IO;
using System.Threading;
using SharpCompress.Common.Options;
using SharpCompress.Factories;
namespace SharpCompress.Writers;
public interface IWriterFactory : IFactory
{
IWriter OpenWriter(Stream stream, WriterOptions writerOptions);
IWriter OpenWriter(Stream stream, IWriterOptions writerOptions);
IAsyncWriter OpenAsyncWriter(
Stream stream,
WriterOptions writerOptions,
IWriterOptions writerOptions,
CancellationToken cancellationToken = default
);
}

View File

@@ -1,11 +1,12 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Common.Options;
namespace SharpCompress.Writers;
public interface IWriterOpenable<TWriterOptions>
where TWriterOptions : WriterOptions
where TWriterOptions : IWriterOptions
{
public static abstract IWriter OpenWriter(string filePath, TWriterOptions writerOptions);

View File

@@ -1,33 +1,120 @@
using System;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Tar.Headers;
namespace SharpCompress.Writers.Tar;
public class TarWriterOptions : WriterOptions
/// <summary>
/// Options for configuring Tar writer behavior.
/// </summary>
/// <remarks>
/// This class is immutable. Use the <c>with</c> expression to create modified copies:
/// <code>
/// var options = new TarWriterOptions(CompressionType.GZip, true);
/// options = options with { HeaderFormat = TarHeaderWriteFormat.V7 };
/// </code>
/// </remarks>
public sealed record TarWriterOptions : IWriterOptions
{
/// <summary>
/// The compression type to use for the archive.
/// </summary>
public CompressionType CompressionType { get; init; }
/// <summary>
/// The compression level to be used when the compression type supports variable levels.
/// </summary>
public int CompressionLevel { get; init; }
/// <summary>
/// SharpCompress will keep the supplied streams open. Default is true.
/// </summary>
public bool LeaveStreamOpen { get; init; } = true;
/// <summary>
/// Encoding to use for archive entry names.
/// </summary>
public IArchiveEncoding ArchiveEncoding { get; init; } = new ArchiveEncoding();
/// <summary>
/// An optional progress reporter for tracking compression operations.
/// </summary>
public IProgress<ProgressReport>? Progress { get; init; }
/// <summary>
/// Indicates if archive should be finalized (by 2 empty blocks) on close.
/// </summary>
public bool FinalizeArchiveOnClose { get; }
public bool FinalizeArchiveOnClose { get; init; } = true;
public TarHeaderWriteFormat HeaderFormat { get; }
/// <summary>
/// The format to use when writing tar headers.
/// </summary>
public TarHeaderWriteFormat HeaderFormat { get; init; } =
TarHeaderWriteFormat.GNU_TAR_LONG_LINK;
/// <summary>
/// Creates a new TarWriterOptions instance with the specified compression type and finalization option.
/// </summary>
/// <param name="compressionType">The compression type for the archive.</param>
/// <param name="finalizeArchiveOnClose">Whether to finalize the archive on close.</param>
public TarWriterOptions(CompressionType compressionType, bool finalizeArchiveOnClose)
{
CompressionType = compressionType;
FinalizeArchiveOnClose = finalizeArchiveOnClose;
CompressionLevel = compressionType switch
{
CompressionType.ZStandard => 3,
_ => 0,
};
}
/// <summary>
/// Creates a new TarWriterOptions instance with the specified compression type, finalization option, and header format.
/// </summary>
/// <param name="compressionType">The compression type for the archive.</param>
/// <param name="finalizeArchiveOnClose">Whether to finalize the archive on close.</param>
/// <param name="headerFormat">The tar header format.</param>
public TarWriterOptions(
CompressionType compressionType,
bool finalizeArchiveOnClose,
TarHeaderWriteFormat headerFormat = TarHeaderWriteFormat.GNU_TAR_LONG_LINK
TarHeaderWriteFormat headerFormat
)
: base(compressionType)
: this(compressionType, finalizeArchiveOnClose)
{
FinalizeArchiveOnClose = finalizeArchiveOnClose;
HeaderFormat = headerFormat;
}
internal TarWriterOptions(WriterOptions options)
: this(options.CompressionType, true)
/// <summary>
/// Creates a new TarWriterOptions instance from an existing WriterOptions instance.
/// </summary>
/// <param name="options">The WriterOptions to copy values from.</param>
public TarWriterOptions(WriterOptions options)
{
LeaveStreamOpen = options.LeaveStreamOpen;
CompressionType = options.CompressionType;
CompressionLevel = options.CompressionLevel;
LeaveStreamOpen = options.LeaveStreamOpen;
ArchiveEncoding = options.ArchiveEncoding;
Progress = options.Progress;
}
/// <summary>
/// Creates a new TarWriterOptions instance from an existing IWriterOptions instance.
/// </summary>
/// <param name="options">The IWriterOptions to copy values from.</param>
public TarWriterOptions(IWriterOptions options)
{
CompressionType = options.CompressionType;
CompressionLevel = options.CompressionLevel;
LeaveStreamOpen = options.LeaveStreamOpen;
ArchiveEncoding = options.ArchiveEncoding;
Progress = options.Progress;
}
/// <summary>
/// Implicit conversion from CompressionType to TarWriterOptions with finalize enabled.
/// </summary>
/// <param name="compressionType">The compression type.</param>
public static implicit operator TarWriterOptions(CompressionType compressionType) =>
new(compressionType, true);
}

View File

@@ -3,6 +3,7 @@ using System.IO;
using System.Linq;
using System.Threading;
using SharpCompress.Common;
using SharpCompress.Common.Options;
namespace SharpCompress.Writers;
@@ -11,7 +12,7 @@ public static class WriterFactory
public static IWriter OpenWriter(
string filePath,
ArchiveType archiveType,
WriterOptions writerOptions
IWriterOptions writerOptions
)
{
filePath.NotNullOrEmpty(nameof(filePath));
@@ -21,7 +22,7 @@ public static class WriterFactory
public static IWriter OpenWriter(
FileInfo fileInfo,
ArchiveType archiveType,
WriterOptions writerOptions
IWriterOptions writerOptions
)
{
fileInfo.NotNull(nameof(fileInfo));
@@ -31,7 +32,7 @@ public static class WriterFactory
public static IAsyncWriter OpenAsyncWriter(
string filePath,
ArchiveType archiveType,
WriterOptions writerOptions,
IWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
@@ -47,7 +48,7 @@ public static class WriterFactory
public static IAsyncWriter OpenAsyncWriter(
FileInfo fileInfo,
ArchiveType archiveType,
WriterOptions writerOptions,
IWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
@@ -63,7 +64,7 @@ public static class WriterFactory
public static IWriter OpenWriter(
Stream stream,
ArchiveType archiveType,
WriterOptions writerOptions
IWriterOptions writerOptions
)
{
var factory = Factories
@@ -89,7 +90,7 @@ public static class WriterFactory
public static IAsyncWriter OpenAsyncWriter(
Stream stream,
ArchiveType archiveType,
WriterOptions writerOptions,
IWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{

View File

@@ -1,11 +1,58 @@
using System;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using D = SharpCompress.Compressors.Deflate;
namespace SharpCompress.Writers;
public class WriterOptions : OptionsBase
/// <summary>
/// Options for configuring writer behavior when creating archives.
/// </summary>
/// <remarks>
/// This class is immutable. Use the <c>with</c> expression to create modified copies:
/// <code>
/// var options = new WriterOptions(CompressionType.Zip);
/// options = options with { LeaveStreamOpen = false };
/// </code>
/// </remarks>
public sealed record WriterOptions : IWriterOptions
{
/// <summary>
/// The compression type to use for the archive.
/// </summary>
public CompressionType CompressionType { get; init; }
/// <summary>
/// The compression level to be used when the compression type supports variable levels.
/// Valid ranges depend on the compression algorithm:
/// - Deflate/GZip: 0-9 (0=no compression, 6=default, 9=best compression)
/// - ZStandard: 1-22 (1=fastest, 3=default, 22=best compression)
/// Note: BZip2 and LZMA do not support compression levels in this implementation.
/// Defaults are set automatically based on compression type in the constructor.
/// </summary>
public int CompressionLevel { get; init; }
/// <summary>
/// SharpCompress will keep the supplied streams open. Default is true.
/// </summary>
public bool LeaveStreamOpen { get; init; } = true;
/// <summary>
/// Encoding to use for archive entry names.
/// </summary>
public IArchiveEncoding ArchiveEncoding { get; init; } = new ArchiveEncoding();
/// <summary>
/// An optional progress reporter for tracking compression operations.
/// When set, progress updates will be reported as entries are written.
/// </summary>
public IProgress<ProgressReport>? Progress { get; init; }
/// <summary>
/// Creates a new WriterOptions instance with the specified compression type.
/// Compression level is automatically set based on the compression type.
/// </summary>
/// <param name="compressionType">The compression type for the archive.</param>
public WriterOptions(CompressionType compressionType)
{
CompressionType = compressionType;
@@ -19,30 +66,48 @@ public class WriterOptions : OptionsBase
};
}
/// <summary>
/// Creates a new WriterOptions instance with the specified compression type and level.
/// </summary>
/// <param name="compressionType">The compression type for the archive.</param>
/// <param name="compressionLevel">The compression level (algorithm-specific).</param>
public WriterOptions(CompressionType compressionType, int compressionLevel)
{
CompressionType = compressionType;
CompressionLevel = compressionLevel;
}
public CompressionType CompressionType { get; set; }
/// <summary>
/// Creates a new WriterOptions instance with the specified compression type and stream open behavior.
/// </summary>
/// <param name="compressionType">The compression type for the archive.</param>
/// <param name="leaveStreamOpen">Whether to leave the stream open after writing.</param>
public WriterOptions(CompressionType compressionType, bool leaveStreamOpen)
: this(compressionType)
{
LeaveStreamOpen = leaveStreamOpen;
}
/// <summary>
/// The compression level to be used when the compression type supports variable levels.
/// Valid ranges depend on the compression algorithm:
/// - Deflate/GZip: 0-9 (0=no compression, 6=default, 9=best compression)
/// - ZStandard: 1-22 (1=fastest, 3=default, 22=best compression)
/// Note: BZip2 and LZMA do not support compression levels in this implementation.
/// Defaults are set automatically based on compression type in the constructor.
/// Creates a new WriterOptions instance with the specified compression type, level, and stream open behavior.
/// </summary>
public int CompressionLevel { get; set; }
/// <param name="compressionType">The compression type for the archive.</param>
/// <param name="compressionLevel">The compression level (algorithm-specific).</param>
/// <param name="leaveStreamOpen">Whether to leave the stream open after writing.</param>
public WriterOptions(
CompressionType compressionType,
int compressionLevel,
bool leaveStreamOpen
)
: this(compressionType, compressionLevel)
{
LeaveStreamOpen = leaveStreamOpen;
}
/// <summary>
/// An optional progress reporter for tracking compression operations.
/// When set, progress updates will be reported as entries are written.
/// Implicit conversion from CompressionType to WriterOptions.
/// </summary>
public IProgress<ProgressReport>? Progress { get; set; }
/// <param name="compressionType">The compression type.</param>
public static implicit operator WriterOptions(CompressionType compressionType) =>
new(compressionType);
}

View File

@@ -1,78 +1,52 @@
using System;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Compressors.Deflate;
using D = SharpCompress.Compressors.Deflate;
namespace SharpCompress.Writers.Zip;
public class ZipWriterOptions : WriterOptions
/// <summary>
/// Options for configuring Zip writer behavior.
/// </summary>
/// <remarks>
/// This class is immutable. Use the <c>with</c> expression to create modified copies:
/// <code>
/// var options = new ZipWriterOptions(CompressionType.Zip);
/// options = options with { UseZip64 = true };
/// </code>
/// </remarks>
public sealed record ZipWriterOptions : IWriterOptions
{
public ZipWriterOptions(
CompressionType compressionType,
CompressionLevel compressionLevel = D.CompressionLevel.Default
)
: base(compressionType, (int)compressionLevel) { }
internal ZipWriterOptions(WriterOptions options)
: base(options.CompressionType)
{
LeaveStreamOpen = options.LeaveStreamOpen;
ArchiveEncoding = options.ArchiveEncoding;
CompressionLevel = options.CompressionLevel;
if (options is ZipWriterOptions writerOptions)
{
UseZip64 = writerOptions.UseZip64;
ArchiveComment = writerOptions.ArchiveComment;
}
}
/// <summary>
/// The compression type to use for the archive.
/// </summary>
public CompressionType CompressionType { get; init; }
/// <summary>
/// Sets the compression level for Deflate compression (0-9).
/// This is a convenience method that sets the CompressionLevel property for Deflate compression.
/// The compression level to be used when the compression type supports variable levels.
/// </summary>
/// <param name="level">Deflate compression level (0=no compression, 6=default, 9=best compression)</param>
public void SetDeflateCompressionLevel(CompressionLevel level)
{
CompressionLevel = (int)level;
}
public int CompressionLevel { get; init; }
/// <summary>
/// Sets the compression level for ZStandard compression (1-22).
/// This is a convenience method that sets the CompressionLevel property for ZStandard compression.
/// SharpCompress will keep the supplied streams open. Default is true.
/// </summary>
/// <param name="level">ZStandard compression level (1=fastest, 3=default, 22=best compression)</param>
/// <exception cref="ArgumentOutOfRangeException">Thrown when level is not between 1 and 22</exception>
public void SetZStandardCompressionLevel(int level)
{
if (level < 1 || level > 22)
{
throw new ArgumentOutOfRangeException(
nameof(level),
"ZStandard compression level must be between 1 and 22"
);
}
CompressionLevel = level;
}
public bool LeaveStreamOpen { get; init; } = true;
/// <summary>
/// Legacy property for Deflate compression levels.
/// Valid range: 0-9 (0=no compression, 6=default, 9=best compression).
/// Encoding to use for archive entry names.
/// </summary>
/// <remarks>
/// This property is deprecated. Use <see cref="WriterOptions.CompressionLevel"/> or <see cref="SetDeflateCompressionLevel"/> instead.
/// </remarks>
[Obsolete(
"Use CompressionLevel property or SetDeflateCompressionLevel method instead. This property will be removed in a future version."
)]
public CompressionLevel DeflateCompressionLevel
{
get => (CompressionLevel)Math.Min(CompressionLevel, 9);
set => CompressionLevel = (int)value;
}
public IArchiveEncoding ArchiveEncoding { get; init; } = new ArchiveEncoding();
public string? ArchiveComment { get; set; }
/// <summary>
/// An optional progress reporter for tracking compression operations.
/// </summary>
public IProgress<ProgressReport>? Progress { get; init; }
/// <summary>
/// Optional comment for the archive.
/// </summary>
public string? ArchiveComment { get; init; }
/// <summary>
/// Sets a value indicating if zip64 support is enabled.
@@ -81,5 +55,72 @@ public class ZipWriterOptions : WriterOptions
/// Archives larger than 4GiB are supported as long as all streams
/// are less than 4GiB in length.
/// </summary>
public bool UseZip64 { get; set; }
public bool UseZip64 { get; init; }
/// <summary>
/// Creates a new ZipWriterOptions instance with the specified compression type.
/// </summary>
/// <param name="compressionType">The compression type for the archive.</param>
public ZipWriterOptions(CompressionType compressionType)
{
CompressionType = compressionType;
CompressionLevel = compressionType switch
{
CompressionType.ZStandard => 3,
CompressionType.Deflate => (int)D.CompressionLevel.Default,
CompressionType.Deflate64 => (int)D.CompressionLevel.Default,
CompressionType.GZip => (int)D.CompressionLevel.Default,
_ => 0,
};
}
/// <summary>
/// Creates a new ZipWriterOptions instance with the specified compression type and level.
/// </summary>
/// <param name="compressionType">The compression type for the archive.</param>
/// <param name="compressionLevel">The compression level (algorithm-specific).</param>
public ZipWriterOptions(CompressionType compressionType, int compressionLevel)
{
CompressionType = compressionType;
CompressionLevel = compressionLevel;
}
/// <summary>
/// Creates a new ZipWriterOptions instance with the specified compression type and Deflate compression level.
/// </summary>
/// <param name="compressionType">The compression type for the archive.</param>
/// <param name="compressionLevel">The Deflate compression level.</param>
public ZipWriterOptions(CompressionType compressionType, D.CompressionLevel compressionLevel)
: this(compressionType, (int)compressionLevel) { }
/// <summary>
/// Creates a new ZipWriterOptions instance from an existing WriterOptions instance.
/// </summary>
/// <param name="options">The WriterOptions to copy values from.</param>
public ZipWriterOptions(WriterOptions options)
: this(options.CompressionType, options.CompressionLevel)
{
LeaveStreamOpen = options.LeaveStreamOpen;
ArchiveEncoding = options.ArchiveEncoding;
Progress = options.Progress;
}
/// <summary>
/// Creates a new ZipWriterOptions instance from an existing IWriterOptions instance.
/// </summary>
/// <param name="options">The IWriterOptions to copy values from.</param>
public ZipWriterOptions(IWriterOptions options)
: this(options.CompressionType, options.CompressionLevel)
{
LeaveStreamOpen = options.LeaveStreamOpen;
ArchiveEncoding = options.ArchiveEncoding;
Progress = options.Progress;
}
/// <summary>
/// Implicit conversion from CompressionType to ZipWriterOptions.
/// </summary>
/// <param name="compressionType">The compression type.</param>
public static implicit operator ZipWriterOptions(CompressionType compressionType) =>
new(compressionType);
}

View File

@@ -216,9 +216,9 @@
"net10.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[10.0.2, )",
"resolved": "10.0.2",
"contentHash": "sXdDtMf2qcnbygw9OdE535c2lxSxrZP8gO4UhDJ0xiJbl1wIqXS1OTcTDFTIJPOFd6Mhcm8gPEthqWGUxBsTqw=="
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "kICGrGYEzCNI3wPzfEXcwNHgTvlvVn9yJDhSdRK+oZQy4jvYH529u7O0xf5ocQKzOMjfS07+3z9PKRIjrFMJDA=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
@@ -264,9 +264,9 @@
"net8.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[8.0.23, )",
"resolved": "8.0.23",
"contentHash": "GqHiB1HbbODWPbY/lc5xLQH8siEEhNA0ptpJCC6X6adtAYNEzu5ZlqV3YHA3Gh7fuEwgA8XqVwMtH2KNtuQM1Q=="
"requested": "[8.0.22, )",
"resolved": "8.0.22",
"contentHash": "MhcMithKEiyyNkD2ZfbDZPmcOdi0GheGfg8saEIIEfD/fol3iHmcV8TsZkD4ZYz5gdUuoX4YtlVySUU7Sxl9SQ=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",

View File

@@ -1,39 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Performance.Benchmarks;
public abstract class ArchiveBenchmarkBase
{
protected static readonly string TEST_ARCHIVES_PATH;
static ArchiveBenchmarkBase()
{
var baseDirectory = AppDomain.CurrentDomain.BaseDirectory;
var index = baseDirectory.IndexOf(
"SharpCompress.Performance",
StringComparison.OrdinalIgnoreCase
);
if (index == -1)
{
throw new InvalidOperationException(
"Could not find SharpCompress.Performance in the base directory path"
);
}
var path = baseDirectory.Substring(0, index);
var solutionBasePath = Path.GetDirectoryName(path) ?? throw new InvalidOperationException();
TEST_ARCHIVES_PATH = Path.Combine(solutionBasePath, "TestArchives", "Archives");
if (!Directory.Exists(TEST_ARCHIVES_PATH))
{
throw new InvalidOperationException(
$"Test archives directory not found: {TEST_ARCHIVES_PATH}"
);
}
}
protected static string GetArchivePath(string fileName) =>
Path.Combine(TEST_ARCHIVES_PATH, fileName);
}

View File

@@ -1,46 +0,0 @@
using System;
using System.IO;
using BenchmarkDotNet.Attributes;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Performance.Benchmarks;
[MemoryDiagnoser]
public class GZipBenchmarks
{
private byte[] _sourceData = null!;
private byte[] _compressedData = null!;
[GlobalSetup]
public void Setup()
{
// Create 100KB of test data
_sourceData = new byte[100 * 1024];
new Random(42).NextBytes(_sourceData);
// Pre-compress for decompression benchmark
using var compressStream = new MemoryStream();
using (var gzipStream = new GZipStream(compressStream, CompressionMode.Compress))
{
gzipStream.Write(_sourceData, 0, _sourceData.Length);
}
_compressedData = compressStream.ToArray();
}
[Benchmark(Description = "GZip: Compress 100KB")]
public void GZipCompress()
{
using var outputStream = new MemoryStream();
using var gzipStream = new GZipStream(outputStream, CompressionMode.Compress);
gzipStream.Write(_sourceData, 0, _sourceData.Length);
}
[Benchmark(Description = "GZip: Decompress 100KB")]
public void GZipDecompress()
{
using var inputStream = new MemoryStream(_compressedData);
using var gzipStream = new GZipStream(inputStream, CompressionMode.Decompress);
gzipStream.CopyTo(Stream.Null);
}
}

View File

@@ -1,46 +0,0 @@
using System;
using System.IO;
using System.Linq;
using BenchmarkDotNet.Attributes;
using SharpCompress.Archives.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Performance.Benchmarks;
[MemoryDiagnoser]
public class RarBenchmarks : ArchiveBenchmarkBase
{
private byte[] _rarBytes = null!;
[GlobalSetup]
public void Setup()
{
_rarBytes = File.ReadAllBytes(GetArchivePath("Rar.rar"));
}
[Benchmark(Description = "Rar: Extract all entries (Archive API)")]
public void RarExtractArchiveApi()
{
using var stream = new MemoryStream(_rarBytes);
using var archive = RarArchive.OpenArchive(stream);
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
}
[Benchmark(Description = "Rar: Extract all entries (Reader API)")]
public void RarExtractReaderApi()
{
using var stream = new MemoryStream(_rarBytes);
using var reader = ReaderFactory.OpenReader(stream);
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryTo(Stream.Null);
}
}
}
}

View File

@@ -1,45 +0,0 @@
using System;
using System.IO;
using System.Linq;
using BenchmarkDotNet.Attributes;
using SharpCompress.Archives.SevenZip;
namespace SharpCompress.Performance.Benchmarks;
[MemoryDiagnoser]
public class SevenZipBenchmarks : ArchiveBenchmarkBase
{
private byte[] _lzmaBytes = null!;
private byte[] _lzma2Bytes = null!;
[GlobalSetup]
public void Setup()
{
_lzmaBytes = File.ReadAllBytes(GetArchivePath("7Zip.LZMA.7z"));
_lzma2Bytes = File.ReadAllBytes(GetArchivePath("7Zip.LZMA2.7z"));
}
[Benchmark(Description = "7Zip LZMA: Extract all entries")]
public void SevenZipLzmaExtract()
{
using var stream = new MemoryStream(_lzmaBytes);
using var archive = SevenZipArchive.OpenArchive(stream);
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
}
[Benchmark(Description = "7Zip LZMA2: Extract all entries")]
public void SevenZipLzma2Extract()
{
using var stream = new MemoryStream(_lzma2Bytes);
using var archive = SevenZipArchive.OpenArchive(stream);
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
}
}

View File

@@ -1,81 +0,0 @@
using System;
using System.IO;
using System.Linq;
using BenchmarkDotNet.Attributes;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Writers;
namespace SharpCompress.Performance.Benchmarks;
[MemoryDiagnoser]
public class TarBenchmarks : ArchiveBenchmarkBase
{
private byte[] _tarBytes = null!;
private byte[] _tarGzBytes = null!;
[GlobalSetup]
public void Setup()
{
_tarBytes = File.ReadAllBytes(GetArchivePath("Tar.tar"));
_tarGzBytes = File.ReadAllBytes(GetArchivePath("Tar.tar.gz"));
}
[Benchmark(Description = "Tar: Extract all entries (Archive API)")]
public void TarExtractArchiveApi()
{
using var stream = new MemoryStream(_tarBytes);
using var archive = TarArchive.OpenArchive(stream);
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
}
[Benchmark(Description = "Tar: Extract all entries (Reader API)")]
public void TarExtractReaderApi()
{
using var stream = new MemoryStream(_tarBytes);
using var reader = ReaderFactory.OpenReader(stream);
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryTo(Stream.Null);
}
}
}
[Benchmark(Description = "Tar.GZip: Extract all entries")]
public void TarGzipExtract()
{
using var stream = new MemoryStream(_tarGzBytes);
using var archive = TarArchive.OpenArchive(stream);
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
}
[Benchmark(Description = "Tar: Create archive with small files")]
public void TarCreateSmallFiles()
{
using var outputStream = new MemoryStream();
using var writer = WriterFactory.OpenWriter(
outputStream,
ArchiveType.Tar,
new WriterOptions(CompressionType.None) { LeaveStreamOpen = true }
);
// Create 10 small files
for (int i = 0; i < 10; i++)
{
var data = new byte[1024]; // 1KB each
using var entryStream = new MemoryStream(data);
writer.Write($"file{i}.txt", entryStream);
}
}
}

View File

@@ -1,69 +0,0 @@
using System;
using System.IO;
using System.Linq;
using BenchmarkDotNet.Attributes;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Writers;
namespace SharpCompress.Performance.Benchmarks;
[MemoryDiagnoser]
public class ZipBenchmarks : ArchiveBenchmarkBase
{
private string _archivePath = null!;
private byte[] _archiveBytes = null!;
[GlobalSetup]
public void Setup()
{
_archivePath = GetArchivePath("Zip.deflate.zip");
_archiveBytes = File.ReadAllBytes(_archivePath);
}
[Benchmark(Description = "Zip: Extract all entries (Archive API)")]
public void ZipExtractArchiveApi()
{
using var stream = new MemoryStream(_archiveBytes);
using var archive = ZipArchive.OpenArchive(stream);
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
}
[Benchmark(Description = "Zip: Extract all entries (Reader API)")]
public void ZipExtractReaderApi()
{
using var stream = new MemoryStream(_archiveBytes);
using var reader = ReaderFactory.OpenReader(stream);
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryTo(Stream.Null);
}
}
}
[Benchmark(Description = "Zip: Create archive with small files")]
public void ZipCreateSmallFiles()
{
using var outputStream = new MemoryStream();
using var writer = WriterFactory.OpenWriter(
outputStream,
ArchiveType.Zip,
new WriterOptions(CompressionType.Deflate) { LeaveStreamOpen = true }
);
// Create 10 small files
for (int i = 0; i < 10; i++)
{
var data = new byte[1024]; // 1KB each
using var entryStream = new MemoryStream(data);
writer.Write($"file{i}.txt", entryStream);
}
}
}

View File

@@ -1,112 +1,54 @@
using System;
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Jobs;
using BenchmarkDotNet.Running;
using BenchmarkDotNet.Toolchains.InProcess.Emit;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Performance;
using SharpCompress.Readers;
using SharpCompress.Test;
namespace SharpCompress.Performance;
var index = AppDomain.CurrentDomain.BaseDirectory.IndexOf(
"SharpCompress.Performance",
StringComparison.OrdinalIgnoreCase
);
var path = AppDomain.CurrentDomain.BaseDirectory.Substring(0, index);
var SOLUTION_BASE_PATH = Path.GetDirectoryName(path) ?? throw new ArgumentNullException();
public class Program
var TEST_ARCHIVES_PATH = Path.Combine(SOLUTION_BASE_PATH, "TestArchives", "Archives");
//using var _ = JetbrainsProfiler.Memory($"/Users/adam/temp/");
using (var __ = JetbrainsProfiler.Cpu($"/Users/adam/temp/"))
{
public static void Main(string[] args)
var testArchives = new[]
{
// Check if profiling mode is requested
if (args.Length > 0 && args[0].Equals("--profile", StringComparison.OrdinalIgnoreCase))
"Rar.Audio_program.rar",
//"64bitstream.zip.7z",
//"TarWithSymlink.tar.gz"
};
var arcs = testArchives.Select(a => Path.Combine(TEST_ARCHIVES_PATH, a)).ToArray();
for (int i = 0; i < 50; i++)
{
using var found = ArchiveFactory.OpenArchive(arcs[0]);
foreach (var entry in found.Entries.Where(entry => !entry.IsDirectory))
{
RunWithProfiler(args);
return;
Console.WriteLine($"Extracting {entry.Key}");
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
/*using var found = ReaderFactory.OpenReader(arcs[0]);
while (found.MoveToNextEntry())
{
var entry = found.Entry;
if (entry.IsDirectory)
continue;
// Default: Run BenchmarkDotNet
var config = DefaultConfig.Instance.AddJob(
Job.Default.WithToolchain(InProcessEmitToolchain.Instance)
.WithWarmupCount(3) // Minimal warmup iterations for CI
.WithIterationCount(10) // Minimal measurement iterations for CI
.WithInvocationCount(10)
.WithUnrollFactor(1)
);
BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args, config);
Console.WriteLine($"Extracting {entry.Key}");
found.WriteEntryTo(Stream.Null);
}*/
}
private static void RunWithProfiler(string[] args)
{
var profileType = "cpu"; // Default to CPU profiling
var outputPath = "./profiler-snapshots";
// Parse arguments
for (int i = 1; i < args.Length; i++)
{
if (args[i].Equals("--type", StringComparison.OrdinalIgnoreCase) && i + 1 < args.Length)
{
profileType = args[++i].ToLowerInvariant();
}
else if (
args[i].Equals("--output", StringComparison.OrdinalIgnoreCase)
&& i + 1 < args.Length
)
{
outputPath = args[++i];
}
}
Console.WriteLine($"Running with JetBrains Profiler ({profileType} mode)");
Console.WriteLine($"Output path: {outputPath}");
Console.WriteLine();
Console.WriteLine(
"Usage: dotnet run --project SharpCompress.Performance.csproj -c Release -- --profile [--type cpu|memory] [--output <path>]"
);
Console.WriteLine();
// Run a sample benchmark with profiling
RunSampleBenchmarkWithProfiler(profileType, outputPath);
}
private static void RunSampleBenchmarkWithProfiler(string profileType, string outputPath)
{
Console.WriteLine("Running sample benchmark with profiler...");
Console.WriteLine("Note: JetBrains profiler requires the profiler tools to be installed.");
Console.WriteLine("Install from: https://www.jetbrains.com/profiler/");
Console.WriteLine();
try
{
IDisposable? profiler = null;
if (profileType == "cpu")
{
profiler = Test.JetbrainsProfiler.Cpu(outputPath);
}
else if (profileType == "memory")
{
profiler = Test.JetbrainsProfiler.Memory(outputPath);
}
using (profiler)
{
// Run a simple benchmark iteration
var zipBenchmark = new Benchmarks.ZipBenchmarks();
zipBenchmark.Setup();
Console.WriteLine("Running benchmark iterations...");
for (int i = 0; i < 10; i++)
{
zipBenchmark.ZipExtractArchiveApi();
if (i % 3 == 0)
{
Console.Write(".");
}
}
Console.WriteLine();
Console.WriteLine("Benchmark iterations completed.");
}
Console.WriteLine($"Profiler snapshot saved to: {outputPath}");
}
catch (Exception ex)
{
Console.WriteLine($"Error running profiler: {ex.Message}");
Console.WriteLine("Make sure JetBrains profiler tools are installed and accessible.");
}
}
Console.WriteLine("Still running...");
}
await Task.Delay(500);

View File

@@ -1,143 +0,0 @@
# SharpCompress Performance Benchmarks
This project contains performance benchmarks for SharpCompress using [BenchmarkDotNet](https://benchmarkdotnet.org/).
## Overview
The benchmarks test all major archive formats supported by SharpCompress:
- **Zip**: Read (Archive & Reader API) and Write operations
- **Tar**: Read (Archive & Reader API) and Write operations, including Tar.GZip
- **Rar**: Read operations (Archive & Reader API)
- **7Zip**: Read operations for LZMA and LZMA2 compression
- **GZip**: Compression and decompression
## Running Benchmarks
### Run all benchmarks
```bash
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release
```
### Run specific benchmark class
```bash
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --filter "*ZipBenchmarks*"
```
### Run with specific job configuration
```bash
# Quick run for testing (1 warmup, 1 iteration)
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --job Dry
# Short run (3 warmup, 3 iterations)
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --job Short
# Medium run (default)
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --job Medium
```
### Export results
```bash
# Export to JSON
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --exporters json
# Export to multiple formats
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --exporters json markdown html
```
### List available benchmarks
```bash
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --list flat
```
## Baseline Results
The baseline results are stored in `baseline-results.md` and represent the expected performance characteristics of the library. These results are used in CI to detect significant performance regressions.
### Generate Baseline (Automated)
Use the build target to generate baseline results:
```bash
dotnet run --project build/build.csproj -- generate-baseline
```
This will:
1. Build the performance project
2. Run all benchmarks
3. Combine the markdown reports into `baseline-results.md`
4. Clean up temporary artifacts
### Generate Baseline (Manual)
To manually update the baseline:
1. Run the benchmarks: `dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --exporters markdown --artifacts baseline-output`
2. Combine the results: `cat baseline-output/results/*-report-github.md > baseline-results.md`
3. Review the changes and commit if appropriate
## JetBrains Profiler Integration
The performance project supports JetBrains profiler for detailed CPU and memory profiling during local development.
### Prerequisites
Install JetBrains profiler tools from: https://www.jetbrains.com/profiler/
### Run with CPU Profiling
```bash
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --profile --type cpu --output ./my-cpu-snapshots
```
### Run with Memory Profiling
```bash
dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release -- --profile --type memory --output ./my-memory-snapshots
```
### Profiler Options
- `--profile`: Enable profiler mode
- `--type cpu|memory`: Choose profiling type (default: cpu)
- `--output <path>`: Specify snapshot output directory (default: ./profiler-snapshots)
The profiler will run a sample benchmark and save snapshots that can be opened in JetBrains profiler tools for detailed analysis.
## CI Integration
The performance benchmarks run automatically in GitHub Actions on:
- Push to `master` or `release` branches
- Pull requests to `master` or `release` branches
- Manual workflow dispatch
Results are displayed in the GitHub Actions summary and uploaded as artifacts.
## Benchmark Configuration
The benchmarks are configured with minimal iterations for CI efficiency:
- **Warmup Count**: 1 iteration
- **Iteration Count**: 3 iterations
- **Invocation Count**: 1
- **Unroll Factor**: 1
- **Toolchain**: InProcessEmitToolchain (for fast execution)
These settings provide a good balance between speed and accuracy for CI purposes. For more accurate results, use the `Short`, `Medium`, or `Long` job configurations.
## Memory Diagnostics
All benchmarks include memory diagnostics using `[MemoryDiagnoser]`, which provides:
- Total allocated memory per operation
- Gen 0/1/2 collection counts
## Understanding Results
Key metrics in the benchmark results:
- **Mean**: Average execution time
- **Error**: Half of 99.9% confidence interval
- **StdDev**: Standard deviation
- **Allocated**: Total managed memory allocated per operation
## Contributing
When adding new benchmarks:
1. Create a new class in the `Benchmarks/` directory
2. Inherit from `ArchiveBenchmarkBase` for archive-related benchmarks
3. Add `[MemoryDiagnoser]` attribute to the class
4. Use `[Benchmark(Description = "...")]` for each benchmark method
5. Add `[GlobalSetup]` for one-time initialization
6. Update this README if needed

View File

@@ -4,7 +4,6 @@
<TargetFramework>net10.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="BenchmarkDotNet" />
<PackageReference Include="JetBrains.Profiler.SelfApi" />
<ProjectReference Include="..\..\src\SharpCompress\SharpCompress.csproj" />
</ItemGroup>

View File

@@ -1,23 +0,0 @@
| Method | Mean | Error | StdDev | Allocated |
|------------------------- |-----------:|---------:|---------:|----------:|
| &#39;GZip: Compress 100KB&#39; | 3,268.7 μs | 28.50 μs | 16.96 μs | 519.2 KB |
| &#39;GZip: Decompress 100KB&#39; | 436.6 μs | 3.23 μs | 1.69 μs | 34.18 KB |
| Method | Mean | Error | StdDev | Allocated |
|----------------------------------------- |---------:|----------:|----------:|----------:|
| &#39;Rar: Extract all entries (Archive API)&#39; | 2.054 ms | 0.3927 ms | 0.2598 ms | 91.09 KB |
| &#39;Rar: Extract all entries (Reader API)&#39; | 2.235 ms | 0.0253 ms | 0.0132 ms | 149.48 KB |
| Method | Mean | Error | StdDev | Allocated |
|---------------------------------- |---------:|----------:|----------:|----------:|
| &#39;7Zip LZMA: Extract all entries&#39; | 9.124 ms | 2.1930 ms | 1.4505 ms | 272.8 KB |
| &#39;7Zip LZMA2: Extract all entries&#39; | 7.810 ms | 0.1323 ms | 0.0788 ms | 272.58 KB |
| Method | Mean | Error | StdDev | Allocated |
|----------------------------------------- |----------:|---------:|---------:|----------:|
| &#39;Tar: Extract all entries (Archive API)&#39; | 56.36 μs | 3.312 μs | 1.971 μs | 16.65 KB |
| &#39;Tar: Extract all entries (Reader API)&#39; | 175.34 μs | 2.616 μs | 1.557 μs | 213.36 KB |
| &#39;Tar.GZip: Extract all entries&#39; | NA | NA | NA | NA |
| &#39;Tar: Create archive with small files&#39; | 51.38 μs | 2.349 μs | 1.398 μs | 68.7 KB |
| Method | Mean | Error | StdDev | Gen0 | Allocated |
|----------------------------------------- |-----------:|---------:|---------:|---------:|-----------:|
| &#39;Zip: Extract all entries (Archive API)&#39; | 1,188.4 μs | 28.62 μs | 14.97 μs | - | 181.66 KB |
| &#39;Zip: Extract all entries (Reader API)&#39; | 1,137.0 μs | 5.58 μs | 2.92 μs | - | 123.19 KB |
| &#39;Zip: Create archive with small files&#39; | 258.2 μs | 8.98 μs | 4.70 μs | 100.0000 | 2806.93 KB |

View File

@@ -2,24 +2,6 @@
"version": 2,
"dependencies": {
"net10.0": {
"BenchmarkDotNet": {
"type": "Direct",
"requested": "[0.15.8, )",
"resolved": "0.15.8",
"contentHash": "paCfrWxSeHqn3rUZc0spYXVFnHCF0nzRhG0nOLnyTjZYs8spsimBaaNmb3vwqvALKIplbYq/TF393vYiYSnh/Q==",
"dependencies": {
"BenchmarkDotNet.Annotations": "0.15.8",
"CommandLineParser": "2.9.1",
"Gee.External.Capstone": "2.3.0",
"Iced": "1.21.0",
"Microsoft.CodeAnalysis.CSharp": "4.14.0",
"Microsoft.Diagnostics.Runtime": "3.1.512801",
"Microsoft.Diagnostics.Tracing.TraceEvent": "3.1.21",
"Microsoft.DotNet.PlatformAbstractions": "3.1.6",
"Perfolizer": "[0.6.1]",
"System.Management": "9.0.5"
}
},
"JetBrains.Profiler.SelfApi": {
"type": "Direct",
"requested": "[2.5.16, )",
@@ -55,26 +37,6 @@
"resolved": "17.14.15",
"contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw=="
},
"BenchmarkDotNet.Annotations": {
"type": "Transitive",
"resolved": "0.15.8",
"contentHash": "hfucY0ycAsB0SsoaZcaAp9oq5wlWBJcylvEJb9pmvdYUx6PD6S4mDiYnZWjdjAlLhIpe/xtGCwzORfzAzPqvzA=="
},
"CommandLineParser": {
"type": "Transitive",
"resolved": "2.9.1",
"contentHash": "OE0sl1/sQ37bjVsPKKtwQlWDgqaxWgtme3xZz7JssWUzg5JpMIyHgCTY9MVMxOg48fJ1AgGT3tgdH5m/kQ5xhA=="
},
"Gee.External.Capstone": {
"type": "Transitive",
"resolved": "2.3.0",
"contentHash": "2ap/rYmjtzCOT8hxrnEW/QeiOt+paD8iRrIcdKX0cxVwWLFa1e+JDBNeECakmccXrSFeBQuu5AV8SNkipFMMMw=="
},
"Iced": {
"type": "Transitive",
"resolved": "1.21.0",
"contentHash": "dv5+81Q1TBQvVMSOOOmRcjJmvWcX3BZPZsIq31+RLc5cNft0IHAyNlkdb7ZarOWG913PyBoFDsDXoCIlKmLclg=="
},
"JetBrains.FormatRipper": {
"type": "Transitive",
"resolved": "2.4.0",
@@ -101,101 +63,6 @@
"resolved": "10.0.102",
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
},
"Microsoft.CodeAnalysis.Analyzers": {
"type": "Transitive",
"resolved": "3.11.0",
"contentHash": "v/EW3UE8/lbEYHoC2Qq7AR/DnmvpgdtAMndfQNmpuIMx/Mto8L5JnuCfdBYtgvalQOtfNCnxFejxuRrryvUTsg=="
},
"Microsoft.CodeAnalysis.Common": {
"type": "Transitive",
"resolved": "4.14.0",
"contentHash": "PC3tuwZYnC+idaPuoC/AZpEdwrtX7qFpmnrfQkgobGIWiYmGi5MCRtl5mx6QrfMGQpK78X2lfIEoZDLg/qnuHg==",
"dependencies": {
"Microsoft.CodeAnalysis.Analyzers": "3.11.0"
}
},
"Microsoft.CodeAnalysis.CSharp": {
"type": "Transitive",
"resolved": "4.14.0",
"contentHash": "568a6wcTivauIhbeWcCwfWwIn7UV7MeHEBvFB2uzGIpM2OhJ4eM/FZ8KS0yhPoNxnSpjGzz7x7CIjTxhslojQA==",
"dependencies": {
"Microsoft.CodeAnalysis.Analyzers": "3.11.0",
"Microsoft.CodeAnalysis.Common": "[4.14.0]"
}
},
"Microsoft.Diagnostics.NETCore.Client": {
"type": "Transitive",
"resolved": "0.2.510501",
"contentHash": "juoqJYMDs+lRrrZyOkXXMImJHneCF23cuvO4waFRd2Ds7j+ZuGIPbJm0Y/zz34BdeaGiiwGWraMUlln05W1PCQ==",
"dependencies": {
"Microsoft.Extensions.Logging": "6.0.0"
}
},
"Microsoft.Diagnostics.Runtime": {
"type": "Transitive",
"resolved": "3.1.512801",
"contentHash": "0lMUDr2oxNZa28D6NH5BuSQEe5T9tZziIkvkD44YkkCGQXPJqvFjLq5ZQq1hYLl3RjQJrY+hR0jFgap+EWPDTw==",
"dependencies": {
"Microsoft.Diagnostics.NETCore.Client": "0.2.410101"
}
},
"Microsoft.Diagnostics.Tracing.TraceEvent": {
"type": "Transitive",
"resolved": "3.1.21",
"contentHash": "/OrJFKaojSR6TkUKtwh8/qA9XWNtxLrXMqvEb89dBSKCWjaGVTbKMYodIUgF5deCEtmd6GXuRerciXGl5bhZ7Q==",
"dependencies": {
"Microsoft.Diagnostics.NETCore.Client": "0.2.510501",
"System.Reflection.TypeExtensions": "4.7.0"
}
},
"Microsoft.DotNet.PlatformAbstractions": {
"type": "Transitive",
"resolved": "3.1.6",
"contentHash": "jek4XYaQ/PGUwDKKhwR8K47Uh1189PFzMeLqO83mXrXQVIpARZCcfuDedH50YDTepBkfijCZN5U/vZi++erxtg=="
},
"Microsoft.Extensions.DependencyInjection": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "k6PWQMuoBDGGHOQTtyois2u4AwyVcIwL2LaSLlTZQm2CYcJ1pxbt6jfAnpWmzENA/wfrYRI/X9DTLoUkE4AsLw==",
"dependencies": {
"Microsoft.Extensions.DependencyInjection.Abstractions": "6.0.0"
}
},
"Microsoft.Extensions.DependencyInjection.Abstractions": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "xlzi2IYREJH3/m6+lUrQlujzX8wDitm4QGnUu6kUXTQAWPuZY8i+ticFJbzfqaetLA6KR/rO6Ew/HuYD+bxifg=="
},
"Microsoft.Extensions.Logging": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "eIbyj40QDg1NDz0HBW0S5f3wrLVnKWnDJ/JtZ+yJDFnDj90VoPuoPmFkeaXrtu+0cKm5GRAwoDf+dBWXK0TUdg==",
"dependencies": {
"Microsoft.Extensions.DependencyInjection": "6.0.0",
"Microsoft.Extensions.DependencyInjection.Abstractions": "6.0.0",
"Microsoft.Extensions.Logging.Abstractions": "6.0.0",
"Microsoft.Extensions.Options": "6.0.0"
}
},
"Microsoft.Extensions.Logging.Abstractions": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/HggWBbTwy8TgebGSX5DBZ24ndhzi93sHUBDvP1IxbZD7FDokYzdAr6+vbWGjw2XAfR2EJ1sfKUotpjHnFWPxA=="
},
"Microsoft.Extensions.Options": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "dzXN0+V1AyjOe2xcJ86Qbo233KHuLEY0njf/P2Kw8SfJU+d45HNS2ctJdnEnrWbM9Ye2eFgaC5Mj9otRMU6IsQ==",
"dependencies": {
"Microsoft.Extensions.DependencyInjection.Abstractions": "6.0.0",
"Microsoft.Extensions.Primitives": "6.0.0"
}
},
"Microsoft.Extensions.Primitives": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "9+PnzmQFfEFNR9J2aDTfJGGupShHjOuGw4VUv+JB044biSHrnmCIMD+mJHmb2H7YryrfBEXDurxQ47gJZdCKNQ=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
@@ -206,37 +73,6 @@
"resolved": "10.0.102",
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
},
"Perfolizer": {
"type": "Transitive",
"resolved": "0.6.1",
"contentHash": "CR1QmWg4XYBd1Pb7WseP+sDmV8nGPwvmowKynExTqr3OuckIGVMhvmN4LC5PGzfXqDlR295+hz/T7syA1CxEqA==",
"dependencies": {
"Pragmastat": "3.2.4"
}
},
"Pragmastat": {
"type": "Transitive",
"resolved": "3.2.4",
"contentHash": "I5qFifWw/gaTQT52MhzjZpkm/JPlfjSeO/DTZJjO7+hTKI+0aGRgOgZ3NN6D96dDuuqbIAZSeA5RimtHjqrA2A=="
},
"System.CodeDom": {
"type": "Transitive",
"resolved": "9.0.5",
"contentHash": "cuzLM2MWutf9ZBEMPYYfd0DXwYdvntp7VCT6a/wvbKCa2ZuvGmW74xi+YBa2mrfEieAXqM4TNKlMmSnfAfpUoQ=="
},
"System.Management": {
"type": "Transitive",
"resolved": "9.0.5",
"contentHash": "n6o9PZm9p25+zAzC3/48K0oHnaPKTInRrxqFq1fi/5TPbMLjuoCm/h//mS3cUmSy+9AO1Z+qsC/Ilt/ZFatv5Q==",
"dependencies": {
"System.CodeDom": "9.0.5"
}
},
"System.Reflection.TypeExtensions": {
"type": "Transitive",
"resolved": "4.7.0",
"contentHash": "VybpaOQQhqE6siHppMktjfGBw1GCwvCqiufqmP8F1nj7fTUNtW35LOEt3UZTEsECfo+ELAl/9o9nJx3U91i7vA=="
},
"sharpcompress": {
"type": "Project"
}

View File

@@ -401,11 +401,9 @@ public class ArchiveTests : ReaderTests
int? compressionLevel = null
)
{
var writerOptions = new ZipWriterOptions(compressionType);
if (compressionLevel.HasValue)
{
writerOptions.CompressionLevel = compressionLevel.Value;
}
var writerOptions = compressionLevel.HasValue
? new WriterOptions(compressionType, compressionLevel.Value)
: new WriterOptions(compressionType);
return WriterFactory.OpenWriter(stream, ArchiveType.Zip, writerOptions);
}
@@ -415,12 +413,9 @@ public class ArchiveTests : ReaderTests
int? compressionLevel = null
)
{
var writerOptions = new ZipWriterOptions(compressionType);
if (compressionLevel.HasValue)
{
writerOptions.CompressionLevel = compressionLevel.Value;
writerOptions.LeaveStreamOpen = true;
}
var writerOptions = compressionLevel.HasValue
? new WriterOptions(compressionType, compressionLevel.Value, leaveStreamOpen: true)
: new WriterOptions(compressionType) { LeaveStreamOpen = true };
return WriterFactory.OpenAsyncWriter(
new AsyncOnlyStream(stream),
ArchiveType.Zip,

View File

@@ -27,7 +27,11 @@ public class ExtractionTests : TestBase
using (var stream = File.Create(testArchive))
{
using var writer = (ZipWriter)
WriterFactory.OpenWriter(stream, ArchiveType.Zip, CompressionType.Deflate);
WriterFactory.OpenWriter(
stream,
ArchiveType.Zip,
new WriterOptions(CompressionType.Deflate)
);
// Create a test file to add to the archive
var testFilePath = Path.Combine(SCRATCH2_FILES_PATH, "testfile.txt");
@@ -72,7 +76,11 @@ public class ExtractionTests : TestBase
using (var stream = File.Create(testArchive))
{
using var writer = (ZipWriter)
WriterFactory.OpenWriter(stream, ArchiveType.Zip, CompressionType.Deflate);
WriterFactory.OpenWriter(
stream,
ArchiveType.Zip,
new WriterOptions(CompressionType.Deflate)
);
var testFilePath = Path.Combine(SCRATCH2_FILES_PATH, "testfile2.txt");
File.WriteAllText(testFilePath, "Test content");

View File

@@ -27,7 +27,7 @@ public class GZipWriterAsyncTests : WriterTests
var writer = WriterFactory.OpenAsyncWriter(
new AsyncOnlyStream(stream),
ArchiveType.GZip,
CompressionType.GZip
new WriterOptions(CompressionType.GZip)
)
)
{
@@ -67,7 +67,7 @@ public class GZipWriterAsyncTests : WriterTests
using var writer = WriterFactory.OpenWriter(
new AsyncOnlyStream(stream),
ArchiveType.GZip,
CompressionType.BZip2
new WriterOptions(CompressionType.BZip2)
);
});

View File

@@ -1,4 +1,4 @@
using System.IO;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
@@ -22,7 +22,11 @@ public class GZipWriterTests : WriterTests
)
)
using (
var writer = WriterFactory.OpenWriter(stream, ArchiveType.GZip, CompressionType.GZip)
var writer = WriterFactory.OpenWriter(
stream,
ArchiveType.GZip,
new WriterOptions(CompressionType.GZip)
)
)
{
writer.Write("Tar.tar", Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"));
@@ -61,7 +65,7 @@ public class GZipWriterTests : WriterTests
using var writer = WriterFactory.OpenWriter(
stream,
ArchiveType.GZip,
CompressionType.BZip2
new WriterOptions(CompressionType.BZip2)
);
});

View File

@@ -41,11 +41,11 @@ public abstract class ReaderTests : TestBase
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
options ??= new ReaderOptions { BufferSize = 0x20000 };
options.LeaveStreamOpen = true;
readImpl(testArchive, options);
var optionsWithStreamOpen = options with { LeaveStreamOpen = true };
readImpl(testArchive, optionsWithStreamOpen);
options.LeaveStreamOpen = false;
readImpl(testArchive, options);
var optionsWithStreamClosed = options with { LeaveStreamOpen = false };
readImpl(testArchive, optionsWithStreamClosed);
VerifyFiles();
}
@@ -141,11 +141,21 @@ public abstract class ReaderTests : TestBase
options ??= new ReaderOptions() { BufferSize = 0x20000 };
options.LeaveStreamOpen = true;
await ReadImplAsync(testArchive, expectedCompression, options, cancellationToken);
var optionsWithStreamOpen = options with { LeaveStreamOpen = true };
await ReadImplAsync(
testArchive,
expectedCompression,
optionsWithStreamOpen,
cancellationToken
);
options.LeaveStreamOpen = false;
await ReadImplAsync(testArchive, expectedCompression, options, cancellationToken);
var optionsWithStreamClosed = options with { LeaveStreamOpen = false };
await ReadImplAsync(
testArchive,
expectedCompression,
optionsWithStreamClosed,
cancellationToken
);
VerifyFiles();
}

View File

@@ -148,8 +148,10 @@ public class TarArchiveAsyncTests : ArchiveTests
await using (var archive = TarArchive.CreateAsyncArchive())
{
await archive.AddAllFromDirectoryAsync(ORIGINAL_FILES_PATH);
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) };
var twopt = new TarWriterOptions(CompressionType.None, true)
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) },
};
await archive.SaveToAsync(scratchPath, twopt);
}
CompareArchivesByPath(unmodified, scratchPath);
@@ -196,8 +198,7 @@ public class TarArchiveAsyncTests : ArchiveTests
{
using var mstm = new MemoryStream();
var enc = new ArchiveEncoding { Default = Encoding.UTF8 };
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = enc;
var twopt = new TarWriterOptions(CompressionType.None, true) { ArchiveEncoding = enc };
var fname = new string((char)0x3042, length);
using (var tw = new TarWriter(mstm, twopt))
using (var input = new MemoryStream(new byte[32]))

View File

@@ -3,6 +3,7 @@ using System.IO;
using System.Linq;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Writers;
using Xunit;
namespace SharpCompress.Test.Tar;
@@ -80,7 +81,7 @@ public class TarArchiveDirectoryTests : TestBase
using (var fileStream = File.Create(scratchPath))
{
archive.SaveTo(fileStream, CompressionType.None);
archive.SaveTo(fileStream, new WriterOptions(CompressionType.None));
}
}

View File

@@ -34,7 +34,13 @@ public class TarArchiveTests : ArchiveTests
// Step 1: create a tar file containing a file with the test name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, CompressionType.None))
using (
var writer = WriterFactory.OpenWriter(
stream,
ArchiveType.Tar,
new WriterOptions(CompressionType.None)
)
)
using (Stream inputStream = new MemoryStream())
{
var sw = new StreamWriter(inputStream);
@@ -94,7 +100,13 @@ public class TarArchiveTests : ArchiveTests
// Step 1: create a tar file containing a file with a long name
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, CompressionType.None))
using (
var writer = WriterFactory.OpenWriter(
stream,
ArchiveType.Tar,
new WriterOptions(CompressionType.None)
)
)
using (Stream inputStream = new MemoryStream())
{
var sw = new StreamWriter(inputStream);
@@ -162,8 +174,10 @@ public class TarArchiveTests : ArchiveTests
using (var archive = TarArchive.CreateArchive())
{
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) };
var twopt = new TarWriterOptions(CompressionType.None, true)
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) },
};
archive.SaveTo(scratchPath, twopt);
}
CompareArchivesByPath(unmodified, scratchPath);
@@ -180,7 +194,7 @@ public class TarArchiveTests : ArchiveTests
using (var archive = TarArchive.OpenArchive(unmodified))
{
archive.AddEntry("jpg\\test.jpg", jpg);
archive.SaveTo(scratchPath, CompressionType.None);
archive.SaveTo(scratchPath, new WriterOptions(CompressionType.None));
}
CompareArchivesByPath(modified, scratchPath);
}
@@ -198,7 +212,7 @@ public class TarArchiveTests : ArchiveTests
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
);
archive.RemoveEntry(entry);
archive.SaveTo(scratchPath, CompressionType.None);
archive.SaveTo(scratchPath, new WriterOptions(CompressionType.None));
}
CompareArchivesByPath(modified, scratchPath);
}
@@ -228,8 +242,7 @@ public class TarArchiveTests : ArchiveTests
{
using var mstm = new MemoryStream();
var enc = new ArchiveEncoding { Default = Encoding.UTF8 };
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = enc;
var twopt = new TarWriterOptions(CompressionType.None, true) { ArchiveEncoding = enc };
var fname = new string((char)0x3042, length);
using (var tw = new TarWriter(mstm, twopt))
using (var input = new MemoryStream(new byte[32]))

View File

@@ -31,7 +31,7 @@ public class Zip64VersionConsistencyTests : WriterTests
}
// Create archive with UseZip64=true
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
var writerOptions = new ZipWriterOptions(CompressionType.Deflate)
{
LeaveStreamOpen = false,
UseZip64 = true,
@@ -135,7 +135,7 @@ public class Zip64VersionConsistencyTests : WriterTests
}
// Create archive without UseZip64
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
var writerOptions = new ZipWriterOptions(CompressionType.Deflate)
{
LeaveStreamOpen = false,
UseZip64 = false,
@@ -186,7 +186,7 @@ public class Zip64VersionConsistencyTests : WriterTests
File.Delete(filename);
}
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.LZMA)
var writerOptions = new ZipWriterOptions(CompressionType.LZMA)
{
LeaveStreamOpen = false,
UseZip64 = false,
@@ -239,7 +239,7 @@ public class Zip64VersionConsistencyTests : WriterTests
File.Delete(filename);
}
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.PPMd)
var writerOptions = new ZipWriterOptions(CompressionType.PPMd)
{
LeaveStreamOpen = false,
UseZip64 = false,
@@ -292,7 +292,7 @@ public class Zip64VersionConsistencyTests : WriterTests
File.Delete(filename);
}
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
var writerOptions = new ZipWriterOptions(CompressionType.Deflate)
{
LeaveStreamOpen = false,
UseZip64 = true,

View File

@@ -133,8 +133,10 @@ public class ZipArchiveAsyncTests : ArchiveTests
);
await archive.RemoveEntryAsync(entry);
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate);
writerOptions.ArchiveEncoding.Default = Encoding.GetEncoding(866);
var writerOptions = new ZipWriterOptions(CompressionType.Deflate)
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) },
};
await archive.SaveToAsync(scratchPath, writerOptions);
}
@@ -153,8 +155,10 @@ public class ZipArchiveAsyncTests : ArchiveTests
{
await archive.AddEntryAsync("jpg\\test.jpg", jpg);
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate);
writerOptions.ArchiveEncoding.Default = Encoding.GetEncoding(866);
var writerOptions = new ZipWriterOptions(CompressionType.Deflate)
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) },
};
await archive.SaveToAsync(scratchPath, writerOptions);
}
@@ -172,8 +176,10 @@ public class ZipArchiveAsyncTests : ArchiveTests
archive.DeflateCompressionLevel = CompressionLevel.BestSpeed;
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate);
writerOptions.ArchiveEncoding.Default = Encoding.UTF8;
var writerOptions = new ZipWriterOptions(CompressionType.Deflate)
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.UTF8 },
};
await archive.SaveToAsync(scratchPath, writerOptions);
}

View File

@@ -3,6 +3,7 @@ using System.IO;
using System.Linq;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Writers;
using Xunit;
namespace SharpCompress.Test.Zip;
@@ -80,7 +81,7 @@ public class ZipArchiveDirectoryTests : TestBase
using (var fileStream = File.Create(scratchPath))
{
archive.SaveTo(fileStream, CompressionType.Deflate);
archive.SaveTo(fileStream, new WriterOptions(CompressionType.Deflate));
}
}

View File

@@ -242,8 +242,10 @@ public class ZipArchiveTests : ArchiveTests
);
archive.RemoveEntry(entry);
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate);
writerOptions.ArchiveEncoding.Default = Encoding.GetEncoding(866);
var writerOptions = new ZipWriterOptions(CompressionType.Deflate)
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) },
};
archive.SaveTo(scratchPath, writerOptions);
}
@@ -262,8 +264,10 @@ public class ZipArchiveTests : ArchiveTests
{
archive.AddEntry("jpg\\test.jpg", jpg);
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate);
writerOptions.ArchiveEncoding.Default = Encoding.GetEncoding(866);
var writerOptions = new ZipWriterOptions(CompressionType.Deflate)
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) },
};
archive.SaveTo(scratchPath, writerOptions);
}
@@ -281,8 +285,8 @@ public class ZipArchiveTests : ArchiveTests
var str = "test.txt";
var source = new MemoryStream(Encoding.UTF8.GetBytes(str));
arc.AddEntry("test.txt", source, true, source.Length);
arc.SaveTo(scratchPath1, CompressionType.Deflate);
arc.SaveTo(scratchPath2, CompressionType.Deflate);
arc.SaveTo(scratchPath1, new WriterOptions(CompressionType.Deflate));
arc.SaveTo(scratchPath2, new WriterOptions(CompressionType.Deflate));
}
Assert.Equal(new FileInfo(scratchPath1).Length, new FileInfo(scratchPath2).Length);
@@ -330,8 +334,8 @@ public class ZipArchiveTests : ArchiveTests
{
arc.AddEntry("1.txt", stream, false, stream.Length);
arc.AddEntry("2.txt", stream, false, stream.Length);
arc.SaveTo(scratchPath1, CompressionType.Deflate);
arc.SaveTo(scratchPath2, CompressionType.Deflate);
arc.SaveTo(scratchPath1, new WriterOptions(CompressionType.Deflate));
arc.SaveTo(scratchPath2, new WriterOptions(CompressionType.Deflate));
}
}
@@ -374,8 +378,10 @@ public class ZipArchiveTests : ArchiveTests
{
archive.AddAllFromDirectory(SCRATCH_FILES_PATH);
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate);
writerOptions.ArchiveEncoding.Default = Encoding.GetEncoding(866);
var writerOptions = new ZipWriterOptions(CompressionType.Deflate)
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(866) },
};
archive.SaveTo(scratchPath, writerOptions);
}
@@ -393,7 +399,7 @@ public class ZipArchiveTests : ArchiveTests
var archiveStream = new MemoryStream();
archive.SaveTo(archiveStream, CompressionType.LZMA);
archive.SaveTo(archiveStream, new WriterOptions(CompressionType.LZMA));
archiveStream.Position = 0;
@@ -622,7 +628,7 @@ public class ZipArchiveTests : ArchiveTests
var zipWriter = WriterFactory.OpenWriter(
stream,
ArchiveType.Zip,
CompressionType.Deflate
new WriterOptions(CompressionType.Deflate)
)
)
{

View File

@@ -286,7 +286,7 @@ public class ZipReaderTests : ReaderTests
var zipWriter = WriterFactory.OpenWriter(
stream,
ArchiveType.Zip,
CompressionType.Deflate
new WriterOptions(CompressionType.Deflate)
)
)
{