Compare commits

...

64 Commits

Author SHA1 Message Date
Adam Hathcock
daacb93902 more pooling 2026-01-14 16:49:34 +00:00
Adam Hathcock
eb2f60fb53 rar byte[] better 2026-01-14 16:38:57 +00:00
Adam Hathcock
bd0439c424 change byte[] to memory using pool 2026-01-14 15:55:14 +00:00
Adam Hathcock
b935bcfaef reduce memory usage on headers 2026-01-14 15:33:08 +00:00
Adam Hathcock
56c22cee78 Merge branch 'copilot/add-performance-benchmarking' into adam/async-again 2026-01-14 14:53:17 +00:00
Adam Hathcock
5c4b83e501 Merge remote-tracking branch 'origin/master' into copilot/add-performance-benchmarking
# Conflicts:
#	tests/SharpCompress.Performance/packages.lock.json
2026-01-14 14:52:21 +00:00
Adam Hathcock
84b5b5a717 add more tests 2026-01-14 14:33:20 +00:00
Adam Hathcock
ebfa16f09f more test fixes 2026-01-14 14:12:53 +00:00
Adam Hathcock
c1d240b516 Fix more tests 2026-01-14 14:06:39 +00:00
Adam Hathcock
5c4719f4a9 missing extensions 2026-01-14 13:39:11 +00:00
Adam Hathcock
95d2278d8b fmt 2026-01-14 12:13:29 +00:00
Adam Hathcock
e63ee57ef0 same for writers 2026-01-14 09:29:44 +00:00
Adam Hathcock
775efa1b26 Reader open factories 2026-01-14 09:23:18 +00:00
Adam Hathcock
3677b4b193 add default interfaces to enforce consistency 2026-01-14 08:57:12 +00:00
Adam Hathcock
c32f4b4f2a fix test reference 2026-01-14 08:33:49 +00:00
Adam Hathcock
8d34f88ca6 fix up gitignore 2026-01-13 16:42:54 +00:00
Adam Hathcock
ca4cf25a1f clean up lazy readonly collections and add tests 2026-01-13 16:39:55 +00:00
Adam Hathcock
4fa976b478 remove unused ref 2026-01-13 15:29:02 +00:00
Adam Hathcock
767f3a4985 fix up extensions to more like polyfills 2026-01-13 15:26:45 +00:00
Adam Hathcock
ddc08e068e fix async error 2026-01-13 15:16:38 +00:00
Adam Hathcock
a1a86cdde8 fmt 2026-01-13 14:29:10 +00:00
Adam Hathcock
fc85f1fa2c more tar async fixes 2026-01-13 14:28:45 +00:00
Adam Hathcock
0b8081f320 gzip fixes 2026-01-13 14:24:36 +00:00
Adam Hathcock
0b5371d986 more async fixing 2026-01-13 14:06:14 +00:00
Adam Hathcock
cdca909d84 fmt 2026-01-13 13:58:31 +00:00
Adam Hathcock
ec7d2e357d fix lock? 2026-01-13 13:58:03 +00:00
Adam Hathcock
1c0183ef11 force async tests 2026-01-13 13:56:56 +00:00
Adam Hathcock
9cf2b3129c fixed up async writer 2026-01-13 13:54:15 +00:00
Adam Hathcock
9a4e864f5e fix usage of ArchiveFactory 2026-01-13 13:49:35 +00:00
Adam Hathcock
4df952db1b split out factories for archive 2026-01-12 16:32:26 +00:00
Adam Hathcock
1b4cedfa13 misc fixes 2026-01-12 16:21:20 +00:00
Adam Hathcock
6d6103afd6 update docs 2026-01-12 16:08:16 +00:00
Adam Hathcock
d727d76299 Merge remote-tracking branch 'origin/master' into adam/async-interface 2026-01-12 15:02:19 +00:00
Adam Hathcock
0502ff545e test fixes and fmt 2026-01-12 15:01:29 +00:00
Adam Hathcock
fce4a96718 make Writable interfaces for archive 2026-01-12 14:57:13 +00:00
Adam Hathcock
38203fb950 Fix async reader variable types - Remove double await on ReaderFactory.OpenAsync and use IAsyncReader
- Removed 'await' keyword before ReaderFactory.OpenAsync() calls since the method returns IAsyncReader directly (not Task)
- Changed ZipReader.Open() to ReaderFactory.OpenAsync() in Zip64AsyncTests.ReadForwardOnlyAsync()
- Changed TarReader.Open() to ReaderFactory.OpenAsync() in TarReaderAsyncTests.Tar_BZip2_Entry_Stream_Async()
- Fixed EntryStream disposal from 'await using' to 'using' since EntryStream doesn't implement IAsyncDisposable
- These changes fix compilation errors where async methods were being called on IReader (synchronous) instead of IAsyncReader (asynchronous)
2026-01-12 14:14:46 +00:00
copilot-swe-agent[bot]
80ac10a5fe Merge latest master branch and resolve conflicts
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-12 14:11:57 +00:00
Adam Hathcock
65dba509e0 Merge pull request #1121 from adamhathcock/adam/async
More async for ZipReader and ZipWriter
2026-01-12 14:07:37 +00:00
Adam Hathcock
0615d17b8b fix async interfacing for open 2026-01-12 13:45:21 +00:00
Adam Hathcock
c1f8580d89 Remove unnecessary ValueTask wrappers from async factory methods
Change return types from ValueTask<T> to direct interface types (IAsyncArchive, IAsyncReader, IWriter) for wrapper methods that don't perform async work. This eliminates unnecessary async state machine allocations while maintaining the same public API behavior.

Changes:
- Interface definitions: Updated IArchiveFactory, IMultiArchiveFactory, IReaderFactory, IWriterFactory
- Concrete factories: Updated archive factories (Zip, Tar, Rar, GZip, SevenZip) and reader-only factories (Ace, Arc, Arj)
- Static factory methods: Updated ReaderFactory, ArchiveFactory, WriterFactory to use new signatures
- Archive classes: Updated static OpenAsync methods in ZipArchive, TarArchive, RarArchive, SevenZipArchive, GZipArchive
- Supporting changes: Updated Factory.cs and async polyfills

Performance benefit: Reduced GC pressure by eliminating unnecessary state machine overhead for non-async wrapper methods.
2026-01-12 13:16:44 +00:00
Adam Hathcock
c5a6f900df Merge branch 'adam/async' into adam/async-interface 2026-01-12 13:02:51 +00:00
Adam Hathcock
3807c3ce2a Merge pull request #1127 from adamhathcock/copilot/sub-pr-1121-yet-again
Fix async test method naming in ZipArchiveAsyncTests
2026-01-12 12:10:51 +00:00
Adam Hathcock
d2f328af01 Merge pull request #1126 from adamhathcock/copilot/sub-pr-1121-another-one
Fix typo in TestBase.cs comment
2026-01-12 12:10:11 +00:00
Adam Hathcock
c3ffcf4fe8 Merge pull request #1125 from adamhathcock/copilot/sub-pr-1121-again
[WIP] Update ZipReader and ZipWriter based on review feedback
2026-01-12 12:09:42 +00:00
copilot-swe-agent[bot]
95c409d979 Change File.Create to File.OpenWrite in TarReaderAsyncTests for consistency
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-12 12:09:12 +00:00
Adam Hathcock
dadf9e71bb Merge pull request #1124 from adamhathcock/copilot/sub-pr-1121
[WIP] Address feedback on async implementation for ZipReader and ZipWriter
2026-01-12 12:09:10 +00:00
Adam Hathcock
05ebf22009 Start using the interface to draw distinction between async and sync 2026-01-12 12:08:25 +00:00
copilot-swe-agent[bot]
f4b1780d8a Rename async test methods to use _Async suffix instead of _Sync
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-12 12:08:23 +00:00
copilot-swe-agent[bot]
64a09eb0f8 Fix typo in TestBase.cs comment: 'akways' -> 'always'
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-12 12:06:47 +00:00
copilot-swe-agent[bot]
3a636531e8 Initial plan 2026-01-12 12:05:34 +00:00
copilot-swe-agent[bot]
292da90184 Initial plan 2026-01-12 12:05:22 +00:00
copilot-swe-agent[bot]
90c8ff8650 Initial plan 2026-01-12 12:05:11 +00:00
Adam Hathcock
8d0ac5062f Merge pull request #1123 from adamhathcock/adam/add-more-docs
Add more documentation
2026-01-11 12:20:55 +00:00
Adam Hathcock
b2d1505e5c remove section on allocations 2026-01-11 12:20:40 +00:00
Adam Hathcock
47037a4b9d docs: Add explicit guideline that agents should never commit to git
Agents should stage files and leave committing to the user. Only create
commits when the user explicitly requests them.
2026-01-08 15:49:10 +00:00
Adam Hathcock
507b1e35d8 docs: Remove broken references to non-existent files
- Remove CONTRIBUTING.md reference from ARCHITECTURE.md
- Remove ERRORS.md reference from API.md
- Remove TROUBLESHOOTING.md reference from ENCODING.md
- Remove TROUBLESHOOTING.md reference from PERFORMANCE.md

All markdown files now reference only existing documentation.
2026-01-08 15:47:57 +00:00
Adam Hathcock
2839e1d33f Update docs/ENCODING.md
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-08 15:39:47 +00:00
Adam Hathcock
8c876c70af Add more documentation 2026-01-08 15:34:48 +00:00
copilot-swe-agent[bot]
a92ce90252 Fix path validation and add iteration cleanup to prevent file reuse
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-05 17:38:17 +00:00
copilot-swe-agent[bot]
e519f61f0f Address code review feedback: fix exception handling and initialization order
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-05 17:36:08 +00:00
copilot-swe-agent[bot]
49f2271253 Format code with CSharpier
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-05 17:33:06 +00:00
copilot-swe-agent[bot]
5b1d11bc1d Add WriteBenchmarks, BaselineComparisonBenchmarks, and comprehensive documentation
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-05 17:31:32 +00:00
copilot-swe-agent[bot]
aa3a40d968 Add BenchmarkDotNet integration with Archive and Reader benchmarks
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-05 17:27:50 +00:00
copilot-swe-agent[bot]
6125654b2e Initial plan 2026-01-05 17:21:29 +00:00
140 changed files with 7012 additions and 2209 deletions

View File

@@ -307,7 +307,6 @@ dotnet_diagnostic.CS8602.severity = error
dotnet_diagnostic.CS8604.severity = error
dotnet_diagnostic.CS8618.severity = error
dotnet_diagnostic.CS0618.severity = suggestion
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS4014.severity = error
dotnet_diagnostic.CS8600.severity = error
dotnet_diagnostic.CS8603.severity = error

8
.gitignore vendored
View File

@@ -4,8 +4,8 @@ _ReSharper.SharpCompress/
bin/
*.suo
*.user
TestArchives/Scratch/
TestArchives/Scratch2/
tests/TestArchives/Scratch/
tests/TestArchives/Scratch2/
TestResults/
*.nupkg
packages/*/
@@ -20,3 +20,7 @@ artifacts/
.DS_Store
*.snupkg
# BenchmarkDotNet artifacts
BenchmarkDotNet.Artifacts/
**/BenchmarkDotNet.Artifacts/

View File

@@ -14,6 +14,7 @@ SharpCompress is a pure C# compression library supporting multiple archive forma
- Follow the existing code style and patterns in the codebase.
## General Instructions
- **Agents should NEVER commit to git** - Agents should stage files and leave committing to the user. Only create commits when the user explicitly requests them.
- Make only high confidence suggestions when reviewing code changes.
- Write code with good maintainability practices, including comments on why certain design decisions were made.
- Handle edge cases and write clear exception handling.
@@ -110,7 +111,7 @@ SharpCompress supports multiple archive and compression formats:
- **Archive Formats**: Zip, Tar, 7Zip, Rar (read-only)
- **Compression**: DEFLATE, BZip2, LZMA/LZMA2, PPMd, ZStandard (decompress only), Deflate64 (decompress only)
- **Combined Formats**: Tar.GZip, Tar.BZip2, Tar.LZip, Tar.XZ, Tar.ZStandard
- See FORMATS.md for complete format support matrix
- See [docs/FORMATS.md](docs/FORMATS.md) for complete format support matrix
### Stream Handling Rules
- **Disposal**: As of version 0.21, SharpCompress closes wrapped streams by default
@@ -177,5 +178,4 @@ SharpCompress supports multiple archive and compression formats:
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
4. **Tar + non-seekable stream** - Must provide file size or it will throw
5. **Multi-framework differences** - Some features differ between .NET Framework and modern .NET (e.g., Mono.Posix)
6. **Format detection** - Use `ReaderFactory.Open()` for auto-detection, test with actual archive files

View File

@@ -12,5 +12,6 @@
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
<CentralPackageTransitivePinningEnabled>true</CentralPackageTransitivePinningEnabled>
</PropertyGroup>
</Project>

View File

@@ -1,10 +1,12 @@
<Project>
<ItemGroup>
<PackageVersion Include="BenchmarkDotNet" Version="0.14.0" />
<PackageVersion Include="Bullseye" Version="6.1.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.15" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
<PackageVersion Include="Microsoft.NET.ILLink.Task" Version="10.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="13.0.0" />

View File

@@ -4,7 +4,7 @@ SharpCompress is a compression library in pure C# for .NET Framework 4.8, .NET 8
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
**NEW:** All I/O operations now support async/await for improved performance and scalability. See the [USAGE.md](USAGE.md#async-examples) for examples.
**NEW:** All I/O operations now support async/await for improved performance and scalability. See the [USAGE.md](docs/USAGE.md#async-examples) for examples.
GitHub Actions Build -
[![SharpCompress](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml/badge.svg)](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
@@ -14,7 +14,7 @@ GitHub Actions Build -
Post Issues on Github!
Check the [Supported Formats](FORMATS.md) and [Basic Usage.](USAGE.md)
Check the [Supported Formats](docs/FORMATS.md) and [Basic Usage.](docs/USAGE.md)
## Recommended Formats

View File

@@ -18,12 +18,11 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
Directory.Build.props = Directory.Build.props
global.json = global.json
.editorconfig = .editorconfig
.gitignore = .gitignore
Directory.Packages.props = Directory.Packages.props
NuGet.config = NuGet.config
.github\workflows\nuget-release.yml = .github\workflows\nuget-release.yml
USAGE.md = USAGE.md
README.md = README.md
FORMATS.md = FORMATS.md
AGENTS.md = AGENTS.md
EndProjectSection
EndProject

512
docs/API.md Normal file
View File

@@ -0,0 +1,512 @@
# API Quick Reference
Quick reference for commonly used SharpCompress APIs.
## Factory Methods
### Opening Archives
```csharp
// Auto-detect format
using (var reader = ReaderFactory.Open(stream))
{
// Works with Zip, Tar, GZip, Rar, 7Zip, etc.
}
// Specific format - Archive API
using (var archive = ZipArchive.Open("file.zip"))
using (var archive = TarArchive.Open("file.tar"))
using (var archive = RarArchive.Open("file.rar"))
using (var archive = SevenZipArchive.Open("file.7z"))
using (var archive = GZipArchive.Open("file.gz"))
// With options
var options = new ReaderOptions
{
Password = "password",
LeaveStreamOpen = true,
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
};
using (var archive = ZipArchive.Open("encrypted.zip", options))
```
### Creating Archives
```csharp
// Writer Factory
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
// Write entries
}
// Specific writer
using (var archive = ZipArchive.Create())
using (var archive = TarArchive.Create())
using (var archive = GZipArchive.Create())
// With options
var options = new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 9,
LeaveStreamOpen = false
};
using (var archive = ZipArchive.Create())
{
archive.SaveTo("output.zip", options);
}
```
---
## Archive API Methods
### Reading/Extracting
```csharp
using (var archive = ZipArchive.Open("file.zip"))
{
// Get all entries
IEnumerable<IArchiveEntry> entries = archive.Entries;
// Find specific entry
var entry = archive.Entries.FirstOrDefault(e => e.Key == "file.txt");
// Extract all
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true
});
// Extract single entry
var entry = archive.Entries.First();
entry.WriteToFile(@"C:\output\file.txt");
entry.WriteToFile(@"C:\output\file.txt", new ExtractionOptions { Overwrite = true });
// Get entry stream
using (var stream = entry.OpenEntryStream())
{
stream.CopyTo(outputStream);
}
}
// Async extraction (requires IAsyncArchive)
using (var asyncArchive = await ZipArchive.OpenAsync("file.zip"))
{
await asyncArchive.WriteToDirectoryAsync(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cancellationToken: cancellationToken
);
}
using (var stream = await entry.OpenEntryStreamAsync(cancellationToken))
{
// ...
}
```
### Entry Properties
```csharp
foreach (var entry in archive.Entries)
{
string name = entry.Key; // Entry name/path
long size = entry.Size; // Uncompressed size
long compressedSize = entry.CompressedSize;
bool isDir = entry.IsDirectory;
DateTime? modTime = entry.LastModifiedTime;
CompressionType compression = entry.CompressionType;
}
```
### Creating Archives
```csharp
using (var archive = ZipArchive.Create())
{
// Add file
archive.AddEntry("file.txt", @"C:\source\file.txt");
// Add multiple files
archive.AddAllFromDirectory(@"C:\source");
archive.AddAllFromDirectory(@"C:\source", "*.txt"); // Pattern
// Save to file
archive.SaveTo("output.zip", CompressionType.Deflate);
// Save to stream
archive.SaveTo(outputStream, new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 9,
LeaveStreamOpen = true
});
}
```
---
## Reader API Methods
### Forward-Only Reading
```csharp
using (var stream = File.OpenRead("file.zip"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
IArchiveEntry entry = reader.Entry;
if (!entry.IsDirectory)
{
// Extract entry
reader.WriteEntryToDirectory(@"C:\output");
reader.WriteEntryToFile(@"C:\output\file.txt");
// Or get stream
using (var entryStream = reader.OpenEntryStream())
{
entryStream.CopyTo(outputStream);
}
}
}
}
// Async variants (use OpenAsync to get IAsyncReader)
using (var stream = File.OpenRead("file.zip"))
using (var reader = await ReaderFactory.OpenAsync(stream))
{
while (await reader.MoveToNextEntryAsync())
{
await reader.WriteEntryToFileAsync(
@"C:\output\" + reader.Entry.Key,
cancellationToken: cancellationToken
);
}
// Async extraction of all entries
await reader.WriteAllToDirectoryAsync(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
```
---
## Writer API Methods
### Creating Archives (Streaming)
```csharp
using (var stream = File.Create("output.zip"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
// Write single file
using (var fileStream = File.OpenRead("source.txt"))
{
writer.Write("entry.txt", fileStream, DateTime.Now);
}
// Write directory
writer.WriteAll("C:\\source", "*", SearchOption.AllDirectories);
writer.WriteAll("C:\\source", "*.txt", SearchOption.TopDirectoryOnly);
// Async variants
using (var fileStream = File.OpenRead("source.txt"))
{
await writer.WriteAsync("entry.txt", fileStream, DateTime.Now, cancellationToken);
}
await writer.WriteAllAsync("C:\\source", "*", SearchOption.AllDirectories, cancellationToken);
}
```
---
## Common Options
### ReaderOptions
```csharp
var options = new ReaderOptions
{
Password = "password", // For encrypted archives
LeaveStreamOpen = true, // Don't close wrapped stream
ArchiveEncoding = new ArchiveEncoding // Custom character encoding
{
Default = Encoding.GetEncoding(932)
}
};
using (var archive = ZipArchive.Open("file.zip", options))
{
// ...
}
```
### WriterOptions
```csharp
var options = new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 9, // 0-9 for Deflate
LeaveStreamOpen = true, // Don't close stream
};
archive.SaveTo("output.zip", options);
```
### ExtractionOptions
```csharp
var options = new ExtractionOptions
{
ExtractFullPath = true, // Recreate directory structure
Overwrite = true, // Overwrite existing files
PreserveFileTime = true // Keep original timestamps
};
archive.WriteToDirectory(@"C:\output", options);
```
---
## Compression Types
### Available Compressions
```csharp
// For creating archives
CompressionType.None // No compression (store)
CompressionType.Deflate // DEFLATE (default for ZIP/GZip)
CompressionType.Deflate64 // Deflate64
CompressionType.BZip2 // BZip2
CompressionType.LZMA // LZMA (for 7Zip, LZip, XZ)
CompressionType.PPMd // PPMd (for ZIP)
CompressionType.Rar // RAR compression (read-only)
CompressionType.ZStandard // ZStandard
ArchiveType.Arc
ArchiveType.Arj
ArchiveType.Ace
// For Tar archives with compression
// Use WriterFactory to create compressed tar archives
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.GZip)) // Tar.GZip
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, CompressionType.BZip2)) // Tar.BZip2
```
### Archive Types
```csharp
ArchiveType.Zip
ArchiveType.Tar
ArchiveType.GZip
ArchiveType.BZip2
ArchiveType.Rar
ArchiveType.SevenZip
ArchiveType.XZ
ArchiveType.ZStandard
```
---
## Patterns & Examples
### Extract with Error Handling
```csharp
try
{
using (var archive = ZipArchive.Open("archive.zip",
new ReaderOptions { Password = "password" }))
{
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true
});
}
}
catch (PasswordRequiredException)
{
Console.WriteLine("Password required");
}
catch (InvalidArchiveException)
{
Console.WriteLine("Archive is invalid");
}
catch (SharpCompressException ex)
{
Console.WriteLine($"Error: {ex.Message}");
}
```
### Extract with Progress
```csharp
var progress = new Progress<ProgressReport>(report =>
{
Console.WriteLine($"Extracting {report.EntryPath}: {report.PercentComplete}%");
});
var options = new ReaderOptions { Progress = progress };
using (var archive = ZipArchive.Open("archive.zip", options))
{
archive.WriteToDirectory(@"C:\output");
}
```
### Async Extract with Cancellation
```csharp
var cts = new CancellationTokenSource();
cts.CancelAfter(TimeSpan.FromMinutes(5));
try
{
using (var archive = await ZipArchive.OpenAsync("archive.zip"))
{
await archive.WriteToDirectoryAsync(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cancellationToken: cts.Token
);
}
}
catch (OperationCanceledException)
{
Console.WriteLine("Extraction cancelled");
}
```
### Create with Custom Compression
```csharp
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory(@"D:\source");
// Fastest
archive.SaveTo("fast.zip", new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 1
});
// Balanced (default)
archive.SaveTo("normal.zip", CompressionType.Deflate);
// Best compression
archive.SaveTo("best.zip", new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 9
});
}
```
### Stream Processing (No File I/O)
```csharp
using (var outputStream = new MemoryStream())
using (var archive = ZipArchive.Create())
{
// Add content from memory
using (var contentStream = new MemoryStream(Encoding.UTF8.GetBytes("Hello")))
{
archive.AddEntry("file.txt", contentStream);
}
// Save to memory
archive.SaveTo(outputStream, CompressionType.Deflate);
// Get bytes
byte[] archiveBytes = outputStream.ToArray();
}
```
### Extract Specific Files
```csharp
using (var archive = ZipArchive.Open("archive.zip"))
{
var filesToExtract = new[] { "file1.txt", "file2.txt" };
foreach (var entry in archive.Entries.Where(e => filesToExtract.Contains(e.Key)))
{
entry.WriteToFile(@"C:\output\" + entry.Key);
}
}
```
### List Archive Contents
```csharp
using (var archive = ZipArchive.Open("archive.zip"))
{
foreach (var entry in archive.Entries)
{
if (entry.IsDirectory)
Console.WriteLine($"[DIR] {entry.Key}");
else
Console.WriteLine($"[FILE] {entry.Key} ({entry.Size} bytes)");
}
}
```
---
## Common Mistakes
### ✗ Wrong - Stream not disposed
```csharp
var stream = File.OpenRead("archive.zip");
var archive = ZipArchive.Open(stream);
archive.WriteToDirectory(@"C:\output");
// stream not disposed - leaked resource
```
### ✓ Correct - Using blocks
```csharp
using (var stream = File.OpenRead("archive.zip"))
using (var archive = ZipArchive.Open(stream))
{
archive.WriteToDirectory(@"C:\output");
}
// Both properly disposed
```
### ✗ Wrong - Mixing API styles
```csharp
// Loading entire archive then iterating
using (var archive = ZipArchive.Open("large.zip"))
{
var entries = archive.Entries.ToList(); // Loads all in memory
foreach (var e in entries)
{
e.WriteToFile(...); // Then extracts each
}
}
```
### ✓ Correct - Use Reader for large files
```csharp
// Streaming iteration
using (var stream = File.OpenRead("large.zip"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(@"C:\output");
}
}
```
---
## Related Documentation
- [USAGE.md](USAGE.md) - Complete code examples
- [FORMATS.md](FORMATS.md) - Supported formats
- [PERFORMANCE.md](PERFORMANCE.md) - API selection guide

659
docs/ARCHITECTURE.md Normal file
View File

@@ -0,0 +1,659 @@
# SharpCompress Architecture Guide
This guide explains the internal architecture and design patterns of SharpCompress for contributors.
## Overview
SharpCompress is organized into three main layers:
```
┌─────────────────────────────────────────┐
│ User-Facing APIs (Top Layer) │
│ Archive, Reader, Writer Factories │
├─────────────────────────────────────────┤
│ Format-Specific Implementations │
│ ZipArchive, TarReader, GZipWriter, │
│ RarArchive, SevenZipArchive, etc. │
├─────────────────────────────────────────┤
│ Compression & Crypto (Bottom Layer) │
│ Deflate, LZMA, BZip2, AES, CRC32 │
└─────────────────────────────────────────┘
```
---
## Directory Structure
### `src/SharpCompress/`
#### `Archives/` - Archive Implementations
Contains `IArchive` implementations for seekable, random-access APIs.
**Key Files:**
- `AbstractArchive.cs` - Base class for all archives
- `IArchive.cs` - Archive interface definition
- `ArchiveFactory.cs` - Factory for opening archives
- Format-specific: `ZipArchive.cs`, `TarArchive.cs`, `RarArchive.cs`, `SevenZipArchive.cs`, `GZipArchive.cs`
**Use Archive API when:**
- Stream is seekable (file, memory)
- Need random access to entries
- Archive fits in memory
- Simplicity is important
#### `Readers/` - Reader Implementations
Contains `IReader` implementations for forward-only, non-seekable APIs.
**Key Files:**
- `AbstractReader.cs` - Base reader class
- `IReader.cs` - Reader interface
- `ReaderFactory.cs` - Auto-detection factory
- `ReaderOptions.cs` - Configuration for readers
- Format-specific: `ZipReader.cs`, `TarReader.cs`, `GZipReader.cs`, `RarReader.cs`, etc.
**Use Reader API when:**
- Stream is non-seekable (network, pipe, compressed)
- Processing large files
- Memory is limited
- Forward-only processing is acceptable
#### `Writers/` - Writer Implementations
Contains `IWriter` implementations for forward-only writing.
**Key Files:**
- `AbstractWriter.cs` - Base writer class
- `IWriter.cs` - Writer interface
- `WriterFactory.cs` - Factory for creating writers
- `WriterOptions.cs` - Configuration for writers
- Format-specific: `ZipWriter.cs`, `TarWriter.cs`, `GZipWriter.cs`
#### `Factories/` - Format Detection
Factory classes for auto-detecting archive format and creating appropriate readers/writers.
**Key Files:**
- `Factory.cs` - Base factory class
- `IFactory.cs` - Factory interface
- Format-specific: `ZipFactory.cs`, `TarFactory.cs`, `RarFactory.cs`, etc.
**How It Works:**
1. `ReaderFactory.Open(stream)` probes stream signatures
2. Identifies format by magic bytes
3. Creates appropriate reader instance
4. Returns generic `IReader` interface
#### `Common/` - Shared Types
Common types, options, and enumerations used across formats.
**Key Files:**
- `IEntry.cs` - Entry interface (file within archive)
- `Entry.cs` - Entry implementation
- `ArchiveType.cs` - Enum for archive formats
- `CompressionType.cs` - Enum for compression methods
- `ArchiveEncoding.cs` - Character encoding configuration
- `ExtractionOptions.cs` - Extraction configuration
- Format-specific headers: `Zip/Headers/`, `Tar/Headers/`, `Rar/Headers/`, etc.
#### `Compressors/` - Compression Algorithms
Low-level compression streams implementing specific algorithms.
**Algorithms:**
- `Deflate/` - DEFLATE compression (Zip default)
- `BZip2/` - BZip2 compression
- `LZMA/` - LZMA compression (7Zip, XZ, LZip)
- `PPMd/` - Prediction by Partial Matching (Zip, 7Zip)
- `ZStandard/` - ZStandard compression (decompression only)
- `Xz/` - XZ format (decompression only)
- `Rar/` - RAR-specific unpacking
- `Arj/`, `Arc/`, `Ace/` - Legacy format decompression
- `Filters/` - BCJ/BCJ2 filters for executable compression
**Each Compressor:**
- Implements a `Stream` subclass
- Provides both compression and decompression
- Some are read-only (decompression only)
#### `Crypto/` - Encryption & Hashing
Cryptographic functions and stream wrappers.
**Key Files:**
- `Crc32Stream.cs` - CRC32 calculation wrapper
- `BlockTransformer.cs` - Block cipher transformations
- AES, PKWare, WinZip encryption implementations
#### `IO/` - Stream Utilities
Stream wrappers and utilities.
**Key Classes:**
- `SharpCompressStream` - Base stream class
- `ProgressReportingStream` - Progress tracking wrapper
- `MarkingBinaryReader` - Binary reader with position marks
- `BufferedSubStream` - Buffered read-only substream
- `ReadOnlySubStream` - Read-only view of parent stream
- `NonDisposingStream` - Prevents wrapped stream disposal
---
## Design Patterns
### 1. Factory Pattern
**Purpose:** Auto-detect format and create appropriate reader/writer.
**Example:**
```csharp
// User calls factory
using (var reader = ReaderFactory.Open(stream)) // Returns IReader
{
while (reader.MoveToNextEntry())
{
// Process entry
}
}
// Behind the scenes:
// 1. Factory.Open() probes stream signatures
// 2. Detects format (Zip, Tar, Rar, etc.)
// 3. Creates appropriate reader (ZipReader, TarReader, etc.)
// 4. Returns as generic IReader interface
```
**Files:**
- `src/SharpCompress/Factories/ReaderFactory.cs`
- `src/SharpCompress/Factories/WriterFactory.cs`
- `src/SharpCompress/Factories/ArchiveFactory.cs`
### 2. Strategy Pattern
**Purpose:** Encapsulate compression algorithms as swappable strategies.
**Example:**
```csharp
// Different compression strategies
CompressionType.Deflate // DEFLATE
CompressionType.BZip2 // BZip2
CompressionType.LZMA // LZMA
CompressionType.PPMd // PPMd
// Writer uses strategy pattern
var archive = ZipArchive.Create();
archive.SaveTo("output.zip", CompressionType.Deflate); // Use Deflate
archive.SaveTo("output.bz2", CompressionType.BZip2); // Use BZip2
```
**Files:**
- `src/SharpCompress/Compressors/` - Strategy implementations
### 3. Decorator Pattern
**Purpose:** Wrap streams with additional functionality.
**Example:**
```csharp
// Progress reporting decorator
var progressStream = new ProgressReportingStream(baseStream, progressReporter);
progressStream.Read(buffer, 0, buffer.Length); // Reports progress
// Non-disposing decorator
var nonDisposingStream = new NonDisposingStream(baseStream);
using (var compressor = new DeflateStream(nonDisposingStream))
{
// baseStream won't be disposed when compressor is disposed
}
```
**Files:**
- `src/SharpCompress/IO/ProgressReportingStream.cs`
- `src/SharpCompress/IO/NonDisposingStream.cs`
### 4. Template Method Pattern
**Purpose:** Define algorithm skeleton in base class, let subclasses fill details.
**Example:**
```csharp
// AbstractArchive defines common archive operations
public abstract class AbstractArchive : IArchive
{
// Template methods
public virtual void WriteToDirectory(string destinationDirectory, ExtractionOptions options)
{
// Common extraction logic
foreach (var entry in Entries)
{
// Call subclass method
entry.WriteToFile(destinationPath, options);
}
}
// Subclasses override format-specific details
protected abstract Entry CreateEntry(EntryData data);
}
```
**Files:**
- `src/SharpCompress/Archives/AbstractArchive.cs`
- `src/SharpCompress/Readers/AbstractReader.cs`
### 5. Iterator Pattern
**Purpose:** Provide sequential access to entries.
**Example:**
```csharp
// Archive API - provides collection
IEnumerable<IEntry> entries = archive.Entries;
foreach (var entry in entries)
{
// Random access - entries already in memory
}
// Reader API - provides iterator
IReader reader = ReaderFactory.Open(stream);
while (reader.MoveToNextEntry())
{
// Forward-only iteration - one entry at a time
var entry = reader.Entry;
}
```
---
## Key Interfaces
### IArchive - Random Access API
```csharp
public interface IArchive : IDisposable
{
IEnumerable<IEntry> Entries { get; }
void WriteToDirectory(string destinationDirectory,
ExtractionOptions options = null);
IEntry FirstOrDefault(Func<IEntry, bool> predicate);
// ... format-specific methods
}
```
**Implementations:** `ZipArchive`, `TarArchive`, `RarArchive`, `SevenZipArchive`, `GZipArchive`
### IReader - Forward-Only API
```csharp
public interface IReader : IDisposable
{
IEntry Entry { get; }
bool MoveToNextEntry();
void WriteEntryToDirectory(string destinationDirectory,
ExtractionOptions options = null);
Stream OpenEntryStream();
// ... async variants
}
```
**Implementations:** `ZipReader`, `TarReader`, `RarReader`, `GZipReader`, etc.
### IWriter - Writing API
```csharp
public interface IWriter : IDisposable
{
void Write(string entryPath, Stream source,
DateTime? modificationTime = null);
void WriteAll(string sourceDirectory, string searchPattern,
SearchOption searchOption);
// ... async variants
}
```
**Implementations:** `ZipWriter`, `TarWriter`, `GZipWriter`
### IEntry - Archive Entry
```csharp
public interface IEntry
{
string Key { get; }
uint Size { get; }
uint CompressedSize { get; }
bool IsDirectory { get; }
DateTime? LastModifiedTime { get; }
CompressionType CompressionType { get; }
void WriteToFile(string fullPath, ExtractionOptions options = null);
void WriteToStream(Stream destinationStream);
Stream OpenEntryStream();
// ... async variants
}
```
---
## Adding Support for a New Format
### Step 1: Understand the Format
- Research format specification
- Understand compression/encryption used
- Study existing similar formats in codebase
### Step 2: Create Format Structure Classes
**Create:** `src/SharpCompress/Common/NewFormat/`
```csharp
// Headers and data structures
public class NewFormatHeader
{
public uint Magic { get; set; }
public ushort Version { get; set; }
// ... other fields
public static NewFormatHeader Read(BinaryReader reader)
{
// Deserialize from binary
}
}
public class NewFormatEntry
{
public string FileName { get; set; }
public uint CompressedSize { get; set; }
public uint UncompressedSize { get; set; }
// ... other fields
}
```
### Step 3: Create Archive Implementation
**Create:** `src/SharpCompress/Archives/NewFormat/NewFormatArchive.cs`
```csharp
public class NewFormatArchive : AbstractArchive
{
private NewFormatHeader _header;
private List<NewFormatEntry> _entries;
public static NewFormatArchive Open(Stream stream)
{
var archive = new NewFormatArchive();
archive._header = NewFormatHeader.Read(stream);
archive.LoadEntries(stream);
return archive;
}
public override IEnumerable<IEntry> Entries => _entries.Select(e => new Entry(e));
protected override Stream OpenEntryStream(Entry entry)
{
// Return decompressed stream for entry
}
// ... other abstract method implementations
}
```
### Step 4: Create Reader Implementation
**Create:** `src/SharpCompress/Readers/NewFormat/NewFormatReader.cs`
```csharp
public class NewFormatReader : AbstractReader
{
private NewFormatHeader _header;
private BinaryReader _reader;
public NewFormatReader(Stream stream)
{
_reader = new BinaryReader(stream);
_header = NewFormatHeader.Read(_reader);
}
public override bool MoveToNextEntry()
{
// Read next entry header
if (!_reader.BaseStream.CanRead) return false;
var entryData = NewFormatEntry.Read(_reader);
// ... set this.Entry
return entryData != null;
}
// ... other abstract method implementations
}
```
### Step 5: Create Factory
**Create:** `src/SharpCompress/Factories/NewFormatFactory.cs`
```csharp
public class NewFormatFactory : Factory, IArchiveFactory, IReaderFactory
{
// Archive format magic bytes (signature)
private static readonly byte[] NewFormatSignature = new byte[] { 0x4E, 0x46 }; // "NF"
public static NewFormatFactory Instance { get; } = new();
public IArchive CreateArchive(Stream stream)
=> NewFormatArchive.Open(stream);
public IReader CreateReader(Stream stream, ReaderOptions options)
=> new NewFormatReader(stream) { Options = options };
public bool Matches(Stream stream, ReadOnlySpan<byte> signature)
=> signature.StartsWith(NewFormatSignature);
}
```
### Step 6: Register Factory
**Update:** `src/SharpCompress/Factories/ArchiveFactory.cs`
```csharp
private static readonly IFactory[] Factories =
{
ZipFactory.Instance,
TarFactory.Instance,
RarFactory.Instance,
SevenZipFactory.Instance,
GZipFactory.Instance,
NewFormatFactory.Instance, // Add here
// ... other factories
};
```
### Step 7: Add Tests
**Create:** `tests/SharpCompress.Test/NewFormat/NewFormatTests.cs`
```csharp
public class NewFormatTests : TestBase
{
[Fact]
public void NewFormat_Extracts_Successfully()
{
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "archive.newformat");
using (var archive = NewFormatArchive.Open(archivePath))
{
archive.WriteToDirectory(SCRATCH_FILES_PATH);
// Assert extraction
}
}
[Fact]
public void NewFormat_Reader_Works()
{
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "archive.newformat");
using (var stream = File.OpenRead(archivePath))
using (var reader = new NewFormatReader(stream))
{
Assert.True(reader.MoveToNextEntry());
Assert.NotNull(reader.Entry);
}
}
}
```
### Step 8: Add Test Archives
Place test files in `tests/TestArchives/Archives/NewFormat/` directory.
### Step 9: Document
Update `docs/FORMATS.md` with format support information.
---
## Compression Algorithm Implementation
### Creating a New Compression Stream
**Example:** Creating `CustomStream` for a custom compression algorithm
```csharp
public class CustomStream : Stream
{
private readonly Stream _baseStream;
private readonly bool _leaveOpen;
public CustomStream(Stream baseStream, bool leaveOpen = false)
{
_baseStream = baseStream;
_leaveOpen = leaveOpen;
}
public override int Read(byte[] buffer, int offset, int count)
{
// Decompress data from _baseStream into buffer
// Return number of decompressed bytes
}
public override void Write(byte[] buffer, int offset, int count)
{
// Compress data from buffer into _baseStream
}
protected override void Dispose(bool disposing)
{
if (disposing && !_leaveOpen)
{
_baseStream?.Dispose();
}
base.Dispose(disposing);
}
}
```
---
## Stream Handling Best Practices
### Disposal Pattern
```csharp
// Correct: Nested using blocks
using (var fileStream = File.OpenRead("archive.zip"))
using (var archive = ZipArchive.Open(fileStream))
{
archive.WriteToDirectory(@"C:\output");
}
// Both archive and fileStream properly disposed
// Correct: Using with options
var options = new ReaderOptions { LeaveStreamOpen = true };
var stream = File.OpenRead("archive.zip");
using (var archive = ZipArchive.Open(stream, options))
{
archive.WriteToDirectory(@"C:\output");
}
stream.Dispose(); // Manually dispose if LeaveStreamOpen = true
```
### NonDisposingStream Wrapper
```csharp
// Prevent unwanted stream closure
var baseStream = File.OpenRead("data.bin");
var nonDisposing = new NonDisposingStream(baseStream);
using (var compressor = new DeflateStream(nonDisposing))
{
// Compressor won't close baseStream when disposed
}
// baseStream still usable
baseStream.Position = 0; // Works
baseStream.Dispose(); // Manual disposal
```
---
## Performance Considerations
### Memory Efficiency
1. **Avoid loading entire archive in memory** - Use Reader API for large files
2. **Process entries sequentially** - Especially for solid archives
3. **Use appropriate buffer sizes** - Larger buffers for network I/O
4. **Dispose streams promptly** - Free resources when done
### Algorithm Selection
1. **Archive API** - Fast for small archives with random access
2. **Reader API** - Efficient for large files or streaming
3. **Solid archives** - Sequential extraction much faster
4. **Compression levels** - Trade-off between speed and size
---
## Testing Guidelines
### Test Coverage
1. **Happy path** - Normal extraction works
2. **Edge cases** - Empty archives, single file, many files
3. **Corrupted data** - Handle gracefully
4. **Error cases** - Missing passwords, unsupported compression
5. **Async operations** - Both sync and async code paths
### Test Archives
- Use `tests/TestArchives/` for test data
- Create format-specific subdirectories
- Include encrypted, corrupted, and edge case archives
- Don't recreate existing archives
### Test Patterns
```csharp
[Fact]
public void Archive_Extraction_Works()
{
// Arrange
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "test.zip");
// Act
using (var archive = ZipArchive.Open(testArchive))
{
archive.WriteToDirectory(SCRATCH_FILES_PATH);
}
// Assert
Assert.True(File.Exists(Path.Combine(SCRATCH_FILES_PATH, "file.txt")));
}
```
---
## Related Documentation
- [AGENTS.md](../AGENTS.md) - Development guidelines
- [FORMATS.md](FORMATS.md) - Supported formats

610
docs/ENCODING.md Normal file
View File

@@ -0,0 +1,610 @@
# SharpCompress Character Encoding Guide
This guide explains how SharpCompress handles character encoding for archive entries (filenames, comments, etc.).
## Overview
Most archive formats store filenames and metadata as bytes. SharpCompress must convert these bytes to strings using the appropriate character encoding.
**Common Problem:** Archives created on systems with non-UTF8 encodings (especially Japanese, Chinese systems) appear with corrupted filenames when extracted on systems that assume UTF8.
---
## ArchiveEncoding Class
### Basic Usage
```csharp
using SharpCompress.Common;
using SharpCompress.Readers;
// Configure encoding before opening archive
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding(932) // cp932 for Japanese
}
};
using (var archive = ZipArchive.Open("japanese.zip", options))
{
foreach (var entry in archive.Entries)
{
Console.WriteLine(entry.Key); // Now shows correct characters
}
}
```
### ArchiveEncoding Properties
| Property | Purpose |
|----------|---------|
| `Default` | Default encoding for filenames (fallback) |
| `CustomDecoder` | Custom decoding function for special cases |
### Setting for Different APIs
**Archive API:**
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
};
using (var archive = ZipArchive.Open("file.zip", options))
{
// Use archive with correct encoding
}
```
**Reader API:**
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
};
using (var stream = File.OpenRead("file.zip"))
using (var reader = ReaderFactory.Open(stream, options))
{
while (reader.MoveToNextEntry())
{
// Filenames decoded correctly
}
}
```
---
## Common Encodings
### Asian Encodings
#### cp932 (Japanese)
```csharp
// Windows-31J, Shift-JIS variant used on Japanese Windows
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding(932)
}
};
using (var archive = ZipArchive.Open("japanese.zip", options))
{
// Correctly decodes Japanese filenames
}
```
**When to use:**
- Archives from Japanese Windows systems
- Files with Japanese characters in names
#### gb2312 (Simplified Chinese)
```csharp
// Simplified Chinese
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("gb2312")
}
};
```
#### gbk (Extended Simplified Chinese)
```csharp
// Extended Simplified Chinese (more characters than gb2312)
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("gbk")
}
};
```
#### big5 (Traditional Chinese)
```csharp
// Traditional Chinese (Taiwan, Hong Kong)
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("big5")
}
};
```
#### euc-jp (Japanese, Unix)
```csharp
// Extended Unix Code for Japanese
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("eucjp")
}
};
```
#### euc-kr (Korean)
```csharp
// Extended Unix Code for Korean
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("euc-kr")
}
};
```
### Western European Encodings
#### iso-8859-1 (Latin-1)
```csharp
// Western European (includes accented characters)
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("iso-8859-1")
}
};
```
**When to use:**
- Archives from French, German, Spanish systems
- Files with accented characters (é, ñ, ü, etc.)
#### cp1252 (Windows-1252)
```csharp
// Windows Western European
// Very similar to iso-8859-1 but with additional printable characters
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("cp1252")
}
};
```
**When to use:**
- Archives from older Western European Windows systems
- Files with smart quotes and other Windows-specific characters
#### iso-8859-15 (Latin-9)
```csharp
// Western European with Euro symbol support
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("iso-8859-15")
}
};
```
### Cyrillic Encodings
#### cp1251 (Windows Cyrillic)
```csharp
// Russian, Serbian, Bulgarian, etc.
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("cp1251")
}
};
```
#### koi8-r (KOI8 Russian)
```csharp
// Russian (Unix standard)
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("koi8-r")
}
};
```
### UTF Encodings (Modern)
#### UTF-8 (Default)
```csharp
// Modern standard - usually correct for new archives
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.UTF8
}
};
```
#### UTF-16
```csharp
// Unicode - rarely used in archives
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.Unicode
}
};
```
---
## Encoding Auto-Detection
SharpCompress attempts to auto-detect encoding, but this isn't always reliable:
```csharp
// Auto-detection (default)
using (var archive = ZipArchive.Open("file.zip")) // Uses UTF8 by default
{
// May show corrupted characters if archive uses different encoding
}
// Explicit encoding (more reliable)
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
};
using (var archive = ZipArchive.Open("file.zip", options))
{
// Correct characters displayed
}
```
### When Manual Override is Needed
| Situation | Solution |
|-----------|----------|
| Archive shows corrupted characters | Specify the encoding explicitly |
| Archives from specific region | Use that region's encoding |
| Mixed encodings in archive | Use CustomDecoder |
| Testing with international files | Try different encodings |
---
## Custom Decoder
For complex scenarios where a single encoding isn't sufficient:
### Basic Custom Decoder
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
CustomDecoder = (data, offset, length) =>
{
// Custom decoding logic
var bytes = new byte[length];
Array.Copy(data, offset, bytes, 0, length);
// Try UTF8 first
try
{
return Encoding.UTF8.GetString(bytes);
}
catch
{
// Fallback to cp932 if UTF8 fails
return Encoding.GetEncoding(932).GetString(bytes);
}
}
}
};
using (var archive = ZipArchive.Open("mixed.zip", options))
{
foreach (var entry in archive.Entries)
{
Console.WriteLine(entry.Key); // Uses custom decoder
}
}
```
### Advanced: Detect Encoding by Content
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
CustomDecoder = DetectAndDecode
}
};
private static string DetectAndDecode(byte[] data, int offset, int length)
{
var bytes = new byte[length];
Array.Copy(data, offset, bytes, 0, length);
// Try UTF8 (most modern archives)
try
{
var str = Encoding.UTF8.GetString(bytes);
// Verify it decoded correctly (no replacement characters)
if (!str.Contains('\uFFFD'))
return str;
}
catch { }
// Try cp932 (Japanese)
try
{
var str = Encoding.GetEncoding(932).GetString(bytes);
if (!str.Contains('\uFFFD'))
return str;
}
catch { }
// Fallback to iso-8859-1 (always succeeds)
return Encoding.GetEncoding("iso-8859-1").GetString(bytes);
}
```
---
## Code Examples
### Extract Archive with Japanese Filenames
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding(932) // cp932
}
};
using (var archive = ZipArchive.Open("japanese_files.zip", options))
{
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true
});
}
// Files extracted with correct Japanese names
```
### Extract Archive with Western European Filenames
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("iso-8859-1")
}
};
using (var archive = ZipArchive.Open("french_files.zip", options))
{
archive.WriteToDirectory(@"C:\output");
}
// Accented characters (é, è, ê, etc.) display correctly
```
### Extract Archive with Chinese Filenames
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("gbk") // Simplified Chinese
}
};
using (var archive = ZipArchive.Open("chinese_files.zip", options))
{
archive.WriteToDirectory(@"C:\output");
}
```
### Extract Archive with Russian Filenames
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("cp1251") // Windows Cyrillic
}
};
using (var archive = ZipArchive.Open("russian_files.zip", options))
{
archive.WriteToDirectory(@"C:\output");
}
```
### Reader API with Encoding
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding(932)
}
};
using (var stream = File.OpenRead("japanese.zip"))
using (var reader = ReaderFactory.Open(stream, options))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Console.WriteLine(reader.Entry.Key); // Correct characters
reader.WriteEntryToDirectory(@"C:\output");
}
}
}
```
---
## Creating Archives with Correct Encoding
When creating archives, SharpCompress uses UTF8 by default (recommended):
```csharp
// Create with UTF8 (default, recommended)
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory(@"D:\my_files");
archive.SaveTo("output.zip", CompressionType.Deflate);
// Archives created with UTF8 encoding
}
```
If you need to create archives for systems that expect specific encodings:
```csharp
// Note: SharpCompress Writer API uses UTF8 for encoding
// To create archives with other encodings, consider:
// 1. Let users on those systems create archives
// 2. Use system tools (7-Zip, WinRAR) with desired encoding
// 3. Post-process archives if absolutely necessary
// For now, recommend modern UTF8-based archives
```
---
## Troubleshooting Encoding Issues
### Filenames Show Question Marks (?)
```
✗ Wrong encoding detected
test文件.txt → test???.txt
```
**Solution:** Specify correct encoding explicitly
```csharp
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
Default = Encoding.GetEncoding("gbk") // Try different encodings
}
};
```
### Filenames Show Replacement Character (￿)
```
✗ Invalid bytes for selected encoding
café.txt → caf￿.txt
```
**Solution:**
1. Try a different encoding (see Common Encodings table)
2. Use CustomDecoder with fallback encoding
3. Archive might be corrupted
### Mixed Encodings in Single Archive
```csharp
// Use CustomDecoder to handle mixed encodings
var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding
{
CustomDecoder = (data, offset, length) =>
{
// Try multiple encodings in priority order
var bytes = new byte[length];
Array.Copy(data, offset, bytes, 0, length);
foreach (var encoding in new[]
{
Encoding.UTF8,
Encoding.GetEncoding(932),
Encoding.GetEncoding("iso-8859-1")
})
{
try
{
var str = encoding.GetString(bytes);
if (!str.Contains('\uFFFD'))
return str;
}
catch { }
}
// Final fallback
return Encoding.GetEncoding("iso-8859-1").GetString(bytes);
}
}
};
```
---
## Encoding Reference Table
| Encoding | Code | Use Case |
|----------|------|----------|
| UTF-8 | (default) | Modern archives, recommended |
| cp932 | 932 | Japanese Windows |
| gb2312 | "gb2312" | Simplified Chinese |
| gbk | "gbk" | Extended Simplified Chinese |
| big5 | "big5" | Traditional Chinese |
| iso-8859-1 | "iso-8859-1" | Western European |
| cp1252 | "cp1252" | Windows Western European |
| cp1251 | "cp1251" | Russian/Cyrillic |
| euc-jp | "euc-jp" | Japanese Unix |
| euc-kr | "euc-kr" | Korean |
---
## Best Practices
1. **Use UTF-8 for new archives** - Most modern systems support it
2. **Ask the archive creator** - When receiving archives with corrupted names
3. **Provide encoding options** - If your app handles user archives
4. **Document your assumption** - Tell users what encoding you're using
5. **Test with international files** - Before releasing production code
---
## Related Documentation
- [USAGE.md](USAGE.md#extract-zip-which-has-non-utf8-encoded-filenamycp932) - Usage examples

474
docs/PERFORMANCE.md Normal file
View File

@@ -0,0 +1,474 @@
# SharpCompress Performance Guide
This guide helps you optimize SharpCompress for performance in various scenarios.
## API Selection Guide
### Archive API vs Reader API
Choose the right API based on your use case:
| Aspect | Archive API | Reader API |
|--------|------------|-----------|
| **Stream Type** | Seekable only | Non-seekable OK |
| **Memory Usage** | All entries in memory | One entry at a time |
| **Random Access** | ✓ Yes | ✗ No |
| **Best For** | Small-to-medium archives | Large or streaming data |
| **Performance** | Fast for random access | Better for large files |
### Archive API (Fast for Random Access)
```csharp
// Use when:
// - Archive fits in memory
// - You need random access to entries
// - Stream is seekable (file, MemoryStream)
using (var archive = ZipArchive.Open("archive.zip"))
{
// Random access - all entries available
var specific = archive.Entries.FirstOrDefault(e => e.Key == "file.txt");
if (specific != null)
{
specific.WriteToFile(@"C:\output\file.txt");
}
}
```
**Performance Characteristics:**
- ✓ Instant entry lookup
- ✓ Parallel extraction possible
- ✗ Entire archive in memory
- ✗ Can't process while downloading
### Reader API (Best for Large Files)
```csharp
// Use when:
// - Processing large archives (>100 MB)
// - Streaming from network/pipe
// - Memory is constrained
// - Forward-only processing is acceptable
using (var stream = File.OpenRead("large.zip"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
// Process one entry at a time
reader.WriteEntryToDirectory(@"C:\output");
}
}
```
**Performance Characteristics:**
- ✓ Minimal memory footprint
- ✓ Works with non-seekable streams
- ✓ Can process while downloading
- ✗ Forward-only (no random access)
- ✗ Entry lookup requires iteration
---
## Buffer Sizing
### Understanding Buffers
SharpCompress uses internal buffers for reading compressed data. Buffer size affects:
- **Speed:** Larger buffers = fewer I/O operations = faster
- **Memory:** Larger buffers = higher memory usage
### Recommended Buffer Sizes
| Scenario | Size | Notes |
|----------|------|-------|
| Embedded/IoT devices | 4-8 KB | Minimal memory usage |
| Memory-constrained | 16-32 KB | Conservative default |
| Standard use (default) | 64 KB | Recommended default |
| Large file streaming | 256 KB | Better throughput |
| High-speed SSD | 512 KB - 1 MB | Maximum throughput |
### How Buffer Size Affects Performance
```csharp
// SharpCompress manages buffers internally
// You can't directly set buffer size, but you can:
// 1. Use Stream.CopyTo with explicit buffer size
using (var entryStream = reader.OpenEntryStream())
using (var fileStream = File.Create(@"C:\output\file.txt"))
{
// 64 KB buffer (default)
entryStream.CopyTo(fileStream);
// Or specify larger buffer for faster copy
entryStream.CopyTo(fileStream, bufferSize: 262144); // 256 KB
}
// 2. Use custom buffer for writing
using (var entryStream = reader.OpenEntryStream())
using (var fileStream = File.Create(@"C:\output\file.txt"))
{
byte[] buffer = new byte[262144]; // 256 KB
int bytesRead;
while ((bytesRead = entryStream.Read(buffer, 0, buffer.Length)) > 0)
{
fileStream.Write(buffer, 0, bytesRead);
}
}
```
---
## Streaming Large Files
### Non-Seekable Stream Patterns
For processing archives from downloads or pipes:
```csharp
// Download stream (non-seekable)
using (var httpStream = await httpClient.GetStreamAsync(url))
using (var reader = ReaderFactory.Open(httpStream))
{
// Process entries as they arrive
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(@"C:\output");
}
}
}
```
**Performance Tips:**
- Don't try to buffer the entire stream
- Process entries immediately
- Use async APIs for better responsiveness
### Download-Then-Extract vs Streaming
Choose based on your constraints:
| Approach | When to Use |
|----------|------------|
| **Download then extract** | Moderate size, need random access |
| **Stream during download** | Large files, bandwidth limited, memory constrained |
```csharp
// Download then extract (requires disk space)
var archivePath = await DownloadFile(url, @"C:\temp\archive.zip");
using (var archive = ZipArchive.Open(archivePath))
{
archive.WriteToDirectory(@"C:\output");
}
// Stream during download (on-the-fly extraction)
using (var httpStream = await httpClient.GetStreamAsync(url))
using (var reader = ReaderFactory.Open(httpStream))
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(@"C:\output");
}
}
```
---
## Solid Archive Optimization
### Why Solid Archives Are Slow
Solid archives (Rar, 7Zip) group files together in a single compressed stream:
```
Solid Archive Layout:
[Header] [Compressed Stream] [Footer]
├─ File1 compressed data
├─ File2 compressed data
├─ File3 compressed data
└─ File4 compressed data
```
Extracting File3 requires decompressing File1 and File2 first.
### Sequential vs Random Extraction
**Random Extraction (Slow):**
```csharp
using (var archive = RarArchive.Open("solid.rar"))
{
foreach (var entry in archive.Entries)
{
entry.WriteToFile(@"C:\output\" + entry.Key); // ✗ Slow!
// Each entry triggers full decompression from start
}
}
```
**Sequential Extraction (Fast):**
```csharp
using (var archive = RarArchive.Open("solid.rar"))
{
// Method 1: Use WriteToDirectory (recommended)
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true
});
// Method 2: Use ExtractAllEntries
archive.ExtractAllEntries();
// Method 3: Use Reader API (also sequential)
using (var reader = RarReader.Open(File.OpenRead("solid.rar")))
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(@"C:\output");
}
}
}
```
**Performance Impact:**
- Random extraction: O(n²) - very slow for many files
- Sequential extraction: O(n) - 10-100x faster
### Best Practices for Solid Archives
1. **Always extract sequentially** when possible
2. **Use Reader API** for large solid archives
3. **Process entries in order** from the archive
4. **Consider using 7Zip command-line** for scripted extractions
---
## Compression Level Trade-offs
### Deflate/GZip Levels
```csharp
// Level 1 = Fastest, largest size
// Level 6 = Default (balanced)
// Level 9 = Slowest, best compression
// Write with different compression levels
using (var archive = ZipArchive.Create())
{
archive.AddAllFromDirectory(@"D:\data");
// Fast compression (level 1)
archive.SaveTo("fast.zip", new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 1
});
// Default compression (level 6)
archive.SaveTo("default.zip", CompressionType.Deflate);
// Best compression (level 9)
archive.SaveTo("best.zip", new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 9
});
}
```
**Speed vs Size:**
| Level | Speed | Size | Use Case |
|-------|-------|------|----------|
| 1 | 10x | 90% | Network, streaming |
| 6 | 1x | 75% | Default (good balance) |
| 9 | 0.1x | 65% | Archival, static storage |
### BZip2 Block Size
```csharp
// BZip2 block size affects memory and compression
// 100K to 900K (default 900K)
// Smaller block size = lower memory, faster
// Larger block size = better compression, slower
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(@"D:\data");
// These are preset in WriterOptions via CompressionLevel
archive.SaveTo("archive.tar.bz2", CompressionType.BZip2);
}
```
### LZMA Settings
LZMA compression is very powerful but memory-intensive:
```csharp
// LZMA (7Zip, .tar.lzma):
// - Dictionary size: 16 KB to 1 GB (default 32 MB)
// - Faster preset: smaller dictionary
// - Better compression: larger dictionary
// Preset via CompressionType
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(@"D:\data");
archive.SaveTo("archive.tar.xz", CompressionType.LZMA); // Default settings
}
```
---
## Async Performance
### When Async Helps
Async is beneficial when:
- **Long I/O operations** (network, slow disks)
- **UI responsiveness** needed (Windows Forms, WPF, Blazor)
- **Server applications** (ASP.NET, multiple concurrent operations)
```csharp
// Async extraction (non-blocking)
using (var archive = ZipArchive.Open("archive.zip"))
{
await archive.WriteToDirectoryAsync(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
// Thread can handle other work while I/O happens
```
### When Async Doesn't Help
Async doesn't improve performance for:
- **CPU-bound operations** (already fast)
- **Local SSD I/O** (I/O is fast enough)
- **Single-threaded scenarios** (no parallelism benefit)
```csharp
// Sync extraction (simpler, same performance on fast I/O)
using (var archive = ZipArchive.Open("archive.zip"))
{
archive.WriteToDirectory(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
// Simple and fast - no async needed
```
### Cancellation Pattern
```csharp
var cts = new CancellationTokenSource();
// Cancel after 5 minutes
cts.CancelAfter(TimeSpan.FromMinutes(5));
try
{
using (var archive = ZipArchive.Open("archive.zip"))
{
await archive.WriteToDirectoryAsync(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cts.Token
);
}
}
catch (OperationCanceledException)
{
Console.WriteLine("Extraction cancelled");
// Clean up partial extraction if needed
}
```
---
## Practical Performance Tips
### 1. Choose the Right API
| Scenario | API | Why |
|----------|-----|-----|
| Small archives | Archive | Faster random access |
| Large archives | Reader | Lower memory |
| Streaming | Reader | Works on non-seekable streams |
| Download streams | Reader | Async extraction while downloading |
### 2. Batch Operations
```csharp
// ✗ Slow - opens each archive separately
foreach (var file in files)
{
using (var archive = ZipArchive.Open("archive.zip"))
{
archive.WriteToDirectory(@"C:\output");
}
}
// ✓ Better - process multiple entries at once
using (var archive = ZipArchive.Open("archive.zip"))
{
archive.WriteToDirectory(@"C:\output");
}
```
### 3. Profile Your Code
```csharp
var sw = Stopwatch.StartNew();
using (var archive = ZipArchive.Open("large.zip"))
{
archive.WriteToDirectory(@"C:\output");
}
sw.Stop();
Console.WriteLine($"Extraction took {sw.ElapsedMilliseconds}ms");
// Measure memory before/after
var beforeMem = GC.GetTotalMemory(true);
// ... do work ...
var afterMem = GC.GetTotalMemory(true);
Console.WriteLine($"Memory used: {(afterMem - beforeMem) / 1024 / 1024}MB");
```
---
## Troubleshooting Performance
### Extraction is Slow
1. **Check if solid archive** → Use sequential extraction
2. **Check API** → Reader API might be faster for large files
3. **Check compression level** → Higher levels are slower to decompress
4. **Check I/O** → Network drives are much slower than SSD
5. **Check buffer size** → May need larger buffers for network
### High Memory Usage
1. **Use Reader API** instead of Archive API
2. **Process entries immediately** rather than buffering
3. **Reduce compression level** if writing
4. **Check for memory leaks** in your code
### CPU Usage at 100%
1. **Normal for compression** - especially with high compression levels
2. **Consider lower level** for faster processing
3. **Reduce parallelism** if processing multiple archives
4. **Check if awaiting properly** in async code
---
## Related Documentation
- [PERFORMANCE.md](USAGE.md) - Usage examples with performance considerations
- [FORMATS.md](FORMATS.md) - Format-specific performance notes

View File

@@ -1,6 +1,6 @@
# SharpCompress Usage
## Async/Await Support
## Async/Await Support (Beta)
SharpCompress now provides full async/await support for all I/O operations. All `Read`, `Write`, and extraction operations have async equivalents ending in `Async` that accept an optional `CancellationToken`. This enables better performance and scalability for I/O-bound operations.
@@ -13,7 +13,7 @@ SharpCompress now provides full async/await support for all I/O operations. All
See [Async Examples](#async-examples) section below for usage patterns.
## Stream Rules (changed with 0.21)
## Stream Rules
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.

View File

@@ -68,7 +68,7 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual long TotalUncompressSize =>
public virtual long TotalUncompressedSize =>
Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
@@ -187,10 +187,26 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
}
public virtual IAsyncEnumerable<TEntry> EntriesAsync => _lazyEntriesAsync;
IAsyncEnumerable<IArchiveEntry> IAsyncArchive.EntriesAsync =>
EntriesAsync.Cast<TEntry, IArchiveEntry>();
public IAsyncEnumerable<IVolume> VolumesAsync => _lazyVolumesAsync.Cast<TVolume, IVolume>();
private async IAsyncEnumerable<IArchiveEntry> EntriesAsyncCast()
{
await foreach (var entry in EntriesAsync)
{
yield return entry;
}
}
IAsyncEnumerable<IArchiveEntry> IAsyncArchive.EntriesAsync => EntriesAsyncCast();
private async IAsyncEnumerable<IVolume> VolumesAsyncCast()
{
await foreach (var volume in VolumesAsync)
{
yield return volume;
}
}
public IAsyncEnumerable<IVolume> VolumesAsync => VolumesAsyncCast();
public async ValueTask<IAsyncReader> ExtractAllEntriesAsync()
{
@@ -209,14 +225,16 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
public async ValueTask<bool> IsCompleteAsync()
{
await EnsureEntriesLoadedAsync();
return await EntriesAsync.All(x => x.IsComplete);
return await EntriesAsync.AllAsync(x => x.IsComplete);
}
public async ValueTask<long> TotalSizeAsync() =>
await EntriesAsync.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.CompressedSize);
public async ValueTask<long> TotalUncompressSizeAsync() =>
await EntriesAsync.Aggregate(0L, (total, cf) => total + cf.Size);
public async ValueTask<long> TotalUncompressedSizeAsync() =>
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.Size);
public ValueTask<bool> IsEncryptedAsync() => new(IsEncrypted);
#endregion
}

View File

@@ -12,7 +12,8 @@ namespace SharpCompress.Archives;
public abstract class AbstractWritableArchive<TEntry, TVolume>
: AbstractArchive<TEntry, TVolume>,
IWritableArchive
IWritableArchive,
IWritableAsyncArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
@@ -83,12 +84,12 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
}
}
void IWritableArchive.RemoveEntry(IArchiveEntry entry) => RemoveEntry((TEntry)entry);
void IWritableArchiveCommon.RemoveEntry(IArchiveEntry entry) => RemoveEntry((TEntry)entry);
public TEntry AddEntry(string key, Stream source, long size = 0, DateTime? modified = null) =>
AddEntry(key, source, false, size, modified);
IArchiveEntry IWritableArchive.AddEntry(
IArchiveEntry IWritableArchiveCommon.AddEntry(
string key,
Stream source,
bool closeStream,
@@ -96,7 +97,7 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
DateTime? modified
) => AddEntry(key, source, closeStream, size, modified);
IArchiveEntry IWritableArchive.AddDirectoryEntry(string key, DateTime? modified) =>
IArchiveEntry IWritableArchiveCommon.AddDirectoryEntry(string key, DateTime? modified) =>
AddDirectoryEntry(key, modified);
public TEntry AddEntry(

View File

@@ -13,12 +13,6 @@ namespace SharpCompress.Archives;
public static class ArchiveFactory
{
/// <summary>
/// Opens an Archive for random access
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <returns></returns>
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
readerOptions ??= new ReaderOptions();
@@ -26,13 +20,6 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
}
/// <summary>
/// Opens an Archive for random access asynchronously
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static async ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
@@ -41,11 +28,8 @@ public static class ArchiveFactory
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken)
.ConfigureAwait(false);
return await factory
.OpenAsync(stream, readerOptions, cancellationToken)
.ConfigureAwait(false);
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken);
return factory.OpenAsync(stream, readerOptions);
}
public static IWritableArchive Create(ArchiveType type)
@@ -62,23 +46,12 @@ public static class ArchiveFactory
throw new NotSupportedException("Cannot create Archives of type: " + type);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
}
/// <summary>
/// Opens an Archive from a filepath asynchronously.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
string filePath,
ReaderOptions? options = null,
@@ -89,11 +62,6 @@ public static class ArchiveFactory
return OpenAsync(new FileInfo(filePath), options, cancellationToken);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
@@ -101,12 +69,6 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(fileInfo).Open(fileInfo, options);
}
/// <summary>
/// Opens an Archive from a FileInfo object asynchronously.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? options = null,
@@ -115,16 +77,10 @@ public static class ArchiveFactory
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken)
.ConfigureAwait(false);
return await factory.OpenAsync(fileInfo, options, cancellationToken).ConfigureAwait(false);
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken);
return factory.OpenAsync(fileInfo, options, cancellationToken);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
{
fileInfos.NotNull(nameof(fileInfos));
@@ -146,12 +102,6 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
}
/// <summary>
/// Opens a multi-part archive from files asynchronously.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async ValueTask<IAsyncArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? options = null,
@@ -168,23 +118,16 @@ public static class ArchiveFactory
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return await OpenAsync(fileInfo, options, cancellationToken).ConfigureAwait(false);
return await OpenAsync(fileInfo, options, cancellationToken);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = FindFactory<IMultiArchiveFactory>(fileInfo);
return await factory
.OpenAsync(filesArray, options, cancellationToken)
.ConfigureAwait(false);
var factory = await FindFactoryAsync<IMultiArchiveFactory>(fileInfo, cancellationToken);
return factory.OpenAsync(filesArray, options, cancellationToken);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.NotNull(nameof(streams));
@@ -206,12 +149,6 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
}
/// <summary>
/// Opens a multi-part archive from streams asynchronously.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async ValueTask<IAsyncArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? options = null,
@@ -229,21 +166,16 @@ public static class ArchiveFactory
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return await OpenAsync(firstStream, options, cancellationToken).ConfigureAwait(false);
return await OpenAsync(firstStream, options, cancellationToken);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
var factory = FindFactory<IMultiArchiveFactory>(firstStream);
return await factory
.OpenAsync(streamsArray, options, cancellationToken)
.ConfigureAwait(false);
return factory.OpenAsync(streamsArray, options);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(
string sourceArchive,
string destinationDirectory,
@@ -382,22 +314,12 @@ public static class ArchiveFactory
return false;
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<string> GetFileParts(string part1)
{
part1.NotNullOrEmpty(nameof(part1));
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
{
part1.NotNull(nameof(part1));
@@ -411,7 +333,7 @@ public static class ArchiveFactory
if (part != null)
{
yield return part;
while ((part = factory.GetFilePart(i++, part1)) != null) //tests split too
while ((part = factory.GetFilePart(i++, part1)) != null)
{
yield return part;
}

View File

@@ -34,18 +34,19 @@ internal class AutoArchiveFactory : IArchiveFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(stream, readerOptions);
public async ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await ArchiveFactory.OpenAsync(stream, readerOptions, cancellationToken);
public IAsyncArchive OpenAsync(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)Open(stream, readerOptions);
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(fileInfo, readerOptions);
public async ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await ArchiveFactory.OpenAsync(fileInfo, readerOptions, cancellationToken);
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfo, readerOptions);
}
}

View File

@@ -0,0 +1,192 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
namespace SharpCompress.Archives.GZip;
public partial class GZipArchive
#if NET8_0_OR_GREATER
: IArchiveOpenable<IWritableArchive, IWritableAsyncArchive>,
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
#endif
{
public static IWritableAsyncArchive OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IWritableAsyncArchive)Open(
new FileInfo(path),
readerOptions ?? new ReaderOptions()
);
}
public static IWritableArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
public static IWritableArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new GZipArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new GZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new GZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new GZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
public static IWritableAsyncArchive OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(stream, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(fileInfo, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(streams, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(fileInfos, readerOptions);
}
public static GZipArchive Create() => new();
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
public static bool IsGZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsGZipFile(stream);
}
public static bool IsGZipFile(Stream stream)
{
Span<byte> header = stackalloc byte[10];
if (!stream.ReadFully(header))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
public static async ValueTask<bool> IsGZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
byte[] header = new byte[10];
if (!await stream.ReadFullyAsync(header, cancellationToken).ConfigureAwait(false))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
}

View File

@@ -14,186 +14,20 @@ using SharpCompress.Writers.GZip;
namespace SharpCompress.Archives.GZip;
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new GZipArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new GZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new GZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new GZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Opens a GZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a GZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a GZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a GZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
public static GZipArchive Create() => new();
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private GZipArchive(SourceStream sourceStream)
: base(ArchiveType.GZip, sourceStream) { }
internal GZipArchive()
: base(ArchiveType.GZip) { }
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.LoadAllParts();
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
}
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
public static bool IsGZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsGZipFile(stream);
}
public void SaveTo(string filePath) => SaveTo(new FileInfo(filePath));
public void SaveTo(FileInfo fileInfo)
@@ -215,50 +49,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
.ConfigureAwait(false);
}
public static bool IsGZipFile(Stream stream)
{
// read the header on the first read
Span<byte> header = stackalloc byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!stream.ReadFully(header))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
public static async ValueTask<bool> IsGZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// read the header on the first read
byte[] header = new byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!await stream.ReadFullyAsync(header, cancellationToken).ConfigureAwait(false))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
internal GZipArchive()
: base(ArchiveType.GZip) { }
protected override GZipArchiveEntry CreateEntryInternal(
string filePath,
Stream source,
@@ -329,7 +119,18 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
var stream = volumes.Single().Stream;
yield return new GZipArchiveEntry(
this,
new GZipFilePart(stream, ReaderOptions.ArchiveEncoding)
GZipFilePart.Create(stream, ReaderOptions.ArchiveEncoding)
);
}
protected override async IAsyncEnumerable<GZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<GZipVolume> volumes
)
{
var stream = (await volumes.SingleAsync()).Stream;
yield return new GZipArchiveEntry(
this,
await GZipFilePart.CreateAsync(stream, ReaderOptions.ArchiveEncoding)
);
}
@@ -344,6 +145,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new(GZipReader.Open(stream));
return new((IAsyncReader)GZipReader.Open(stream));
}
}

View File

@@ -23,12 +23,10 @@ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
return Parts.Single().GetCompressedStream().NotNull();
}
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
// GZip synchronous implementation is fast enough, just wrap it
return OpenEntryStream();
return new(OpenEntryStream());
}
#region IArchiveEntry Members

View File

@@ -38,5 +38,10 @@ public interface IArchive : IDisposable
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
long TotalUncompressSize { get; }
long TotalUncompressedSize { get; }
/// <summary>
/// Returns whether the archive is encrypted.
/// </summary>
bool IsEncrypted { get; }
}

View File

@@ -8,7 +8,6 @@ namespace SharpCompress.Archives;
public static class IArchiveExtensions
{
/// <param name="archive">The archive to extract.</param>
extension(IArchive archive)
{
/// <summary>
@@ -23,7 +22,6 @@ public static class IArchiveExtensions
IProgress<ProgressReport>? progress = null
)
{
// For solid archives (Rar, 7Zip), use the optimized reader-based approach
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
{
using var reader = archive.ExtractAllEntries();
@@ -31,7 +29,6 @@ public static class IArchiveExtensions
}
else
{
// For non-solid archives, extract entries directly
archive.WriteToDirectoryInternal(destinationDirectory, options, progress);
}
}
@@ -42,14 +39,10 @@ public static class IArchiveExtensions
IProgress<ProgressReport>? progress
)
{
// Prepare for progress reporting
var totalBytes = archive.TotalUncompressSize;
var totalBytes = archive.TotalUncompressedSize;
var bytesRead = 0L;
// Tracking for created directories.
var seenDirectories = new HashSet<string>();
// Extract
foreach (var entry in archive.Entries)
{
if (entry.IsDirectory)
@@ -68,10 +61,8 @@ public static class IArchiveExtensions
continue;
}
// Use the entry's WriteToDirectory method which respects ExtractionOptions
entry.WriteToDirectory(destinationDirectory, options);
// Update progress
bytesRead += entry.Size;
progress?.Report(
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -33,12 +32,7 @@ public interface IArchiveFactory : IFactory
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
IAsyncArchive OpenAsync(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
@@ -53,7 +47,7 @@ public interface IArchiveFactory : IFactory
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<IAsyncArchive> OpenAsync(
IAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default

View File

@@ -0,0 +1,36 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public interface IArchiveOpenable<TSync, TASync>
where TSync : IArchive
where TASync : IAsyncArchive
{
public static abstract TSync Open(string filePath, ReaderOptions? readerOptions = null);
public static abstract TSync Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
public static abstract TSync Open(Stream stream, ReaderOptions? readerOptions = null);
public static abstract TASync OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract TASync OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract TASync OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}
#endif

View File

@@ -39,5 +39,10 @@ public interface IAsyncArchive : IAsyncDisposable
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
ValueTask<long> TotalUncompressSizeAsync();
ValueTask<long> TotalUncompressedSizeAsync();
/// <summary>
/// Returns whether the archive is encrypted.
/// </summary>
ValueTask<bool> IsEncryptedAsync();
}

View File

@@ -10,84 +10,83 @@ namespace SharpCompress.Archives;
public static class IAsyncArchiveExtensions
{
/// <summary>
/// Extract to specific directory asynchronously with progress reporting and cancellation support
/// </summary>
/// <param name="archive">The archive to extract.</param>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public static async Task WriteToDirectoryAsync(
this IAsyncArchive archive,
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
)
extension(IAsyncArchive archive)
{
// For solid archives (Rar, 7Zip), use the optimized reader-based approach
if (await archive.IsSolidAsync() || archive.Type == ArchiveType.SevenZip)
/// <summary>
/// Extract to specific directory asynchronously with progress reporting and cancellation support
/// </summary>
/// <param name="archive">The archive to extract.</param>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public async Task WriteToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
)
{
await using var reader = await archive.ExtractAllEntriesAsync();
await reader.WriteAllToDirectoryAsync(destinationDirectory, options, cancellationToken);
}
else
{
// For non-solid archives, extract entries directly
await archive.WriteToDirectoryAsyncInternal(
destinationDirectory,
options,
progress,
cancellationToken
);
}
}
private static async Task WriteToDirectoryAsyncInternal(
this IAsyncArchive archive,
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress,
CancellationToken cancellationToken
)
{
// Prepare for progress reporting
var totalBytes = await archive.TotalUncompressSizeAsync();
var bytesRead = 0L;
// Tracking for created directories.
var seenDirectories = new HashSet<string>();
// Extract
await foreach (var entry in archive.EntriesAsync.WithCancellation(cancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
if (entry.IsDirectory)
if (await archive.IsSolidAsync() || archive.Type == ArchiveType.SevenZip)
{
var dirPath = Path.Combine(
await using var reader = await archive.ExtractAllEntriesAsync();
await reader.WriteAllToDirectoryAsync(
destinationDirectory,
entry.Key.NotNull("Entry Key is null")
options,
cancellationToken
);
if (
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(parentDirectory);
}
continue;
}
else
{
await archive.WriteToDirectoryAsyncInternal(
destinationDirectory,
options,
progress,
cancellationToken
);
}
}
// Use the entry's WriteToDirectoryAsync method which respects ExtractionOptions
await entry
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
.ConfigureAwait(false);
private async Task WriteToDirectoryAsyncInternal(
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress,
CancellationToken cancellationToken
)
{
var totalBytes = await archive.TotalUncompressedSizeAsync();
var bytesRead = 0L;
var seenDirectories = new HashSet<string>();
// Update progress
bytesRead += entry.Size;
progress?.Report(new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes));
await foreach (var entry in archive.EntriesAsync.WithCancellation(cancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
if (entry.IsDirectory)
{
var dirPath = Path.Combine(
destinationDirectory,
entry.Key.NotNull("Entry Key is null")
);
if (
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(parentDirectory);
}
continue;
}
await entry
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
.ConfigureAwait(false);
bytesRead += entry.Size;
progress?.Report(
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
);
}
}
}
}

View File

@@ -1,7 +1,6 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -34,12 +33,7 @@ public interface IMultiArchiveFactory : IFactory
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
IAsyncArchive OpenAsync(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with IEnumerable Stream objects, multi and split support.
@@ -54,7 +48,7 @@ public interface IMultiArchiveFactory : IFactory
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<IAsyncArchive> OpenAsync(
IAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default

View File

@@ -0,0 +1,35 @@
#if NET8_0_OR_GREATER
using System.Collections.Generic;
using System.IO;
using System.Threading;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public interface IMultiArchiveOpenable<TSync, TASync>
where TSync : IArchive
where TASync : IAsyncArchive
{
public static abstract TSync Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
);
public static abstract TSync Open(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
);
public static abstract TASync OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract TASync OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}
#endif

View File

@@ -6,8 +6,17 @@ using SharpCompress.Writers;
namespace SharpCompress.Archives;
public interface IWritableArchive : IArchive
public interface IWritableArchiveCommon
{
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
/// <summary>
/// Removes the specified entry from the archive.
/// </summary>
void RemoveEntry(IArchiveEntry entry);
IArchiveEntry AddEntry(
@@ -19,18 +28,24 @@ public interface IWritableArchive : IArchive
);
IArchiveEntry AddDirectoryEntry(string key, DateTime? modified = null);
}
public interface IWritableArchive : IArchive, IWritableArchiveCommon
{
/// <summary>
/// Saves the archive to the specified stream using the given writer options.
/// </summary>
void SaveTo(Stream stream, WriterOptions options);
}
public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
{
/// <summary>
/// Asynchronously saves the archive to the specified stream using the given writer options.
/// </summary>
ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
CancellationToken cancellationToken = default
);
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
}

View File

@@ -0,0 +1,52 @@
using System;
using System.IO;
namespace SharpCompress.Archives;
public static class IWritableArchiveCommonExtensions
{
extension(IWritableArchiveCommon writableArchive)
{
public void AddAllFromDirectory(
string filePath,
string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories
)
{
using (writableArchive.PauseEntryRebuilding())
{
foreach (
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
)
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(
path.Substring(filePath.Length),
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}
public IArchiveEntry AddEntry(string key, string file) =>
writableArchive.AddEntry(key, new FileInfo(file));
public IArchiveEntry AddEntry(string key, FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntry(
key,
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}

View File

@@ -1,106 +1,20 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
public static class IWritableArchiveExtensions
{
public static void AddEntry(
this IWritableArchive writableArchive,
string entryPath,
string filePath
)
extension(IWritableArchive writableArchive)
{
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
public void SaveTo(string filePath, WriterOptions? options = null) =>
writableArchive.SaveTo(new FileInfo(filePath), options ?? new(CompressionType.Deflate));
public void SaveTo(FileInfo fileInfo, WriterOptions? options = null)
{
throw new FileNotFoundException("Could not AddEntry: " + filePath);
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
writableArchive.SaveTo(stream, options ?? new(CompressionType.Deflate));
}
writableArchive.AddEntry(
entryPath,
new FileInfo(filePath).OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
public static void SaveTo(
this IWritableArchive writableArchive,
string filePath,
WriterOptions options
) => writableArchive.SaveTo(new FileInfo(filePath), options);
public static void SaveTo(
this IWritableArchive writableArchive,
FileInfo fileInfo,
WriterOptions options
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
writableArchive.SaveTo(stream, options);
}
public static ValueTask SaveToAsync(
this IWritableArchive writableArchive,
string filePath,
WriterOptions options,
CancellationToken cancellationToken = default
) => writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken);
public static async ValueTask SaveToAsync(
this IWritableArchive writableArchive,
FileInfo fileInfo,
WriterOptions options,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await writableArchive.SaveToAsync(stream, options, cancellationToken).ConfigureAwait(false);
}
public static void AddAllFromDirectory(
this IWritableArchive writableArchive,
string filePath,
string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories
)
{
using (writableArchive.PauseEntryRebuilding())
{
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(
path.Substring(filePath.Length),
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}
public static IArchiveEntry AddEntry(
this IWritableArchive writableArchive,
string key,
FileInfo fileInfo
)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntry(
key,
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}

View File

@@ -0,0 +1,36 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
public static class IWritableAsyncArchiveExtensions
{
extension(IWritableAsyncArchive writableArchive)
{
public ValueTask SaveToAsync(
string filePath,
WriterOptions? options = null,
CancellationToken cancellationToken = default
) =>
writableArchive.SaveToAsync(
new FileInfo(filePath),
options ?? new(CompressionType.Deflate),
cancellationToken
);
public async ValueTask SaveToAsync(
FileInfo fileInfo,
WriterOptions? options = null,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await writableArchive
.SaveToAsync(stream, options ?? new(CompressionType.Deflate), cancellationToken)
.ConfigureAwait(false);
}
}
}

View File

@@ -1,18 +1,36 @@
using System.Linq;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
namespace SharpCompress.Archives.Rar;
public static class RarArchiveExtensions
{
/// <summary>
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
/// </summary>
public static bool IsFirstVolume(this RarArchive archive) =>
archive.Volumes.First().IsFirstVolume;
extension(IRarArchive archive)
{
/// <summary>
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
/// </summary>
public bool IsFirstVolume() => archive.Volumes.Cast<RarVolume>().First().IsFirstVolume;
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public static bool IsMultipartVolume(this RarArchive archive) =>
archive.Volumes.First().IsMultiVolume;
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public bool IsMultipartVolume() => archive.Volumes.Cast<RarVolume>().First().IsMultiVolume;
}
extension(IRarAsyncArchive archive)
{
/// <summary>
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
/// </summary>
public async ValueTask<bool> IsFirstVolumeAsync() =>
(await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsFirstVolume;
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public async ValueTask<bool> IsMultipartVolumeAsync() =>
(await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsMultiVolume;
}
}

View File

@@ -0,0 +1,163 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar;
public partial class RarArchive
#if NET8_0_OR_GREATER
: IArchiveOpenable<IRarArchive, IRarAsyncArchive>,
IMultiArchiveOpenable<IRarArchive, IRarAsyncArchive>
#endif
{
public static IRarAsyncArchive OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IRarAsyncArchive)Open(new FileInfo(path), readerOptions);
}
public static IRarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
var fileInfo = new FileInfo(filePath);
return new RarArchive(
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
options ?? new ReaderOptions()
)
);
}
public static IRarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new RarArchive(
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
options ?? new ReaderOptions()
)
);
}
public static IRarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
}
public static IRarArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new RarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IRarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new RarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IRarAsyncArchive OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IRarAsyncArchive)Open(stream, readerOptions);
}
public static IRarAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IRarAsyncArchive)Open(fileInfo, readerOptions);
}
public static IRarAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IRarAsyncArchive)Open(streams, readerOptions);
}
public static IRarAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IRarAsyncArchive)Open(fileInfos, readerOptions);
}
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
public static bool IsRarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsRarFile(stream);
}
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try
{
MarkHeader.Read(stream, true, false);
return true;
}
catch
{
return false;
}
}
}

View File

@@ -14,17 +14,23 @@ using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar;
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public interface IRarArchiveCommon
{
int MinVersion { get; }
int MaxVersion { get; }
}
public interface IRarArchive : IArchive, IRarArchiveCommon { }
public interface IRarAsyncArchive : IAsyncArchive, IRarArchiveCommon { }
public partial class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>, IRarArchive
{
private bool _disposed;
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
new(() => new Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private RarArchive(SourceStream sourceStream)
: base(ArchiveType.Rar, sourceStream) { }
@@ -42,15 +48,29 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
}
}
public override async ValueTask DisposeAsync()
{
if (!_disposed)
{
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
{
unpackV1.Dispose();
}
_disposed = true;
await base.DisposeAsync();
}
}
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.LoadAllParts(); //request all streams
sourceStream.LoadAllParts();
var streams = sourceStream.Streams.ToArray();
var i = 0;
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions))
{
sourceStream.IsVolumes = true;
streams[1].Position = 0;
@@ -63,7 +83,6 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
));
}
//split mode or single file
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
}
@@ -82,12 +101,12 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
volume.Stream.Position = 0;
return volume.Stream;
});
return RarReader.Open(streams, ReaderOptions);
return (RarReader)RarReader.Open(streams, ReaderOptions);
}
var stream = Volumes.First().Stream;
stream.Position = 0;
return RarReader.Open(stream, ReaderOptions);
return (RarReader)RarReader.Open(stream, ReaderOptions);
}
public override bool IsSolid => Volumes.First().IsSolidArchive;
@@ -96,187 +115,4 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;
#region Creation
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
var fileInfo = new FileInfo(filePath);
return new RarArchive(
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
options ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new RarArchive(
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
options ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new RarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new RarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Opens a RarArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a RarArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a RarArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a RarArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
public static bool IsRarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsRarFile(stream);
}
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try
{
MarkHeader.Read(stream, true, false);
return true;
}
catch
{
return false;
}
}
#endregion
}

View File

@@ -0,0 +1,166 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip;
public partial class SevenZipArchive
#if NET8_0_OR_GREATER
: IArchiveOpenable<IArchive, IAsyncArchive>,
IMultiArchiveOpenable<IArchive, IAsyncArchive>
#endif
{
public static IAsyncArchive OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty("path");
return (IAsyncArchive)Open(new FileInfo(path), readerOptions ?? new ReaderOptions());
}
public static IArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
public static IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull("fileInfo");
return new SevenZipArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
public static IArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new SevenZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new SevenZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull("stream");
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new SevenZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
public static IAsyncArchive OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(stream, readerOptions);
}
public static IAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfo, readerOptions);
}
public static IAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(streams, readerOptions);
}
public static IAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfos, readerOptions);
}
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
public static bool IsSevenZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsSevenZipFile(stream);
}
public static bool IsSevenZipFile(Stream stream)
{
try
{
return SignatureMatch(stream);
}
catch
{
return false;
}
}
private static ReadOnlySpan<byte> Signature =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static bool SignatureMatch(Stream stream)
{
var reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(Signature);
}
}

View File

@@ -12,191 +12,22 @@ using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip;
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
{
private ArchiveDatabase? _database;
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull("fileInfo");
return new SevenZipArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new SevenZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new SevenZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull("stream");
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new SevenZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private SevenZipArchive(SourceStream sourceStream)
: base(ArchiveType.SevenZip, sourceStream) { }
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
}
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
public static bool IsSevenZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsSevenZipFile(stream);
}
internal SevenZipArchive()
: base(ArchiveType.SevenZip) { }
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts();
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable();
}
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(
IEnumerable<SevenZipVolume> volumes
)
@@ -222,7 +53,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
foreach (var entry in group)
{
entry.IsSolid = isSolid;
isSolid = true; //mark others in this group as solid - same as rar behaviour.
isSolid = true;
}
}
@@ -240,28 +71,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
public static bool IsSevenZipFile(Stream stream)
{
try
{
return SignatureMatch(stream);
}
catch
{
return false;
}
}
private static ReadOnlySpan<byte> Signature =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static bool SignatureMatch(Stream stream)
{
var reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(Signature);
}
protected override IReader CreateReaderForSolidExtraction() =>
new SevenZipReader(ReaderOptions, this);
@@ -298,9 +107,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
_currentEntry = dir;
yield return dir;
}
// For non-directory entries, yield them without creating shared streams
// Each call to GetEntryStream() will create a fresh decompression stream
// to avoid state corruption issues with async operations
foreach (var entry in entries.Where(x => !x.IsDirectory))
{
_currentEntry = entry;
@@ -310,13 +116,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
protected override EntryStream GetEntryStream()
{
// Create a fresh decompression stream for each file (no state sharing).
// However, the LZMA decoder has bugs in its async implementation that cause
// state corruption even on fresh streams. The SyncOnlyStream wrapper
// works around these bugs by forcing async operations to use sync equivalents.
//
// TODO: Fix the LZMA decoder async bugs (in LzmaStream, Decoder, OutWindow)
// so this wrapper is no longer necessary.
var entry = _currentEntry.NotNull("currentEntry is not null");
if (entry.IsDirectory)
{
@@ -326,15 +125,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
/// <summary>
/// WORKAROUND: Forces async operations to use synchronous equivalents.
/// This is necessary because the LZMA decoder has bugs in its async implementation
/// that cause state corruption (IndexOutOfRangeException, DataErrorException).
///
/// The proper fix would be to repair the LZMA decoder's async methods
/// (LzmaStream.ReadAsync, Decoder.CodeAsync, OutWindow async operations),
/// but that requires deep changes to the decoder state machine.
/// </summary>
private sealed class SyncOnlyStream : Stream
{
private readonly Stream _baseStream;
@@ -364,7 +154,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
public override void Write(byte[] buffer, int offset, int count) =>
_baseStream.Write(buffer, offset, count);
// Force async operations to use sync equivalents to avoid LZMA decoder bugs
public override Task<int> ReadAsync(
byte[] buffer,
int offset,

View File

@@ -12,9 +12,8 @@ public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
) => OpenEntryStream();
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
new(OpenEntryStream());
public IArchive Archive { get; }

View File

@@ -0,0 +1,166 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
namespace SharpCompress.Archives.Tar;
public partial class TarArchive
#if NET8_0_OR_GREATER
: IArchiveOpenable<IWritableArchive, IWritableAsyncArchive>,
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
#endif
{
public static IWritableArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
public static IWritableArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new TarArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new TarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new TarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new TarArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
public static IWritableAsyncArchive OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(stream, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(new FileInfo(path), readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(fileInfo, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(streams, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(fileInfos, readerOptions);
}
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
public static bool IsTarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsTarFile(stream);
}
public static bool IsTarFile(Stream stream)
{
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch { }
return false;
}
public static TarArchive Create() => new();
}

View File

@@ -15,196 +15,14 @@ using SharpCompress.Writers.Tar;
namespace SharpCompress.Archives.Tar;
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
{
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new TarArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new TarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new TarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new TarArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Opens a TarArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a TarArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a TarArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a TarArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
public static bool IsTarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsTarFile(stream);
}
public static bool IsTarFile(Stream stream)
{
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch { }
return false;
}
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable(); //simple single volume or split, multivolume not supported
sourceStream.NotNull("SourceStream is null").LoadAllParts();
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable();
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private TarArchive(SourceStream sourceStream)
: base(ArchiveType.Tar, sourceStream) { }
@@ -269,8 +87,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
}
}
public static TarArchive Create() => new();
protected override TarArchiveEntry CreateEntryInternal(
string filePath,
Stream source,
@@ -371,6 +187,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new(TarReader.Open(stream));
return new((IAsyncReader)TarReader.Open(stream));
}
}

View File

@@ -14,9 +14,8 @@ public class TarArchiveEntry : TarEntry, IArchiveEntry
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
) => OpenEntryStream();
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
new(OpenEntryStream());
#region IArchiveEntry Members

View File

@@ -0,0 +1,319 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Zip;
public partial class ZipArchive
#if NET8_0_OR_GREATER
: IArchiveOpenable<IWritableArchive, IWritableAsyncArchive>,
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
#endif
{
public static IWritableArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
public static IWritableArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new ZipArchive(
new SourceStream(
fileInfo,
i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new ZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new ZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new ZipArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
public static IWritableAsyncArchive OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(path, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(stream, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(fileInfo, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(streams, readerOptions);
}
public static IWritableAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)Open(fileInfos, readerOptions);
}
public static bool IsZipFile(
string filePath,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => IsZipFile(new FileInfo(filePath), password, bufferSize);
public static bool IsZipFile(
FileInfo fileInfo,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsZipFile(stream, password, bufferSize);
}
public static bool IsZipFile(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static bool IsZipMulti(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek)
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream, useSync: true).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static async ValueTask<bool> IsZipFileAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = await headerFactory
.ReadStreamHeaderAsync(stream)
.Where(x => x.ZipHeaderType != ZipHeaderType.Split)
.FirstOrDefaultAsync(cancellationToken);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static ZipArchive Create() => new();
public static async ValueTask<bool> IsZipMultiAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek)
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
ZipHeader? x = null;
await foreach (
var h in z.ReadSeekableHeaderAsync(stream)
.WithCancellation(cancellationToken)
)
{
x = h;
break;
}
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
}

View File

@@ -16,21 +16,12 @@ using SharpCompress.Writers.Zip;
namespace SharpCompress.Archives.Zip;
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
private readonly SeekableZipHeaderFactory? headerFactory;
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
/// if the compression method is set to deflate
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
/// <param name="options"></param>
internal ZipArchive(SourceStream sourceStream)
: base(ArchiveType.Zip, sourceStream) =>
headerFactory = new SeekableZipHeaderFactory(
@@ -38,371 +29,36 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
sourceStream.ReaderOptions.ArchiveEncoding
);
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new ZipArchive(
new SourceStream(
fileInfo,
i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new ZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new ZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new ZipArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Opens a ZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a ZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a ZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a ZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
public static bool IsZipFile(
string filePath,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => IsZipFile(new FileInfo(filePath), password, bufferSize);
public static bool IsZipFile(
FileInfo fileInfo,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsZipFile(stream, password, bufferSize);
}
public static bool IsZipFile(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static bool IsZipMulti(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream, useSync: true).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static async ValueTask<bool> IsZipFileAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = await headerFactory
.ReadStreamHeaderAsync(stream)
.Where(x => x.ZipHeaderType != ZipHeaderType.Split)
.FirstOrDefaultAsync();
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static async ValueTask<bool> IsZipMultiAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
ZipHeader? x = null;
await foreach (
var h in z.ReadSeekableHeaderAsync(stream)
.WithCancellation(cancellationToken)
)
{
x = h;
break;
}
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
internal ZipArchive()
: base(ArchiveType.Zip) { }
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
{
stream.LoadAllParts(); //request all streams
stream.LoadAllParts();
stream.Position = 0;
var streams = stream.Streams.ToList();
var idx = 0;
if (streams.Count() > 1) //test part 2 - true = multipart not split
if (streams.Count() > 1)
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
streams[1].Position += 4;
var isZip = IsZipFile(streams[1], ReaderOptions.Password, ReaderOptions.BufferSize);
streams[1].Position -= 4;
if (isZip)
{
stream.IsVolumes = true;
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
var tmp = streams[0];
streams.RemoveAt(0);
streams.Add(tmp);
//streams[0].Position = 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
return streams.Select(a => new ZipVolume(a, ReaderOptions, idx++));
}
}
//split mode or single file
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
}
internal ZipArchive()
: base(ArchiveType.Zip) { }
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var vols = volumes.ToArray();
@@ -584,8 +240,6 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
DateTime? modified
) => new ZipWritableArchiveEntry(this, directoryPath, modified);
public static ZipArchive Create() => new();
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
@@ -597,6 +251,6 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new(ZipReader.Open(stream));
return new((IAsyncReader)ZipReader.Open(stream));
}
}

View File

@@ -51,11 +51,14 @@ namespace SharpCompress.Common
return BinaryPrimitives.ReadUInt64LittleEndian(_buffer);
}
public async ValueTask<byte[]> ReadBytesAsync(int count, CancellationToken ct = default)
public async ValueTask ReadBytesAsync(byte[] bytes, int offset, int count, CancellationToken ct = default)
{
var result = new byte[count];
await _stream.ReadExactAsync(result, 0, count, ct).ConfigureAwait(false);
return result;
await _stream.ReadExactAsync(bytes, offset, count, ct).ConfigureAwait(false);
}
public async ValueTask SkipAsync(int count, CancellationToken ct = default)
{
await _stream.SkipAsync( count, ct).ConfigureAwait(false);
}
public void Dispose()

View File

@@ -40,6 +40,6 @@ public class GZipEntry : Entry
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
{
yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
yield return new GZipEntry(GZipFilePart.Create(stream, options.ArchiveEncoding));
}
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
@@ -13,28 +15,58 @@ internal sealed class GZipFilePart : FilePart
private string? _name;
private readonly Stream _stream;
internal GZipFilePart(Stream stream, IArchiveEncoding archiveEncoding)
: base(archiveEncoding)
internal static GZipFilePart Create(Stream stream, IArchiveEncoding archiveEncoding)
{
_stream = stream;
ReadAndValidateGzipHeader();
var part = new GZipFilePart(stream, archiveEncoding);
part.ReadAndValidateGzipHeader();
if (stream.CanSeek)
{
var position = stream.Position;
stream.Position = stream.Length - 8;
ReadTrailer();
part.ReadTrailer();
stream.Position = position;
EntryStartPosition = position;
part.EntryStartPosition = position;
}
else
{
// For non-seekable streams, we can't read the trailer or track position.
// Set to 0 since the stream will be read sequentially from its current position.
EntryStartPosition = 0;
part.EntryStartPosition = 0;
}
return part;
}
internal long EntryStartPosition { get; }
internal static async ValueTask<GZipFilePart> CreateAsync(
Stream stream,
IArchiveEncoding archiveEncoding,
CancellationToken cancellationToken = default
)
{
var part = new GZipFilePart(stream, archiveEncoding);
await part.ReadAndValidateGzipHeaderAsync(cancellationToken);
if (stream.CanSeek)
{
var position = stream.Position;
stream.Position = stream.Length - 8;
await part.ReadTrailerAsync(cancellationToken);
stream.Position = position;
part.EntryStartPosition = position;
}
else
{
// For non-seekable streams, we can't read the trailer or track position.
// Set to 0 since the stream will be read sequentially from its current position.
part.EntryStartPosition = 0;
}
return part;
}
private GZipFilePart(Stream stream, IArchiveEncoding archiveEncoding)
: base(archiveEncoding) => _stream = stream;
internal long EntryStartPosition { get; private set; }
internal DateTime? DateModified { get; private set; }
internal uint? Crc { get; private set; }
@@ -51,12 +83,22 @@ internal sealed class GZipFilePart : FilePart
{
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
Span<byte> trailer = stackalloc byte[8];
var n = _stream.Read(trailer);
_stream.ReadFully(trailer);
Crc = BinaryPrimitives.ReadUInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadUInt32LittleEndian(trailer.Slice(4));
}
private async ValueTask ReadTrailerAsync(CancellationToken cancellationToken = default)
{
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
var trailer = new byte[8];
_ = await _stream.ReadFullyAsync(trailer, 0, 8, cancellationToken);
Crc = BinaryPrimitives.ReadUInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadUInt32LittleEndian(trailer.AsSpan().Slice(4));
}
private void ReadAndValidateGzipHeader()
{
// read the header on the first read
@@ -109,6 +151,61 @@ internal sealed class GZipFilePart : FilePart
}
}
private async ValueTask ReadAndValidateGzipHeaderAsync(
CancellationToken cancellationToken = default
)
{
// read the header on the first read
var header = new byte[10];
var n = await _stream.ReadAsync(header, 0, 10, cancellationToken);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
{
return;
}
if (n != 10)
{
throw new ZlibException("Not a valid GZIP stream.");
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
throw new ZlibException("Bad GZIP header.");
}
var timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan().Slice(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
var lengthField = new byte[2];
_ = await _stream.ReadAsync(lengthField, 0, 2, cancellationToken);
var extraLength = (short)(lengthField[0] + (lengthField[1] * 256));
var extra = new byte[extraLength];
if (!await _stream.ReadFullyAsync(extra, cancellationToken))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
}
if ((header[3] & 0x08) == 0x08)
{
_name = await ReadZeroTerminatedStringAsync(_stream, cancellationToken);
}
if ((header[3] & 0x10) == 0x010)
{
await ReadZeroTerminatedStringAsync(_stream, cancellationToken);
}
if ((header[3] & 0x02) == 0x02)
{
var buf = new byte[1];
_ = await _stream.ReadAsync(buf, 0, 1, cancellationToken); // CRC16, ignore
}
}
private string ReadZeroTerminatedString(Stream stream)
{
Span<byte> buf1 = stackalloc byte[1];
@@ -134,4 +231,33 @@ internal sealed class GZipFilePart : FilePart
var buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
}
private async ValueTask<string> ReadZeroTerminatedStringAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var buf1 = new byte[1];
var list = new List<byte>();
var done = false;
do
{
// workitem 7740
var n = await stream.ReadAsync(buf1, 0, 1, cancellationToken);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
}
if (buf1[0] == 0)
{
done = true;
}
else
{
list.Add(buf1[0]);
}
} while (!done);
var buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
}
}

View File

@@ -29,7 +29,8 @@ internal class DirectoryEndHeader : ZipHeader
DirectorySize = await reader.ReadUInt32Async();
DirectoryStartOffsetRelativeToDisk = await reader.ReadUInt32Async();
CommentLength = await reader.ReadUInt16Async();
Comment = await reader.ReadBytesAsync(CommentLength);
Comment = new byte[CommentLength];
await reader.ReadBytesAsync(Comment, 0, CommentLength);
}
public ushort VolumeNumber { get; private set; }

View File

@@ -53,10 +53,12 @@ internal class DirectoryEntryHeader : ZipFileEntry
InternalFileAttributes = await reader.ReadUInt16Async();
ExternalFileAttributes = await reader.ReadUInt32Async();
RelativeOffsetOfEntryHeader = await reader.ReadUInt32Async();
var name = await reader.ReadBytesAsync(nameLength);
var extra = await reader.ReadBytesAsync(extraLength);
var comment = await reader.ReadBytesAsync(commentLength);
var name = new byte[nameLength];
var extra = new byte[extraLength];
var comment = new byte[commentLength];
await reader.ReadBytesAsync(name,0 ,nameLength);
await reader.ReadBytesAsync(extra, 0, extraLength);
await reader.ReadBytesAsync(comment, 0, commentLength);
ProcessReadData(name, extra, comment);
}

View File

@@ -37,8 +37,10 @@ internal class LocalEntryHeader(IArchiveEncoding archiveEncoding)
UncompressedSize = await reader.ReadUInt32Async();
var nameLength = await reader.ReadUInt16Async();
var extraLength = await reader.ReadUInt16Async();
var name = await reader.ReadBytesAsync(nameLength);
var extra = await reader.ReadBytesAsync(extraLength);
var name = new byte[nameLength];
var extra = new byte[extraLength];
await reader.ReadBytesAsync(name,0 ,nameLength);
await reader.ReadBytesAsync(extra, 0, extraLength);
ProcessReadData(name, extra);
}

View File

@@ -38,12 +38,12 @@ internal class Zip64DirectoryEndHeader : ZipHeader
TotalNumberOfEntries = (long)await reader.ReadUInt64Async();
DirectorySize = (long)await reader.ReadUInt64Async();
DirectoryStartOffsetRelativeToDisk = (long)await reader.ReadUInt64Async();
DataSector = await reader.ReadBytesAsync(
(int)(
SizeOfDirectoryEndRecord
- SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS
)
var size = (int)(
SizeOfDirectoryEndRecord
- SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS
);
DataSector = new byte[size];
await reader.ReadBytesAsync(DataSector, 0, size);
}
private const int SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS = 44;

View File

@@ -1,4 +1,5 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
@@ -16,6 +17,8 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
private const int MAX_SEARCH_LENGTH_FOR_EOCD = 65557;
private bool _zip64;
private static readonly byte[] needle = { 0x06, 0x05, 0x4b, 0x50 };
internal SeekableZipHeaderFactory(string? password, IArchiveEncoding archiveEncoding)
: base(StreamingMode.Seekable, password, archiveEncoding) { }
@@ -153,74 +156,8 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
}
}
internal async IAsyncEnumerable<ZipHeader> ReadSeekableHeaderAsync(Stream stream, bool useSync)
{
var reader = new AsyncBinaryReader(stream);
await SeekBackToHeaderAsync(stream, reader);
var eocd_location = stream.Position;
var entry = new DirectoryEndHeader();
await entry.Read(reader);
if (entry.IsZip64)
{
_zip64 = true;
// ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin);
var zip64_locator = await reader.ReadUInt32Async();
if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR)
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
await zip64Locator.Read(reader);
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
var zip64Signature = await reader.ReadUInt32Async();
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
await zip64Entry.Read(reader);
stream.Seek(zip64Entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
else
{
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
var position = stream.Position;
while (true)
{
stream.Position = position;
var signature = await reader.ReadUInt32Async();
var nextHeader = await ReadHeader(signature, reader, _zip64);
position = stream.Position;
if (nextHeader is null)
{
yield break;
}
if (nextHeader is DirectoryEntryHeader entryHeader)
{
//entry could be zero bytes so we need to know that.
entryHeader.HasData = entryHeader.CompressedSize != 0;
yield return entryHeader;
}
else if (nextHeader is DirectoryEndHeader endHeader)
{
yield return endHeader;
}
}
}
private static bool IsMatch(byte[] haystack, int position, byte[] needle)
private static bool IsMatch(Span<byte> haystack, int position, byte[] needle)
{
for (var i = 0; i < needle.Length; i++)
{
@@ -247,29 +184,35 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
stream.Length < MAX_SEARCH_LENGTH_FOR_EOCD
? (int)stream.Length
: MAX_SEARCH_LENGTH_FOR_EOCD;
// We search for marker in reverse to find the first occurance
byte[] needle = { 0x06, 0x05, 0x4b, 0x50 };
stream.Seek(-len, SeekOrigin.End);
var seek = ArrayPool<byte>.Shared.Rent(len);
var seek = await reader.ReadBytesAsync(len);
// Search in reverse
Array.Reverse(seek);
// don't exclude the minimum eocd region, otherwise you fail to locate the header in empty zip files
var max_search_area = len; // - MINIMUM_EOCD_LENGTH;
for (var pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
try
{
if (IsMatch(seek, pos_from_end, needle))
{
stream.Seek(-pos_from_end, SeekOrigin.End);
return;
}
}
await reader.ReadBytesAsync(seek, 0, len, default);
var memory = new Memory<byte>(seek, 0, len);
var span = memory.Span;
span.Reverse();
throw new ArchiveException("Failed to locate the Zip Header");
// don't exclude the minimum eocd region, otherwise you fail to locate the header in empty zip files
var max_search_area = len; // - MINIMUM_EOCD_LENGTH;
for (var pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
{
if (IsMatch(span, pos_from_end, needle))
{
stream.Seek(-pos_from_end, SeekOrigin.End);
return;
}
}
throw new ArchiveException("Failed to locate the Zip Header");
}
finally
{
ArrayPool<byte>.Shared.Return(seek);
}
}
private static void SeekBackToHeader(Stream stream, BinaryReader reader)
@@ -286,9 +229,6 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
stream.Length < MAX_SEARCH_LENGTH_FOR_EOCD
? (int)stream.Length
: MAX_SEARCH_LENGTH_FOR_EOCD;
// We search for marker in reverse to find the first occurance
byte[] needle = { 0x06, 0x05, 0x4b, 0x50 };
stream.Seek(-len, SeekOrigin.End);
var seek = reader.ReadBytes(len);

View File

@@ -79,7 +79,7 @@ internal class ZipHeaderFactory
}
else
{
await reader.ReadBytesAsync(zip64 ? 20 : 12);
await reader.SkipAsync(zip64 ? 20 : 12);
}
return null;
}

View File

@@ -64,6 +64,7 @@
// -----------------------------------------------------------------------
using System;
using System.Buffers;
using SharpCompress.Algorithms;
namespace SharpCompress.Compressors.Deflate;
@@ -116,14 +117,14 @@ internal sealed class InflateBlocks
internal int readAt; // window read pointer
internal int table; // table lengths (14 bits)
internal int[] tb = new int[1]; // bit length decoding tree
internal byte[] window; // sliding window
internal IMemoryOwner<byte> window; // sliding window
internal int writeAt; // window write pointer
internal InflateBlocks(ZlibCodec codec, object checkfn, int w)
{
_codec = codec;
hufts = new int[MANY * 3];
window = new byte[w];
window = MemoryPool<byte>.Shared.Rent(w);
end = w;
this.checkfn = checkfn;
mode = InflateBlockMode.TYPE;
@@ -340,7 +341,7 @@ internal sealed class InflateBlocks
{
t = m;
}
Array.Copy(_codec.InputBuffer, p, window, q, t);
_codec.InputBuffer.AsSpan(p, t).CopyTo(window.Memory.Span.Slice(q));
p += t;
n -= t;
q += t;
@@ -715,13 +716,14 @@ internal sealed class InflateBlocks
internal void Free()
{
Reset();
window?.Dispose();
window = null;
hufts = null;
}
internal void SetDictionary(byte[] d, int start, int n)
{
Array.Copy(d, start, window, 0, n);
d.AsSpan(start, n).CopyTo(window.Memory.Span.Slice(0, n));
readAt = writeAt = n;
}
@@ -774,11 +776,11 @@ internal sealed class InflateBlocks
// update check information
if (checkfn != null)
{
_codec._adler32 = check = Adler32.Calculate(check, window.AsSpan(readAt, nBytes));
_codec._adler32 = check = Adler32.Calculate(check, window.Memory.Span.Slice(readAt, nBytes));
}
// copy as far as end of window
Array.Copy(window, readAt, _codec.OutputBuffer, _codec.NextOut, nBytes);
window.Memory.Span.Slice(readAt, nBytes).CopyTo(_codec.OutputBuffer.AsSpan(_codec.NextOut));
_codec.NextOut += nBytes;
readAt += nBytes;
@@ -1213,7 +1215,7 @@ internal sealed class InflateCodes
}
}
blocks.window[q++] = blocks.window[f++];
blocks.window.Memory.Span[q++] = blocks.window.Memory.Span[f++];
m--;
if (f == blocks.end)
@@ -1259,7 +1261,7 @@ internal sealed class InflateCodes
}
r = ZlibConstants.Z_OK;
blocks.window[q++] = (byte)lit;
blocks.window.Memory.Span[q++] = (byte)lit;
m--;
mode = START;
@@ -1396,7 +1398,7 @@ internal sealed class InflateCodes
b >>= (tp[tp_index_t_3 + 1]);
k -= (tp[tp_index_t_3 + 1]);
s.window[q++] = (byte)tp[tp_index_t_3 + 2];
s.window.Memory.Span[q++] = (byte)tp[tp_index_t_3 + 2];
m--;
continue;
}
@@ -1461,13 +1463,13 @@ internal sealed class InflateCodes
r = q - d;
if (q - r > 0 && 2 > (q - r))
{
s.window[q++] = s.window[r++]; // minimum count is three,
s.window[q++] = s.window[r++]; // so unroll loop a little
s.window.Memory.Span[q++] = s.window.Memory.Span[r++]; // minimum count is three,
s.window.Memory.Span[q++] = s.window.Memory.Span[r++]; // so unroll loop a little
c -= 2;
}
else
{
Array.Copy(s.window, r, s.window, q, 2);
s.window.Memory.Span.Slice(r, 2).CopyTo(s.window.Memory.Span.Slice(q));
q += 2;
r += 2;
c -= 2;
@@ -1490,12 +1492,12 @@ internal sealed class InflateCodes
{
do
{
s.window[q++] = s.window[r++];
s.window.Memory.Span[q++] = s.window.Memory.Span[r++];
} while (--e != 0);
}
else
{
Array.Copy(s.window, r, s.window, q, e);
s.window.Memory.Span.Slice(r, e).CopyTo(s.window.Memory.Span.Slice(q));
q += e;
r += e;
e = 0;
@@ -1509,12 +1511,12 @@ internal sealed class InflateCodes
{
do
{
s.window[q++] = s.window[r++];
s.window.Memory.Span[q++] = s.window.Memory.Span[r++];
} while (--c != 0);
}
else
{
Array.Copy(s.window, r, s.window, q, c);
s.window.Memory.Span.Slice(r, c).CopyTo(s.window.Memory.Span.Slice(q));
q += c;
r += c;
c = 0;
@@ -1560,7 +1562,7 @@ internal sealed class InflateCodes
{
b >>= (tp[tp_index_t_3 + 1]);
k -= (tp[tp_index_t_3 + 1]);
s.window[q++] = (byte)tp[tp_index_t_3 + 2];
s.window.Memory.Span[q++] = (byte)tp[tp_index_t_3 + 2];
m--;
break;
}

View File

@@ -87,10 +87,10 @@ internal class RarStream : Stream, IStreamStack
#endif
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
readStream.Dispose();
}
isDisposed = true;
base.Dispose(disposing);
readStream.Dispose();
}
}

View File

@@ -126,17 +126,13 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
private FileHeader fileHeader;
private void Init(byte[] window)
private void Init()
{
if (this.window is null && window is null)
if (this.window is null)
{
this.window = ArrayPool<byte>.Shared.Rent(PackDef.MAXWINSIZE);
}
else if (window is not null)
{
this.window = window;
externalWindow = true;
}
inAddr = 0;
UnpInitData(false);
}
@@ -149,7 +145,7 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
this.writeStream = writeStream;
if (!fileHeader.IsSolid)
{
Init(null);
Init();
}
suspended = false;
DoUnpack();
@@ -168,7 +164,7 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
this.writeStream = writeStream;
if (!fileHeader.IsSolid)
{
Init(null);
Init();
}
suspended = false;
await DoUnpackAsync(cancellationToken).ConfigureAwait(false);

View File

@@ -32,11 +32,15 @@ namespace SharpCompress.Factories
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
AceReader.Open(stream, options);
public ValueTask<IAsyncReader> OpenReaderAsync(
public IAsyncReader OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
) => new(AceReader.Open(stream, options));
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)AceReader.Open(stream, options);
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,

View File

@@ -44,11 +44,15 @@ namespace SharpCompress.Factories
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
ArcReader.Open(stream, options);
public ValueTask<IAsyncReader> OpenReaderAsync(
public IAsyncReader OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
) => new(ArcReader.Open(stream, options));
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)ArcReader.Open(stream, options);
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,

View File

@@ -35,11 +35,15 @@ namespace SharpCompress.Factories
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
ArjReader.Open(stream, options);
public ValueTask<IAsyncReader> OpenReaderAsync(
public IAsyncReader OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
) => new(ArjReader.Open(stream, options));
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)ArjReader.Open(stream, options);
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,

View File

@@ -133,10 +133,7 @@ public abstract class Factory : IFactory
)
{
((IStreamStack)stream).StackSeek(pos);
return (
true,
await readerFactory.OpenReaderAsync(stream, options, cancellationToken)
);
return (true, readerFactory.OpenReaderAsync(stream, options, cancellationToken));
}
}

View File

@@ -65,11 +65,8 @@ public class GZipFactory
GZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => GZipArchive.OpenAsync(stream, readerOptions, cancellationToken);
public IAsyncArchive OpenAsync(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)Open(stream, readerOptions);
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
@@ -78,15 +75,15 @@ public class GZipFactory
) => new(IsArchive(stream, password, bufferSize));
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => GZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfo, readerOptions);
}
#endregion
@@ -97,22 +94,25 @@ public class GZipFactory
GZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => GZipArchive.OpenAsync(streams, readerOptions, cancellationToken);
ReaderOptions? readerOptions = null
) => (IAsyncArchive)Open(streams, readerOptions);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => GZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfos, readerOptions);
}
#endregion
@@ -153,16 +153,20 @@ public class GZipFactory
GZipReader.Open(stream, options);
/// <inheritdoc/>
public ValueTask<IAsyncReader> OpenReaderAsync(
public IAsyncReader OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(GZipReader.Open(stream, options));
return (IAsyncReader)GZipReader.Open(stream, options);
}
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(fileInfo, readerOptions);
#endregion
#region IWriterFactory
@@ -178,14 +182,18 @@ public class GZipFactory
}
/// <inheritdoc/>
public ValueTask<IWriter> OpenAsync(
public IAsyncWriter OpenAsync(
Stream stream,
WriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, writerOptions));
if (writerOptions.CompressionType != CompressionType.GZip)
{
throw new InvalidFormatException("GZip archives only support GZip compression type.");
}
return (IAsyncWriter)Open(stream, writerOptions);
}
#endregion

View File

@@ -50,22 +50,23 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
RarArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => RarArchive.OpenAsync(stream, readerOptions, cancellationToken);
public IAsyncArchive OpenAsync(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)Open(stream, readerOptions);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
RarArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => RarArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfo, readerOptions);
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
@@ -82,22 +83,25 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
RarArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => RarArchive.OpenAsync(streams, readerOptions, cancellationToken);
ReaderOptions? readerOptions = null
) => (IAsyncArchive)Open(streams, readerOptions);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
RarArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => RarArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfos, readerOptions);
}
#endregion
@@ -108,14 +112,14 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
RarReader.Open(stream, options);
/// <inheritdoc/>
public ValueTask<IAsyncReader> OpenReaderAsync(
public IAsyncReader OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(RarReader.Open(stream, options));
return (IAsyncReader)RarReader.Open(stream, options);
}
#endregion

View File

@@ -45,22 +45,23 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
SevenZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => SevenZipArchive.OpenAsync(stream, readerOptions, cancellationToken);
public IAsyncArchive OpenAsync(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)Open(stream, readerOptions);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => SevenZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfo, readerOptions);
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
@@ -77,22 +78,25 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
SevenZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => SevenZipArchive.OpenAsync(streams, readerOptions, cancellationToken);
ReaderOptions? readerOptions = null
) => (IAsyncArchive)Open(streams, readerOptions);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => SevenZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfos, readerOptions);
}
#endregion

View File

@@ -76,22 +76,23 @@ public class TarFactory
TarArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => TarArchive.OpenAsync(stream, readerOptions, cancellationToken);
public IAsyncArchive OpenAsync(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)Open(stream, readerOptions);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
TarArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => TarArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfo, readerOptions);
}
#endregion
@@ -102,22 +103,25 @@ public class TarFactory
TarArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => TarArchive.OpenAsync(streams, readerOptions, cancellationToken);
ReaderOptions? readerOptions = null
) => (IAsyncArchive)Open(streams, readerOptions);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
TarArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => TarArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfos, readerOptions);
}
#endregion
@@ -271,14 +275,14 @@ public class TarFactory
TarReader.Open(stream, options);
/// <inheritdoc/>
public ValueTask<IAsyncReader> OpenReaderAsync(
public IAsyncReader OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(TarReader.Open(stream, options));
return (IAsyncReader)TarReader.Open(stream, options);
}
#endregion
@@ -290,14 +294,14 @@ public class TarFactory
new TarWriter(stream, new TarWriterOptions(writerOptions));
/// <inheritdoc/>
public ValueTask<IWriter> OpenAsync(
public IAsyncWriter OpenAsync(
Stream stream,
WriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, writerOptions));
return (IAsyncWriter)Open(stream, writerOptions);
}
#endregion

View File

@@ -143,22 +143,23 @@ public class ZipFactory
ZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ZipArchive.OpenAsync(stream, readerOptions, cancellationToken);
public IAsyncArchive OpenAsync(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)Open(stream, readerOptions);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfo, readerOptions);
}
#endregion
@@ -169,22 +170,25 @@ public class ZipFactory
ZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ZipArchive.OpenAsync(streams, readerOptions, cancellationToken);
ReaderOptions? readerOptions = null
) => (IAsyncArchive)Open(streams, readerOptions);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
public IAsyncArchive OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)Open(fileInfos, readerOptions);
}
#endregion
@@ -195,14 +199,14 @@ public class ZipFactory
ZipReader.Open(stream, options);
/// <inheritdoc/>
public ValueTask<IAsyncReader> OpenReaderAsync(
public IAsyncReader OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(ZipReader.Open(stream, options));
return (IAsyncReader)ZipReader.Open(stream, options);
}
#endregion
@@ -214,14 +218,14 @@ public class ZipFactory
new ZipWriter(stream, new ZipWriterOptions(writerOptions));
/// <inheritdoc/>
public ValueTask<IWriter> OpenAsync(
public IAsyncWriter OpenAsync(
Stream stream,
WriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, writerOptions));
return (IAsyncWriter)Open(stream, writerOptions);
}
#endregion

View File

@@ -1,4 +1,5 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
@@ -28,13 +29,16 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
#if DEBUG_STREAMS
this.DebugDispose(typeof(BufferedSubStream));
#endif
if (disposing) { }
if (disposing)
{
ArrayPool<byte>.Shared.Return(_cache);
}
base.Dispose(disposing);
}
private int _cacheOffset;
private int _cacheLength;
private readonly byte[] _cache = new byte[32 << 10];
private readonly byte[] _cache = ArrayPool<byte>.Shared.Rent(32 << 10);
private long origin;
private long BytesLeftToRead { get; set; }

View File

@@ -1,8 +1,6 @@
using System;
using System.Buffers;
using System.Diagnostics;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;

View File

@@ -10,23 +10,23 @@ namespace SharpCompress;
internal sealed class LazyAsyncReadOnlyCollection<T>(IAsyncEnumerable<T> source)
: IAsyncEnumerable<T>
{
private readonly List<T> backing = new();
private readonly IAsyncEnumerator<T> source = source.GetAsyncEnumerator();
private bool fullyLoaded;
private readonly List<T> _backing = new();
private readonly IAsyncEnumerator<T> _source = source.GetAsyncEnumerator();
private bool _fullyLoaded;
private class LazyLoader(
LazyAsyncReadOnlyCollection<T> lazyReadOnlyCollection,
CancellationToken cancellationToken
) : IAsyncEnumerator<T>
{
private bool disposed;
private int index = -1;
private bool _disposed;
private int _index = -1;
public ValueTask DisposeAsync()
{
if (!disposed)
if (!_disposed)
{
disposed = true;
_disposed = true;
}
return default;
}
@@ -34,27 +34,27 @@ internal sealed class LazyAsyncReadOnlyCollection<T>(IAsyncEnumerable<T> source)
public async ValueTask<bool> MoveNextAsync()
{
cancellationToken.ThrowIfCancellationRequested();
if (index + 1 < lazyReadOnlyCollection.backing.Count)
if (_index + 1 < lazyReadOnlyCollection._backing.Count)
{
index++;
_index++;
return true;
}
if (
!lazyReadOnlyCollection.fullyLoaded
&& await lazyReadOnlyCollection.source.MoveNextAsync()
!lazyReadOnlyCollection._fullyLoaded
&& await lazyReadOnlyCollection._source.MoveNextAsync()
)
{
lazyReadOnlyCollection.backing.Add(lazyReadOnlyCollection.source.Current);
index++;
lazyReadOnlyCollection._backing.Add(lazyReadOnlyCollection._source.Current);
_index++;
return true;
}
lazyReadOnlyCollection.fullyLoaded = true;
lazyReadOnlyCollection._fullyLoaded = true;
return false;
}
#region IEnumerator<T> Members
public T Current => lazyReadOnlyCollection.backing[index];
public T Current => lazyReadOnlyCollection._backing[_index];
#endregion
@@ -62,9 +62,9 @@ internal sealed class LazyAsyncReadOnlyCollection<T>(IAsyncEnumerable<T> source)
public void Dispose()
{
if (!disposed)
if (!_disposed)
{
disposed = true;
_disposed = true;
}
}
@@ -73,18 +73,18 @@ internal sealed class LazyAsyncReadOnlyCollection<T>(IAsyncEnumerable<T> source)
internal async ValueTask EnsureFullyLoaded()
{
if (!fullyLoaded)
if (!_fullyLoaded)
{
var loader = new LazyLoader(this, CancellationToken.None);
while (await loader.MoveNextAsync())
{
// Intentionally empty
}
fullyLoaded = true;
_fullyLoaded = true;
}
}
internal IEnumerable<T> GetLoaded() => backing;
internal IEnumerable<T> GetLoaded() => _backing;
#region ICollection<T> Members

View File

@@ -8,24 +8,24 @@ namespace SharpCompress;
internal sealed class LazyReadOnlyCollection<T> : ICollection<T>
{
private readonly List<T> backing = new();
private readonly IEnumerator<T> source;
private bool fullyLoaded;
private readonly List<T> _backing = new();
private readonly IEnumerator<T> _source;
private bool _fullyLoaded;
public LazyReadOnlyCollection(IEnumerable<T> source) => this.source = source.GetEnumerator();
public LazyReadOnlyCollection(IEnumerable<T> source) => _source = source.GetEnumerator();
private class LazyLoader : IEnumerator<T>
{
private readonly LazyReadOnlyCollection<T> lazyReadOnlyCollection;
private bool disposed;
private int index = -1;
private readonly LazyReadOnlyCollection<T> _lazyReadOnlyCollection;
private bool _disposed;
private int _index = -1;
internal LazyLoader(LazyReadOnlyCollection<T> lazyReadOnlyCollection) =>
this.lazyReadOnlyCollection = lazyReadOnlyCollection;
_lazyReadOnlyCollection = lazyReadOnlyCollection;
#region IEnumerator<T> Members
public T Current => lazyReadOnlyCollection.backing[index];
public T Current => _lazyReadOnlyCollection._backing[_index];
#endregion
@@ -33,9 +33,9 @@ internal sealed class LazyReadOnlyCollection<T> : ICollection<T>
public void Dispose()
{
if (!disposed)
if (!_disposed)
{
disposed = true;
_disposed = true;
}
}
@@ -47,18 +47,18 @@ internal sealed class LazyReadOnlyCollection<T> : ICollection<T>
public bool MoveNext()
{
if (index + 1 < lazyReadOnlyCollection.backing.Count)
if (_index + 1 < _lazyReadOnlyCollection._backing.Count)
{
index++;
_index++;
return true;
}
if (!lazyReadOnlyCollection.fullyLoaded && lazyReadOnlyCollection.source.MoveNext())
if (!_lazyReadOnlyCollection._fullyLoaded && _lazyReadOnlyCollection._source.MoveNext())
{
lazyReadOnlyCollection.backing.Add(lazyReadOnlyCollection.source.Current);
index++;
_lazyReadOnlyCollection._backing.Add(_lazyReadOnlyCollection._source.Current);
_index++;
return true;
}
lazyReadOnlyCollection.fullyLoaded = true;
_lazyReadOnlyCollection._fullyLoaded = true;
return false;
}
@@ -69,14 +69,14 @@ internal sealed class LazyReadOnlyCollection<T> : ICollection<T>
internal void EnsureFullyLoaded()
{
if (!fullyLoaded)
if (!_fullyLoaded)
{
this.ForEach(x => { });
fullyLoaded = true;
_fullyLoaded = true;
}
}
internal IEnumerable<T> GetLoaded() => backing;
internal IEnumerable<T> GetLoaded() => _backing;
#region ICollection<T> Members
@@ -87,13 +87,13 @@ internal sealed class LazyReadOnlyCollection<T> : ICollection<T>
public bool Contains(T item)
{
EnsureFullyLoaded();
return backing.Contains(item);
return _backing.Contains(item);
}
public void CopyTo(T[] array, int arrayIndex)
{
EnsureFullyLoaded();
backing.CopyTo(array, arrayIndex);
_backing.CopyTo(array, arrayIndex);
}
public int Count
@@ -101,7 +101,7 @@ internal sealed class LazyReadOnlyCollection<T> : ICollection<T>
get
{
EnsureFullyLoaded();
return backing.Count;
return _backing.Count;
}
}

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress;
@@ -29,9 +31,46 @@ public static class EnumerableExtensions
public static class AsyncEnumerableExtensions
{
#if !NET10_0_OR_GREATER
extension<T>(IAsyncEnumerable<T> source)
where T : notnull
{
public async IAsyncEnumerable<TResult> Select<TResult>(Func<T, TResult> selector)
{
await foreach (var element in source)
{
yield return selector(element);
}
}
public async ValueTask<int> CountAsync(CancellationToken cancellationToken = default)
{
await using var e = source.GetAsyncEnumerator(cancellationToken);
var count = 0;
while (await e.MoveNextAsync())
{
checked
{
count++;
}
}
return count;
}
public async IAsyncEnumerable<T> Take(int count)
{
await foreach (var element in source)
{
yield return element;
if (--count == 0)
{
break;
}
}
}
public async ValueTask<List<T>> ToListAsync()
{
var list = new List<T>();
@@ -42,16 +81,7 @@ public static class AsyncEnumerableExtensions
return list;
}
public async IAsyncEnumerable<TResult> Cast<TResult>()
where TResult : class
{
await foreach (var item in source)
{
yield return (item as TResult).NotNull();
}
}
public async ValueTask<bool> All(Func<T, bool> predicate)
public async ValueTask<bool> AllAsync(Func<T, bool> predicate)
{
await foreach (var item in source)
{
@@ -75,27 +105,77 @@ public static class AsyncEnumerableExtensions
}
}
public async ValueTask<T?> FirstOrDefaultAsync()
public async ValueTask<T> SingleAsync(Func<T, bool>? predicate = null)
{
IAsyncEnumerator<T> enumerator;
if (predicate is null)
{
enumerator = source.GetAsyncEnumerator();
}
else
{
enumerator = source.Where(predicate).GetAsyncEnumerator();
}
if (!await enumerator.MoveNextAsync())
{
throw new InvalidOperationException("The source sequence is empty.");
}
var value = enumerator.Current;
if (await enumerator.MoveNextAsync())
{
throw new InvalidOperationException(
"The source sequence contains more than one element."
);
}
return value;
}
public async ValueTask<T> FirstAsync()
{
await foreach (var item in source)
{
return item; // Returns the very first item found
return item;
}
return default; // Returns null/default if the stream is empty
throw new InvalidOperationException("The source sequence is empty.");
}
public async ValueTask<TAccumulate> Aggregate<TAccumulate>(
TAccumulate seed,
Func<TAccumulate, T, TAccumulate> func
public async ValueTask<T?> FirstOrDefaultAsync(
CancellationToken cancellationToken = default
)
{
TAccumulate result = seed;
await foreach (var element in source)
await foreach (var item in source.WithCancellation(cancellationToken))
{
result = func(result, element);
return item;
}
return result;
return default;
}
}
#endif
public static async IAsyncEnumerable<TResult> CastAsync<TResult>(
this IAsyncEnumerable<object?> source
)
where TResult : class
{
await foreach (var item in source)
{
yield return (item as TResult).NotNull();
}
}
public static async ValueTask<TAccumulate> AggregateAsync<TAccumulate, T>(
this IAsyncEnumerable<T> source,
TAccumulate seed,
Func<TAccumulate, T, TAccumulate> func
)
{
var result = seed;
await foreach (var element in source)
{
result = func(result, element);
}
return result;
}
}

View File

@@ -27,7 +27,11 @@ public static class StreamExtensions
public Task SkipAsync(CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
#if NET8_0_OR_GREATER
return stream.CopyToAsync(Stream.Null, cancellationToken);
#else
return stream.CopyToAsync(Stream.Null);
#endif
}
internal int Read(Span<byte> buffer)

View File

@@ -0,0 +1,53 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Common;
namespace SharpCompress.Readers.Ace;
public partial class AceReader : IReaderOpenable
{
public static IAsyncReader OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IAsyncReader)Open(new FileInfo(path), readerOptions);
}
public static IAsyncReader OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(stream, readerOptions);
}
public static IAsyncReader OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(fileInfo, readerOptions);
}
public static IReader Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions);
}
public static IReader Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return Open(fileInfo.OpenRead(), readerOptions);
}
}
#endif

View File

@@ -23,7 +23,7 @@ namespace SharpCompress.Readers.Ace
/// - Recovery record support
/// - Additional header flags
/// </remarks>
public abstract class AceReader : AbstractReader<AceEntry, AceVolume>
public abstract partial class AceReader : AbstractReader<AceEntry, AceVolume>
{
private readonly IArchiveEncoding _archiveEncoding;
@@ -50,7 +50,7 @@ namespace SharpCompress.Readers.Ace
/// <param name="stream">The stream containing the ACE archive.</param>
/// <param name="options">Reader options.</param>
/// <returns>An AceReader instance.</returns>
public static AceReader Open(Stream stream, ReaderOptions? options = null)
public static IReader Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
return new SingleVolumeAceReader(stream, options ?? new ReaderOptions());
@@ -62,7 +62,7 @@ namespace SharpCompress.Readers.Ace
/// <param name="streams"></param>
/// <param name="options"></param>
/// <returns></returns>
public static AceReader Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
public static IReader Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.NotNull(nameof(streams));
return new MultiVolumeAceReader(streams, options ?? new ReaderOptions());

View File

@@ -0,0 +1,53 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Common;
namespace SharpCompress.Readers.Arc;
public partial class ArcReader : IReaderOpenable
{
public static IAsyncReader OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IAsyncReader)Open(new FileInfo(path), readerOptions);
}
public static IAsyncReader OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(stream, readerOptions);
}
public static IAsyncReader OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(fileInfo, readerOptions);
}
public static IReader Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions);
}
public static IReader Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return Open(fileInfo.OpenRead(), readerOptions);
}
}
#endif

View File

@@ -9,7 +9,7 @@ using SharpCompress.Common.Arc;
namespace SharpCompress.Readers.Arc
{
public class ArcReader : AbstractReader<ArcEntry, ArcVolume>
public partial class ArcReader : AbstractReader<ArcEntry, ArcVolume>
{
private ArcReader(Stream stream, ReaderOptions options)
: base(options, ArchiveType.Arc) => Volume = new ArcVolume(stream, options, 0);
@@ -22,7 +22,7 @@ namespace SharpCompress.Readers.Arc
/// <param name="stream"></param>
/// <param name="options"></param>
/// <returns></returns>
public static ArcReader Open(Stream stream, ReaderOptions? options = null)
public static IReader Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
return new ArcReader(stream, options ?? new ReaderOptions());

View File

@@ -0,0 +1,53 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Common;
namespace SharpCompress.Readers.Arj;
public partial class ArjReader : IReaderOpenable
{
public static IAsyncReader OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IAsyncReader)Open(new FileInfo(path), readerOptions);
}
public static IAsyncReader OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(stream, readerOptions);
}
public static IAsyncReader OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(fileInfo, readerOptions);
}
public static IReader Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions);
}
public static IReader Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return Open(fileInfo.OpenRead(), readerOptions);
}
}
#endif

View File

@@ -8,7 +8,7 @@ using SharpCompress.Readers.Rar;
namespace SharpCompress.Readers.Arj
{
public abstract class ArjReader : AbstractReader<ArjEntry, ArjVolume>
public abstract partial class ArjReader : AbstractReader<ArjEntry, ArjVolume>
{
internal ArjReader(ReaderOptions options)
: base(options, ArchiveType.Arj) { }
@@ -27,7 +27,7 @@ namespace SharpCompress.Readers.Arj
/// <param name="stream"></param>
/// <param name="options"></param>
/// <returns></returns>
public static ArjReader Open(Stream stream, ReaderOptions? options = null)
public static IReader Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
return new SingleVolumeArjReader(stream, options ?? new ReaderOptions());
@@ -39,7 +39,7 @@ namespace SharpCompress.Readers.Arj
/// <param name="streams"></param>
/// <param name="options"></param>
/// <returns></returns>
public static ArjReader Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
public static IReader Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.NotNull(nameof(streams));
return new MultiVolumeArjReader(streams, options ?? new ReaderOptions());

View File

@@ -0,0 +1,53 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Common;
namespace SharpCompress.Readers.GZip;
public partial class GZipReader : IReaderOpenable
{
public static IAsyncReader OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IAsyncReader)Open(new FileInfo(path), readerOptions);
}
public static IAsyncReader OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(stream, readerOptions);
}
public static IAsyncReader OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(fileInfo, readerOptions);
}
public static IReader Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions);
}
public static IReader Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return Open(fileInfo.OpenRead(), readerOptions);
}
}
#endif

View File

@@ -1,11 +1,11 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
namespace SharpCompress.Readers.GZip;
public class GZipReader : AbstractReader<GZipEntry, GZipVolume>
public partial class GZipReader : AbstractReader<GZipEntry, GZipVolume>
{
private GZipReader(Stream stream, ReaderOptions options)
: base(options, ArchiveType.GZip) => Volume = new GZipVolume(stream, options, 0);
@@ -20,7 +20,7 @@ public class GZipReader : AbstractReader<GZipEntry, GZipVolume>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <returns></returns>
public static GZipReader Open(Stream stream, ReaderOptions? options = null)
public static IReader Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
return new GZipReader(stream, options ?? new ReaderOptions());

View File

@@ -65,5 +65,33 @@ public static class IAsyncReaderExtensions
.ConfigureAwait(false);
}
}
public async ValueTask WriteEntryToAsync(
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToFileAsync(
reader.Entry,
destinationFileName,
options,
async (x, fm, ct) =>
{
using var fs = File.Open(destinationFileName, fm);
await reader.WriteEntryToAsync(fs, ct).ConfigureAwait(false);
},
cancellationToken
)
.ConfigureAwait(false);
public async ValueTask WriteEntryToAsync(
FileInfo destinationFileInfo,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await reader
.WriteEntryToAsync(destinationFileInfo.FullName, options, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Readers;
@@ -13,7 +12,7 @@ public interface IReaderFactory : Factories.IFactory
/// <param name="options"></param>
/// <returns></returns>
IReader OpenReader(Stream stream, ReaderOptions? options);
ValueTask<IAsyncReader> OpenReaderAsync(
IAsyncReader OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken

View File

@@ -0,0 +1,33 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
namespace SharpCompress.Readers;
public interface IReaderOpenable
{
public static abstract IReader Open(string filePath, ReaderOptions? readerOptions = null);
public static abstract IReader Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
public static abstract IReader Open(Stream stream, ReaderOptions? readerOptions = null);
public static abstract IAsyncReader OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract IAsyncReader OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract IAsyncReader OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}
#endif

View File

@@ -0,0 +1,41 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Common;
namespace SharpCompress.Readers.Rar;
public partial class RarReader : IReaderOpenable
{
public static IAsyncReader OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IAsyncReader)Open(new FileInfo(path), readerOptions);
}
public static IAsyncReader OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(stream, readerOptions);
}
public static IAsyncReader OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(fileInfo, readerOptions);
}
}
#endif

View File

@@ -11,7 +11,7 @@ namespace SharpCompress.Readers.Rar;
/// <summary>
/// This class faciliates Reading a Rar Archive in a non-seekable forward-only manner
/// </summary>
public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
public abstract partial class RarReader : AbstractReader<RarReaderEntry, RarVolume>
{
private bool _disposed;
private RarVolume? volume;
@@ -40,24 +40,24 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
public override RarVolume? Volume => volume;
public static RarReader Open(string filePath, ReaderOptions? options = null)
public static IReader Open(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
}
public static RarReader Open(FileInfo fileInfo, ReaderOptions? options = null)
public static IReader Open(FileInfo fileInfo, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
return Open(fileInfo.OpenRead(), options);
}
public static RarReader Open(IEnumerable<string> filePaths, ReaderOptions? options = null)
public static IReader Open(IEnumerable<string> filePaths, ReaderOptions? options = null)
{
return Open(filePaths.Select(x => new FileInfo(x)), options);
}
public static RarReader Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
public static IReader Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
return Open(fileInfos.Select(x => x.OpenRead()), options);
@@ -69,7 +69,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <returns></returns>
public static RarReader Open(Stream stream, ReaderOptions? options = null)
public static IReader Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
return new SingleVolumeRarReader(stream, options ?? new ReaderOptions());
@@ -81,7 +81,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <returns></returns>
public static RarReader Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
public static IReader Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.NotNull(nameof(streams));
return new MultiVolumeRarReader(streams, options ?? new ReaderOptions());

View File

@@ -24,7 +24,7 @@ public static class ReaderFactory
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static ValueTask<IAsyncReader> OpenAsync(
public static IAsyncReader OpenAsync(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
@@ -47,7 +47,7 @@ public static class ReaderFactory
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static ValueTask<IAsyncReader> OpenAsync(
public static IAsyncReader OpenAsync(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
@@ -110,7 +110,7 @@ public static class ReaderFactory
);
}
public static async ValueTask<IAsyncReader> OpenAsync(
public static IAsyncReader OpenAsync(
Stream stream,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
@@ -136,19 +136,10 @@ public static class ReaderFactory
if (testedFactory is IReaderFactory readerFactory)
{
((IStreamStack)bStream).StackSeek(pos);
if (
await testedFactory.IsArchiveAsync(
bStream,
options.Password,
options.BufferSize,
cancellationToken
)
)
if (testedFactory.IsArchive(bStream))
{
((IStreamStack)bStream).StackSeek(pos);
return await readerFactory
.OpenReaderAsync(bStream, options, cancellationToken)
.ConfigureAwait(false);
return readerFactory.OpenReaderAsync(bStream, options, cancellationToken);
}
}
((IStreamStack)bStream).StackSeek(pos);
@@ -161,20 +152,10 @@ public static class ReaderFactory
continue; // Already tested above
}
((IStreamStack)bStream).StackSeek(pos);
if (
factory is IReaderFactory readerFactory
&& await factory.IsArchiveAsync(
bStream,
options.Password,
options.BufferSize,
cancellationToken
)
)
if (factory is IReaderFactory readerFactory && factory.IsArchive(bStream))
{
((IStreamStack)bStream).StackSeek(pos);
return await readerFactory
.OpenReaderAsync(bStream, options, cancellationToken)
.ConfigureAwait(false);
return readerFactory.OpenReaderAsync(bStream, options, cancellationToken);
}
}

View File

@@ -0,0 +1,53 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Common;
namespace SharpCompress.Readers.Tar;
public partial class TarReader : IReaderOpenable
{
public static IAsyncReader OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IAsyncReader)Open(new FileInfo(path), readerOptions);
}
public static IAsyncReader OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(stream, readerOptions);
}
public static IAsyncReader OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(fileInfo, readerOptions);
}
public static IReader Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions);
}
public static IReader Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return Open(fileInfo.OpenRead(), readerOptions);
}
}
#endif

View File

@@ -16,7 +16,7 @@ using SharpCompress.IO;
namespace SharpCompress.Readers.Tar;
public class TarReader : AbstractReader<TarEntry, TarVolume>
public partial class TarReader : AbstractReader<TarEntry, TarVolume>
{
private readonly CompressionType compressionType;
@@ -53,7 +53,7 @@ public class TarReader : AbstractReader<TarEntry, TarVolume>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <returns></returns>
public static TarReader Open(Stream stream, ReaderOptions? options = null)
public static IReader Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
options = options ?? new ReaderOptions();

View File

@@ -0,0 +1,53 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Common;
namespace SharpCompress.Readers.Zip;
public partial class ZipReader : IReaderOpenable
{
public static IAsyncReader OpenAsync(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IAsyncReader)Open(new FileInfo(path), readerOptions);
}
public static IAsyncReader OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(stream, readerOptions);
}
public static IAsyncReader OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)Open(fileInfo, readerOptions);
}
public static IReader Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions);
}
public static IReader Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return Open(fileInfo.OpenRead(), readerOptions);
}
}
#endif

View File

@@ -9,7 +9,7 @@ using SharpCompress.Common.Zip.Headers;
namespace SharpCompress.Readers.Zip;
public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
public partial class ZipReader : AbstractReader<ZipEntry, ZipVolume>
{
private readonly StreamingZipHeaderFactory _headerFactory;
@@ -45,17 +45,13 @@ public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <returns></returns>
public static ZipReader Open(Stream stream, ReaderOptions? options = null)
public static IReader Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
return new ZipReader(stream, options ?? new ReaderOptions());
}
public static ZipReader Open(
Stream stream,
ReaderOptions? options,
IEnumerable<ZipEntry> entries
)
public static IReader Open(Stream stream, ReaderOptions? options, IEnumerable<ZipEntry> entries)
{
stream.NotNull(nameof(stream));
return new ZipReader(stream, options ?? new ReaderOptions(), entries);

View File

@@ -8,7 +8,9 @@ using SharpCompress.IO;
namespace SharpCompress.Writers;
#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable.
public abstract class AbstractWriter(ArchiveType type, WriterOptions writerOptions) : IWriter
public abstract class AbstractWriter(ArchiveType type, WriterOptions writerOptions)
: IWriter,
IAsyncWriter
{
private bool _isDisposed;

View File

@@ -0,0 +1,58 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Common;
namespace SharpCompress.Writers.GZip;
public partial class GZipWriter : IWriterOpenable<GZipWriterOptions>
{
public static IWriter Open(string filePath, GZipWriterOptions writerOptions)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), writerOptions);
}
public static IWriter Open(FileInfo fileInfo, GZipWriterOptions writerOptions)
{
fileInfo.NotNull(nameof(fileInfo));
return new GZipWriter(fileInfo.OpenWrite(), writerOptions);
}
public static IWriter Open(Stream stream, GZipWriterOptions writerOptions)
{
stream.NotNull(nameof(stream));
return new GZipWriter(stream, writerOptions);
}
public static IAsyncWriter OpenAsync(
string path,
GZipWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncWriter)Open(path, writerOptions);
}
public static IAsyncWriter OpenAsync(
Stream stream,
GZipWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncWriter)Open(stream, writerOptions);
}
public static IAsyncWriter OpenAsync(
FileInfo fileInfo,
GZipWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncWriter)Open(fileInfo, writerOptions);
}
}
#endif

View File

@@ -7,7 +7,7 @@ using SharpCompress.IO;
namespace SharpCompress.Writers.GZip;
public sealed class GZipWriter : AbstractWriter
public sealed partial class GZipWriter : AbstractWriter
{
private bool _wroteToStream;

View File

@@ -10,13 +10,18 @@ public interface IWriter : IDisposable
{
ArchiveType WriterType { get; }
void Write(string filename, Stream source, DateTime? modificationTime);
void WriteDirectory(string directoryName, DateTime? modificationTime);
}
public interface IAsyncWriter : IDisposable
{
ArchiveType WriterType { get; }
ValueTask WriteAsync(
string filename,
Stream source,
DateTime? modificationTime,
CancellationToken cancellationToken = default
);
void WriteDirectory(string directoryName, DateTime? modificationTime);
ValueTask WriteDirectoryAsync(
string directoryName,
DateTime? modificationTime,

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using System.Linq;
using System.Threading;
@@ -8,126 +8,123 @@ namespace SharpCompress.Writers;
public static class IWriterExtensions
{
public static void Write(this IWriter writer, string entryPath, Stream source) =>
writer.Write(entryPath, source, null);
public static void Write(this IWriter writer, string entryPath, FileInfo source)
extension(IWriter writer)
{
if (!source.Exists)
public void Write(string entryPath, Stream source) => writer.Write(entryPath, source, null);
public void Write(string entryPath, FileInfo source)
{
throw new ArgumentException("Source does not exist: " + source.FullName);
}
using var stream = source.OpenRead();
writer.Write(entryPath, stream, source.LastWriteTime);
}
if (!source.Exists)
{
throw new ArgumentException("Source does not exist: " + source.FullName);
}
public static void Write(this IWriter writer, string entryPath, string source) =>
writer.Write(entryPath, new FileInfo(source));
public static void WriteAll(
this IWriter writer,
string directory,
string searchPattern = "*",
SearchOption option = SearchOption.TopDirectoryOnly
) => writer.WriteAll(directory, searchPattern, null, option);
public static void WriteAll(
this IWriter writer,
string directory,
string searchPattern = "*",
Func<string, bool>? fileSearchFunc = null,
SearchOption option = SearchOption.TopDirectoryOnly
)
{
if (!Directory.Exists(directory))
{
throw new ArgumentException("Directory does not exist: " + directory);
using var stream = source.OpenRead();
writer.Write(entryPath, stream, source.LastWriteTime);
}
fileSearchFunc ??= n => true;
foreach (
var file in Directory
.EnumerateFiles(directory, searchPattern, option)
.Where(fileSearchFunc)
public void Write(string entryPath, string source) =>
writer.Write(entryPath, new FileInfo(source));
public void WriteAll(
string directory,
string searchPattern = "*",
SearchOption option = SearchOption.TopDirectoryOnly
) => writer.WriteAll(directory, searchPattern, null, option);
public void WriteAll(
string directory,
string searchPattern = "*",
Func<string, bool>? fileSearchFunc = null,
SearchOption option = SearchOption.TopDirectoryOnly
)
{
writer.Write(file.Substring(directory.Length), file);
if (!Directory.Exists(directory))
{
throw new ArgumentException("Directory does not exist: " + directory);
}
fileSearchFunc ??= n => true;
foreach (
var file in Directory
.EnumerateFiles(directory, searchPattern, option)
.Where(fileSearchFunc)
)
{
writer.Write(file.Substring(directory.Length), file);
}
}
public void WriteDirectory(string directoryName) =>
writer.WriteDirectory(directoryName, null);
}
public static void WriteDirectory(this IWriter writer, string directoryName) =>
writer.WriteDirectory(directoryName, null);
// Async extensions
public static ValueTask WriteAsync(
this IWriter writer,
string entryPath,
Stream source,
CancellationToken cancellationToken = default
) => writer.WriteAsync(entryPath, source, null, cancellationToken);
public static async ValueTask WriteAsync(
this IWriter writer,
string entryPath,
FileInfo source,
CancellationToken cancellationToken = default
)
extension(IAsyncWriter writer)
{
if (!source.Exists)
{
throw new ArgumentException("Source does not exist: " + source.FullName);
}
using var stream = source.OpenRead();
await writer
.WriteAsync(entryPath, stream, source.LastWriteTime, cancellationToken)
.ConfigureAwait(false);
}
public ValueTask WriteAsync(
string entryPath,
Stream source,
CancellationToken cancellationToken = default
) => writer.WriteAsync(entryPath, source, null, cancellationToken);
public static ValueTask WriteAsync(
this IWriter writer,
string entryPath,
string source,
CancellationToken cancellationToken = default
) => writer.WriteAsync(entryPath, new FileInfo(source), cancellationToken);
public static ValueTask WriteAllAsync(
this IWriter writer,
string directory,
string searchPattern = "*",
SearchOption option = SearchOption.TopDirectoryOnly,
CancellationToken cancellationToken = default
) => writer.WriteAllAsync(directory, searchPattern, null, option, cancellationToken);
public static async ValueTask WriteAllAsync(
this IWriter writer,
string directory,
string searchPattern = "*",
Func<string, bool>? fileSearchFunc = null,
SearchOption option = SearchOption.TopDirectoryOnly,
CancellationToken cancellationToken = default
)
{
if (!Directory.Exists(directory))
{
throw new ArgumentException("Directory does not exist: " + directory);
}
fileSearchFunc ??= n => true;
foreach (
var file in Directory
.EnumerateFiles(directory, searchPattern, option)
.Where(fileSearchFunc)
public async ValueTask WriteAsync(
string entryPath,
FileInfo source,
CancellationToken cancellationToken = default
)
{
if (!source.Exists)
{
throw new ArgumentException("Source does not exist: " + source.FullName);
}
using var stream = source.OpenRead();
await writer
.WriteAsync(file.Substring(directory.Length), file, cancellationToken)
.WriteAsync(entryPath, stream, source.LastWriteTime, cancellationToken)
.ConfigureAwait(false);
}
}
public static ValueTask WriteDirectoryAsync(
this IWriter writer,
string directoryName,
CancellationToken cancellationToken = default
) => writer.WriteDirectoryAsync(directoryName, null, cancellationToken);
public ValueTask WriteAsync(
string entryPath,
string source,
CancellationToken cancellationToken = default
) => writer.WriteAsync(entryPath, new FileInfo(source), cancellationToken);
public ValueTask WriteAllAsync(
string directory,
string searchPattern = "*",
SearchOption option = SearchOption.TopDirectoryOnly,
CancellationToken cancellationToken = default
) => writer.WriteAllAsync(directory, searchPattern, null, option, cancellationToken);
public async ValueTask WriteAllAsync(
string directory,
string searchPattern = "*",
Func<string, bool>? fileSearchFunc = null,
SearchOption option = SearchOption.TopDirectoryOnly,
CancellationToken cancellationToken = default
)
{
if (!Directory.Exists(directory))
{
throw new ArgumentException("Directory does not exist: " + directory);
}
fileSearchFunc ??= n => true;
foreach (
var file in Directory
.EnumerateFiles(directory, searchPattern, option)
.Where(fileSearchFunc)
)
{
await writer
.WriteAsync(file.Substring(directory.Length), file, cancellationToken)
.ConfigureAwait(false);
}
}
public ValueTask WriteDirectoryAsync(
string directoryName,
CancellationToken cancellationToken = default
) => writer.WriteDirectoryAsync(directoryName, null, cancellationToken);
}
}

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
namespace SharpCompress.Writers;
@@ -9,7 +8,7 @@ public interface IWriterFactory : IFactory
{
IWriter Open(Stream stream, WriterOptions writerOptions);
ValueTask<IWriter> OpenAsync(
IAsyncWriter OpenAsync(
Stream stream,
WriterOptions writerOptions,
CancellationToken cancellationToken = default

View File

@@ -0,0 +1,41 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
namespace SharpCompress.Writers;
public interface IWriterOpenable<TWriterOptions>
where TWriterOptions : WriterOptions
{
public static abstract IWriter Open(string filePath, TWriterOptions writerOptions);
public static abstract IWriter Open(FileInfo fileInfo, TWriterOptions writerOptions);
public static abstract IWriter Open(Stream stream, TWriterOptions writerOptions);
/// <summary>
/// Opens a Writer asynchronously.
/// </summary>
/// <param name="stream">The stream to write to.</param>
/// <param name="archiveType">The archive type.</param>
/// <param name="writerOptions">Writer options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A task that returns an IWriter.</returns>
public static abstract IAsyncWriter OpenAsync(
Stream stream,
TWriterOptions writerOptions,
CancellationToken cancellationToken = default
);
public static abstract IAsyncWriter OpenAsync(
string filePath,
TWriterOptions writerOptions,
CancellationToken cancellationToken = default
);
public static abstract IAsyncWriter OpenAsync(
FileInfo fileInfo,
TWriterOptions writerOptions,
CancellationToken cancellationToken = default
);
}
#endif

View File

@@ -0,0 +1,58 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Common;
namespace SharpCompress.Writers.Tar;
public partial class TarWriter : IWriterOpenable<TarWriterOptions>
{
public static IWriter Open(string filePath, TarWriterOptions writerOptions)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), writerOptions);
}
public static IWriter Open(FileInfo fileInfo, TarWriterOptions writerOptions)
{
fileInfo.NotNull(nameof(fileInfo));
return new TarWriter(fileInfo.OpenWrite(), writerOptions);
}
public static IWriter Open(Stream stream, TarWriterOptions writerOptions)
{
stream.NotNull(nameof(stream));
return new TarWriter(stream, writerOptions);
}
public static IAsyncWriter OpenAsync(
string path,
TarWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncWriter)Open(path, writerOptions);
}
public static IAsyncWriter OpenAsync(
Stream stream,
TarWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncWriter)Open(stream, writerOptions);
}
public static IAsyncWriter OpenAsync(
FileInfo fileInfo,
TarWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncWriter)Open(fileInfo, writerOptions);
}
}
#endif

View File

@@ -12,7 +12,7 @@ using SharpCompress.IO;
namespace SharpCompress.Writers.Tar;
public class TarWriter : AbstractWriter
public partial class TarWriter : AbstractWriter
{
private readonly bool finalizeArchiveOnClose;
private TarHeaderWriteFormat headerFormat;

View File

@@ -2,13 +2,59 @@ using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Writers;
public static class WriterFactory
{
public static IWriter Open(
string filePath,
ArchiveType archiveType,
WriterOptions writerOptions
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), archiveType, writerOptions);
}
public static IWriter Open(
FileInfo fileInfo,
ArchiveType archiveType,
WriterOptions writerOptions
)
{
fileInfo.NotNull(nameof(fileInfo));
return Open(fileInfo.OpenWrite(), archiveType, writerOptions);
}
public static IAsyncWriter OpenAsync(
string filePath,
ArchiveType archiveType,
WriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenAsync(new FileInfo(filePath), archiveType, writerOptions, cancellationToken);
}
public static IAsyncWriter OpenAsync(
FileInfo fileInfo,
ArchiveType archiveType,
WriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
fileInfo.NotNull(nameof(fileInfo));
return OpenAsync(
fileInfo.Open(FileMode.Create, FileAccess.Write),
archiveType,
writerOptions,
cancellationToken
);
}
public static IWriter Open(Stream stream, ArchiveType archiveType, WriterOptions writerOptions)
{
var factory = Factories
@@ -31,7 +77,7 @@ public static class WriterFactory
/// <param name="writerOptions">Writer options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A task that returns an IWriter.</returns>
public static async ValueTask<IWriter> OpenAsync(
public static IAsyncWriter OpenAsync(
Stream stream,
ArchiveType archiveType,
WriterOptions writerOptions,
@@ -44,9 +90,7 @@ public static class WriterFactory
if (factory != null)
{
return await factory
.OpenAsync(stream, writerOptions, cancellationToken)
.ConfigureAwait(false);
return factory.OpenAsync(stream, writerOptions, cancellationToken);
}
throw new NotSupportedException("Archive Type does not have a Writer: " + archiveType);

View File

@@ -0,0 +1,58 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Common;
namespace SharpCompress.Writers.Zip;
public partial class ZipWriter : IWriterOpenable<ZipWriterOptions>
{
public static IWriter Open(string filePath, ZipWriterOptions writerOptions)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), writerOptions);
}
public static IWriter Open(FileInfo fileInfo, ZipWriterOptions writerOptions)
{
fileInfo.NotNull(nameof(fileInfo));
return new ZipWriter(fileInfo.OpenWrite(), writerOptions);
}
public static IWriter Open(Stream stream, ZipWriterOptions writerOptions)
{
stream.NotNull(nameof(stream));
return new ZipWriter(stream, writerOptions);
}
public static IAsyncWriter OpenAsync(
string path,
ZipWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncWriter)Open(path, writerOptions);
}
public static IAsyncWriter OpenAsync(
Stream stream,
ZipWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncWriter)Open(stream, writerOptions);
}
public static IAsyncWriter OpenAsync(
FileInfo fileInfo,
ZipWriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncWriter)Open(fileInfo, writerOptions);
}
}
#endif

Some files were not shown because too many files have changed in this diff Show More