Compare commits

..

55 Commits

Author SHA1 Message Date
Adam Hathcock
18eb140017 Merge pull request #1197 from adamhathcock/adam/add-alternate-compressions
Add ability to have alternate compressions
2026-02-13 13:46:35 +00:00
Adam Hathcock
d138999445 adjust tests 2026-02-13 13:38:37 +00:00
Adam Hathcock
7cef629a06 add compiler flags 2026-02-13 12:46:48 +00:00
Adam Hathcock
76352df852 revamped and added SC stream tests 2026-02-13 12:44:12 +00:00
Adam Hathcock
69a434b0e7 sc stream length is better 2026-02-13 12:34:39 +00:00
Adam Hathcock
39c9d68c4f Better usage of reader options 2026-02-13 11:00:53 +00:00
Adam Hathcock
da5cc69a06 fix up gzip encoding 2026-02-13 09:51:46 +00:00
Adam Hathcock
6c2e27870d reducing duplication in providers 2026-02-12 17:02:03 +00:00
Adam Hathcock
d667288a87 added async stream creation 2026-02-12 16:49:56 +00:00
Adam Hathcock
165239c971 Merge pull request #1213 from adamhathcock/copilot/fix-compression-type-entry
Fix CompressionType for WinZip AES encrypted ZIP entries
2026-02-12 16:33:28 +00:00
copilot-swe-agent[bot]
d68b9d6a86 Final validation - all tests pass, no security issues
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-12 15:51:01 +00:00
copilot-swe-agent[bot]
359b1093bc Add named constants for WinZip AES extra data magic numbers
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-12 15:47:30 +00:00
copilot-swe-agent[bot]
ebb8f16e44 Fix CompressionType for WinZip AES encrypted entries
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-12 15:45:52 +00:00
Adam Hathcock
6931a78bed add async stream creation concept 2026-02-12 15:41:16 +00:00
copilot-swe-agent[bot]
c7dac12cd9 Initial analysis - WinZip AES encrypted entries show CompressionType: Unknown
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-12 15:40:59 +00:00
copilot-swe-agent[bot]
a505808549 Initial plan 2026-02-12 15:37:05 +00:00
Adam Hathcock
a92f82ef28 complete the usage of providers for detection and async 2026-02-12 15:09:21 +00:00
Adam Hathcock
ae0b4f5c4c forgot some files 2026-02-12 14:31:13 +00:00
Adam Hathcock
0e72f5ad9d move providers 2026-02-12 14:30:55 +00:00
Adam Hathcock
0c21681717 change namespace 2026-02-12 14:26:03 +00:00
Adam Hathcock
f2a9171dd8 merge fix 2026-02-12 14:21:31 +00:00
Adam Hathcock
0e2c33dd78 Merge branch 'master' into adam/add-alternate-compressions
# Conflicts:
#	tests/SharpCompress.Performance/Benchmarks/TarBenchmarks.cs
#	tests/SharpCompress.Performance/Benchmarks/ZipBenchmarks.cs
#	tests/SharpCompress.Performance/baseline-results.md
2026-02-12 14:20:01 +00:00
Adam Hathcock
c81e78b5bb fix async benchmarks 2026-02-12 11:07:11 +00:00
Adam Hathcock
fb707aa676 push to nuget only on tags 2026-02-12 10:56:26 +00:00
Adam Hathcock
147dbc878a Merge pull request #1210 from adamhathcock/adam/issue-1206
OpenAsyncReader, OpenAsyncArchive and others must be async for Tar detection
2026-02-12 10:43:10 +00:00
Adam Hathcock
06bd6d9bed Merge pull request #1202 from adamhathcock/adam/async-benchmarks
update benchmarks to include async paths
2026-02-12 10:37:21 +00:00
Adam Hathcock
7f6272807d update docs 2026-02-12 10:32:20 +00:00
Adam Hathcock
89d948b4e1 use configure await false 2026-02-12 10:29:15 +00:00
Adam Hathcock
51c42b89b4 OpenAsyncArchive has to be async 2026-02-12 10:26:18 +00:00
Adam Hathcock
5a319ffe2c create/open always has to be async for detection 2026-02-12 10:18:43 +00:00
Adam Hathcock
bae660381c TarArchive should use a compression method like TarReader 2026-02-12 09:48:06 +00:00
Adam Hathcock
b2f1d007c6 Clean up some code paths 2026-02-12 08:50:18 +00:00
Adam Hathcock
33e9c78626 Merge pull request #1203 from adamhathcock/adam/issue-1201 2026-02-11 17:41:08 +00:00
Adam Hathcock
6f50545c31 more cleaning 2026-02-11 16:48:37 +00:00
Adam Hathcock
ab1dd45e9c more moved and validated 2026-02-11 16:47:20 +00:00
Adam Hathcock
cd5da3da5d moved and validated more async code 2026-02-11 16:35:41 +00:00
Adam Hathcock
218af5a8b3 validate and make sure rar5 methods are the same 2026-02-11 16:27:53 +00:00
Adam Hathcock
e786c00767 divide async and sync logic 2026-02-11 16:20:51 +00:00
Adam Hathcock
103ae60631 codex found problems 2026-02-11 16:10:55 +00:00
Adam Hathcock
98d0f1913e make sure things compile adam 2026-02-11 14:19:21 +00:00
Adam Hathcock
8aa93f4e34 fix fmt 2026-02-11 14:02:27 +00:00
Adam Hathcock
3689b893db Update tests/SharpCompress.Performance/Benchmarks/SevenZipBenchmarks.cs
Co-authored-by: kiloconnect[bot] <240665456+kiloconnect[bot]@users.noreply.github.com>
2026-02-11 14:00:37 +00:00
Adam Hathcock
fbec7dc083 generate github actions baseline 2026-02-11 13:57:06 +00:00
Adam Hathcock
7cf7623438 update benchmarks to include async paths 2026-02-11 13:36:26 +00:00
Adam Hathcock
c4fb32a56d baseline 2026-02-10 16:29:50 +00:00
Adam Hathcock
54a00e2614 some renamespacing 2026-02-10 16:25:21 +00:00
Adam Hathcock
19ed4d16db add fixes and benchmarks for system providers 2026-02-10 16:15:31 +00:00
Adam Hathcock
d220532b16 add comment 2026-02-10 16:00:19 +00:00
Adam Hathcock
6b035cb76e updates 2026-02-10 15:51:50 +00:00
Adam Hathcock
a3e3d9d0aa some clean up 2026-02-10 15:43:32 +00:00
Adam Hathcock
f0da1b3a93 Consolidate 2026-02-10 15:32:23 +00:00
Adam Hathcock
04c3b84fc0 merge fixes 2026-02-10 15:22:55 +00:00
Adam Hathcock
a9f2d3cf7f Merge remote-tracking branch 'origin/master' into adam/add-alternate-compressions
# Conflicts:
#	src/SharpCompress/Archives/GZip/GZipArchive.Async.cs
#	src/SharpCompress/Archives/GZip/GZipArchive.cs
#	src/SharpCompress/Archives/Zip/ZipArchive.Async.cs
#	src/SharpCompress/Archives/Zip/ZipArchive.cs
#	src/SharpCompress/Common/GZip/GZipEntry.Async.cs
#	src/SharpCompress/Common/GZip/GZipEntry.cs
#	src/SharpCompress/Common/Options/IReaderOptions.cs
#	src/SharpCompress/Readers/ReaderOptions.cs
#	src/SharpCompress/Readers/Zip/ZipReader.Async.cs
#	src/SharpCompress/Readers/Zip/ZipReader.cs
#	src/SharpCompress/Writers/GZip/GZipWriterOptions.cs
2026-02-10 15:21:13 +00:00
Adam Hathcock
9c7d27d1e0 more providers 2026-02-09 07:34:00 +00:00
Adam Hathcock
c48388ead2 adding alternate compressions 2026-02-06 21:07:32 +00:00
158 changed files with 8876 additions and 4995 deletions

View File

@@ -53,9 +53,9 @@ jobs:
name: ${{ matrix.os }}-nuget-package
path: artifacts/*.nupkg
# Push to NuGet.org using C# build target (Windows only, not on PRs)
# Push to NuGet.org only for version tag pushes (Windows only)
- name: Push to NuGet
if: success() && matrix.os == 'windows-latest' && github.event_name != 'pull_request'
if: success() && matrix.os == 'windows-latest' && startsWith(github.ref, 'refs/tags/')
run: dotnet run --project build/build.csproj -- push-to-nuget
env:
NUGET_API_KEY: ${{ secrets.NUGET_API_KEY }}

View File

@@ -103,8 +103,11 @@ tests/
### Factory Pattern
Factory implementations can implement one or more interfaces (`IArchiveFactory`, `IReaderFactory`, `IWriterFactory`) depending on format capabilities:
- `ArchiveFactory.OpenArchive()` - Opens archive API objects from seekable streams/files
- `ArchiveFactory.OpenAsyncArchive()` - Opens async archive API objects for async archive use cases
- `ReaderFactory.OpenReader()` - Auto-detects and opens forward-only readers
- `ReaderFactory.OpenAsyncReader()` - Auto-detects and opens forward-only async readers
- `WriterFactory.OpenWriter()` - Creates a writer for a specified `ArchiveType`
- `WriterFactory.OpenAsyncWriter()` - Creates an async writer for async write scenarios
- Factories located in: `src/SharpCompress/Factories/`
## Nullable Reference Types
@@ -132,6 +135,9 @@ SharpCompress supports multiple archive and compression formats:
### Async/Await Patterns
- All I/O operations support async/await with `CancellationToken`
- Async methods follow the naming convention: `MethodNameAsync`
- For async archive scenarios, prefer `ArchiveFactory.OpenAsyncArchive(...)` over sync `OpenArchive(...)`.
- For async forward-only read scenarios, prefer `ReaderFactory.OpenAsyncReader(...)` over sync `OpenReader(...)`.
- For async write scenarios, prefer `WriterFactory.OpenAsyncWriter(...)` over sync `OpenWriter(...)`.
- Key async methods:
- `WriteEntryToAsync` - Extract entry asynchronously
- `WriteAllToDirectoryAsync` - Extract all entries asynchronously
@@ -199,7 +205,8 @@ SharpCompress supports multiple archive and compression formats:
## Common Pitfalls
1. **Don't mix Archive and Reader APIs** - Archive needs seekable stream, Reader doesn't
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
4. **Tar + non-seekable stream** - Must provide file size or it will throw
5. **Format detection** - Use `ReaderFactory.OpenReader()` for auto-detection, test with actual archive files
2. **Don't mix sync and async open paths** - For async workflows use `OpenAsyncArchive`/`OpenAsyncReader`/`OpenAsyncWriter`, not `OpenArchive`/`OpenReader`/`OpenWriter`
3. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
4. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
5. **Tar + non-seekable stream** - Must provide file size or it will throw
6. **Format detection** - Use `ReaderFactory.OpenReader()` / `ReaderFactory.OpenAsyncReader()` for auto-detection, test with actual archive files

View File

@@ -16,6 +16,10 @@ Post Issues on Github!
Check the [Supported Formats](docs/FORMATS.md) and [Basic Usage.](docs/USAGE.md)
## Custom Compression Providers
If you need to swap out SharpCompresss built-in codecs, the `Providers` property (and `WithProviders(...)` extensions) on `ReaderOptions` and `WriterOptions` lets you supply a `CompressionProviderRegistry`. The selected registry is used by Reader/Writer APIs, Archive APIs, and async extraction paths, so the same provider choice is applied consistently across open/read/write flows. The default registry is already wired up, so customization is only necessary when you want to plug in alternatives such as `SystemGZipCompressionProvider` or a third-party `CompressionProvider`. See [docs/USAGE.md#custom-compression-providers](docs/USAGE.md#custom-compression-providers) for guided examples.
## Recommended Formats
In general, I recommend GZip (Deflate)/BZip2 (BZip)/LZip (LZMA) as the simplicity of the formats lend to better long term archival as well as the streamability. Tar is often used in conjunction for multiple files in a single archive (e.g. `.tar.gz`)

View File

@@ -95,7 +95,7 @@ using (var archive = ZipArchive.OpenArchive("file.zip"))
}
// Async extraction (requires IAsyncArchive)
using (var asyncArchive = await ZipArchive.OpenAsyncArchive("file.zip"))
await using (var asyncArchive = await ZipArchive.OpenAsyncArchive("file.zip"))
{
await asyncArchive.WriteToDirectoryAsync(
@"C:\output",
@@ -177,7 +177,7 @@ using (var reader = ReaderFactory.OpenReader(stream))
// Async variants (use OpenAsyncReader to get IAsyncReader)
using (var stream = File.OpenRead("file.zip"))
using (var reader = await ReaderFactory.OpenAsyncReader(stream))
await using (var reader = await ReaderFactory.OpenAsyncReader(stream))
{
while (await reader.MoveToNextEntryAsync())
{
@@ -318,6 +318,24 @@ WriterOptions: write-time behavior (compression type/level, encoding, stream own
ZipWriterEntryOptions: per-entry ZIP overrides (compression, level, timestamps, comments, zip64)
```
### Compression Providers
`ReaderOptions` and `WriterOptions` expose a `Providers` registry that controls which `ICompressionProvider` implementations are used for each `CompressionType`. The registry defaults to `CompressionProviderRegistry.Default`, so you only need to set it if you want to swap in a custom provider (for example the `SystemGZipCompressionProvider`). The selected registry is honored by Reader/Writer APIs, Archive APIs, and async entry-stream extraction paths.
```csharp
var registry = CompressionProviderRegistry.Default.With(new SystemGZipCompressionProvider());
var readerOptions = ReaderOptions.ForOwnedFile().WithProviders(registry);
var writerOptions = new WriterOptions(CompressionType.GZip)
{
CompressionLevel = 6,
}.WithProviders(registry);
using var reader = ReaderFactory.OpenReader(input, readerOptions);
using var writer = WriterFactory.OpenWriter(output, ArchiveType.GZip, writerOptions);
```
When a format needs additional initialization/finalization data (LZMA, PPMd, etc.) the registry exposes `GetCompressingProvider` which returns the `ICompressionProviderHooks` contract; the rest of the API continues to flow through `Providers`, including pre/properties/post compression hook data.
---
## Compression Types
@@ -409,7 +427,7 @@ cts.CancelAfter(TimeSpan.FromMinutes(5));
try
{
using (var archive = await ZipArchive.OpenAsyncArchive("archive.zip"))
await using (var archive = await ZipArchive.OpenAsyncArchive("archive.zip"))
{
await archive.WriteToDirectoryAsync(
@"C:\output",

View File

@@ -206,6 +206,29 @@ foreach(var entry in archive.Entries)
}
```
## Custom Compression Providers
By default `ReaderOptions` and `WriterOptions` already include `CompressionProviderRegistry.Default` via their `Providers` property, so you can read and write without touching the registry yet still get SharpCompresss built-in implementations.
The configured registry is used consistently across Reader APIs, Writer APIs, Archive APIs, and async entry-stream extraction, including compressed TAR wrappers and ZIP async decompression.
To replace a specific algorithm (for example to use `System.IO.Compression` for GZip or Deflate), create a modified registry and pass it through the same options:
```C#
var systemGZip = new SystemGZipCompressionProvider();
var customRegistry = CompressionProviderRegistry.Default.With(systemGZip);
var readerOptions = ReaderOptions.ForOwnedFile()
.WithProviders(customRegistry);
using var reader = ReaderFactory.OpenReader(stream, readerOptions);
var writerOptions = new WriterOptions(CompressionType.GZip)
.WithProviders(customRegistry);
using var writer = WriterFactory.OpenWriter(outputStream, ArchiveType.GZip, writerOptions);
```
The registry also exposes `GetCompressingProvider` (now returning `ICompressionProviderHooks`) when a compression format needs pre- or post-stream data (e.g., LZMA/PPMd). Implementations that need extra headers can supply those bytes through the `ICompressionProviderHooks` members while the rest of the API still works through the `Providers` property.
## Async Examples
### Async Reader Examples

View File

@@ -22,7 +22,9 @@ public static partial class ArchiveFactory
readerOptions ??= ReaderOptions.ForExternalStream;
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken)
.ConfigureAwait(false);
return factory.OpenAsyncArchive(stream, readerOptions);
return await factory
.OpenAsyncArchive(stream, readerOptions, cancellationToken)
.ConfigureAwait(false);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
@@ -45,7 +47,9 @@ public static partial class ArchiveFactory
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken)
.ConfigureAwait(false);
return factory.OpenAsyncArchive(fileInfo, options);
return await factory
.OpenAsyncArchive(fileInfo, options, cancellationToken)
.ConfigureAwait(false);
}
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(

View File

@@ -2,12 +2,9 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Factories;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives;

View File

@@ -77,7 +77,7 @@ public partial class GZipArchive
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)GZipReader.OpenReader(stream));
return new((IAsyncReader)GZipReader.OpenReader(stream, ReaderOptions));
}
protected override async IAsyncEnumerable<GZipArchiveEntry> LoadEntriesAsync(
@@ -88,7 +88,7 @@ public partial class GZipArchive
yield return new GZipArchiveEntry(
this,
await GZipFilePart
.CreateAsync(stream, ReaderOptions.ArchiveEncoding)
.CreateAsync(stream, ReaderOptions.ArchiveEncoding, ReaderOptions.Providers)
.ConfigureAwait(false),
ReaderOptions
);

View File

@@ -20,14 +20,15 @@ public partial class GZipArchive
>
#endif
{
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IWritableAsyncArchive<GZipWriterOptions>)
OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions());
return OpenAsyncArchive(new FileInfo(path), readerOptions, cancellationToken);
}
public static IWritableArchive<GZipWriterOptions> OpenArchive(
@@ -103,30 +104,50 @@ public partial class GZipArchive
);
}
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(stream, readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(stream, readerOptions));
}
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfo, readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfo, readerOptions));
}
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(streams, readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(streams, readerOptions));
}
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfos, readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfos, readerOptions));
}
public static IWritableArchive<GZipWriterOptions> CreateArchive() => new GZipArchive();
public static IWritableAsyncArchive<GZipWriterOptions> CreateAsyncArchive() =>
new GZipArchive();
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> CreateAsyncArchive() =>
new(new GZipArchive());
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));

View File

@@ -87,7 +87,7 @@ public partial class GZipArchive
var stream = volumes.Single().Stream;
yield return new GZipArchiveEntry(
this,
GZipFilePart.Create(stream, ReaderOptions.ArchiveEncoding),
GZipFilePart.Create(stream, ReaderOptions.ArchiveEncoding, ReaderOptions.Providers),
ReaderOptions
);
}
@@ -96,6 +96,6 @@ public partial class GZipArchive
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return GZipReader.OpenReader(stream);
return GZipReader.OpenReader(stream, ReaderOptions);
}
}

View File

@@ -24,10 +24,20 @@ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
return Parts.Single().GetCompressedStream().NotNull();
}
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
// GZip synchronous implementation is fast enough, just wrap it
return new(OpenEntryStream());
// Reset the stream position if seekable
var part = (GZipFilePart)Parts.Single();
var rawStream = part.GetRawStream();
if (rawStream.CanSeek && rawStream.Position != part.EntryStartPosition)
{
rawStream.Position = part.EntryStartPosition;
}
return (
await Parts.Single().GetCompressedStreamAsync(cancellationToken).ConfigureAwait(false)
).NotNull();
}
#region IArchiveEntry Members

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -32,7 +33,13 @@ public interface IArchiveFactory : IFactory
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null);
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A <see cref="ValueTask{TResult}"/> containing the opened async archive.</returns>
ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
@@ -47,5 +54,10 @@ public interface IArchiveFactory : IFactory
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null);
/// <returns>A <see cref="ValueTask{TResult}"/> containing the opened async archive.</returns>
ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -1,6 +1,7 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
@@ -18,19 +19,22 @@ public interface IArchiveOpenable<TSync, TASync>
public static abstract TSync OpenArchive(Stream stream, ReaderOptions? readerOptions = null);
public static abstract TASync OpenAsyncArchive(
public static abstract ValueTask<TASync> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract TASync OpenAsyncArchive(
public static abstract ValueTask<TASync> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract TASync OpenAsyncArchive(
public static abstract ValueTask<TASync> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
@@ -20,14 +21,16 @@ public interface IMultiArchiveOpenable<TSync, TASync>
ReaderOptions? readerOptions = null
);
public static abstract TASync OpenAsyncArchive(
public static abstract ValueTask<TASync> OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract TASync OpenAsyncArchive(
public static abstract ValueTask<TASync> OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}
#endif

View File

@@ -1,3 +1,4 @@
using System.Threading.Tasks;
using SharpCompress.Common.Options;
#if NET8_0_OR_GREATER
@@ -8,6 +9,6 @@ public interface IWritableArchiveOpenable<TOptions>
where TOptions : IWriterOptions
{
public static abstract IWritableArchive<TOptions> CreateArchive();
public static abstract IWritableAsyncArchive<TOptions> CreateAsyncArchive();
public static abstract ValueTask<IWritableAsyncArchive<TOptions>> CreateAsyncArchive();
}
#endif

View File

@@ -20,13 +20,15 @@ public partial class RarArchive
IMultiArchiveOpenable<IRarArchive, IRarAsyncArchive>
#endif
{
public static IRarAsyncArchive OpenAsyncArchive(
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IRarAsyncArchive)OpenArchive(new FileInfo(path), readerOptions);
return new((IRarAsyncArchive)OpenArchive(new FileInfo(path), readerOptions));
}
public static IRarArchive OpenArchive(string filePath, ReaderOptions? options = null)
@@ -98,36 +100,44 @@ public partial class RarArchive
);
}
public static IRarAsyncArchive OpenAsyncArchive(
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
return (IRarAsyncArchive)OpenArchive(stream, readerOptions);
cancellationToken.ThrowIfCancellationRequested();
return new((IRarAsyncArchive)OpenArchive(stream, readerOptions));
}
public static IRarAsyncArchive OpenAsyncArchive(
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
return (IRarAsyncArchive)OpenArchive(fileInfo, readerOptions);
cancellationToken.ThrowIfCancellationRequested();
return new((IRarAsyncArchive)OpenArchive(fileInfo, readerOptions));
}
public static IRarAsyncArchive OpenAsyncArchive(
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
return (IRarAsyncArchive)OpenArchive(streams, readerOptions);
cancellationToken.ThrowIfCancellationRequested();
return new((IRarAsyncArchive)OpenArchive(streams, readerOptions));
}
public static IRarAsyncArchive OpenAsyncArchive(
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
return (IRarAsyncArchive)OpenArchive(fileInfos, readerOptions);
cancellationToken.ThrowIfCancellationRequested();
return new((IRarAsyncArchive)OpenArchive(fileInfos, readerOptions));
}
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));

View File

@@ -16,10 +16,17 @@ public partial class SevenZipArchive
IMultiArchiveOpenable<IArchive, IAsyncArchive>
#endif
{
public static IAsyncArchive OpenAsyncArchive(string path, ReaderOptions? readerOptions = null)
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty("path");
return (IAsyncArchive)OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions());
return new(
(IAsyncArchive)OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions())
);
}
public static IArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
@@ -86,33 +93,44 @@ public partial class SevenZipArchive
);
}
public static IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null)
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
return (IAsyncArchive)OpenArchive(stream, readerOptions);
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(stream, readerOptions));
}
public static IAsyncArchive OpenAsyncArchive(
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfo, readerOptions));
}
public static IAsyncArchive OpenAsyncArchive(
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
return (IAsyncArchive)OpenArchive(streams, readerOptions);
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(streams, readerOptions));
}
public static IAsyncArchive OpenAsyncArchive(
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
return (IAsyncArchive)OpenArchive(fileInfos, readerOptions);
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfos, readerOptions));
}
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));

View File

@@ -89,26 +89,32 @@ public partial class TarArchive
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)TarReader.OpenReader(stream));
return new((IAsyncReader)new TarReader(stream, ReaderOptions, _compressionType));
}
protected override async IAsyncEnumerable<TarArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<TarVolume> volumes
)
{
var stream = (await volumes.SingleAsync().ConfigureAwait(false)).Stream;
var sourceStream = (await volumes.SingleAsync().ConfigureAwait(false)).Stream;
var stream = await GetStreamAsync(sourceStream).ConfigureAwait(false);
if (stream.CanSeek)
{
stream.Position = 0;
}
var streamingMode =
_compressionType == CompressionType.None
? StreamingMode.Seekable
: StreamingMode.Streaming;
// Always use async header reading in LoadEntriesAsync for consistency
{
// Use async header reading for async-only streams
TarHeader? previousHeader = null;
await foreach (
var header in TarHeaderFactory.ReadHeaderAsync(
StreamingMode.Seekable,
streamingMode,
stream,
ReaderOptions.ArchiveEncoding
)
@@ -126,7 +132,10 @@ public partial class TarArchive
{
var entry = new TarArchiveEntry(
this,
new TarFilePart(previousHeader, stream),
new TarFilePart(
previousHeader,
_compressionType == CompressionType.None ? stream : null
),
CompressionType.None,
ReaderOptions
);
@@ -151,7 +160,10 @@ public partial class TarArchive
}
yield return new TarArchiveEntry(
this,
new TarFilePart(header, stream),
new TarFilePart(
header,
_compressionType == CompressionType.None ? stream : null
),
CompressionType.None,
ReaderOptions
);

View File

@@ -7,6 +7,7 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Factories;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers.Tar;
@@ -37,12 +38,9 @@ public partial class TarArchive
)
{
fileInfo.NotNull(nameof(fileInfo));
return new TarArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
return OpenArchive(
[fileInfo],
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
);
}
@@ -53,13 +51,17 @@ public partial class TarArchive
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new TarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
var sourceStream = new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
);
var compressionType = TarFactory.GetCompressionType(
sourceStream,
sourceStream.ReaderOptions.Providers
);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
public static IWritableArchive<TarWriterOptions> OpenArchive(
@@ -69,13 +71,17 @@ public partial class TarArchive
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new TarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
var sourceStream = new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
);
var compressionType = TarFactory.GetCompressionType(
sourceStream,
sourceStream.ReaderOptions.Providers
);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
public static IWritableArchive<TarWriterOptions> OpenArchive(
@@ -90,35 +96,113 @@ public partial class TarArchive
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new TarArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
return OpenArchive([stream], readerOptions);
}
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
public static async ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(stream, readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
stream.NotNull(nameof(stream));
var sourceStream = new SourceStream(
stream,
i => null,
readerOptions ?? new ReaderOptions()
);
var compressionType = await TarFactory
.GetCompressionTypeAsync(
sourceStream,
sourceStream.ReaderOptions.Providers,
cancellationToken
)
.ConfigureAwait(false);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
public static ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(new FileInfo(path), readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return OpenAsyncArchive(new FileInfo(path), readerOptions, cancellationToken);
}
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
public static async ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(fileInfo, readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
fileInfo.NotNull(nameof(fileInfo));
readerOptions ??= new ReaderOptions() { LeaveStreamOpen = false };
var sourceStream = new SourceStream(fileInfo, i => null, readerOptions);
var compressionType = await TarFactory
.GetCompressionTypeAsync(
sourceStream,
sourceStream.ReaderOptions.Providers,
cancellationToken
)
.ConfigureAwait(false);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
public static async ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(streams, readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
var sourceStream = new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
);
var compressionType = await TarFactory
.GetCompressionTypeAsync(
sourceStream,
sourceStream.ReaderOptions.Providers,
cancellationToken
)
.ConfigureAwait(false);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
public static async ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(fileInfos, readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
var sourceStream = new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
);
var compressionType = await TarFactory
.GetCompressionTypeAsync(
sourceStream,
sourceStream.ReaderOptions.Providers,
cancellationToken
)
.ConfigureAwait(false);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
@@ -183,5 +267,6 @@ public partial class TarArchive
public static IWritableArchive<TarWriterOptions> CreateArchive() => new TarArchive();
public static IWritableAsyncArchive<TarWriterOptions> CreateAsyncArchive() => new TarArchive();
public static ValueTask<IWritableAsyncArchive<TarWriterOptions>> CreateAsyncArchive() =>
new(new TarArchive());
}

View File

@@ -5,35 +5,113 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Providers;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
using Constants = SharpCompress.Common.Constants;
namespace SharpCompress.Archives.Tar;
public partial class TarArchive
: AbstractWritableArchive<TarArchiveEntry, TarVolume, TarWriterOptions>
{
private readonly CompressionType _compressionType;
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts();
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable();
}
private TarArchive(SourceStream sourceStream)
: base(ArchiveType.Tar, sourceStream) { }
internal TarArchive(SourceStream sourceStream, CompressionType compressionType)
: base(ArchiveType.Tar, sourceStream)
{
_compressionType = compressionType;
}
private TarArchive()
: base(ArchiveType.Tar) { }
private Stream GetStream(Stream stream) =>
_compressionType switch
{
CompressionType.BZip2 => ReaderOptions.Providers.CreateDecompressStream(
CompressionType.BZip2,
stream
),
CompressionType.GZip => ReaderOptions.Providers.CreateDecompressStream(
CompressionType.GZip,
stream,
CompressionContext.FromStream(stream).WithReaderOptions(ReaderOptions)
),
CompressionType.ZStandard => ReaderOptions.Providers.CreateDecompressStream(
CompressionType.ZStandard,
stream
),
CompressionType.LZip => ReaderOptions.Providers.CreateDecompressStream(
CompressionType.LZip,
stream
),
CompressionType.Xz => ReaderOptions.Providers.CreateDecompressStream(
CompressionType.Xz,
stream
),
CompressionType.Lzw => ReaderOptions.Providers.CreateDecompressStream(
CompressionType.Lzw,
stream
),
CompressionType.None => stream,
_ => throw new NotSupportedException("Invalid compression type: " + _compressionType),
};
private ValueTask<Stream> GetStreamAsync(
Stream stream,
CancellationToken cancellationToken = default
) =>
_compressionType switch
{
CompressionType.BZip2 => ReaderOptions.Providers.CreateDecompressStreamAsync(
CompressionType.BZip2,
stream,
cancellationToken
),
CompressionType.GZip => ReaderOptions.Providers.CreateDecompressStreamAsync(
CompressionType.GZip,
stream,
CompressionContext.FromStream(stream).WithReaderOptions(ReaderOptions),
cancellationToken
),
CompressionType.ZStandard => ReaderOptions.Providers.CreateDecompressStreamAsync(
CompressionType.ZStandard,
stream,
cancellationToken
),
CompressionType.LZip => ReaderOptions.Providers.CreateDecompressStreamAsync(
CompressionType.LZip,
stream,
cancellationToken
),
CompressionType.Xz => ReaderOptions.Providers.CreateDecompressStreamAsync(
CompressionType.Xz,
stream,
cancellationToken
),
CompressionType.Lzw => ReaderOptions.Providers.CreateDecompressStreamAsync(
CompressionType.Lzw,
stream,
cancellationToken
),
CompressionType.None => new ValueTask<Stream>(stream),
_ => throw new NotSupportedException("Invalid compression type: " + _compressionType),
};
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
var stream = volumes.Single().Stream;
var stream = GetStream(volumes.Single().Stream);
if (stream.CanSeek)
{
stream.Position = 0;
@@ -41,7 +119,9 @@ public partial class TarArchive
TarHeader? previousHeader = null;
foreach (
var header in TarHeaderFactory.ReadHeader(
StreamingMode.Seekable,
_compressionType == CompressionType.None
? StreamingMode.Seekable
: StreamingMode.Streaming,
stream,
ReaderOptions.ArchiveEncoding
)
@@ -59,7 +139,10 @@ public partial class TarArchive
{
var entry = new TarArchiveEntry(
this,
new TarFilePart(previousHeader, stream),
new TarFilePart(
previousHeader,
_compressionType == CompressionType.None ? stream : null
),
CompressionType.None,
ReaderOptions
);
@@ -82,7 +165,10 @@ public partial class TarArchive
}
yield return new TarArchiveEntry(
this,
new TarFilePart(header, stream),
new TarFilePart(
header,
_compressionType == CompressionType.None ? stream : null
),
CompressionType.None,
ReaderOptions
);
@@ -154,6 +240,6 @@ public partial class TarArchive
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return TarReader.OpenReader(stream);
return new TarReader(stream, ReaderOptions, _compressionType);
}
}

View File

@@ -55,7 +55,12 @@ public partial class ZipArchive
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory.NotNull(), deh, s),
new SeekableZipFilePart(
headerFactory.NotNull(),
deh,
s,
ReaderOptions.Providers
),
ReaderOptions
);
}

View File

@@ -95,30 +95,55 @@ public partial class ZipArchive
);
}
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(path, readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(path, readerOptions));
}
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(stream, readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(stream, readerOptions));
}
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfo, readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfo, readerOptions));
}
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(streams, readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(streams, readerOptions));
}
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfos, readerOptions);
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfos, readerOptions));
}
public static bool IsZipFile(string filePath, string? password = null) =>
IsZipFile(new FileInfo(filePath), password);
@@ -223,7 +248,8 @@ public partial class ZipArchive
public static IWritableArchive<ZipWriterOptions> CreateArchive() => new ZipArchive();
public static IWritableAsyncArchive<ZipWriterOptions> CreateAsyncArchive() => new ZipArchive();
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> CreateAsyncArchive() =>
new(new ZipArchive());
public static async ValueTask<bool> IsZipMultiAsync(
Stream stream,

View File

@@ -96,7 +96,12 @@ public partial class ZipArchive
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory.NotNull(), deh, s),
new SeekableZipFilePart(
headerFactory.NotNull(),
deh,
s,
ReaderOptions.Providers
),
ReaderOptions
);
}
@@ -171,6 +176,6 @@ public partial class ZipArchive
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)ZipReader.OpenReader(stream));
return new((IAsyncReader)ZipReader.OpenReader(stream, ReaderOptions, Entries));
}
}

View File

@@ -12,7 +12,9 @@ public partial class GZipEntry
)
{
yield return new GZipEntry(
await GZipFilePart.CreateAsync(stream, options.ArchiveEncoding).ConfigureAwait(false),
await GZipFilePart
.CreateAsync(stream, options.ArchiveEncoding, options.Providers)
.ConfigureAwait(false),
options
);
}

View File

@@ -46,7 +46,10 @@ public partial class GZipEntry : Entry
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, ReaderOptions options)
{
yield return new GZipEntry(GZipFilePart.Create(stream, options.ArchiveEncoding), options);
yield return new GZipEntry(
GZipFilePart.Create(stream, options.ArchiveEncoding, options.Providers),
options
);
}
// Async methods moved to GZipEntry.Async.cs

View File

@@ -5,7 +5,9 @@ using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Providers;
namespace SharpCompress.Common.GZip;
@@ -14,10 +16,11 @@ internal sealed partial class GZipFilePart
internal static async ValueTask<GZipFilePart> CreateAsync(
Stream stream,
IArchiveEncoding archiveEncoding,
CompressionProviderRegistry compressionProviders,
CancellationToken cancellationToken = default
)
{
var part = new GZipFilePart(stream, archiveEncoding);
var part = new GZipFilePart(stream, archiveEncoding, compressionProviders);
await part.ReadAndValidateGzipHeaderAsync(cancellationToken).ConfigureAwait(false);
if (stream.CanSeek)
@@ -131,4 +134,14 @@ internal sealed partial class GZipFilePart
var buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
}
internal override async ValueTask<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
// GZip uses Deflate compression
return await _compressionProviders
.CreateDecompressStreamAsync(CompressionType.Deflate, _stream, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -5,6 +5,7 @@ using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Providers;
namespace SharpCompress.Common.GZip;
@@ -12,10 +13,15 @@ internal sealed partial class GZipFilePart : FilePart
{
private string? _name;
private readonly Stream _stream;
private readonly CompressionProviderRegistry _compressionProviders;
internal static GZipFilePart Create(Stream stream, IArchiveEncoding archiveEncoding)
internal static GZipFilePart Create(
Stream stream,
IArchiveEncoding archiveEncoding,
CompressionProviderRegistry compressionProviders
)
{
var part = new GZipFilePart(stream, archiveEncoding);
var part = new GZipFilePart(stream, archiveEncoding, compressionProviders);
part.ReadAndValidateGzipHeader();
if (stream.CanSeek)
@@ -35,8 +41,16 @@ internal sealed partial class GZipFilePart : FilePart
return part;
}
private GZipFilePart(Stream stream, IArchiveEncoding archiveEncoding)
: base(archiveEncoding) => _stream = stream;
private GZipFilePart(
Stream stream,
IArchiveEncoding archiveEncoding,
CompressionProviderRegistry compressionProviders
)
: base(archiveEncoding)
{
_stream = stream;
_compressionProviders = compressionProviders;
}
internal long EntryStartPosition { get; private set; }
@@ -46,13 +60,11 @@ internal sealed partial class GZipFilePart : FilePart
internal override string? FilePartName => _name;
internal override Stream GetCompressedStream() =>
new DeflateStream(
_stream,
CompressionMode.Decompress,
CompressionLevel.Default,
leaveOpen: true
);
internal override Stream GetCompressedStream()
{
//GZip uses Deflate compression, at this point we need a deflate stream
return _compressionProviders.CreateDecompressStream(CompressionType.Deflate, _stream);
}
internal override Stream GetRawStream() => _stream;

View File

@@ -15,7 +15,9 @@ public partial class LzwEntry
)
{
yield return new LzwEntry(
await LzwFilePart.CreateAsync(stream, options.ArchiveEncoding, cancellationToken),
await LzwFilePart
.CreateAsync(stream, options.ArchiveEncoding, options.Providers, cancellationToken)
.ConfigureAwait(false),
options
);
}

View File

@@ -46,7 +46,10 @@ public partial class LzwEntry : Entry
internal static IEnumerable<LzwEntry> GetEntries(Stream stream, ReaderOptions options)
{
yield return new LzwEntry(LzwFilePart.Create(stream, options.ArchiveEncoding), options);
yield return new LzwEntry(
LzwFilePart.Create(stream, options.ArchiveEncoding, options.Providers),
options
);
}
// Async methods moved to LzwEntry.Async.cs

View File

@@ -1,6 +1,8 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Providers;
namespace SharpCompress.Common.Lzw;
@@ -9,15 +11,25 @@ internal sealed partial class LzwFilePart
internal static async ValueTask<LzwFilePart> CreateAsync(
Stream stream,
IArchiveEncoding archiveEncoding,
CompressionProviderRegistry compressionProviders,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var part = new LzwFilePart(stream, archiveEncoding);
var part = new LzwFilePart(stream, archiveEncoding, compressionProviders);
// For non-seekable streams, we can't track position, so use 0 since the stream will be
// read sequentially from its current position.
part.EntryStartPosition = stream.CanSeek ? stream.Position : 0;
return part;
}
internal override async ValueTask<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
return await _compressionProviders
.CreateDecompressStreamAsync(CompressionType.Lzw, _stream, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -1,5 +1,6 @@
using System.IO;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Common;
using SharpCompress.Providers;
namespace SharpCompress.Common.Lzw;
@@ -7,10 +8,15 @@ internal sealed partial class LzwFilePart : FilePart
{
private readonly Stream _stream;
private readonly string? _name;
private readonly CompressionProviderRegistry _compressionProviders;
internal static LzwFilePart Create(Stream stream, IArchiveEncoding archiveEncoding)
internal static LzwFilePart Create(
Stream stream,
IArchiveEncoding archiveEncoding,
CompressionProviderRegistry compressionProviders
)
{
var part = new LzwFilePart(stream, archiveEncoding);
var part = new LzwFilePart(stream, archiveEncoding, compressionProviders);
// For non-seekable streams, we can't track position, so use 0 since the stream will be
// read sequentially from its current position.
@@ -18,11 +24,16 @@ internal sealed partial class LzwFilePart : FilePart
return part;
}
private LzwFilePart(Stream stream, IArchiveEncoding archiveEncoding)
private LzwFilePart(
Stream stream,
IArchiveEncoding archiveEncoding,
CompressionProviderRegistry compressionProviders
)
: base(archiveEncoding)
{
_stream = stream;
_name = DeriveFileName(stream);
_compressionProviders = compressionProviders;
}
internal long EntryStartPosition { get; private set; }
@@ -30,7 +41,7 @@ internal sealed partial class LzwFilePart : FilePart
internal override string? FilePartName => _name;
internal override Stream GetCompressedStream() =>
new LzwStream(_stream) { IsStreamOwner = false };
_compressionProviders.CreateDecompressStream(CompressionType.Lzw, _stream);
internal override Stream GetRawStream() => _stream;

View File

@@ -1,3 +1,6 @@
using SharpCompress.Compressors;
using SharpCompress.Providers;
namespace SharpCompress.Common.Options;
public interface IReaderOptions
@@ -6,10 +9,40 @@ public interface IReaderOptions
IProgressOptions,
IExtractionOptions
{
/// <summary>
/// Look for RarArchive (Check for self-extracting archives or cases where RarArchive isn't at the start of the file)
/// </summary>
bool LookForHeader { get; init; }
/// <summary>
/// Password for encrypted archives.
/// </summary>
string? Password { get; init; }
/// <summary>
/// Disable checking for incomplete archives.
/// </summary>
bool DisableCheckIncomplete { get; init; }
/// <summary>
/// Buffer size for stream operations.
/// </summary>
int BufferSize { get; init; }
/// <summary>
/// Provide a hint for the extension of the archive being read, can speed up finding the correct decoder.
/// </summary>
string? ExtensionHint { get; init; }
/// <summary>
/// Size of the rewindable buffer for non-seekable streams.
/// </summary>
int? RewindableBufferSize { get; init; }
/// <summary>
/// Registry of compression providers.
/// Defaults to <see cref="CompressionProviderRegistry.Default" /> but can be replaced with custom providers.
/// Use this to provide alternative decompression implementations.
/// </summary>
CompressionProviderRegistry Providers { get; init; }
}

View File

@@ -1,9 +1,28 @@
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Providers;
namespace SharpCompress.Common.Options;
/// <summary>
/// Options for configuring writer behavior when creating archives.
/// </summary>
public interface IWriterOptions : IStreamOptions, IEncodingOptions, IProgressOptions
{
/// <summary>
/// The compression type to use for the archive.
/// </summary>
CompressionType CompressionType { get; init; }
/// <summary>
/// The compression level to be used when the compression type supports variable levels.
/// </summary>
int CompressionLevel { get; init; }
/// <summary>
/// Registry of compression providers.
/// Defaults to <see cref="CompressionProviderRegistry.Default" /> but can be replaced with custom providers, such as
/// System.IO.Compression for Deflate/GZip on modern .NET.
/// </summary>
CompressionProviderRegistry Providers { get; init; }
}

View File

@@ -1,5 +1,7 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors;
using SharpCompress.Providers;
namespace SharpCompress.Common.Zip;
@@ -11,9 +13,10 @@ internal partial class SeekableZipFilePart : ZipFilePart
internal SeekableZipFilePart(
SeekableZipHeaderFactory headerFactory,
DirectoryEntryHeader header,
Stream stream
Stream stream,
CompressionProviderRegistry compressionProviders
)
: base(header, stream) => _headerFactory = headerFactory;
: base(header, stream, compressionProviders) => _headerFactory = headerFactory;
internal override Stream GetCompressedStream()
{

View File

@@ -1,7 +1,8 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors;
using SharpCompress.IO;
using SharpCompress.Providers;
namespace SharpCompress.Common.Zip;
@@ -9,8 +10,12 @@ internal sealed partial class StreamingZipFilePart : ZipFilePart
{
private Stream? _decompressionStream;
internal StreamingZipFilePart(ZipFileEntry header, Stream stream)
: base(header, stream) { }
internal StreamingZipFilePart(
ZipFileEntry header,
Stream stream,
CompressionProviderRegistry compressionProviders
)
: base(header, stream, compressionProviders) { }
protected override Stream CreateBaseStream() => Header.PackedStream.NotNull();
@@ -47,11 +52,6 @@ internal sealed partial class StreamingZipFilePart : ZipFilePart
// If we had TotalIn / TotalOut we could have used them
Header.CompressedSize = _decompressionStream.Position;
if (_decompressionStream is DeflateStream deflateStream)
{
stream.Position = 0;
}
Skipped = true;
}
var reader = new BinaryReader(stream, System.Text.Encoding.Default, leaveOpen: true);

View File

@@ -10,6 +10,10 @@ public class ZipEntry : Entry
{
private readonly ZipFilePart? _filePart;
// WinZip AES extra data constants
private const int MinimumWinZipAesExtraDataLength = 7;
private const int WinZipAesCompressionMethodOffset = 5;
internal ZipEntry(ZipFilePart? filePart, IReaderOptions readerOptions)
: base(readerOptions)
{
@@ -33,24 +37,54 @@ public class ZipEntry : Entry
CreatedTime = times?.UnicodeTimes.Item3;
}
public override CompressionType CompressionType =>
_filePart?.Header.CompressionMethod switch
public override CompressionType CompressionType
{
get
{
ZipCompressionMethod.BZip2 => CompressionType.BZip2,
ZipCompressionMethod.Deflate => CompressionType.Deflate,
ZipCompressionMethod.Deflate64 => CompressionType.Deflate64,
ZipCompressionMethod.LZMA => CompressionType.LZMA,
ZipCompressionMethod.PPMd => CompressionType.PPMd,
ZipCompressionMethod.None => CompressionType.None,
ZipCompressionMethod.Shrink => CompressionType.Shrink,
ZipCompressionMethod.Reduce1 => CompressionType.Reduce1,
ZipCompressionMethod.Reduce2 => CompressionType.Reduce2,
ZipCompressionMethod.Reduce3 => CompressionType.Reduce3,
ZipCompressionMethod.Reduce4 => CompressionType.Reduce4,
ZipCompressionMethod.Explode => CompressionType.Explode,
ZipCompressionMethod.ZStandard => CompressionType.ZStandard,
_ => CompressionType.Unknown,
};
var compressionMethod = GetActualCompressionMethod();
return compressionMethod switch
{
ZipCompressionMethod.BZip2 => CompressionType.BZip2,
ZipCompressionMethod.Deflate => CompressionType.Deflate,
ZipCompressionMethod.Deflate64 => CompressionType.Deflate64,
ZipCompressionMethod.LZMA => CompressionType.LZMA,
ZipCompressionMethod.PPMd => CompressionType.PPMd,
ZipCompressionMethod.None => CompressionType.None,
ZipCompressionMethod.Shrink => CompressionType.Shrink,
ZipCompressionMethod.Reduce1 => CompressionType.Reduce1,
ZipCompressionMethod.Reduce2 => CompressionType.Reduce2,
ZipCompressionMethod.Reduce3 => CompressionType.Reduce3,
ZipCompressionMethod.Reduce4 => CompressionType.Reduce4,
ZipCompressionMethod.Explode => CompressionType.Explode,
ZipCompressionMethod.ZStandard => CompressionType.ZStandard,
_ => CompressionType.Unknown,
};
}
}
private ZipCompressionMethod GetActualCompressionMethod()
{
if (_filePart?.Header.CompressionMethod != ZipCompressionMethod.WinzipAes)
{
return _filePart?.Header.CompressionMethod ?? ZipCompressionMethod.None;
}
// For WinZip AES, the actual compression method is stored in the extra data
var aesExtraData = _filePart.Header.Extra.FirstOrDefault(x =>
x.Type == ExtraDataType.WinZipAes
);
if (aesExtraData is null || aesExtraData.DataBytes.Length < MinimumWinZipAesExtraDataLength)
{
return ZipCompressionMethod.WinzipAes;
}
// The compression method is at offset 5 in the extra data
return (ZipCompressionMethod)
System.Buffers.Binary.BinaryPrimitives.ReadUInt16LittleEndian(
aesExtraData.DataBytes.AsSpan(WinZipAesCompressionMethodOffset)
);
}
public override long Crc => _filePart?.Header.Crc ?? 0;

View File

@@ -6,17 +6,8 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.Explode;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.Compressors.Reduce;
using SharpCompress.Compressors.Shrink;
using SharpCompress.Compressors.Xz;
using SharpCompress.Compressors.ZStandard;
using SharpCompress.IO;
using SharpCompress.Providers;
namespace SharpCompress.Common.Zip;
@@ -123,6 +114,7 @@ internal abstract partial class ZipFilePart
CancellationToken cancellationToken = default
)
{
// Handle special cases first
switch (method)
{
case ZipCompressionMethod.None:
@@ -134,98 +126,24 @@ internal abstract partial class ZipFilePart
return stream;
}
case ZipCompressionMethod.Shrink:
case ZipCompressionMethod.WinzipAes:
{
return await ShrinkStream
.CreateAsync(
stream,
CompressionMode.Decompress,
Header.CompressedSize,
Header.UncompressedSize,
cancellationToken
)
.ConfigureAwait(false);
}
case ZipCompressionMethod.Reduce1:
{
return await ReduceStream
.CreateAsync(
stream,
Header.CompressedSize,
Header.UncompressedSize,
1,
cancellationToken
)
.ConfigureAwait(false);
}
case ZipCompressionMethod.Reduce2:
{
return await ReduceStream
.CreateAsync(
stream,
Header.CompressedSize,
Header.UncompressedSize,
2,
cancellationToken
)
.ConfigureAwait(false);
}
case ZipCompressionMethod.Reduce3:
{
return await ReduceStream
.CreateAsync(
stream,
Header.CompressedSize,
Header.UncompressedSize,
3,
cancellationToken
)
.ConfigureAwait(false);
}
case ZipCompressionMethod.Reduce4:
{
return await ReduceStream
.CreateAsync(
stream,
Header.CompressedSize,
Header.UncompressedSize,
4,
cancellationToken
)
.ConfigureAwait(false);
}
case ZipCompressionMethod.Explode:
{
return await ExplodeStream
.CreateAsync(
stream,
Header.CompressedSize,
Header.UncompressedSize,
Header.Flags,
cancellationToken
)
return await CreateWinzipAesDecompressionStreamAsync(stream, cancellationToken)
.ConfigureAwait(false);
}
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.Deflate64:
{
return new Deflate64Stream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.BZip2:
{
return await BZip2Stream
.CreateAsync(
stream,
CompressionMode.Decompress,
false,
cancellationToken: cancellationToken
)
.ConfigureAwait(false);
}
var compressionType = ToCompressionType(method);
var providers = GetProviders();
var context = new CompressionContext
{
InputSize = Header.CompressedSize,
OutputSize = Header.UncompressedSize,
CanSeek = stream.CanSeek,
};
switch (method)
{
case ZipCompressionMethod.LZMA:
{
if (FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted))
@@ -234,81 +152,108 @@ internal abstract partial class ZipFilePart
}
var buffer = new byte[4];
await stream.ReadFullyAsync(buffer, 0, 4, cancellationToken).ConfigureAwait(false);
var version = BinaryPrimitives.ReadUInt16LittleEndian(buffer.AsSpan(0, 2));
var propsSize = BinaryPrimitives.ReadUInt16LittleEndian(buffer.AsSpan(2, 2));
var props = new byte[propsSize];
await stream
.ReadFullyAsync(props, 0, propsSize, cancellationToken)
.ConfigureAwait(false);
return await LzmaStream
.CreateAsync(
props,
stream,
context = context with
{
Properties = props,
InputSize =
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: Header.UncompressedSize
OutputSize = FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: Header.UncompressedSize,
};
return await providers
.CreateDecompressStreamAsync(
compressionType,
stream,
context,
cancellationToken
)
.ConfigureAwait(false);
}
case ZipCompressionMethod.Xz:
{
return new XZStream(stream);
}
case ZipCompressionMethod.ZStandard:
{
return new DecompressionStream(stream);
}
case ZipCompressionMethod.PPMd:
{
var props = new byte[2];
await stream.ReadFullyAsync(props, 0, 2, cancellationToken).ConfigureAwait(false);
return await PpmdStream
.CreateAsync(new PpmdProperties(props), stream, false, cancellationToken)
context = context with { Properties = props };
return await providers
.CreateDecompressStreamAsync(
compressionType,
stream,
context,
cancellationToken
)
.ConfigureAwait(false);
}
case ZipCompressionMethod.WinzipAes:
case ZipCompressionMethod.Explode:
{
var data = Header.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
if (data is null)
{
throw new InvalidFormatException("No Winzip AES extra data found.");
}
if (data.Length != 7)
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
var compressedMethod = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes);
if (compressedMethod != 0x01 && compressedMethod != 0x02)
{
throw new InvalidFormatException(
"Unexpected vendor version number for WinZip AES metadata"
);
}
var vendorId = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(2));
if (vendorId != 0x4541)
{
throw new InvalidFormatException(
"Unexpected vendor ID for WinZip AES metadata"
);
}
return await CreateDecompressionStreamAsync(
context = context with { FormatOptions = Header.Flags };
return await providers
.CreateDecompressStreamAsync(
compressionType,
stream,
(ZipCompressionMethod)
BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)),
context,
cancellationToken
)
.ConfigureAwait(false);
}
default:
{
throw new NotSupportedException("CompressionMethod: " + Header.CompressionMethod);
return await providers
.CreateDecompressStreamAsync(
compressionType,
stream,
context,
cancellationToken
)
.ConfigureAwait(false);
}
}
}
private async ValueTask<Stream> CreateWinzipAesDecompressionStreamAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var data = Header.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
if (data is null)
{
throw new InvalidFormatException("No Winzip AES extra data found.");
}
if (data.Length != 7)
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
var compressedMethod = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes);
if (compressedMethod != 0x01 && compressedMethod != 0x02)
{
throw new InvalidFormatException(
"Unexpected vendor version number for WinZip AES metadata"
);
}
var vendorId = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(2));
if (vendorId != 0x4541)
{
throw new InvalidFormatException("Unexpected vendor ID for WinZip AES metadata");
}
return await CreateDecompressionStreamAsync(
stream,
(ZipCompressionMethod)
BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)),
cancellationToken
)
.ConfigureAwait(false);
}
}

View File

@@ -15,17 +15,25 @@ using SharpCompress.Compressors.Shrink;
using SharpCompress.Compressors.Xz;
using SharpCompress.Compressors.ZStandard;
using SharpCompress.IO;
using SharpCompress.Providers;
namespace SharpCompress.Common.Zip;
internal abstract partial class ZipFilePart : FilePart
{
internal ZipFilePart(ZipFileEntry header, Stream stream)
private readonly CompressionProviderRegistry _compressionProviders;
internal ZipFilePart(
ZipFileEntry header,
Stream stream,
CompressionProviderRegistry compressionProviders
)
: base(header.ArchiveEncoding)
{
Header = header;
header.Part = this;
BaseStream = stream;
_compressionProviders = compressionProviders;
}
internal Stream BaseStream { get; }
@@ -64,8 +72,37 @@ internal abstract partial class ZipFilePart : FilePart
protected bool LeaveStreamOpen =>
FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor) || Header.IsZip64;
/// <summary>
/// Gets the compression provider registry, falling back to default if not set.
/// </summary>
protected CompressionProviderRegistry GetProviders() => _compressionProviders;
/// <summary>
/// Converts ZipCompressionMethod to CompressionType.
/// </summary>
protected static CompressionType ToCompressionType(ZipCompressionMethod method) =>
method switch
{
ZipCompressionMethod.None => CompressionType.None,
ZipCompressionMethod.Deflate => CompressionType.Deflate,
ZipCompressionMethod.Deflate64 => CompressionType.Deflate64,
ZipCompressionMethod.BZip2 => CompressionType.BZip2,
ZipCompressionMethod.LZMA => CompressionType.LZMA,
ZipCompressionMethod.PPMd => CompressionType.PPMd,
ZipCompressionMethod.ZStandard => CompressionType.ZStandard,
ZipCompressionMethod.Xz => CompressionType.Xz,
ZipCompressionMethod.Shrink => CompressionType.Shrink,
ZipCompressionMethod.Reduce1 => CompressionType.Reduce1,
ZipCompressionMethod.Reduce2 => CompressionType.Reduce2,
ZipCompressionMethod.Reduce3 => CompressionType.Reduce3,
ZipCompressionMethod.Reduce4 => CompressionType.Reduce4,
ZipCompressionMethod.Explode => CompressionType.Explode,
_ => throw new NotSupportedException($"Unsupported compression method: {method}"),
};
protected Stream CreateDecompressionStream(Stream stream, ZipCompressionMethod method)
{
// Handle special cases first
switch (method)
{
case ZipCompressionMethod.None:
@@ -74,76 +111,29 @@ internal abstract partial class ZipFilePart : FilePart
{
return new DataDescriptorStream(stream);
}
return stream;
}
case ZipCompressionMethod.Shrink:
case ZipCompressionMethod.WinzipAes:
{
return new ShrinkStream(
stream,
CompressionMode.Decompress,
Header.CompressedSize,
Header.UncompressedSize
);
}
case ZipCompressionMethod.Reduce1:
{
return ReduceStream.Create(
stream,
Header.CompressedSize,
Header.UncompressedSize,
1
);
}
case ZipCompressionMethod.Reduce2:
{
return ReduceStream.Create(
stream,
Header.CompressedSize,
Header.UncompressedSize,
2
);
}
case ZipCompressionMethod.Reduce3:
{
return ReduceStream.Create(
stream,
Header.CompressedSize,
Header.UncompressedSize,
3
);
}
case ZipCompressionMethod.Reduce4:
{
return ReduceStream.Create(
stream,
Header.CompressedSize,
Header.UncompressedSize,
4
);
}
case ZipCompressionMethod.Explode:
{
return ExplodeStream.Create(
stream,
Header.CompressedSize,
Header.UncompressedSize,
Header.Flags
);
return CreateWinzipAesDecompressionStream(stream);
}
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.Deflate64:
{
return new Deflate64Stream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.BZip2:
{
return BZip2Stream.Create(stream, CompressionMode.Decompress, false);
}
// Get the compression type and providers
var compressionType = ToCompressionType(method);
var providers = GetProviders();
// Build context with header information
var context = new CompressionContext
{
InputSize = Header.CompressedSize,
OutputSize = Header.UncompressedSize,
CanSeek = stream.CanSeek,
};
// Handle methods that need special context
switch (method)
{
case ZipCompressionMethod.LZMA:
{
if (FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted))
@@ -158,71 +148,71 @@ internal abstract partial class ZipFilePart : FilePart
)
)
{
reader.ReadUInt16(); //LZMA version
var props = new byte[reader.ReadUInt16()];
reader.ReadUInt16(); // LZMA version
var propsLength = reader.ReadUInt16();
var props = new byte[propsLength];
reader.Read(props, 0, props.Length);
return LzmaStream.Create(
props,
stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
context = context with
{
Properties = props,
InputSize =
Header.CompressedSize > 0
? Header.CompressedSize - 4 - props.Length
: -1,
OutputSize = FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: Header.UncompressedSize
);
: Header.UncompressedSize,
};
return providers.CreateDecompressStream(compressionType, stream, context);
}
}
case ZipCompressionMethod.Xz:
{
return new XZStream(stream);
}
case ZipCompressionMethod.ZStandard:
{
return new DecompressionStream(stream);
}
case ZipCompressionMethod.PPMd:
{
Span<byte> props = stackalloc byte[2];
stream.ReadFully(props);
return PpmdStream.Create(new PpmdProperties(props), stream, false);
context = context with { Properties = props.ToArray() };
return providers.CreateDecompressStream(compressionType, stream, context);
}
case ZipCompressionMethod.WinzipAes:
case ZipCompressionMethod.Explode:
{
var data = Header.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
if (data is null)
{
throw new InvalidFormatException("No Winzip AES extra data found.");
}
if (data.Length != 7)
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
var compressedMethod = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes);
if (compressedMethod != 0x01 && compressedMethod != 0x02)
{
throw new InvalidFormatException(
"Unexpected vendor version number for WinZip AES metadata"
);
}
var vendorId = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(2));
if (vendorId != 0x4541)
{
throw new InvalidFormatException(
"Unexpected vendor ID for WinZip AES metadata"
);
}
return CreateDecompressionStream(
stream,
(ZipCompressionMethod)
BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5))
);
}
default:
{
throw new NotSupportedException("CompressionMethod: " + Header.CompressionMethod);
context = context with { FormatOptions = Header.Flags };
return providers.CreateDecompressStream(compressionType, stream, context);
}
}
// For simple methods, use the basic decompress
return providers.CreateDecompressStream(compressionType, stream, context);
}
private Stream CreateWinzipAesDecompressionStream(Stream stream)
{
var data = Header.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
if (data is null)
{
throw new InvalidFormatException("No Winzip AES extra data found.");
}
if (data.Length != 7)
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
var compressedMethod = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes);
if (compressedMethod != 0x01 && compressedMethod != 0x02)
{
throw new InvalidFormatException(
"Unexpected vendor version number for WinZip AES metadata"
);
}
var vendorId = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(2));
if (vendorId != 0x4541)
{
throw new InvalidFormatException("Unexpected vendor ID for WinZip AES metadata");
}
return CreateDecompressionStream(
stream,
(ZipCompressionMethod)BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5))
);
}
protected Stream GetCryptoStream(Stream plainStream)

View File

@@ -3,10 +3,11 @@ using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Providers;
namespace SharpCompress.Compressors.BZip2;
public sealed partial class BZip2Stream : Stream
public sealed partial class BZip2Stream : Stream, IFinishable
{
private Stream stream = default!;
private bool isDisposed;

View File

@@ -32,6 +32,8 @@ using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
namespace SharpCompress.Compressors.Deflate;
@@ -53,8 +55,23 @@ public partial class GZipStream : Stream
public GZipStream(Stream stream, CompressionMode mode)
: this(stream, mode, CompressionLevel.Default, Encoding.UTF8) { }
public GZipStream(Stream stream, CompressionMode mode, CompressionLevel level)
: this(stream, mode, level, Encoding.UTF8) { }
public GZipStream(Stream stream, CompressionMode mode, IReaderOptions readerOptions)
: this(stream, mode, CompressionLevel.Default, readerOptions) { }
public GZipStream(
Stream stream,
CompressionMode mode,
CompressionLevel level,
IReaderOptions readerOptions
)
: this(
stream,
mode,
level,
(
readerOptions ?? throw new ArgumentNullException(nameof(readerOptions))
).ArchiveEncoding.GetEncoding()
) { }
public GZipStream(
Stream stream,

View File

@@ -6,6 +6,7 @@ using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Crypto;
using SharpCompress.IO;
using SharpCompress.Providers;
namespace SharpCompress.Compressors.LZMA;
@@ -17,7 +18,7 @@ namespace SharpCompress.Compressors.LZMA;
/// <summary>
/// Stream supporting the LZIP format, as documented at http://www.nongnu.org/lzip/manual/lzip_manual.html
/// </summary>
public sealed partial class LZipStream : Stream
public sealed partial class LZipStream : Stream, IFinishable
{
private readonly Stream _stream;
private readonly CountingStream? _countingWritableSubStream;

View File

@@ -0,0 +1,603 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar.UnpackV1.Decode;
using SharpCompress.Compressors.Rar.UnpackV1.PPM;
using SharpCompress.Compressors.Rar.VM;
namespace SharpCompress.Compressors.Rar.UnpackV1;
internal sealed partial class Unpack
{
public async Task DoUnpackAsync(
FileHeader fileHeader,
Stream readStream,
Stream writeStream,
CancellationToken cancellationToken = default
)
{
destUnpSize = fileHeader.UncompressedSize;
this.fileHeader = fileHeader;
this.readStream = readStream;
this.writeStream = writeStream;
if (!fileHeader.IsSolid)
{
Init();
}
suspended = false;
await DoUnpackAsync(cancellationToken).ConfigureAwait(false);
}
public async Task DoUnpackAsync(CancellationToken cancellationToken = default)
{
if (fileHeader.CompressionMethod == 0)
{
await UnstoreFileAsync(cancellationToken).ConfigureAwait(false);
return;
}
switch (fileHeader.CompressionAlgorithm)
{
case 15:
await unpack15Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 20:
case 26:
await unpack20Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 29:
case 36:
await Unpack29Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 50:
await Unpack5Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
default:
throw new InvalidFormatException(
"unknown rar compression version " + fileHeader.CompressionAlgorithm
);
}
}
private async Task UnstoreFileAsync(CancellationToken cancellationToken = default)
{
var buffer = new byte[(int)Math.Min(0x10000, destUnpSize)];
do
{
var code = await readStream
.ReadAsync(buffer, 0, buffer.Length, cancellationToken)
.ConfigureAwait(false);
if (code == 0 || code == -1)
{
break;
}
code = code < destUnpSize ? code : (int)destUnpSize;
await writeStream.WriteAsync(buffer, 0, code, cancellationToken).ConfigureAwait(false);
destUnpSize -= code;
} while (!suspended && destUnpSize > 0);
}
private async Task Unpack29Async(bool solid, CancellationToken cancellationToken = default)
{
int[] DDecode = new int[PackDef.DC];
byte[] DBits = new byte[PackDef.DC];
int Bits;
if (DDecode[1] == 0)
{
int Dist = 0,
BitLength = 0,
Slot = 0;
for (var I = 0; I < DBitLengthCounts.Length; I++, BitLength++)
{
var count = DBitLengthCounts[I];
for (var J = 0; J < count; J++, Slot++, Dist += (1 << BitLength))
{
DDecode[Slot] = Dist;
DBits[Slot] = (byte)BitLength;
}
}
}
FileExtracted = true;
if (!suspended)
{
UnpInitData(solid);
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if ((!solid || !tablesRead) && !ReadTables())
{
return;
}
}
if (ppmError)
{
return;
}
while (true)
{
unpPtr &= PackDef.MAXWINMASK;
if (inAddr > readBorder)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 260 && wrPtr != unpPtr)
{
UnpWriteBuf();
if (destUnpSize < 0)
{
return;
}
if (suspended)
{
FileExtracted = false;
return;
}
}
if (unpBlockType == BlockTypes.BLOCK_PPM)
{
var Ch = ppm.DecodeChar();
if (Ch == -1)
{
ppmError = true;
break;
}
if (Ch == PpmEscChar)
{
var NextCh = ppm.DecodeChar();
if (NextCh == 0)
{
if (!ReadTables())
{
break;
}
continue;
}
if (NextCh == 2 || NextCh == -1)
{
break;
}
if (NextCh == 3)
{
if (!ReadVMCodePPM())
{
break;
}
continue;
}
if (NextCh == 4)
{
int Distance = 0,
Length = 0;
var failed = false;
for (var I = 0; I < 4 && !failed; I++)
{
var ch = ppm.DecodeChar();
if (ch == -1)
{
failed = true;
}
else
{
if (I == 3)
{
Length = ch & 0xff;
}
else
{
Distance = (Distance << 8) + (ch & 0xff);
}
}
}
if (failed)
{
break;
}
CopyString(Length + 32, Distance + 2);
continue;
}
if (NextCh == 5)
{
var Length = ppm.DecodeChar();
if (Length == -1)
{
break;
}
CopyString(Length + 4, 1);
continue;
}
}
window[unpPtr++] = (byte)Ch;
continue;
}
var Number = this.decodeNumber(LD);
if (Number < 256)
{
window[unpPtr++] = (byte)Number;
continue;
}
if (Number >= 271)
{
var Length = LDecode[Number -= 271] + 3;
if ((Bits = LBits[Number]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
var DistNumber = this.decodeNumber(DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
if (DistNumber > 9)
{
if (Bits > 4)
{
Distance += ((Utility.URShift(GetBits(), (20 - Bits))) << 4);
AddBits(Bits - 4);
}
if (lowDistRepCount > 0)
{
lowDistRepCount--;
Distance += prevLowDist;
}
else
{
var LowDist = this.decodeNumber(LDD);
if (LowDist == 16)
{
lowDistRepCount = PackDef.LOW_DIST_REP_COUNT - 1;
Distance += prevLowDist;
}
else
{
Distance += LowDist;
prevLowDist = LowDist;
}
}
}
else
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000L)
{
Length++;
}
}
InsertOldDist(Distance);
InsertLastMatch(Length, Distance);
CopyString(Length, Distance);
continue;
}
if (Number == 256)
{
if (!ReadEndOfBlock())
{
break;
}
continue;
}
if (Number == 257)
{
if (!ReadVMCode())
{
break;
}
continue;
}
if (Number == 258)
{
if (lastLength != 0)
{
CopyString(lastLength, lastDist);
}
continue;
}
if (Number < 263)
{
var DistNum = Number - 259;
var Distance = oldDist[DistNum];
for (var I = DistNum; I > 0; I--)
{
oldDist[I] = oldDist[I - 1];
}
oldDist[0] = Distance;
var LengthNumber = this.decodeNumber(RD);
var Length = LDecode[LengthNumber] + 2;
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
InsertLastMatch(Length, Distance);
CopyString(Length, Distance);
continue;
}
if (Number < 272)
{
var Distance = SDDecode[Number -= 263] + 1;
if ((Bits = SDBits[Number]) > 0)
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
InsertOldDist(Distance);
InsertLastMatch(2, Distance);
CopyString(2, Distance);
}
}
UnpWriteBuf();
}
private async Task UnpWriteBufAsync(CancellationToken cancellationToken = default)
{
var WrittenBorder = wrPtr;
var WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
for (var I = 0; I < prgStack.Count; I++)
{
var flt = prgStack[I];
if (flt is null)
{
continue;
}
if (flt.NextWindow)
{
flt.NextWindow = false;
continue;
}
var BlockStart = flt.BlockStart;
var BlockLength = flt.BlockLength;
if (((BlockStart - WrittenBorder) & PackDef.MAXWINMASK) < WriteSize)
{
if (WrittenBorder != BlockStart)
{
await UnpWriteAreaAsync(WrittenBorder, BlockStart, cancellationToken)
.ConfigureAwait(false);
WrittenBorder = BlockStart;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
if (BlockLength <= WriteSize)
{
var BlockEnd = (BlockStart + BlockLength) & PackDef.MAXWINMASK;
if (BlockStart < BlockEnd || BlockEnd == 0)
{
rarVM.setMemory(0, window, BlockStart, BlockLength);
}
else
{
var FirstPartLength = PackDef.MAXWINSIZE - BlockStart;
rarVM.setMemory(0, window, BlockStart, FirstPartLength);
rarVM.setMemory(FirstPartLength, window, 0, BlockEnd);
}
var ParentPrg = filters[flt.ParentFilter].Program;
var Prg = flt.Program;
if (ParentPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
Prg.GlobalData.Clear();
for (
var i = 0;
i < ParentPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
Prg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = ParentPrg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
ExecuteCode(Prg);
if (Prg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
if (ParentPrg.GlobalData.Count < Prg.GlobalData.Count)
{
ParentPrg.GlobalData.SetSize(Prg.GlobalData.Count);
}
for (var i = 0; i < Prg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE; i++)
{
ParentPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = Prg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
else
{
ParentPrg.GlobalData.Clear();
}
var FilteredDataOffset = Prg.FilteredDataOffset;
var FilteredDataSize = Prg.FilteredDataSize;
var FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
try
{
Array.Copy(
rarVM.Mem,
FilteredDataOffset,
FilteredData,
0,
FilteredDataSize
);
prgStack[I] = null;
while (I + 1 < prgStack.Count)
{
var NextFilter = prgStack[I + 1];
if (
NextFilter is null
|| NextFilter.BlockStart != BlockStart
|| NextFilter.BlockLength != FilteredDataSize
|| NextFilter.NextWindow
)
{
break;
}
rarVM.setMemory(0, FilteredData, 0, FilteredDataSize);
var pPrg = filters[NextFilter.ParentFilter].Program;
var NextPrg = NextFilter.Program;
if (pPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
NextPrg.GlobalData.SetSize(pPrg.GlobalData.Count);
for (
var i = 0;
i < pPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
ExecuteCode(NextPrg);
if (NextPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
if (pPrg.GlobalData.Count < NextPrg.GlobalData.Count)
{
pPrg.GlobalData.SetSize(NextPrg.GlobalData.Count);
}
for (
var i = 0;
i < NextPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
else
{
pPrg.GlobalData.Clear();
}
FilteredDataOffset = NextPrg.FilteredDataOffset;
FilteredDataSize = NextPrg.FilteredDataSize;
if (FilteredData.Length < FilteredDataSize)
{
ArrayPool<byte>.Shared.Return(FilteredData);
FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
}
for (var i = 0; i < FilteredDataSize; i++)
{
FilteredData[i] = NextPrg.GlobalData[FilteredDataOffset + i];
}
I++;
prgStack[I] = null;
}
await writeStream
.WriteAsync(FilteredData, 0, FilteredDataSize, cancellationToken)
.ConfigureAwait(false);
writtenFileSize += FilteredDataSize;
destUnpSize -= FilteredDataSize;
WrittenBorder = BlockEnd;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
finally
{
ArrayPool<byte>.Shared.Return(FilteredData);
}
}
else
{
for (var J = I; J < prgStack.Count; J++)
{
var filt = prgStack[J];
if (filt != null && filt.NextWindow)
{
filt.NextWindow = false;
}
}
wrPtr = WrittenBorder;
return;
}
}
}
await UnpWriteAreaAsync(WrittenBorder, unpPtr, cancellationToken).ConfigureAwait(false);
wrPtr = unpPtr;
}
private async Task UnpWriteAreaAsync(
int startPtr,
int endPtr,
CancellationToken cancellationToken = default
)
{
if (endPtr < startPtr)
{
await UnpWriteDataAsync(
window,
startPtr,
-startPtr & PackDef.MAXWINMASK,
cancellationToken
)
.ConfigureAwait(false);
await UnpWriteDataAsync(window, 0, endPtr, cancellationToken).ConfigureAwait(false);
}
else
{
await UnpWriteDataAsync(window, startPtr, endPtr - startPtr, cancellationToken)
.ConfigureAwait(false);
}
}
private async Task UnpWriteDataAsync(
byte[] data,
int offset,
int size,
CancellationToken cancellationToken = default
)
{
if (destUnpSize < 0)
{
return;
}
var writeSize = size;
if (writeSize > destUnpSize)
{
writeSize = (int)destUnpSize;
}
await writeStream
.WriteAsync(data, offset, writeSize, cancellationToken)
.ConfigureAwait(false);
writtenFileSize += size;
destUnpSize -= size;
}
}

View File

@@ -150,25 +150,6 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
DoUnpack();
}
public async System.Threading.Tasks.Task DoUnpackAsync(
FileHeader fileHeader,
Stream readStream,
Stream writeStream,
System.Threading.CancellationToken cancellationToken = default
)
{
destUnpSize = fileHeader.UncompressedSize;
this.fileHeader = fileHeader;
this.readStream = readStream;
this.writeStream = writeStream;
if (!fileHeader.IsSolid)
{
Init();
}
suspended = false;
await DoUnpackAsync(cancellationToken).ConfigureAwait(false);
}
public void DoUnpack()
{
if (fileHeader.CompressionMethod == 0)
@@ -203,42 +184,6 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
}
}
public async System.Threading.Tasks.Task DoUnpackAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
if (fileHeader.CompressionMethod == 0)
{
await UnstoreFileAsync(cancellationToken).ConfigureAwait(false);
return;
}
switch (fileHeader.CompressionAlgorithm)
{
case 15: // rar 1.5 compression
await unpack15Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 20: // rar 2.x compression
case 26: // files larger than 2GB
await unpack20Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 29: // rar 3.x compression
case 36: // alternative hash
await Unpack29Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 50: // rar 5.x compression
await Unpack5Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
default:
throw new InvalidFormatException(
"unknown rar compression version " + fileHeader.CompressionAlgorithm
);
}
}
private void UnstoreFile()
{
Span<byte> buffer = stackalloc byte[(int)Math.Min(0x10000, destUnpSize)];
@@ -255,26 +200,6 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
} while (!suspended && destUnpSize > 0);
}
private async System.Threading.Tasks.Task UnstoreFileAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var buffer = new byte[(int)Math.Min(0x10000, destUnpSize)];
do
{
var code = await readStream
.ReadAsync(buffer, 0, buffer.Length, cancellationToken)
.ConfigureAwait(false);
if (code == 0 || code == -1)
{
break;
}
code = code < destUnpSize ? code : (int)destUnpSize;
await writeStream.WriteAsync(buffer, 0, code, cancellationToken).ConfigureAwait(false);
destUnpSize -= code;
} while (!suspended && destUnpSize > 0);
}
private void Unpack29(bool solid)
{
Span<int> DDecode = stackalloc int[PackDef.DC];
@@ -553,281 +478,6 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
UnpWriteBuf();
}
private async System.Threading.Tasks.Task Unpack29Async(
bool solid,
System.Threading.CancellationToken cancellationToken = default
)
{
int[] DDecode = new int[PackDef.DC];
byte[] DBits = new byte[PackDef.DC];
int Bits;
if (DDecode[1] == 0)
{
int Dist = 0,
BitLength = 0,
Slot = 0;
for (var I = 0; I < DBitLengthCounts.Length; I++, BitLength++)
{
var count = DBitLengthCounts[I];
for (var J = 0; J < count; J++, Slot++, Dist += (1 << BitLength))
{
DDecode[Slot] = Dist;
DBits[Slot] = (byte)BitLength;
}
}
}
FileExtracted = true;
if (!suspended)
{
UnpInitData(solid);
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if ((!solid || !tablesRead) && !ReadTables())
{
return;
}
}
if (ppmError)
{
return;
}
while (true)
{
unpPtr &= PackDef.MAXWINMASK;
if (inAddr > readBorder)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 260 && wrPtr != unpPtr)
{
await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
if (destUnpSize < 0)
{
return;
}
if (suspended)
{
FileExtracted = false;
return;
}
}
if (unpBlockType == BlockTypes.BLOCK_PPM)
{
var ch = ppm.DecodeChar();
if (ch == -1)
{
ppmError = true;
break;
}
if (ch == PpmEscChar)
{
var nextCh = ppm.DecodeChar();
if (nextCh == 0)
{
if (!ReadTables())
{
break;
}
continue;
}
if (nextCh == 2 || nextCh == -1)
{
break;
}
if (nextCh == 3)
{
if (!ReadVMCode())
{
break;
}
continue;
}
if (nextCh == 4)
{
uint Distance = 0,
Length = 0;
var failed = false;
for (var I = 0; I < 4 && !failed; I++)
{
var ch2 = ppm.DecodeChar();
if (ch2 == -1)
{
failed = true;
}
else if (I == 3)
{
Length = (uint)ch2;
}
else
{
Distance = (Distance << 8) + (uint)ch2;
}
}
if (failed)
{
break;
}
CopyString(Length + 32, Distance + 2);
continue;
}
if (nextCh == 5)
{
var length = ppm.DecodeChar();
if (length == -1)
{
break;
}
CopyString((uint)(length + 4), 1);
continue;
}
}
window[unpPtr++] = (byte)ch;
continue;
}
var Number = this.decodeNumber(LD);
if (Number < 256)
{
window[unpPtr++] = (byte)Number;
continue;
}
if (Number >= 271)
{
var Length = LDecode[Number -= 271] + 3;
if ((Bits = LBits[Number]) > 0)
{
Length += GetBits() >> (16 - Bits);
AddBits(Bits);
}
var DistNumber = this.decodeNumber(DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
if (DistNumber > 9)
{
if (Bits > 4)
{
Distance += (GetBits() >> (20 - Bits)) << 4;
AddBits(Bits - 4);
}
if (lowDistRepCount > 0)
{
lowDistRepCount--;
Distance += prevLowDist;
}
else
{
var LowDist = this.decodeNumber(LDD);
if (LowDist == 16)
{
lowDistRepCount = PackDef.LOW_DIST_REP_COUNT - 1;
Distance += prevLowDist;
}
else
{
Distance += LowDist;
prevLowDist = (int)LowDist;
}
}
}
else
{
Distance += GetBits() >> (16 - Bits);
AddBits(Bits);
}
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000)
{
Length++;
}
}
InsertOldDist(Distance);
lastLength = Length;
CopyString(Length, Distance);
continue;
}
if (Number == 256)
{
if (!ReadEndOfBlock())
{
break;
}
continue;
}
if (Number == 257)
{
if (!ReadVMCode())
{
break;
}
continue;
}
if (Number == 258)
{
if (lastLength != 0)
{
CopyString(lastLength, oldDist[0]);
}
continue;
}
if (Number < 263)
{
var DistNum = Number - 259;
var Distance = (uint)oldDist[DistNum];
for (var I = DistNum; I > 0; I--)
{
oldDist[I] = oldDist[I - 1];
}
oldDist[0] = (int)Distance;
var LengthNumber = this.decodeNumber(RD);
var Length = LDecode[LengthNumber] + 2;
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += GetBits() >> (16 - Bits);
AddBits(Bits);
}
lastLength = Length;
CopyString((uint)Length, Distance);
continue;
}
if (Number < 272)
{
var Distance = SDDecode[Number -= 263] + 1;
if ((Bits = SDBits[Number]) > 0)
{
Distance += GetBits() >> (16 - Bits);
AddBits(Bits);
}
InsertOldDist((uint)Distance);
lastLength = 2;
CopyString(2, (uint)Distance);
}
}
await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
}
private void UnpWriteBuf()
{
var WrittenBorder = wrPtr;
@@ -1684,256 +1334,6 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
}
}
private async System.Threading.Tasks.Task UnpWriteBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var WrittenBorder = wrPtr;
var WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
for (var I = 0; I < prgStack.Count; I++)
{
var flt = prgStack[I];
if (flt is null)
{
continue;
}
if (flt.NextWindow)
{
flt.NextWindow = false;
continue;
}
var BlockStart = flt.BlockStart;
var BlockLength = flt.BlockLength;
if (((BlockStart - WrittenBorder) & PackDef.MAXWINMASK) < WriteSize)
{
if (WrittenBorder != BlockStart)
{
await UnpWriteAreaAsync(WrittenBorder, BlockStart, cancellationToken)
.ConfigureAwait(false);
WrittenBorder = BlockStart;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
if (BlockLength <= WriteSize)
{
var BlockEnd = (BlockStart + BlockLength) & PackDef.MAXWINMASK;
if (BlockStart < BlockEnd || BlockEnd == 0)
{
rarVM.setMemory(0, window, BlockStart, BlockLength);
}
else
{
var FirstPartLength = PackDef.MAXWINSIZE - BlockStart;
rarVM.setMemory(0, window, BlockStart, FirstPartLength);
rarVM.setMemory(FirstPartLength, window, 0, BlockEnd);
}
var ParentPrg = filters[flt.ParentFilter].Program;
var Prg = flt.Program;
if (ParentPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
Prg.GlobalData.Clear();
for (
var i = 0;
i < ParentPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
Prg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = ParentPrg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
ExecuteCode(Prg);
if (Prg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
if (ParentPrg.GlobalData.Count < Prg.GlobalData.Count)
{
ParentPrg.GlobalData.SetSize(Prg.GlobalData.Count);
}
for (var i = 0; i < Prg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE; i++)
{
ParentPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = Prg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
else
{
ParentPrg.GlobalData.Clear();
}
var FilteredDataOffset = Prg.FilteredDataOffset;
var FilteredDataSize = Prg.FilteredDataSize;
var FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
try
{
Array.Copy(
rarVM.Mem,
FilteredDataOffset,
FilteredData,
0,
FilteredDataSize
);
prgStack[I] = null;
while (I + 1 < prgStack.Count)
{
var NextFilter = prgStack[I + 1];
if (
NextFilter is null
|| NextFilter.BlockStart != BlockStart
|| NextFilter.BlockLength != FilteredDataSize
|| NextFilter.NextWindow
)
{
break;
}
rarVM.setMemory(0, FilteredData, 0, FilteredDataSize);
var pPrg = filters[NextFilter.ParentFilter].Program;
var NextPrg = NextFilter.Program;
if (pPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
NextPrg.GlobalData.SetSize(pPrg.GlobalData.Count);
for (
var i = 0;
i < pPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
ExecuteCode(NextPrg);
if (NextPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
if (pPrg.GlobalData.Count < NextPrg.GlobalData.Count)
{
pPrg.GlobalData.SetSize(NextPrg.GlobalData.Count);
}
for (
var i = 0;
i < NextPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
else
{
pPrg.GlobalData.Clear();
}
FilteredDataOffset = NextPrg.FilteredDataOffset;
FilteredDataSize = NextPrg.FilteredDataSize;
if (FilteredData.Length < FilteredDataSize)
{
ArrayPool<byte>.Shared.Return(FilteredData);
FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
}
for (var i = 0; i < FilteredDataSize; i++)
{
FilteredData[i] = NextPrg.GlobalData[FilteredDataOffset + i];
}
I++;
prgStack[I] = null;
}
await writeStream
.WriteAsync(FilteredData, 0, FilteredDataSize, cancellationToken)
.ConfigureAwait(false);
writtenFileSize += FilteredDataSize;
destUnpSize -= FilteredDataSize;
WrittenBorder = BlockEnd;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
finally
{
ArrayPool<byte>.Shared.Return(FilteredData);
}
}
else
{
for (var J = I; J < prgStack.Count; J++)
{
var filt = prgStack[J];
if (filt != null && filt.NextWindow)
{
filt.NextWindow = false;
}
}
wrPtr = WrittenBorder;
return;
}
}
}
await UnpWriteAreaAsync(WrittenBorder, unpPtr, cancellationToken).ConfigureAwait(false);
wrPtr = unpPtr;
}
private async System.Threading.Tasks.Task UnpWriteAreaAsync(
int startPtr,
int endPtr,
System.Threading.CancellationToken cancellationToken = default
)
{
if (endPtr < startPtr)
{
await UnpWriteDataAsync(
window,
startPtr,
-startPtr & PackDef.MAXWINMASK,
cancellationToken
)
.ConfigureAwait(false);
await UnpWriteDataAsync(window, 0, endPtr, cancellationToken).ConfigureAwait(false);
}
else
{
await UnpWriteDataAsync(window, startPtr, endPtr - startPtr, cancellationToken)
.ConfigureAwait(false);
}
}
private async System.Threading.Tasks.Task UnpWriteDataAsync(
byte[] data,
int offset,
int size,
System.Threading.CancellationToken cancellationToken = default
)
{
if (destUnpSize < 0)
{
return;
}
var writeSize = size;
if (writeSize > destUnpSize)
{
writeSize = (int)destUnpSize;
}
await writeStream
.WriteAsync(data, offset, writeSize, cancellationToken)
.ConfigureAwait(false);
writtenFileSize += size;
destUnpSize -= size;
}
private void CleanUp()
{
if (ppm != null)

View File

@@ -0,0 +1,162 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Rar.UnpackV1.Decode;
namespace SharpCompress.Compressors.Rar.UnpackV1;
internal partial class Unpack
{
private async Task unpack15Async(bool solid, CancellationToken cancellationToken = default)
{
if (suspended)
{
unpPtr = wrPtr;
}
else
{
UnpInitData(solid);
oldUnpInitData(solid);
await unpReadBufAsync(cancellationToken).ConfigureAwait(false);
if (!solid)
{
initHuff();
unpPtr = 0;
}
else
{
unpPtr = wrPtr;
}
--destUnpSize;
}
if (destUnpSize >= 0)
{
getFlagsBuf();
FlagsCnt = 8;
}
while (destUnpSize >= 0)
{
unpPtr &= PackDef.MAXWINMASK;
if (
inAddr > readTop - 30
&& !await unpReadBufAsync(cancellationToken).ConfigureAwait(false)
)
{
break;
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 270 && wrPtr != unpPtr)
{
oldUnpWriteBuf();
if (suspended)
{
return;
}
}
if (StMode != 0)
{
huffDecode();
continue;
}
if (--FlagsCnt < 0)
{
getFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
longLZ();
}
else
{
huffDecode();
}
}
else
{
FlagBuf <<= 1;
if (--FlagsCnt < 0)
{
getFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
huffDecode();
}
else
{
longLZ();
}
}
else
{
FlagBuf <<= 1;
shortLZ();
}
}
}
oldUnpWriteBuf();
}
private async Task<bool> unpReadBufAsync(CancellationToken cancellationToken = default)
{
var dataSize = readTop - inAddr;
if (dataSize < 0)
{
return false;
}
if (inAddr > MAX_SIZE / 2)
{
if (dataSize > 0)
{
Array.Copy(InBuf, inAddr, InBuf, 0, dataSize);
}
inAddr = 0;
readTop = dataSize;
}
else
{
dataSize = readTop;
}
var readCode = await readStream
.ReadAsync(InBuf, dataSize, (MAX_SIZE - dataSize) & ~0xf, cancellationToken)
.ConfigureAwait(false);
if (readCode > 0)
{
readTop += readCode;
}
readBorder = readTop - 30;
return readCode != -1;
}
private async Task oldUnpWriteBufAsync(CancellationToken cancellationToken = default)
{
if (unpPtr < wrPtr)
{
await writeStream
.WriteAsync(window, wrPtr, -wrPtr & PackDef.MAXWINMASK, cancellationToken)
.ConfigureAwait(false);
await writeStream
.WriteAsync(window, 0, unpPtr, cancellationToken)
.ConfigureAwait(false);
}
else
{
await writeStream
.WriteAsync(window, wrPtr, unpPtr - wrPtr, cancellationToken)
.ConfigureAwait(false);
}
wrPtr = unpPtr;
}
}

View File

@@ -316,110 +316,6 @@ internal partial class Unpack
oldUnpWriteBuf();
}
private async System.Threading.Tasks.Task unpack15Async(
bool solid,
System.Threading.CancellationToken cancellationToken = default
)
{
if (suspended)
{
unpPtr = wrPtr;
}
else
{
UnpInitData(solid);
oldUnpInitData(solid);
await unpReadBufAsync(cancellationToken).ConfigureAwait(false);
if (!solid)
{
initHuff();
unpPtr = 0;
}
else
{
unpPtr = wrPtr;
}
--destUnpSize;
}
if (destUnpSize >= 0)
{
getFlagsBuf();
FlagsCnt = 8;
}
while (destUnpSize >= 0)
{
unpPtr &= PackDef.MAXWINMASK;
if (
inAddr > readTop - 30
&& !await unpReadBufAsync(cancellationToken).ConfigureAwait(false)
)
{
break;
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 270 && wrPtr != unpPtr)
{
await oldUnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
if (suspended)
{
return;
}
}
if (StMode != 0)
{
huffDecode();
continue;
}
if (--FlagsCnt < 0)
{
getFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
longLZ();
}
else
{
huffDecode();
}
}
else
{
FlagBuf <<= 1;
if (--FlagsCnt < 0)
{
getFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
huffDecode();
}
else
{
longLZ();
}
}
else
{
FlagBuf <<= 1;
shortLZ();
}
}
}
await oldUnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
}
private bool unpReadBuf()
{
var dataSize = readTop - inAddr;
@@ -455,40 +351,6 @@ internal partial class Unpack
return (readCode != -1);
}
private async System.Threading.Tasks.Task<bool> unpReadBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var dataSize = readTop - inAddr;
if (dataSize < 0)
{
return (false);
}
if (inAddr > MAX_SIZE / 2)
{
if (dataSize > 0)
{
Array.Copy(InBuf, inAddr, InBuf, 0, dataSize);
}
inAddr = 0;
readTop = dataSize;
}
else
{
dataSize = readTop;
}
var readCode = await readStream
.ReadAsync(InBuf, dataSize, (MAX_SIZE - dataSize) & ~0xf, cancellationToken)
.ConfigureAwait(false);
if (readCode > 0)
{
readTop += readCode;
}
readBorder = readTop - 30;
return (readCode != -1);
}
private int getShortLen1(int pos) => pos == 1 ? Buf60 + 3 : ShortLen1[pos];
private int getShortLen2(int pos) => pos == 3 ? Buf60 + 3 : ShortLen2[pos];
@@ -952,26 +814,4 @@ internal partial class Unpack
}
wrPtr = unpPtr;
}
private async System.Threading.Tasks.Task oldUnpWriteBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
if (unpPtr < wrPtr)
{
await writeStream
.WriteAsync(window, wrPtr, -wrPtr & PackDef.MAXWINMASK, cancellationToken)
.ConfigureAwait(false);
await writeStream
.WriteAsync(window, 0, unpPtr, cancellationToken)
.ConfigureAwait(false);
}
else
{
await writeStream
.WriteAsync(window, wrPtr, unpPtr - wrPtr, cancellationToken)
.ConfigureAwait(false);
}
wrPtr = unpPtr;
}
}

View File

@@ -0,0 +1,275 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Rar.UnpackV1.Decode;
namespace SharpCompress.Compressors.Rar.UnpackV1;
internal partial class Unpack
{
private async Task unpack20Async(bool solid, CancellationToken cancellationToken = default)
{
int Bits;
if (suspended)
{
unpPtr = wrPtr;
}
else
{
UnpInitData(solid);
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if (!solid)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
return;
}
}
--destUnpSize;
}
while (destUnpSize >= 0)
{
unpPtr &= PackDef.MAXWINMASK;
if (inAddr > readTop - 30)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 270 && wrPtr != unpPtr)
{
oldUnpWriteBuf();
if (suspended)
{
return;
}
}
if (UnpAudioBlock != 0)
{
var AudioNumber = this.decodeNumber(MD[UnpCurChannel]);
if (AudioNumber == 256)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
window[unpPtr++] = DecodeAudio(AudioNumber);
if (++UnpCurChannel == UnpChannels)
{
UnpCurChannel = 0;
}
--destUnpSize;
continue;
}
var Number = this.decodeNumber(LD);
if (Number < 256)
{
window[unpPtr++] = (byte)Number;
--destUnpSize;
continue;
}
if (Number > 269)
{
var Length = LDecode[Number -= 270] + 3;
if ((Bits = LBits[Number]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
var DistNumber = this.decodeNumber(DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000L)
{
Length++;
}
}
CopyString20(Length, Distance);
continue;
}
if (Number == 269)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
if (Number == 256)
{
CopyString20(lastLength, lastDist);
continue;
}
if (Number < 261)
{
var Distance = oldDist[(oldDistPtr - (Number - 256)) & 3];
var LengthNumber = this.decodeNumber(RD);
var Length = LDecode[LengthNumber] + 2;
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
if (Distance >= 0x101)
{
Length++;
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000)
{
Length++;
}
}
}
CopyString20(Length, Distance);
continue;
}
if (Number < 270)
{
var Distance = SDDecode[Number -= 261] + 1;
if ((Bits = SDBits[Number]) > 0)
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
CopyString20(2, Distance);
}
}
ReadLastTables();
oldUnpWriteBuf();
}
private async Task<bool> ReadTables20Async(CancellationToken cancellationToken = default)
{
byte[] BitLength = new byte[PackDef.BC20];
byte[] Table = new byte[PackDef.MC20 * 4];
int TableSize,
N,
I;
if (inAddr > readTop - 25)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var BitField = GetBits();
UnpAudioBlock = (BitField & 0x8000);
if (0 == (BitField & 0x4000))
{
new Span<byte>(UnpOldTable20).Clear();
}
AddBits(2);
if (UnpAudioBlock != 0)
{
UnpChannels = ((Utility.URShift(BitField, 12)) & 3) + 1;
if (UnpCurChannel >= UnpChannels)
{
UnpCurChannel = 0;
}
AddBits(2);
TableSize = PackDef.MC20 * UnpChannels;
}
else
{
TableSize = PackDef.NC20 + PackDef.DC20 + PackDef.RC20;
}
for (I = 0; I < PackDef.BC20; I++)
{
BitLength[I] = (byte)(Utility.URShift(GetBits(), 12));
AddBits(4);
}
UnpackUtility.makeDecodeTables(BitLength, 0, BD, PackDef.BC20);
I = 0;
while (I < TableSize)
{
if (inAddr > readTop - 5)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var Number = this.decodeNumber(BD);
if (Number < 16)
{
Table[I] = (byte)((Number + UnpOldTable20[I]) & 0xf);
I++;
}
else if (Number == 16)
{
N = (Utility.URShift(GetBits(), 14)) + 3;
AddBits(2);
while (N-- > 0 && I < TableSize)
{
Table[I] = Table[I - 1];
I++;
}
}
else
{
if (Number == 17)
{
N = (Utility.URShift(GetBits(), 13)) + 3;
AddBits(3);
}
else
{
N = (Utility.URShift(GetBits(), 9)) + 11;
AddBits(7);
}
while (N-- > 0 && I < TableSize)
{
Table[I++] = 0;
}
}
}
if (inAddr > readTop)
{
return true;
}
if (UnpAudioBlock != 0)
{
for (I = 0; I < UnpChannels; I++)
{
UnpackUtility.makeDecodeTables(Table, I * PackDef.MC20, MD[I], PackDef.MC20);
}
}
else
{
UnpackUtility.makeDecodeTables(Table, 0, LD, PackDef.NC20);
UnpackUtility.makeDecodeTables(Table, PackDef.NC20, DD, PackDef.DC20);
UnpackUtility.makeDecodeTables(Table, PackDef.NC20 + PackDef.DC20, RD, PackDef.RC20);
}
for (var i = 0; i < UnpOldTable20.Length; i++)
{
UnpOldTable20[i] = Table[i];
}
return true;
}
}

View File

@@ -368,163 +368,6 @@ internal partial class Unpack
oldUnpWriteBuf();
}
private async System.Threading.Tasks.Task unpack20Async(
bool solid,
System.Threading.CancellationToken cancellationToken = default
)
{
int Bits;
if (suspended)
{
unpPtr = wrPtr;
}
else
{
UnpInitData(solid);
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if (!solid)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
return;
}
}
--destUnpSize;
}
while (destUnpSize >= 0)
{
unpPtr &= PackDef.MAXWINMASK;
if (inAddr > readTop - 30)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 270 && wrPtr != unpPtr)
{
await oldUnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
if (suspended)
{
return;
}
}
if (UnpAudioBlock != 0)
{
var AudioNumber = this.decodeNumber(MD[UnpCurChannel]);
if (AudioNumber == 256)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
window[unpPtr++] = DecodeAudio(AudioNumber);
if (++UnpCurChannel == UnpChannels)
{
UnpCurChannel = 0;
}
--destUnpSize;
continue;
}
var Number = this.decodeNumber(LD);
if (Number < 256)
{
window[unpPtr++] = (byte)Number;
--destUnpSize;
continue;
}
if (Number > 269)
{
var Length = LDecode[Number -= 270] + 3;
if ((Bits = LBits[Number]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
var DistNumber = this.decodeNumber(DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000L)
{
Length++;
}
}
CopyString20(Length, Distance);
continue;
}
if (Number == 269)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
if (Number == 256)
{
CopyString20(lastLength, lastDist);
continue;
}
if (Number < 261)
{
var Distance = oldDist[(oldDistPtr - (Number - 256)) & 3];
var LengthNumber = this.decodeNumber(RD);
var Length = LDecode[LengthNumber] + 2;
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
if (Distance >= 0x101)
{
Length++;
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000)
{
Length++;
}
}
}
CopyString20(Length, Distance);
continue;
}
if (Number < 270)
{
var Distance = SDDecode[Number -= 261] + 1;
if ((Bits = SDBits[Number]) > 0)
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
CopyString20(2, Distance);
}
}
ReadLastTables();
await oldUnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
}
private void CopyString20(int Length, int Distance)
{
lastDist = oldDist[oldDistPtr++ & 3] = Distance;
@@ -691,120 +534,6 @@ internal partial class Unpack
return (true);
}
private async System.Threading.Tasks.Task<bool> ReadTables20Async(
System.Threading.CancellationToken cancellationToken = default
)
{
byte[] BitLength = new byte[PackDef.BC20];
byte[] Table = new byte[PackDef.MC20 * 4];
int TableSize,
N,
I;
if (inAddr > readTop - 25)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return (false);
}
}
var BitField = GetBits();
UnpAudioBlock = (BitField & 0x8000);
if (0 == (BitField & 0x4000))
{
new Span<byte>(UnpOldTable20).Clear();
}
AddBits(2);
if (UnpAudioBlock != 0)
{
UnpChannels = ((Utility.URShift(BitField, 12)) & 3) + 1;
if (UnpCurChannel >= UnpChannels)
{
UnpCurChannel = 0;
}
AddBits(2);
TableSize = PackDef.MC20 * UnpChannels;
}
else
{
TableSize = PackDef.NC20 + PackDef.DC20 + PackDef.RC20;
}
for (I = 0; I < PackDef.BC20; I++)
{
BitLength[I] = (byte)(Utility.URShift(GetBits(), 12));
AddBits(4);
}
UnpackUtility.makeDecodeTables(BitLength, 0, BD, PackDef.BC20);
I = 0;
while (I < TableSize)
{
if (inAddr > readTop - 5)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return (false);
}
}
var Number = this.decodeNumber(BD);
if (Number < 16)
{
Table[I] = (byte)((Number + UnpOldTable20[I]) & 0xf);
I++;
}
else if (Number == 16)
{
N = (Utility.URShift(GetBits(), 14)) + 3;
AddBits(2);
while (N-- > 0 && I < TableSize)
{
Table[I] = Table[I - 1];
I++;
}
}
else
{
if (Number == 17)
{
N = (Utility.URShift(GetBits(), 13)) + 3;
AddBits(3);
}
else
{
N = (Utility.URShift(GetBits(), 9)) + 11;
AddBits(7);
}
while (N-- > 0 && I < TableSize)
{
Table[I++] = 0;
}
}
}
if (inAddr > readTop)
{
return (true);
}
if (UnpAudioBlock != 0)
{
for (I = 0; I < UnpChannels; I++)
{
UnpackUtility.makeDecodeTables(Table, I * PackDef.MC20, MD[I], PackDef.MC20);
}
}
else
{
UnpackUtility.makeDecodeTables(Table, 0, LD, PackDef.NC20);
UnpackUtility.makeDecodeTables(Table, PackDef.NC20, DD, PackDef.DC20);
UnpackUtility.makeDecodeTables(Table, PackDef.NC20 + PackDef.DC20, RD, PackDef.RC20);
}
for (var i = 0; i < UnpOldTable20.Length; i++)
{
UnpOldTable20[i] = Table[i];
}
return (true);
}
private void unpInitData20(bool Solid)
{
if (!Solid)

View File

@@ -0,0 +1,321 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Rar.UnpackV1.Decode;
namespace SharpCompress.Compressors.Rar.UnpackV1;
internal partial class Unpack
{
private async Task<bool> UnpReadBufAsync(CancellationToken cancellationToken = default)
{
var DataSize = ReadTop - Inp.InAddr; // Data left to process.
if (DataSize < 0)
{
return false;
}
BlockHeader.BlockSize -= Inp.InAddr - BlockHeader.BlockStart;
if (Inp.InAddr > MAX_SIZE / 2)
{
if (DataSize > 0)
{
Array.Copy(InBuf, inAddr, InBuf, 0, DataSize);
}
Inp.InAddr = 0;
ReadTop = DataSize;
}
else
{
DataSize = ReadTop;
}
var ReadCode = 0;
if (MAX_SIZE != DataSize)
{
ReadCode = await readStream
.ReadAsync(InBuf, DataSize, MAX_SIZE - DataSize, cancellationToken)
.ConfigureAwait(false);
}
if (ReadCode > 0) // Can be also -1.
{
ReadTop += ReadCode;
}
ReadBorder = ReadTop - 30;
BlockHeader.BlockStart = Inp.InAddr;
if (BlockHeader.BlockSize != -1) // '-1' means not defined yet.
{
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
}
return ReadCode != -1;
}
public async Task Unpack5Async(bool Solid, CancellationToken cancellationToken = default)
{
FileExtracted = true;
if (!Suspended)
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (
!await ReadBlockHeaderAsync(cancellationToken).ConfigureAwait(false)
|| !ReadTables()
|| !TablesRead5
)
{
return;
}
}
while (true)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr >= ReadBorder)
{
var FileDone = false;
// We use 'while', because for empty block containing only Huffman table,
// we'll be on the block border once again just after reading the table.
while (
Inp.InAddr > BlockHeader.BlockStart + BlockHeader.BlockSize - 1
|| Inp.InAddr == BlockHeader.BlockStart + BlockHeader.BlockSize - 1
&& Inp.InBit >= BlockHeader.BlockBitSize
)
{
if (BlockHeader.LastBlockInFile)
{
FileDone = true;
break;
}
if (
!await ReadBlockHeaderAsync(cancellationToken).ConfigureAwait(false)
|| !ReadTables()
)
{
return;
}
}
if (FileDone || !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (
((WriteBorder - UnpPtr) & MaxWinMask) < PackDef.MAX_LZ_MATCH + 3
&& WriteBorder != UnpPtr
)
{
UnpWriteBuf();
if (WrittenFileSize > DestUnpSize)
{
return;
}
if (Suspended)
{
FileExtracted = false;
return;
}
}
var MainSlot = this.DecodeNumber(LD);
if (MainSlot < 256)
{
Window[UnpPtr++] = (byte)MainSlot;
continue;
}
if (MainSlot >= 262)
{
var Length = SlotToLength(MainSlot - 262);
int DBits;
uint Distance = 1,
DistSlot = this.DecodeNumber(DD);
if (DistSlot < 4)
{
DBits = 0;
Distance += DistSlot;
}
else
{
DBits = (int)((DistSlot / 2) - 1);
Distance += (2 | (DistSlot & 1)) << DBits;
}
if (DBits > 0)
{
if (DBits >= 4)
{
if (DBits > 4)
{
Distance += ((Inp.getbits() >> (36 - DBits)) << 4);
Inp.AddBits(DBits - 4);
}
var LowDist = this.DecodeNumber(LDD);
Distance += LowDist;
}
else
{
Distance += Inp.getbits() >> (32 - DBits);
Inp.AddBits(DBits);
}
}
if (Distance > 0x100)
{
Length++;
if (Distance > 0x2000)
{
Length++;
if (Distance > 0x40000)
{
Length++;
}
}
}
InsertOldDist(Distance);
LastLength = Length;
CopyString(Length, Distance);
continue;
}
if (MainSlot == 256)
{
var Filter = new UnpackFilter();
if (
!await ReadFilterAsync(Filter, cancellationToken).ConfigureAwait(false)
|| !AddFilter(Filter)
)
{
break;
}
continue;
}
if (MainSlot == 257)
{
if (LastLength != 0)
{
CopyString(LastLength, OldDistN(0));
}
continue;
}
if (MainSlot < 262)
{
var DistNum = (int)(MainSlot - 258);
var Distance = OldDistN(DistNum);
for (var I = DistNum; I > 0; I--)
{
SetOldDistN(I, OldDistN(I - 1));
}
SetOldDistN(0, Distance);
var LengthSlot = this.DecodeNumber(RD);
var Length = SlotToLength(LengthSlot);
LastLength = Length;
CopyString(Length, Distance);
continue;
}
}
UnpWriteBuf();
}
private async Task<bool> ReadBlockHeaderAsync(CancellationToken cancellationToken = default)
{
Header.HeaderSize = 0;
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 7)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
Inp.faddbits((uint)((8 - Inp.InBit) & 7));
var BlockFlags = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
var ByteCount = (uint)(((BlockFlags >> 3) & 3) + 1);
if (ByteCount == 4)
{
return false;
}
Header.HeaderSize = (int)(2 + ByteCount);
Header.BlockBitSize = (BlockFlags & 7) + 1;
var SavedCheckSum = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
var BlockSize = 0;
for (var I = 0; I < ByteCount; I++)
{
BlockSize += (int)(Inp.fgetbits() >> 8) << (I * 8);
Inp.AddBits(8);
}
Header.BlockSize = BlockSize;
var CheckSum = (byte)(0x5a ^ BlockFlags ^ BlockSize ^ (BlockSize >> 8) ^ (BlockSize >> 16));
if (CheckSum != SavedCheckSum)
{
return false;
}
Header.BlockStart = Inp.InAddr;
ReadBorder = Math.Min(ReadBorder, Header.BlockStart + Header.BlockSize - 1);
Header.LastBlockInFile = (BlockFlags & 0x40) != 0;
Header.TablePresent = (BlockFlags & 0x80) != 0;
return true;
}
private async Task<bool> ReadFilterAsync(
UnpackFilter Filter,
CancellationToken cancellationToken = default
)
{
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 16)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
Filter.uBlockStart = ReadFilterData();
Filter.uBlockLength = ReadFilterData();
if (Filter.BlockLength > MAX_FILTER_BLOCK_SIZE)
{
Filter.BlockLength = 0;
}
Filter.Type = (byte)(Inp.fgetbits() >> 13);
Inp.faddbits(3);
if (Filter.Type == (byte)FilterType.FILTER_DELTA)
{
Filter.Channels = (byte)((Inp.fgetbits() >> 11) + 1);
Inp.faddbits(5);
}
return true;
}
}

View File

@@ -479,354 +479,6 @@ internal partial class Unpack
return ReadCode != -1;
}
private async System.Threading.Tasks.Task<bool> UnpReadBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var DataSize = ReadTop - Inp.InAddr; // Data left to process.
if (DataSize < 0)
{
return false;
}
BlockHeader.BlockSize -= Inp.InAddr - BlockHeader.BlockStart;
if (Inp.InAddr > MAX_SIZE / 2)
{
if (DataSize > 0)
{
Array.Copy(InBuf, inAddr, InBuf, 0, DataSize);
}
Inp.InAddr = 0;
ReadTop = DataSize;
}
else
{
DataSize = ReadTop;
}
var ReadCode = 0;
if (MAX_SIZE != DataSize)
{
ReadCode = await readStream
.ReadAsync(InBuf, DataSize, MAX_SIZE - DataSize, cancellationToken)
.ConfigureAwait(false);
}
if (ReadCode > 0) // Can be also -1.
{
ReadTop += ReadCode;
}
ReadBorder = ReadTop - 30;
BlockHeader.BlockStart = Inp.InAddr;
if (BlockHeader.BlockSize != -1) // '-1' means not defined yet.
{
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
}
return ReadCode != -1;
}
public async System.Threading.Tasks.Task Unpack5Async(
bool Solid,
System.Threading.CancellationToken cancellationToken = default
)
{
FileExtracted = true;
if (!Suspended)
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (
!await ReadBlockHeaderAsync(cancellationToken).ConfigureAwait(false)
|| !ReadTables()
|| !TablesRead5
)
{
return;
}
}
while (true)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr >= ReadBorder)
{
var FileDone = false;
// We use 'while', because for empty block containing only Huffman table,
// we'll be on the block border once again just after reading the table.
while (
Inp.InAddr > BlockHeader.BlockStart + BlockHeader.BlockSize - 1
|| Inp.InAddr == BlockHeader.BlockStart + BlockHeader.BlockSize - 1
&& Inp.InBit >= BlockHeader.BlockBitSize
)
{
if (BlockHeader.LastBlockInFile)
{
FileDone = true;
break;
}
if (
!await ReadBlockHeaderAsync(cancellationToken).ConfigureAwait(false)
|| !ReadTables()
)
{
return;
}
}
if (FileDone || !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (
((WriteBorder - UnpPtr) & MaxWinMask) < PackDef.MAX_LZ_MATCH + 3
&& WriteBorder != UnpPtr
)
{
UnpWriteBuf();
if (WrittenFileSize > DestUnpSize)
{
return;
}
if (Suspended)
{
FileExtracted = false;
return;
}
}
//uint MainSlot=DecodeNumber(Inp,LD);
var MainSlot = this.DecodeNumber(LD);
if (MainSlot < 256)
{
// if (Fragmented)
// FragWindow[UnpPtr++]=(byte)MainSlot;
// else
Window[UnpPtr++] = (byte)MainSlot;
continue;
}
if (MainSlot >= 262)
{
var Length = SlotToLength(MainSlot - 262);
//uint DBits,Distance=1,DistSlot=DecodeNumber(Inp,&BlockTables.DD);
int DBits;
uint Distance = 1,
DistSlot = this.DecodeNumber(DD);
if (DistSlot < 4)
{
DBits = 0;
Distance += DistSlot;
}
else
{
//DBits=DistSlot/2 - 1;
DBits = (int)((DistSlot / 2) - 1);
Distance += (2 | (DistSlot & 1)) << DBits;
}
if (DBits > 0)
{
if (DBits >= 4)
{
if (DBits > 4)
{
Distance += ((Inp.getbits() >> (36 - DBits)) << 4);
Inp.AddBits(DBits - 4);
}
//uint LowDist=DecodeNumber(Inp,&BlockTables.LDD);
var LowDist = this.DecodeNumber(LDD);
Distance += LowDist;
}
else
{
Distance += Inp.getbits() >> (32 - DBits);
Inp.AddBits(DBits);
}
}
if (Distance > 0x100)
{
Length++;
if (Distance > 0x2000)
{
Length++;
if (Distance > 0x40000)
{
Length++;
}
}
}
InsertOldDist(Distance);
LastLength = Length;
// if (Fragmented)
// FragWindow.CopyString(Length,Distance,UnpPtr,MaxWinMask);
// else
CopyString(Length, Distance);
continue;
}
if (MainSlot == 256)
{
var Filter = new UnpackFilter();
if (
!await ReadFilterAsync(Filter, cancellationToken).ConfigureAwait(false)
|| !AddFilter(Filter)
)
{
break;
}
continue;
}
if (MainSlot == 257)
{
if (LastLength != 0)
// if (Fragmented)
// FragWindow.CopyString(LastLength,OldDist[0],UnpPtr,MaxWinMask);
// else
//CopyString(LastLength,OldDist[0]);
{
CopyString(LastLength, OldDistN(0));
}
continue;
}
if (MainSlot < 262)
{
//uint DistNum=MainSlot-258;
var DistNum = (int)(MainSlot - 258);
//uint Distance=OldDist[DistNum];
var Distance = OldDistN(DistNum);
//for (uint I=DistNum;I>0;I--)
for (var I = DistNum; I > 0; I--)
//OldDistN[I]=OldDistN(I-1);
{
SetOldDistN(I, OldDistN(I - 1));
}
//OldDistN[0]=Distance;
SetOldDistN(0, Distance);
var LengthSlot = this.DecodeNumber(RD);
var Length = SlotToLength(LengthSlot);
LastLength = Length;
// if (Fragmented)
// FragWindow.CopyString(Length,Distance,UnpPtr,MaxWinMask);
// else
CopyString(Length, Distance);
continue;
}
}
UnpWriteBuf();
}
private async System.Threading.Tasks.Task<bool> ReadBlockHeaderAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
Header.HeaderSize = 0;
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 7)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
//Inp.faddbits((8-Inp.InBit)&7);
Inp.faddbits((uint)((8 - Inp.InBit) & 7));
var BlockFlags = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
//uint ByteCount=((BlockFlags>>3)&3)+1; // Block size byte count.
var ByteCount = (uint)(((BlockFlags >> 3) & 3) + 1); // Block size byte count.
if (ByteCount == 4)
{
return false;
}
//Header.HeaderSize=2+ByteCount;
Header.HeaderSize = (int)(2 + ByteCount);
Header.BlockBitSize = (BlockFlags & 7) + 1;
var SavedCheckSum = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
var BlockSize = 0;
//for (uint I=0;I<ByteCount;I++)
for (var I = 0; I < ByteCount; I++)
{
//BlockSize+=(Inp.fgetbits()>>8)<<(I*8);
BlockSize += (int)(Inp.fgetbits() >> 8) << (I * 8);
Inp.AddBits(8);
}
Header.BlockSize = BlockSize;
var CheckSum = (byte)(0x5a ^ BlockFlags ^ BlockSize ^ (BlockSize >> 8) ^ (BlockSize >> 16));
if (CheckSum != SavedCheckSum)
{
return false;
}
Header.BlockStart = Inp.InAddr;
ReadBorder = Math.Min(ReadBorder, Header.BlockStart + Header.BlockSize - 1);
Header.LastBlockInFile = (BlockFlags & 0x40) != 0;
Header.TablePresent = (BlockFlags & 0x80) != 0;
return true;
}
private async System.Threading.Tasks.Task<bool> ReadFilterAsync(
UnpackFilter Filter,
System.Threading.CancellationToken cancellationToken = default
)
{
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 16)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
Filter.uBlockStart = ReadFilterData();
Filter.uBlockLength = ReadFilterData();
if (Filter.BlockLength > MAX_FILTER_BLOCK_SIZE)
{
Filter.BlockLength = 0;
}
//Filter.Type=Inp.fgetbits()>>13;
Filter.Type = (byte)(Inp.fgetbits() >> 13);
Inp.faddbits(3);
if (Filter.Type == (byte)FilterType.FILTER_DELTA)
{
//Filter.Channels=(Inp.fgetbits()>>11)+1;
Filter.Channels = (byte)((Inp.fgetbits() >> 11) + 1);
Inp.faddbits(5);
}
return true;
}
//?
// void UnpWriteBuf()
// {

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
using size_t = System.UInt32;
@@ -23,11 +25,11 @@ internal partial class Unpack : IRarUnpack
// NOTE: caller has logic to check for -1 for error we throw instead.
readStream.Read(buf, offset, count);
private async System.Threading.Tasks.Task<int> UnpIO_UnpReadAsync(
private async Task<int> UnpIO_UnpReadAsync(
byte[] buf,
int offset,
int count,
System.Threading.CancellationToken cancellationToken = default
CancellationToken cancellationToken = default
) =>
// NOTE: caller has logic to check for -1 for error we throw instead.
await readStream.ReadAsync(buf, offset, count, cancellationToken).ConfigureAwait(false);
@@ -35,11 +37,11 @@ internal partial class Unpack : IRarUnpack
private void UnpIO_UnpWrite(byte[] buf, size_t offset, uint count) =>
writeStream.Write(buf, checked((int)offset), checked((int)count));
private async System.Threading.Tasks.Task UnpIO_UnpWriteAsync(
private async Task UnpIO_UnpWriteAsync(
byte[] buf,
size_t offset,
uint count,
System.Threading.CancellationToken cancellationToken = default
CancellationToken cancellationToken = default
) =>
await writeStream
.WriteAsync(buf, checked((int)offset), checked((int)count), cancellationToken)
@@ -66,11 +68,11 @@ internal partial class Unpack : IRarUnpack
DoUnpack();
}
public async System.Threading.Tasks.Task DoUnpackAsync(
public async Task DoUnpackAsync(
FileHeader fileHeader,
Stream readStream,
Stream writeStream,
System.Threading.CancellationToken cancellationToken = default
CancellationToken cancellationToken = default
)
{
DestUnpSize = fileHeader.UncompressedSize;
@@ -97,9 +99,7 @@ internal partial class Unpack : IRarUnpack
}
}
public async System.Threading.Tasks.Task DoUnpackAsync(
System.Threading.CancellationToken cancellationToken = default
)
public async Task DoUnpackAsync(CancellationToken cancellationToken = default)
{
if (fileHeader.IsStored)
{
@@ -133,9 +133,7 @@ internal partial class Unpack : IRarUnpack
} while (!Suspended);
}
private async System.Threading.Tasks.Task UnstoreFileAsync(
System.Threading.CancellationToken cancellationToken = default
)
private async Task UnstoreFileAsync(CancellationToken cancellationToken = default)
{
var buffer = new byte[(int)Math.Min(0x10000, DestUnpSize)];
do

View File

@@ -0,0 +1,100 @@
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal partial class Unpack
{
private async Task Unpack15Async(bool Solid, CancellationToken cancellationToken = default)
{
UnpInitData(Solid);
UnpInitData15(Solid);
await UnpReadBufAsync(cancellationToken).ConfigureAwait(false);
if (!Solid)
{
InitHuff();
UnpPtr = 0;
}
else
{
UnpPtr = WrPtr;
}
--DestUnpSize;
if (DestUnpSize >= 0)
{
GetFlagsBuf();
FlagsCnt = 8;
}
while (DestUnpSize >= 0)
{
UnpPtr &= MaxWinMask;
if (
Inp.InAddr > ReadTop - 30
&& !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false)
)
{
break;
}
if (((WrPtr - UnpPtr) & MaxWinMask) < 270 && WrPtr != UnpPtr)
{
UnpWriteBuf20();
}
if (StMode != 0)
{
HuffDecode();
continue;
}
if (--FlagsCnt < 0)
{
GetFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
LongLZ();
}
else
{
HuffDecode();
}
}
else
{
FlagBuf <<= 1;
if (--FlagsCnt < 0)
{
GetFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
HuffDecode();
}
else
{
LongLZ();
}
}
else
{
FlagBuf <<= 1;
ShortLZ();
}
}
}
UnpWriteBuf20();
}
}

View File

@@ -200,102 +200,6 @@ internal partial class Unpack
UnpWriteBuf20();
}
private async System.Threading.Tasks.Task Unpack15Async(
bool Solid,
System.Threading.CancellationToken cancellationToken = default
)
{
UnpInitData(Solid);
UnpInitData15(Solid);
await UnpReadBufAsync(cancellationToken).ConfigureAwait(false);
if (!Solid)
{
InitHuff();
UnpPtr = 0;
}
else
{
UnpPtr = WrPtr;
}
--DestUnpSize;
if (DestUnpSize >= 0)
{
GetFlagsBuf();
FlagsCnt = 8;
}
while (DestUnpSize >= 0)
{
UnpPtr &= MaxWinMask;
if (
Inp.InAddr > ReadTop - 30
&& !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false)
)
{
break;
}
if (((WrPtr - UnpPtr) & MaxWinMask) < 270 && WrPtr != UnpPtr)
{
await UnpWriteBuf20Async(cancellationToken).ConfigureAwait(false);
}
if (StMode != 0)
{
HuffDecode();
continue;
}
if (--FlagsCnt < 0)
{
GetFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
LongLZ();
}
else
{
HuffDecode();
}
}
else
{
FlagBuf <<= 1;
if (--FlagsCnt < 0)
{
GetFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
HuffDecode();
}
else
{
LongLZ();
}
}
else
{
FlagBuf <<= 1;
ShortLZ();
}
}
}
await UnpWriteBuf20Async(cancellationToken).ConfigureAwait(false);
}
//#define GetShortLen1(pos) ((pos)==1 ? Buf60+3:ShortLen1[pos])
private uint GetShortLen1(uint pos) => ((pos) == 1 ? (uint)(Buf60 + 3) : ShortLen1[pos]);

View File

@@ -0,0 +1,319 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.Unpack.Unpack20Local;
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal partial class Unpack
{
private async Task Unpack20Async(bool Solid, CancellationToken cancellationToken = default)
{
uint Bits;
if (Suspended)
{
UnpPtr = WrPtr;
}
else
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if (
(!Solid || !TablesRead2)
&& !await ReadTables20Async(cancellationToken).ConfigureAwait(false)
)
{
return;
}
--DestUnpSize;
}
while (DestUnpSize >= 0)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr > ReadTop - 30)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((WrPtr - UnpPtr) & MaxWinMask) < 270 && WrPtr != UnpPtr)
{
UnpWriteBuf20();
if (Suspended)
{
return;
}
}
if (UnpAudioBlock)
{
var AudioNumber = DecodeNumber(Inp, MD[UnpCurChannel]);
if (AudioNumber == 256)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
Window[UnpPtr++] = DecodeAudio((int)AudioNumber);
if (++UnpCurChannel == UnpChannels)
{
UnpCurChannel = 0;
}
--DestUnpSize;
continue;
}
var Number = DecodeNumber(Inp, BlockTables.LD);
if (Number < 256)
{
Window[UnpPtr++] = (byte)Number;
--DestUnpSize;
continue;
}
if (Number > 269)
{
var Length = (uint)(LDecode[Number -= 270] + 3);
if ((Bits = LBits[Number]) > 0)
{
Length += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
var DistNumber = DecodeNumber(Inp, BlockTables.DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
Distance += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000L)
{
Length++;
}
}
CopyString20(Length, Distance);
continue;
}
if (Number == 269)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
if (Number == 256)
{
CopyString20(LastLength, LastDist);
continue;
}
if (Number < 261)
{
var Distance = OldDist[(OldDistPtr - (Number - 256)) & 3];
var LengthNumber = DecodeNumber(Inp, BlockTables.RD);
var Length = (uint)(LDecode[LengthNumber] + 2);
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
if (Distance >= 0x101)
{
Length++;
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000)
{
Length++;
}
}
}
CopyString20(Length, Distance);
continue;
}
if (Number < 270)
{
var Distance = (uint)(SDDecode[Number -= 261] + 1);
if ((Bits = SDBits[Number]) > 0)
{
Distance += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
CopyString20(2, Distance);
continue;
}
}
ReadLastTables();
UnpWriteBuf20();
}
private async Task UnpWriteBuf20Async(CancellationToken cancellationToken = default)
{
if (UnpPtr != WrPtr)
{
UnpSomeRead = true;
}
if (UnpPtr < WrPtr)
{
await UnpIO_UnpWriteAsync(
Window,
WrPtr,
(uint)(-(int)WrPtr & MaxWinMask),
cancellationToken
)
.ConfigureAwait(false);
await UnpIO_UnpWriteAsync(Window, 0, UnpPtr, cancellationToken).ConfigureAwait(false);
UnpAllBuf = true;
}
else
{
await UnpIO_UnpWriteAsync(Window, WrPtr, UnpPtr - WrPtr, cancellationToken)
.ConfigureAwait(false);
}
WrPtr = UnpPtr;
}
private async Task<bool> ReadTables20Async(CancellationToken cancellationToken = default)
{
byte[] BitLength = new byte[checked((int)BC20)];
byte[] Table = new byte[checked((int)MC20 * 4)];
if (Inp.InAddr > ReadTop - 25)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var BitField = Inp.getbits();
UnpAudioBlock = (BitField & 0x8000) != 0;
if ((BitField & 0x4000) != 0)
{
Array.Clear(UnpOldTable20, 0, UnpOldTable20.Length);
}
Inp.addbits(2);
uint TableSize;
if (UnpAudioBlock)
{
UnpChannels = ((BitField >> 12) & 3) + 1;
if (UnpCurChannel >= UnpChannels)
{
UnpCurChannel = 0;
}
Inp.addbits(2);
TableSize = MC20 * UnpChannels;
}
else
{
TableSize = NC20 + DC20 + RC20;
}
for (int I = 0; I < checked((int)BC20); I++)
{
BitLength[I] = (byte)(Inp.getbits() >> 12);
Inp.addbits(4);
}
MakeDecodeTables(BitLength, 0, BlockTables.BD, BC20);
for (int I = 0; I < checked((int)TableSize); )
{
if (Inp.InAddr > ReadTop - 5)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var Number = DecodeNumber(Inp, BlockTables.BD);
if (Number < 16)
{
Table[I] = (byte)((Number + UnpOldTable20[I]) & 0xf);
I++;
}
else if (Number == 16)
{
var N = (Inp.getbits() >> 14) + 3;
Inp.addbits(2);
if (I == 0)
{
return false; // We cannot have "repeat previous" code at the first position.
}
else
{
while (N-- > 0 && I < TableSize)
{
Table[I] = Table[I - 1];
I++;
}
}
}
else
{
uint N;
if (Number == 17)
{
N = (Inp.getbits() >> 13) + 3;
Inp.addbits(3);
}
else
{
N = (Inp.getbits() >> 9) + 11;
Inp.addbits(7);
}
while (N-- > 0 && I < TableSize)
{
Table[I++] = 0;
}
}
}
TablesRead2 = true;
if (Inp.InAddr > ReadTop)
{
return true;
}
if (UnpAudioBlock)
{
for (int I = 0; I < UnpChannels; I++)
{
MakeDecodeTables(Table, (int)(I * MC20), MD[I], MC20);
}
}
else
{
MakeDecodeTables(Table, 0, BlockTables.LD, NC20);
MakeDecodeTables(Table, (int)NC20, BlockTables.DD, DC20);
MakeDecodeTables(Table, (int)(NC20 + DC20), BlockTables.RD, RC20);
}
Array.Copy(Table, 0, this.UnpOldTable20, 0, UnpOldTable20.Length);
return true;
}
}

View File

@@ -342,170 +342,6 @@ internal partial class Unpack
UnpWriteBuf20();
}
private async System.Threading.Tasks.Task Unpack20Async(
bool Solid,
System.Threading.CancellationToken cancellationToken = default
)
{
uint Bits;
if (Suspended)
{
UnpPtr = WrPtr;
}
else
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if (
(!Solid || !TablesRead2)
&& !await ReadTables20Async(cancellationToken).ConfigureAwait(false)
)
{
return;
}
--DestUnpSize;
}
while (DestUnpSize >= 0)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr > ReadTop - 30)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((WrPtr - UnpPtr) & MaxWinMask) < 270 && WrPtr != UnpPtr)
{
await UnpWriteBuf20Async(cancellationToken).ConfigureAwait(false);
if (Suspended)
{
return;
}
}
if (UnpAudioBlock)
{
var AudioNumber = DecodeNumber(Inp, MD[UnpCurChannel]);
if (AudioNumber == 256)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
Window[UnpPtr++] = DecodeAudio((int)AudioNumber);
if (++UnpCurChannel == UnpChannels)
{
UnpCurChannel = 0;
}
--DestUnpSize;
continue;
}
var Number = DecodeNumber(Inp, BlockTables.LD);
if (Number < 256)
{
Window[UnpPtr++] = (byte)Number;
--DestUnpSize;
continue;
}
if (Number > 269)
{
var Length = (uint)(LDecode[Number -= 270] + 3);
if ((Bits = LBits[Number]) > 0)
{
Length += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
var DistNumber = DecodeNumber(Inp, BlockTables.DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
Distance += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000L)
{
Length++;
}
}
CopyString20(Length, Distance);
continue;
}
if (Number == 269)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
if (Number == 256)
{
CopyString20(LastLength, LastDist);
continue;
}
if (Number < 261)
{
var Distance = OldDist[(OldDistPtr - (Number - 256)) & 3];
var LengthNumber = DecodeNumber(Inp, BlockTables.RD);
var Length = (uint)(LDecode[LengthNumber] + 2);
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
if (Distance >= 0x101)
{
Length++;
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000)
{
Length++;
}
}
}
CopyString20(Length, Distance);
continue;
}
if (Number < 270)
{
var Distance = (uint)(SDDecode[Number -= 261] + 1);
if ((Bits = SDBits[Number]) > 0)
{
Distance += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
CopyString20(2, Distance);
continue;
}
}
ReadLastTables();
await UnpWriteBuf20Async(cancellationToken).ConfigureAwait(false);
}
private void UnpWriteBuf20()
{
if (UnpPtr != WrPtr)
@@ -527,36 +363,6 @@ internal partial class Unpack
WrPtr = UnpPtr;
}
private async System.Threading.Tasks.Task UnpWriteBuf20Async(
System.Threading.CancellationToken cancellationToken = default
)
{
if (UnpPtr != WrPtr)
{
UnpSomeRead = true;
}
if (UnpPtr < WrPtr)
{
await UnpIO_UnpWriteAsync(
Window,
WrPtr,
(uint)(-(int)WrPtr & MaxWinMask),
cancellationToken
)
.ConfigureAwait(false);
await UnpIO_UnpWriteAsync(Window, 0, UnpPtr, cancellationToken).ConfigureAwait(false);
UnpAllBuf = true;
}
else
{
await UnpIO_UnpWriteAsync(Window, WrPtr, UnpPtr - WrPtr, cancellationToken)
.ConfigureAwait(false);
}
WrPtr = UnpPtr;
}
private bool ReadTables20()
{
Span<byte> BitLength = stackalloc byte[checked((int)BC20)];
@@ -677,130 +483,6 @@ internal partial class Unpack
return true;
}
private async System.Threading.Tasks.Task<bool> ReadTables20Async(
System.Threading.CancellationToken cancellationToken = default
)
{
byte[] BitLength = new byte[checked((int)BC20)];
byte[] Table = new byte[checked((int)MC20 * 4)];
if (Inp.InAddr > ReadTop - 25)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var BitField = Inp.getbits();
UnpAudioBlock = (BitField & 0x8000) != 0;
if ((BitField & 0x4000) != 0)
{
Array.Clear(UnpOldTable20, 0, UnpOldTable20.Length);
}
Inp.addbits(2);
uint TableSize;
if (UnpAudioBlock)
{
UnpChannels = ((BitField >> 12) & 3) + 1;
if (UnpCurChannel >= UnpChannels)
{
UnpCurChannel = 0;
}
Inp.addbits(2);
TableSize = MC20 * UnpChannels;
}
else
{
TableSize = NC20 + DC20 + RC20;
}
for (int I = 0; I < checked((int)BC20); I++)
{
BitLength[I] = (byte)(Inp.getbits() >> 12);
Inp.addbits(4);
}
MakeDecodeTables(BitLength, 0, BlockTables.BD, BC20);
for (int I = 0; I < checked((int)TableSize); )
{
if (Inp.InAddr > ReadTop - 5)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var Number = DecodeNumber(Inp, BlockTables.BD);
if (Number < 16)
{
Table[I] = (byte)((Number + UnpOldTable20[I]) & 0xF);
I++;
}
else if (Number < 18)
{
uint N;
if (Number == 16)
{
N = (Inp.getbits() >> 14) + 3;
Inp.addbits(2);
}
else
{
N = (Inp.getbits() >> 13) + 11;
Inp.addbits(3);
}
if (I == 0)
{
return false;
}
while (N-- > 0 && I < checked((int)TableSize))
{
Table[I] = Table[I - 1];
I++;
}
}
else
{
uint N;
if (Number == 18)
{
N = (Inp.getbits() >> 13) + 3;
Inp.addbits(3);
}
else
{
N = (Inp.getbits() >> 9) + 11;
Inp.addbits(7);
}
while (N-- > 0 && I < checked((int)TableSize))
{
Table[I++] = 0;
}
}
}
if (UnpAudioBlock)
{
for (int I = 0; I < UnpChannels; I++)
{
MakeDecodeTables(Table, (int)(I * MC20), MD[I], MC20);
}
}
else
{
MakeDecodeTables(Table, 0, BlockTables.LD, NC20);
MakeDecodeTables(Table, (int)NC20, BlockTables.DD, DC20);
MakeDecodeTables(Table, (int)(NC20 + DC20), BlockTables.RD, RC20);
}
Array.Copy(Table, 0, this.UnpOldTable20, 0, UnpOldTable20.Length);
return true;
}
private void ReadLastTables()
{
if (ReadTop >= Inp.InAddr + 5)

View File

@@ -0,0 +1,709 @@
#nullable disable
using System;
using System.Threading;
using System.Threading.Tasks;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.UnpackGlobal;
using size_t = System.UInt32;
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal partial class Unpack
{
private async Task Unpack5Async(bool Solid, CancellationToken cancellationToken = default)
{
FileExtracted = true;
if (!Suspended)
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (
!await ReadBlockHeaderAsync(Inp, cancellationToken).ConfigureAwait(false)
|| !await ReadTablesAsync(Inp, cancellationToken).ConfigureAwait(false)
|| !TablesRead5
)
{
return;
}
}
while (true)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr >= ReadBorder)
{
var FileDone = false;
// We use 'while', because for empty block containing only Huffman table,
// we'll be on the block border once again just after reading the table.
while (
Inp.InAddr > BlockHeader.BlockStart + BlockHeader.BlockSize - 1
|| Inp.InAddr == BlockHeader.BlockStart + BlockHeader.BlockSize - 1
&& Inp.InBit >= BlockHeader.BlockBitSize
)
{
if (BlockHeader.LastBlockInFile)
{
FileDone = true;
break;
}
if (
!await ReadBlockHeaderAsync(Inp, cancellationToken).ConfigureAwait(false)
|| !await ReadTablesAsync(Inp, cancellationToken).ConfigureAwait(false)
)
{
return;
}
}
if (FileDone || !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((WriteBorder - UnpPtr) & MaxWinMask) < MAX_LZ_MATCH + 3 && WriteBorder != UnpPtr)
{
await UnpWriteBufAsync(cancellationToken);
if (WrittenFileSize > DestUnpSize)
{
return;
}
if (Suspended)
{
FileExtracted = false;
return;
}
}
var MainSlot = DecodeNumber(Inp, BlockTables.LD);
if (MainSlot < 256)
{
if (Fragmented)
{
FragWindow[UnpPtr++] = (byte)MainSlot;
}
else
{
Window[UnpPtr++] = (byte)MainSlot;
}
continue;
}
if (MainSlot >= 262)
{
var Length = SlotToLength(Inp, MainSlot - 262);
uint DBits,
Distance = 1,
DistSlot = DecodeNumber(Inp, BlockTables.DD);
if (DistSlot < 4)
{
DBits = 0;
Distance += DistSlot;
}
else
{
DBits = (DistSlot / 2) - 1;
Distance += (2 | (DistSlot & 1)) << (int)DBits;
}
if (DBits > 0)
{
if (DBits >= 4)
{
if (DBits > 4)
{
Distance += ((Inp.getbits32() >> (int)(36 - DBits)) << 4);
Inp.addbits(DBits - 4);
}
var LowDist = DecodeNumber(Inp, BlockTables.LDD);
Distance += LowDist;
}
else
{
Distance += Inp.getbits32() >> (int)(32 - DBits);
Inp.addbits(DBits);
}
}
if (Distance > 0x100)
{
Length++;
if (Distance > 0x2000)
{
Length++;
if (Distance > 0x40000)
{
Length++;
}
}
}
InsertOldDist(Distance);
LastLength = Length;
if (Fragmented)
{
FragWindow.CopyString(Length, Distance, ref UnpPtr, MaxWinMask);
}
else
{
CopyString(Length, Distance);
}
continue;
}
if (MainSlot == 256)
{
var Filter = new UnpackFilter();
if (
!await ReadFilterAsync(Inp, Filter, cancellationToken).ConfigureAwait(false)
|| !AddFilter(Filter)
)
{
break;
}
continue;
}
if (MainSlot == 257)
{
if (LastLength != 0)
{
if (Fragmented)
{
FragWindow.CopyString(LastLength, OldDist[0], ref UnpPtr, MaxWinMask);
}
else
{
CopyString(LastLength, OldDist[0]);
}
}
continue;
}
if (MainSlot < 262)
{
var DistNum = MainSlot - 258;
var Distance = OldDist[DistNum];
for (var I = DistNum; I > 0; I--)
{
OldDist[I] = OldDist[I - 1];
}
OldDist[0] = Distance;
var LengthSlot = DecodeNumber(Inp, BlockTables.RD);
var Length = SlotToLength(Inp, LengthSlot);
LastLength = Length;
if (Fragmented)
{
FragWindow.CopyString(Length, Distance, ref UnpPtr, MaxWinMask);
}
else
{
CopyString(Length, Distance);
}
continue;
}
}
await UnpWriteBufAsync(cancellationToken);
}
private async Task<bool> ReadFilterAsync(
BitInput Inp,
UnpackFilter Filter,
CancellationToken cancellationToken = default
)
{
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 16)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
Filter.BlockStart = ReadFilterData(Inp);
Filter.BlockLength = ReadFilterData(Inp);
if (Filter.BlockLength > MAX_FILTER_BLOCK_SIZE)
{
Filter.BlockLength = 0;
}
Filter.Type = (byte)(Inp.fgetbits() >> 13);
Inp.faddbits(3);
if (Filter.Type == FILTER_DELTA)
{
Filter.Channels = (byte)((Inp.fgetbits() >> 11) + 1);
Inp.faddbits(5);
}
return true;
}
private async Task<bool> UnpReadBufAsync(CancellationToken cancellationToken = default)
{
var DataSize = ReadTop - Inp.InAddr; // Data left to process.
if (DataSize < 0)
{
return false;
}
BlockHeader.BlockSize -= Inp.InAddr - BlockHeader.BlockStart;
if (Inp.InAddr > MAX_SIZE / 2)
{
if (DataSize > 0)
{
Buffer.BlockCopy(Inp.InBuf, Inp.InAddr, Inp.InBuf, 0, DataSize);
}
Inp.InAddr = 0;
ReadTop = DataSize;
}
else
{
DataSize = ReadTop;
}
var ReadCode = 0;
if (MAX_SIZE != DataSize)
{
ReadCode = await UnpIO_UnpReadAsync(
Inp.InBuf,
DataSize,
MAX_SIZE - DataSize,
cancellationToken
)
.ConfigureAwait(false);
}
if (ReadCode > 0) // Can be also -1.
{
ReadTop += ReadCode;
}
ReadBorder = ReadTop - 30;
BlockHeader.BlockStart = Inp.InAddr;
if (BlockHeader.BlockSize != -1) // '-1' means not defined yet.
{
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
}
return ReadCode != -1;
}
private async Task UnpWriteBufAsync(CancellationToken cancellationToken = default)
{
var WrittenBorder = WrPtr;
var FullWriteSize = (UnpPtr - WrittenBorder) & MaxWinMask;
var WriteSizeLeft = FullWriteSize;
var NotAllFiltersProcessed = false;
for (var I = 0; I < Filters.Count; I++)
{
var flt = Filters[I];
if (flt.Type == FILTER_NONE)
{
continue;
}
if (flt.NextWindow)
{
if (((flt.BlockStart - WrPtr) & MaxWinMask) <= FullWriteSize)
{
flt.NextWindow = false;
}
continue;
}
var BlockStart = flt.BlockStart;
var BlockLength = flt.BlockLength;
if (((BlockStart - WrittenBorder) & MaxWinMask) < WriteSizeLeft)
{
if (WrittenBorder != BlockStart)
{
await UnpWriteAreaAsync(WrittenBorder, BlockStart, cancellationToken)
.ConfigureAwait(false);
WrittenBorder = BlockStart;
WriteSizeLeft = (UnpPtr - WrittenBorder) & MaxWinMask;
}
if (BlockLength <= WriteSizeLeft)
{
if (BlockLength > 0)
{
var BlockEnd = (BlockStart + BlockLength) & MaxWinMask;
FilterSrcMemory = EnsureCapacity(
FilterSrcMemory,
checked((int)BlockLength)
);
var Mem = FilterSrcMemory;
if (BlockStart < BlockEnd || BlockEnd == 0)
{
if (Fragmented)
{
FragWindow.CopyData(Mem, 0, BlockStart, BlockLength);
}
else
{
Buffer.BlockCopy(Window, (int)BlockStart, Mem, 0, (int)BlockLength);
}
}
else
{
var FirstPartLength = MaxWinSize - BlockStart;
if (Fragmented)
{
FragWindow.CopyData(Mem, 0, BlockStart, FirstPartLength);
FragWindow.CopyData(Mem, FirstPartLength, 0, BlockEnd);
}
else
{
Buffer.BlockCopy(
Window,
(int)BlockStart,
Mem,
0,
(int)FirstPartLength
);
Buffer.BlockCopy(
Window,
0,
Mem,
(int)FirstPartLength,
(int)BlockEnd
);
}
}
var OutMem = ApplyFilter(Mem, BlockLength, flt);
Filters[I].Type = FILTER_NONE;
if (OutMem != null)
{
await UnpIO_UnpWriteAsync(OutMem, 0, BlockLength, cancellationToken)
.ConfigureAwait(false);
WrittenFileSize += BlockLength;
}
WrittenBorder = BlockEnd;
WriteSizeLeft = (UnpPtr - WrittenBorder) & MaxWinMask;
}
}
else
{
NotAllFiltersProcessed = true;
for (var J = I; J < Filters.Count; J++)
{
var fltj = Filters[J];
if (
fltj.Type != FILTER_NONE
&& fltj.NextWindow == false
&& ((fltj.BlockStart - WrPtr) & MaxWinMask) < FullWriteSize
)
{
fltj.NextWindow = true;
}
}
break;
}
}
}
var EmptyCount = 0;
for (var I = 0; I < Filters.Count; I++)
{
if (EmptyCount > 0)
{
Filters[I - EmptyCount] = Filters[I];
}
if (Filters[I].Type == FILTER_NONE)
{
EmptyCount++;
}
}
if (EmptyCount > 0)
{
Filters.RemoveRange(Filters.Count - EmptyCount, EmptyCount);
}
if (!NotAllFiltersProcessed)
{
await UnpWriteAreaAsync(WrittenBorder, UnpPtr, cancellationToken).ConfigureAwait(false);
WrPtr = UnpPtr;
}
WriteBorder = (UnpPtr + Math.Min(MaxWinSize, UNPACK_MAX_WRITE)) & MaxWinMask;
if (
WriteBorder == UnpPtr
|| WrPtr != UnpPtr
&& ((WrPtr - UnpPtr) & MaxWinMask) < ((WriteBorder - UnpPtr) & MaxWinMask)
)
{
WriteBorder = WrPtr;
}
}
private async Task UnpWriteAreaAsync(
size_t StartPtr,
size_t EndPtr,
CancellationToken cancellationToken = default
)
{
if (EndPtr != StartPtr)
{
UnpSomeRead = true;
}
if (EndPtr < StartPtr)
{
UnpAllBuf = true;
}
if (Fragmented)
{
var SizeToWrite = (EndPtr - StartPtr) & MaxWinMask;
while (SizeToWrite > 0)
{
var BlockSize = FragWindow.GetBlockSize(StartPtr, SizeToWrite);
FragWindow.GetBuffer(StartPtr, out var __buffer, out var __offset);
await UnpWriteDataAsync(__buffer, __offset, BlockSize, cancellationToken)
.ConfigureAwait(false);
SizeToWrite -= BlockSize;
StartPtr = (StartPtr + BlockSize) & MaxWinMask;
}
}
else if (EndPtr < StartPtr)
{
await UnpWriteDataAsync(Window, StartPtr, MaxWinSize - StartPtr, cancellationToken)
.ConfigureAwait(false);
await UnpWriteDataAsync(Window, 0, EndPtr, cancellationToken).ConfigureAwait(false);
}
else
{
await UnpWriteDataAsync(Window, StartPtr, EndPtr - StartPtr, cancellationToken)
.ConfigureAwait(false);
}
}
private async Task UnpWriteDataAsync(
byte[] Data,
size_t offset,
size_t Size,
CancellationToken cancellationToken = default
)
{
if (WrittenFileSize >= DestUnpSize)
{
return;
}
var WriteSize = Size;
var LeftToWrite = DestUnpSize - WrittenFileSize;
if (WriteSize > LeftToWrite)
{
WriteSize = (size_t)LeftToWrite;
}
await UnpIO_UnpWriteAsync(Data, offset, WriteSize, cancellationToken).ConfigureAwait(false);
WrittenFileSize += Size;
}
private async Task<bool> ReadBlockHeaderAsync(
BitInput Inp,
CancellationToken cancellationToken = default
)
{
BlockHeader.HeaderSize = 0;
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 7)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
Inp.faddbits((uint)((8 - Inp.InBit) & 7));
var BlockFlags = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
var ByteCount = (uint)(((BlockFlags >> 3) & 3) + 1); // Block size byte count.
if (ByteCount == 4)
{
return false;
}
BlockHeader.HeaderSize = (int)(2 + ByteCount);
BlockHeader.BlockBitSize = (BlockFlags & 7) + 1;
var SavedCheckSum = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
var BlockSize = 0;
for (uint I = 0; I < ByteCount; I++)
{
BlockSize += (int)((Inp.fgetbits() >> 8) << (int)(I * 8));
Inp.addbits(8);
}
BlockHeader.BlockSize = BlockSize;
var CheckSum = (byte)(0x5a ^ BlockFlags ^ BlockSize ^ (BlockSize >> 8) ^ (BlockSize >> 16));
if (CheckSum != SavedCheckSum)
{
return false;
}
BlockHeader.BlockStart = Inp.InAddr;
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
BlockHeader.LastBlockInFile = (BlockFlags & 0x40) != 0;
BlockHeader.TablePresent = (BlockFlags & 0x80) != 0;
return true;
}
private async Task<bool> ReadTablesAsync(
BitInput Inp,
CancellationToken cancellationToken = default
)
{
if (!BlockHeader.TablePresent)
{
return true;
}
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 25)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var BitLength = new byte[checked((int)BC)];
for (int I = 0; I < BC; I++)
{
uint Length = (byte)(Inp.fgetbits() >> 12);
Inp.faddbits(4);
if (Length == 15)
{
uint ZeroCount = (byte)(Inp.fgetbits() >> 12);
Inp.faddbits(4);
if (ZeroCount == 0)
{
BitLength[I] = 15;
}
else
{
ZeroCount += 2;
while (ZeroCount-- > 0 && I < BitLength.Length)
{
BitLength[I++] = 0;
}
I--;
}
}
else
{
BitLength[I] = (byte)Length;
}
}
MakeDecodeTables(BitLength, 0, BlockTables.BD, BC);
var Table = new byte[checked((int)HUFF_TABLE_SIZE)];
const int TableSize = checked((int)HUFF_TABLE_SIZE);
for (int I = 0; I < TableSize; )
{
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 5)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var Number = DecodeNumber(Inp, BlockTables.BD);
if (Number < 16)
{
Table[I] = (byte)Number;
I++;
}
else if (Number < 18)
{
uint N;
if (Number == 16)
{
N = (Inp.fgetbits() >> 13) + 3;
Inp.faddbits(3);
}
else
{
N = (Inp.fgetbits() >> 9) + 11;
Inp.faddbits(7);
}
if (I == 0)
{
// We cannot have "repeat previous" code at the first position.
// Multiple such codes would shift Inp position without changing I,
// which can lead to reading beyond of Inp boundary in mutithreading
// mode, where Inp.ExternalBuffer disables bounds check and we just
// reserve a lot of buffer space to not need such check normally.
return false;
}
else
{
while (N-- > 0 && I < TableSize)
{
Table[I] = Table[I - 1];
I++;
}
}
}
else
{
uint N;
if (Number == 18)
{
N = (Inp.fgetbits() >> 13) + 3;
Inp.faddbits(3);
}
else
{
N = (Inp.fgetbits() >> 9) + 11;
Inp.faddbits(7);
}
while (N-- > 0 && I < TableSize)
{
Table[I++] = 0;
}
}
}
TablesRead5 = true;
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop)
{
return false;
}
MakeDecodeTables(Table, 0, BlockTables.LD, NC);
MakeDecodeTables(Table, (int)NC, BlockTables.DD, DC);
MakeDecodeTables(Table, (int)(NC + DC), BlockTables.LDD, LDC);
MakeDecodeTables(Table, (int)(NC + DC + LDC), BlockTables.RD, RC);
return true;
}
}

View File

@@ -24,11 +24,7 @@ internal partial class Unpack
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (
!ReadBlockHeader(Inp, ref BlockHeader)
|| !ReadTables(Inp, ref BlockHeader, ref BlockTables)
|| !TablesRead5
)
if (!ReadBlockHeader(Inp) || !ReadTables(Inp) || !TablesRead5)
{
return;
}
@@ -55,10 +51,7 @@ internal partial class Unpack
FileDone = true;
break;
}
if (
!ReadBlockHeader(Inp, ref BlockHeader)
|| !ReadTables(Inp, ref BlockHeader, ref BlockTables)
)
if (!ReadBlockHeader(Inp) || !ReadTables(Inp))
{
return;
}
@@ -216,180 +209,6 @@ internal partial class Unpack
UnpWriteBuf();
}
private async System.Threading.Tasks.Task Unpack5Async(
bool Solid,
System.Threading.CancellationToken cancellationToken = default
)
{
FileExtracted = true;
if (!Suspended)
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if (
!ReadBlockHeader(Inp, ref BlockHeader)
|| !ReadTables(Inp, ref BlockHeader, ref BlockTables)
|| !TablesRead5
)
{
return;
}
}
while (true)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr >= ReadBorder)
{
var FileDone = false;
while (
Inp.InAddr > BlockHeader.BlockStart + BlockHeader.BlockSize - 1
|| Inp.InAddr == BlockHeader.BlockStart + BlockHeader.BlockSize - 1
&& Inp.InBit >= BlockHeader.BlockBitSize
)
{
if (BlockHeader.LastBlockInFile)
{
FileDone = true;
break;
}
if (
!ReadBlockHeader(Inp, ref BlockHeader)
|| !ReadTables(Inp, ref BlockHeader, ref BlockTables)
)
{
return;
}
}
if (FileDone || !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((WriteBorder - UnpPtr) & MaxWinMask) < MAX_LZ_MATCH + 3 && WriteBorder != UnpPtr)
{
await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
if (WrittenFileSize > DestUnpSize)
{
return;
}
}
uint MainSlot = DecodeNumber(Inp, BlockTables.LD);
if (MainSlot < 256)
{
if (Fragmented)
{
FragWindow[UnpPtr++] = (byte)MainSlot;
}
else
{
Window[UnpPtr++] = (byte)MainSlot;
}
continue;
}
if (MainSlot >= 262)
{
uint Length = SlotToLength(Inp, MainSlot - 262);
uint DBits,
Distance = 1,
DistSlot = DecodeNumber(Inp, BlockTables.DD);
if (DistSlot < 4)
{
DBits = 0;
Distance += DistSlot;
}
else
{
DBits = (DistSlot / 2) - 1;
Distance += (2 | (DistSlot & 1)) << (int)DBits;
}
if (DBits > 0)
{
if (DBits >= 4)
{
if (DBits > 4)
{
Distance += ((Inp.getbits() >> (int)(20 - DBits)) << 4);
Inp.addbits(DBits - 4);
}
uint LowDist = DecodeNumber(Inp, BlockTables.LDD);
Distance += LowDist;
}
else
{
Distance += Inp.getbits() >> (int)(16 - DBits);
Inp.addbits(DBits);
}
}
if (Distance > 0x100)
{
Length++;
if (Distance > 0x2000)
{
Length++;
if (Distance > 0x40000)
{
Length++;
}
}
}
InsertOldDist(Distance);
LastLength = Length;
CopyString(Length, Distance);
continue;
}
if (MainSlot == 256)
{
var Filter = new UnpackFilter();
if (!ReadFilter(Inp, Filter) || !AddFilter(Filter))
{
break;
}
continue;
}
if (MainSlot == 257)
{
if (LastLength != 0)
{
CopyString((uint)LastLength, (uint)OldDist[0]);
}
continue;
}
if (MainSlot < 262)
{
uint DistNum = MainSlot - 258;
uint Distance = (uint)OldDist[(int)DistNum];
for (var I = (int)DistNum; I > 0; I--)
{
OldDist[I] = OldDist[I - 1];
}
OldDist[0] = Distance;
uint LengthSlot = DecodeNumber(Inp, BlockTables.RD);
uint Length = SlotToLength(Inp, LengthSlot);
LastLength = Length;
CopyString(Length, Distance);
continue;
}
}
await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
}
private uint ReadFilterData(BitInput Inp)
{
var ByteCount = (Inp.fgetbits() >> 14) + 1;
@@ -507,58 +326,6 @@ internal partial class Unpack
return ReadCode != -1;
}
private async System.Threading.Tasks.Task<bool> UnpReadBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var DataSize = ReadTop - Inp.InAddr; // Data left to process.
if (DataSize < 0)
{
return false;
}
BlockHeader.BlockSize -= Inp.InAddr - BlockHeader.BlockStart;
if (Inp.InAddr > MAX_SIZE / 2)
{
if (DataSize > 0)
{
Buffer.BlockCopy(Inp.InBuf, Inp.InAddr, Inp.InBuf, 0, DataSize);
}
Inp.InAddr = 0;
ReadTop = DataSize;
}
else
{
DataSize = ReadTop;
}
var ReadCode = 0;
if (MAX_SIZE != DataSize)
{
ReadCode = await UnpIO_UnpReadAsync(
Inp.InBuf,
DataSize,
MAX_SIZE - DataSize,
cancellationToken
)
.ConfigureAwait(false);
}
if (ReadCode > 0) // Can be also -1.
{
ReadTop += ReadCode;
}
ReadBorder = ReadTop - 30;
BlockHeader.BlockStart = Inp.InAddr;
if (BlockHeader.BlockSize != -1) // '-1' means not defined yet.
{
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
}
return ReadCode != -1;
}
private void UnpWriteBuf()
{
var WrittenBorder = WrPtr;
@@ -753,163 +520,6 @@ internal partial class Unpack
}
}
private async System.Threading.Tasks.Task UnpWriteBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var WrittenBorder = WrPtr;
var FullWriteSize = (UnpPtr - WrittenBorder) & MaxWinMask;
var WriteSizeLeft = FullWriteSize;
var NotAllFiltersProcessed = false;
for (var I = 0; I < Filters.Count; I++)
{
var flt = Filters[I];
if (flt.Type == FILTER_NONE)
{
continue;
}
if (flt.NextWindow)
{
if (((flt.BlockStart - WrPtr) & MaxWinMask) <= FullWriteSize)
{
flt.NextWindow = false;
}
continue;
}
var BlockStart = flt.BlockStart;
var BlockLength = flt.BlockLength;
if (((BlockStart - WrittenBorder) & MaxWinMask) < WriteSizeLeft)
{
if (WrittenBorder != BlockStart)
{
await UnpWriteAreaAsync(WrittenBorder, BlockStart, cancellationToken)
.ConfigureAwait(false);
WrittenBorder = BlockStart;
WriteSizeLeft = (UnpPtr - WrittenBorder) & MaxWinMask;
}
if (BlockLength <= WriteSizeLeft)
{
if (BlockLength > 0)
{
var BlockEnd = (BlockStart + BlockLength) & MaxWinMask;
FilterSrcMemory = EnsureCapacity(
FilterSrcMemory,
checked((int)BlockLength)
);
var Mem = FilterSrcMemory;
if (BlockStart < BlockEnd || BlockEnd == 0)
{
if (Fragmented)
{
FragWindow.CopyData(Mem, 0, BlockStart, BlockLength);
}
else
{
Buffer.BlockCopy(Window, (int)BlockStart, Mem, 0, (int)BlockLength);
}
}
else
{
var FirstPartLength = MaxWinSize - BlockStart;
if (Fragmented)
{
FragWindow.CopyData(Mem, 0, BlockStart, FirstPartLength);
FragWindow.CopyData(Mem, FirstPartLength, 0, BlockEnd);
}
else
{
Buffer.BlockCopy(
Window,
(int)BlockStart,
Mem,
0,
(int)FirstPartLength
);
Buffer.BlockCopy(
Window,
0,
Mem,
(int)FirstPartLength,
(int)BlockEnd
);
}
}
var OutMem = ApplyFilter(Mem, BlockLength, flt);
Filters[I].Type = FILTER_NONE;
if (OutMem != null)
{
await UnpIO_UnpWriteAsync(OutMem, 0, BlockLength, cancellationToken)
.ConfigureAwait(false);
WrittenFileSize += BlockLength;
}
WrittenBorder = BlockEnd;
WriteSizeLeft = (UnpPtr - WrittenBorder) & MaxWinMask;
}
}
else
{
NotAllFiltersProcessed = true;
for (var J = I; J < Filters.Count; J++)
{
var fltj = Filters[J];
if (
fltj.Type != FILTER_NONE
&& fltj.NextWindow == false
&& ((fltj.BlockStart - WrPtr) & MaxWinMask) < FullWriteSize
)
{
fltj.NextWindow = true;
}
}
break;
}
}
}
var EmptyCount = 0;
for (var I = 0; I < Filters.Count; I++)
{
if (EmptyCount > 0)
{
Filters[I - EmptyCount] = Filters[I];
}
if (Filters[I].Type == FILTER_NONE)
{
EmptyCount++;
}
}
if (EmptyCount > 0)
{
Filters.RemoveRange(Filters.Count - EmptyCount, EmptyCount);
}
if (!NotAllFiltersProcessed)
{
await UnpWriteAreaAsync(WrittenBorder, UnpPtr, cancellationToken).ConfigureAwait(false);
WrPtr = UnpPtr;
}
WriteBorder = (UnpPtr + Math.Min(MaxWinSize, UNPACK_MAX_WRITE)) & MaxWinMask;
if (
WriteBorder == UnpPtr
|| WrPtr != UnpPtr
&& ((WrPtr - UnpPtr) & MaxWinMask) < ((WriteBorder - UnpPtr) & MaxWinMask)
)
{
WriteBorder = WrPtr;
}
}
private byte[] ApplyFilter(byte[] __d, uint DataSize, UnpackFilter Flt)
{
var Data = 0;
@@ -1041,48 +651,6 @@ internal partial class Unpack
}
}
private async System.Threading.Tasks.Task UnpWriteAreaAsync(
size_t StartPtr,
size_t EndPtr,
System.Threading.CancellationToken cancellationToken = default
)
{
if (EndPtr != StartPtr)
{
UnpSomeRead = true;
}
if (EndPtr < StartPtr)
{
UnpAllBuf = true;
}
if (Fragmented)
{
var SizeToWrite = (EndPtr - StartPtr) & MaxWinMask;
while (SizeToWrite > 0)
{
var BlockSize = FragWindow.GetBlockSize(StartPtr, SizeToWrite);
FragWindow.GetBuffer(StartPtr, out var __buffer, out var __offset);
await UnpWriteDataAsync(__buffer, __offset, BlockSize, cancellationToken)
.ConfigureAwait(false);
SizeToWrite -= BlockSize;
StartPtr = (StartPtr + BlockSize) & MaxWinMask;
}
}
else if (EndPtr < StartPtr)
{
await UnpWriteDataAsync(Window, StartPtr, MaxWinSize - StartPtr, cancellationToken)
.ConfigureAwait(false);
await UnpWriteDataAsync(Window, 0, EndPtr, cancellationToken).ConfigureAwait(false);
}
else
{
await UnpWriteDataAsync(Window, StartPtr, EndPtr - StartPtr, cancellationToken)
.ConfigureAwait(false);
}
}
private void UnpWriteData(byte[] Data, size_t offset, size_t Size)
{
if (WrittenFileSize >= DestUnpSize)
@@ -1101,29 +669,6 @@ internal partial class Unpack
WrittenFileSize += Size;
}
private async System.Threading.Tasks.Task UnpWriteDataAsync(
byte[] Data,
size_t offset,
size_t Size,
System.Threading.CancellationToken cancellationToken = default
)
{
if (WrittenFileSize >= DestUnpSize)
{
return;
}
var WriteSize = Size;
var LeftToWrite = DestUnpSize - WrittenFileSize;
if (WriteSize > LeftToWrite)
{
WriteSize = (size_t)LeftToWrite;
}
await UnpIO_UnpWriteAsync(Data, offset, WriteSize, cancellationToken).ConfigureAwait(false);
WrittenFileSize += Size;
}
private void UnpInitData50(bool Solid)
{
if (!Solid)
@@ -1132,9 +677,9 @@ internal partial class Unpack
}
}
private bool ReadBlockHeader(BitInput Inp, ref UnpackBlockHeader Header)
private bool ReadBlockHeader(BitInput Inp)
{
Header.HeaderSize = 0;
BlockHeader.HeaderSize = 0;
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 7)
{
@@ -1155,9 +700,9 @@ internal partial class Unpack
return false;
}
Header.HeaderSize = (int)(2 + ByteCount);
BlockHeader.HeaderSize = (int)(2 + ByteCount);
Header.BlockBitSize = (BlockFlags & 7) + 1;
BlockHeader.BlockBitSize = (BlockFlags & 7) + 1;
var SavedCheckSum = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
@@ -1169,28 +714,24 @@ internal partial class Unpack
Inp.addbits(8);
}
Header.BlockSize = BlockSize;
BlockHeader.BlockSize = BlockSize;
var CheckSum = (byte)(0x5a ^ BlockFlags ^ BlockSize ^ (BlockSize >> 8) ^ (BlockSize >> 16));
if (CheckSum != SavedCheckSum)
{
return false;
}
Header.BlockStart = Inp.InAddr;
ReadBorder = Math.Min(ReadBorder, Header.BlockStart + Header.BlockSize - 1);
BlockHeader.BlockStart = Inp.InAddr;
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
Header.LastBlockInFile = (BlockFlags & 0x40) != 0;
Header.TablePresent = (BlockFlags & 0x80) != 0;
BlockHeader.LastBlockInFile = (BlockFlags & 0x40) != 0;
BlockHeader.TablePresent = (BlockFlags & 0x80) != 0;
return true;
}
private bool ReadTables(
BitInput Inp,
ref UnpackBlockHeader Header,
ref UnpackBlockTables Tables
)
private bool ReadTables(BitInput Inp)
{
if (!Header.TablePresent)
if (!BlockHeader.TablePresent)
{
return true;
}
@@ -1233,7 +774,7 @@ internal partial class Unpack
}
}
MakeDecodeTables(BitLength, 0, Tables.BD, BC);
MakeDecodeTables(BitLength, 0, BlockTables.BD, BC);
Span<byte> Table = stackalloc byte[checked((int)HUFF_TABLE_SIZE)];
const int TableSize = checked((int)HUFF_TABLE_SIZE);
@@ -1247,7 +788,7 @@ internal partial class Unpack
}
}
var Number = DecodeNumber(Inp, Tables.BD);
var Number = DecodeNumber(Inp, BlockTables.BD);
if (Number < 16)
{
Table[I] = (byte)Number;
@@ -1309,10 +850,10 @@ internal partial class Unpack
return false;
}
MakeDecodeTables(Table, 0, Tables.LD, NC);
MakeDecodeTables(Table, (int)NC, Tables.DD, DC);
MakeDecodeTables(Table, (int)(NC + DC), Tables.LDD, LDC);
MakeDecodeTables(Table, (int)(NC + DC + LDC), Tables.RD, RC);
MakeDecodeTables(Table, 0, BlockTables.LD, NC);
MakeDecodeTables(Table, (int)NC, BlockTables.DD, DC);
MakeDecodeTables(Table, (int)(NC + DC), BlockTables.LDD, LDC);
MakeDecodeTables(Table, (int)(NC + DC + LDC), BlockTables.RD, RC);
return true;
}

View File

@@ -2,6 +2,8 @@
using System;
using System.Buffers;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.UnpackGlobal;
@@ -196,10 +198,10 @@ internal sealed partial class Unpack : BitInput
}
}
private async System.Threading.Tasks.Task DoUnpackAsync(
private async Task DoUnpackAsync(
uint Method,
bool Solid,
System.Threading.CancellationToken cancellationToken = default
CancellationToken cancellationToken = default
)
{
// Methods <50 will crash in Fragmented mode when accessing NULL Window.

View File

@@ -96,4 +96,28 @@ public abstract class Factory : IFactory
stream.Rewind();
return false;
}
internal virtual async ValueTask<IAsyncReader?> TryOpenReaderAsync(
SharpCompressStream stream,
ReaderOptions options,
CancellationToken cancellationToken = default
)
{
if (this is IReaderFactory readerFactory)
{
stream.Rewind();
if (
await IsArchiveAsync(stream, options.Password, cancellationToken)
.ConfigureAwait(false)
)
{
stream.Rewind(true);
return await readerFactory
.OpenAsyncReader(stream, options, cancellationToken)
.ConfigureAwait(false);
}
}
stream.Rewind();
return null;
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
@@ -10,6 +9,7 @@ using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Providers;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Readers.Tar;
@@ -63,12 +63,26 @@ public class GZipFactory
GZipArchive.OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(stream, readerOptions);
public ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(stream, readerOptions));
}
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
public ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfo, readerOptions));
}
#endregion
@@ -114,7 +128,11 @@ public class GZipFactory
if (GZipArchive.IsGZipFile(sharpCompressStream))
{
sharpCompressStream.Rewind();
var testStream = new GZipStream(sharpCompressStream, CompressionMode.Decompress);
using var testStream = options.Providers.CreateDecompressStream(
CompressionType.GZip,
SharpCompressStream.CreateNonDisposing(sharpCompressStream),
CompressionContext.FromStream(sharpCompressStream).WithReaderOptions(options)
);
if (TarArchive.IsTarFile(testStream))
{
sharpCompressStream.StopRecording();

View File

@@ -58,7 +58,12 @@ public class LzwFactory : Factory, IReaderFactory
if (LzwStream.IsLzwStream(sharpCompressStream))
{
sharpCompressStream.Rewind();
using (var testStream = new LzwStream(sharpCompressStream) { IsStreamOwner = false })
using (
var testStream = options.Providers.CreateDecompressStream(
CompressionType.Lzw,
SharpCompressStream.CreateNonDisposing(sharpCompressStream)
)
)
{
if (TarArchive.IsTarFile(testStream))
{
@@ -87,7 +92,7 @@ public class LzwFactory : Factory, IReaderFactory
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(LzwReader.OpenAsyncReader(stream, options));
return LzwReader.OpenAsyncReader(stream, options);
}
#endregion

View File

@@ -54,16 +54,30 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
RarArchive.OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(stream, readerOptions);
public ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(stream, readerOptions));
}
/// <inheritdoc/>
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
RarArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
public ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfo, readerOptions));
}
#endregion

View File

@@ -49,16 +49,30 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
SevenZipArchive.OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
SevenZipArchive.OpenAsyncArchive(stream, readerOptions);
public ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(stream, readerOptions));
}
/// <inheritdoc/>
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.OpenAsyncArchive(fileInfo, readerOptions);
public ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfo, readerOptions));
}
#endregion
@@ -74,7 +88,7 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IAsyncArchive OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null
) => SevenZipArchive.OpenAsyncArchive(streams, readerOptions);
) => (IAsyncArchive)OpenArchive(streams, readerOptions);
/// <inheritdoc/>
public IArchive OpenArchive(
@@ -86,7 +100,7 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IAsyncArchive OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
) => SevenZipArchive.OpenAsyncArchive(fileInfos, readerOptions);
) => (IAsyncArchive)OpenArchive(fileInfos, readerOptions);
#endregion

View File

@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
@@ -9,6 +8,7 @@ using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.IO;
using SharpCompress.Providers;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
@@ -50,6 +50,7 @@ public class TarFactory
/// <inheritdoc/>
public override bool IsArchive(Stream stream, string? password = null)
{
var providers = CompressionProviderRegistry.Default;
var sharpCompressStream = new SharpCompressStream(stream);
sharpCompressStream.StartRecording();
foreach (var wrapper in TarWrapper.Wrappers)
@@ -58,7 +59,11 @@ public class TarFactory
if (wrapper.IsMatch(sharpCompressStream))
{
sharpCompressStream.Rewind();
var decompressedStream = wrapper.CreateStream(sharpCompressStream);
var decompressedStream = CreateProbeDecompressionStream(
sharpCompressStream,
wrapper.CompressionType,
providers
);
if (TarArchive.IsTarFile(decompressedStream))
{
sharpCompressStream.Rewind();
@@ -77,6 +82,7 @@ public class TarFactory
CancellationToken cancellationToken = default
)
{
var providers = CompressionProviderRegistry.Default;
var sharpCompressStream = new SharpCompressStream(stream);
sharpCompressStream.StartRecording();
foreach (var wrapper in TarWrapper.Wrappers)
@@ -89,8 +95,12 @@ public class TarFactory
)
{
sharpCompressStream.Rewind();
var decompressedStream = await wrapper
.CreateStreamAsync(sharpCompressStream, cancellationToken)
var decompressedStream = await CreateProbeDecompressionStreamAsync(
sharpCompressStream,
wrapper.CompressionType,
providers,
cancellationToken: cancellationToken
)
.ConfigureAwait(false);
if (
await TarArchive
@@ -109,6 +119,125 @@ public class TarFactory
#endregion
private static Stream CreateProbeDecompressionStream(
Stream stream,
CompressionType compressionType,
CompressionProviderRegistry providers,
IReaderOptions? readerOptions = null
)
{
var nonDisposingStream = SharpCompressStream.CreateNonDisposing(stream);
if (compressionType == CompressionType.None)
{
return nonDisposingStream;
}
if (compressionType == CompressionType.GZip && readerOptions is not null)
{
return providers.CreateDecompressStream(
compressionType,
nonDisposingStream,
CompressionContext.FromStream(nonDisposingStream).WithReaderOptions(readerOptions)
);
}
return providers.CreateDecompressStream(compressionType, nonDisposingStream);
}
private static async ValueTask<Stream> CreateProbeDecompressionStreamAsync(
Stream stream,
CompressionType compressionType,
CompressionProviderRegistry providers,
IReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
var nonDisposingStream = SharpCompressStream.CreateNonDisposing(stream);
if (compressionType == CompressionType.None)
{
return nonDisposingStream;
}
if (compressionType == CompressionType.GZip && readerOptions is not null)
{
return await providers
.CreateDecompressStreamAsync(
compressionType,
nonDisposingStream,
CompressionContext
.FromStream(nonDisposingStream)
.WithReaderOptions(readerOptions),
cancellationToken
)
.ConfigureAwait(false);
}
return await providers
.CreateDecompressStreamAsync(compressionType, nonDisposingStream, cancellationToken)
.ConfigureAwait(false);
}
public static CompressionType GetCompressionType(
Stream stream,
CompressionProviderRegistry? providers = null
)
{
providers ??= CompressionProviderRegistry.Default;
stream.Seek(0, SeekOrigin.Begin);
foreach (var wrapper in TarWrapper.Wrappers)
{
stream.Seek(0, SeekOrigin.Begin);
if (wrapper.IsMatch(stream))
{
stream.Seek(0, SeekOrigin.Begin);
var decompressedStream = CreateProbeDecompressionStream(
stream,
wrapper.CompressionType,
providers
);
if (TarArchive.IsTarFile(decompressedStream))
{
return wrapper.CompressionType;
}
}
}
throw new InvalidFormatException("Not a tar file.");
}
public static async ValueTask<CompressionType> GetCompressionTypeAsync(
Stream stream,
CompressionProviderRegistry? providers = null,
CancellationToken cancellationToken = default
)
{
providers ??= CompressionProviderRegistry.Default;
stream.Seek(0, SeekOrigin.Begin);
foreach (var wrapper in TarWrapper.Wrappers)
{
stream.Seek(0, SeekOrigin.Begin);
if (await wrapper.IsMatchAsync(stream, cancellationToken).ConfigureAwait(false))
{
stream.Seek(0, SeekOrigin.Begin);
var decompressedStream = await CreateProbeDecompressionStreamAsync(
stream,
wrapper.CompressionType,
providers,
cancellationToken: cancellationToken
)
.ConfigureAwait(false);
if (
await TarArchive
.IsTarFileAsync(decompressedStream, cancellationToken)
.ConfigureAwait(false)
)
{
return wrapper.CompressionType;
}
}
}
throw new InvalidFormatException("Not a tar file.");
}
#region IArchiveFactory
/// <inheritdoc/>
@@ -116,16 +245,28 @@ public class TarFactory
TarArchive.OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(stream, readerOptions);
public async ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) =>
await TarArchive
.OpenAsyncArchive(stream, readerOptions, cancellationToken)
.ConfigureAwait(false);
/// <inheritdoc/>
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
TarArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
public async ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) =>
await TarArchive
.OpenAsyncArchive(fileInfo, readerOptions, cancellationToken)
.ConfigureAwait(false);
#endregion
@@ -171,7 +312,12 @@ public class TarFactory
if (wrapper.IsMatch(sharpCompressStream))
{
sharpCompressStream.Rewind();
var decompressedStream = wrapper.CreateStream(sharpCompressStream);
var decompressedStream = CreateProbeDecompressionStream(
sharpCompressStream,
wrapper.CompressionType,
options.Providers,
options
);
if (TarArchive.IsTarFile(decompressedStream))
{
sharpCompressStream.StopRecording();
@@ -203,8 +349,13 @@ public class TarFactory
)
{
sharpCompressStream.Rewind();
var decompressedStream = await wrapper
.CreateStreamAsync(sharpCompressStream, cancellationToken)
var decompressedStream = await CreateProbeDecompressionStreamAsync(
sharpCompressStream,
wrapper.CompressionType,
options.Providers,
options,
cancellationToken
)
.ConfigureAwait(false);
if (
await TarArchive
@@ -218,7 +369,9 @@ public class TarFactory
}
}
}
return (IAsyncReader)TarReader.OpenReader(stream, options);
sharpCompressStream.Rewind();
return (IAsyncReader)TarReader.OpenReader(sharpCompressStream, options);
}
#endregion

View File

@@ -127,16 +127,30 @@ public class ZipFactory
ZipArchive.OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(stream, readerOptions);
public ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(stream, readerOptions));
}
/// <inheritdoc/>
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ZipArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
public ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfo, readerOptions));
}
#endregion

View File

@@ -206,8 +206,22 @@ internal partial class SharpCompressStream : Stream, IStreamStack
throw new NotSupportedException();
}
public override long Length =>
_isPassthrough ? stream.Length : throw new NotSupportedException();
public override long Length
{
get
{
if (_isPassthrough)
{
return stream.Length;
}
if (_ringBuffer is not null)
{
return _ringBuffer.Length;
}
throw new NotSupportedException();
}
}
public override long Position
{

View File

@@ -0,0 +1,66 @@
using System.IO;
using SharpCompress.Common.Options;
namespace SharpCompress.Providers;
/// <summary>
/// Provides context information for compression operations.
/// Carries format-specific parameters that some compression types require.
/// </summary>
public sealed record CompressionContext
{
/// <summary>
/// The size of the input data, or -1 if unknown.
/// </summary>
public long InputSize { get; init; } = -1;
/// <summary>
/// The expected output size, or -1 if unknown.
/// </summary>
public long OutputSize { get; init; } = -1;
/// <summary>
/// Properties bytes for the compression format (e.g., LZMA properties).
/// </summary>
public byte[]? Properties { get; init; }
/// <summary>
/// Whether the underlying stream supports seeking.
/// </summary>
public bool CanSeek { get; init; }
/// <summary>
/// Additional format-specific options.
/// </summary>
/// <remarks>
/// This value is consumed by provider implementations that need caller-supplied metadata
/// that is not tied to ReaderOptions. For archive header encoding, use <see cref="ReaderOptions"/> instead.
/// Examples of valid FormatOptions values include compression properties (e.g., LZMA properties),
/// format flags, or algorithm-specific configuration.
/// </remarks>
public object? FormatOptions { get; init; }
/// <summary>
/// Creates a CompressionContext from a stream.
/// </summary>
/// <param name="stream">The stream to extract context from.</param>
/// <returns>A CompressionContext populated from the stream.</returns>
public static CompressionContext FromStream(Stream stream) =>
new() { CanSeek = stream.CanSeek, InputSize = stream.CanSeek ? stream.Length : -1 };
/// <summary>
/// Reader options for accessing archive metadata such as header encoding.
/// </summary>
public IReaderOptions? ReaderOptions { get; init; }
/// <summary>
/// Returns a new <see cref="CompressionContext"/> with the specified reader options.
/// </summary>
/// <param name="readerOptions">The reader options to set.</param>
/// <returns>A new <see cref="CompressionContext"/> instance.</returns>
public CompressionContext WithReaderOptions(IReaderOptions? readerOptions) =>
this with
{
ReaderOptions = readerOptions,
};
}

View File

@@ -0,0 +1,18 @@
using System.Text;
using SharpCompress.Common;
using SharpCompress.Common.Options;
namespace SharpCompress.Providers;
public static class CompressionContextExtensions
{
/// <summary>
/// Resolves the archive header encoding from <see cref="CompressionContext.ReaderOptions"/>.
/// </summary>
/// <remarks>
/// Returns <see cref="ReaderOptions.ArchiveEncoding"/> when ReaderOptions is set,
/// otherwise falls back to UTF-8.
/// </remarks>
public static Encoding ResolveArchiveEncoding(this CompressionContext context) =>
context.ReaderOptions?.ArchiveEncoding.GetEncoding() ?? Encoding.UTF8;
}

View File

@@ -0,0 +1,129 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Providers;
/// <summary>
/// Base class for compression providers that provides default async implementations
/// delegating to synchronous methods. Providers can inherit from this class for
/// simpler implementations or implement ICompressionProvider directly for full control.
/// </summary>
/// <remarks>
/// <para>
/// This base class implements the async methods by calling the synchronous versions.
/// Providers that need true async implementations should override these methods.
/// </para>
/// </remarks>
public abstract class CompressionProviderBase : ICompressionProvider
{
/// <inheritdoc />
public abstract CompressionType CompressionType { get; }
/// <inheritdoc />
public abstract bool SupportsCompression { get; }
/// <inheritdoc />
public abstract bool SupportsDecompression { get; }
/// <inheritdoc />
public abstract Stream CreateCompressStream(Stream destination, int compressionLevel);
/// <inheritdoc />
public virtual Stream CreateCompressStream(
Stream destination,
int compressionLevel,
CompressionContext context
) => CreateCompressStream(destination, compressionLevel);
/// <inheritdoc />
public abstract Stream CreateDecompressStream(Stream source);
/// <inheritdoc />
public virtual Stream CreateDecompressStream(Stream source, CompressionContext context) =>
CreateDecompressStream(source);
/// <summary>
/// Asynchronously creates a compression stream.
/// Default implementation delegates to the synchronous CreateCompressStream.
/// </summary>
public virtual ValueTask<Stream> CreateCompressStreamAsync(
Stream destination,
int compressionLevel,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new ValueTask<Stream>(CreateCompressStream(destination, compressionLevel));
}
/// <summary>
/// Asynchronously creates a compression stream with context.
/// Default implementation delegates to the synchronous CreateCompressStream with context.
/// </summary>
public virtual ValueTask<Stream> CreateCompressStreamAsync(
Stream destination,
int compressionLevel,
CompressionContext context,
CancellationToken cancellationToken = default
)
{
return CreateCompressStreamAsync(destination, compressionLevel, cancellationToken);
}
/// <summary>
/// Asynchronously creates a decompression stream.
/// Default implementation delegates to the synchronous CreateDecompressStream.
/// </summary>
public virtual ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new ValueTask<Stream>(CreateDecompressStream(source));
}
/// <summary>
/// Asynchronously creates a decompression stream with context.
/// Default implementation delegates to the synchronous CreateDecompressStream with context.
/// </summary>
public virtual ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CompressionContext context,
CancellationToken cancellationToken = default
)
{
return CreateDecompressStreamAsync(source, cancellationToken);
}
protected static void ValidateRequiredSizes(CompressionContext context, string algorithmName)
{
if (context.InputSize < 0 || context.OutputSize < 0)
{
throw new ArgumentException(
$"{algorithmName} decompression requires InputSize and OutputSize in CompressionContext.",
nameof(context)
);
}
}
protected static T RequireFormatOption<T>(
CompressionContext context,
string algorithmName,
string optionName
)
{
if (context.FormatOptions is not T options)
{
throw new ArgumentException(
$"{algorithmName} decompression requires {optionName} in CompressionContext.FormatOptions.",
nameof(context)
);
}
return options;
}
}

View File

@@ -0,0 +1,321 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Providers.Default;
namespace SharpCompress.Providers;
/// <summary>
/// A registry of compression providers, keyed by CompressionType.
/// Used to resolve which implementation to use for a given compression type.
/// </summary>
/// <remarks>
/// <para>
/// This class is immutable. Use the <c>With</c> method to create modified copies
/// that add or replace providers:
/// </para>
/// <code>
/// var customRegistry = CompressionProviderRegistry.Default
/// .With(new MyCustomGZipProvider());
/// var options = new WriterOptions(CompressionType.GZip)
/// {
/// Providers = customRegistry
/// };
/// </code>
/// </remarks>
public sealed class CompressionProviderRegistry
{
/// <summary>
/// The default registry using SharpCompress internal implementations.
/// </summary>
public static CompressionProviderRegistry Default { get; } = CreateDefault();
/// <summary>
/// The empty registry for tests
/// </summary>
public static CompressionProviderRegistry Empty { get; } = CreateEmpty();
private readonly Dictionary<CompressionType, ICompressionProvider> _providers;
private CompressionProviderRegistry(
Dictionary<CompressionType, ICompressionProvider> providers
) => _providers = providers;
/// <summary>
/// Gets the provider for a given compression type, or null if none is registered.
/// </summary>
/// <param name="type">The compression type to look up.</param>
/// <returns>The provider for the type, or null if not found.</returns>
public ICompressionProvider? GetProvider(CompressionType type)
{
_providers.TryGetValue(type, out var provider);
return provider;
}
/// <summary>
/// Creates a compression stream for the specified type.
/// </summary>
/// <param name="type">The compression type.</param>
/// <param name="destination">The destination stream.</param>
/// <param name="level">The compression level.</param>
/// <returns>A compression stream.</returns>
/// <exception cref="InvalidOperationException">If no provider is registered for the type.</exception>
/// <exception cref="NotSupportedException">If the provider does not support compression.</exception>
public Stream CreateCompressStream(CompressionType type, Stream destination, int level)
{
var provider = GetProvider(type);
if (provider is null)
{
throw new InvalidOperationException(
$"No compression provider registered for type: {type}"
);
}
return provider.CreateCompressStream(destination, level);
}
/// <summary>
/// Creates a decompression stream for the specified type.
/// </summary>
/// <param name="type">The compression type.</param>
/// <param name="source">The source stream.</param>
/// <returns>A decompression stream.</returns>
/// <exception cref="InvalidOperationException">If no provider is registered for the type.</exception>
/// <exception cref="NotSupportedException">If the provider does not support decompression.</exception>
public Stream CreateDecompressStream(CompressionType type, Stream source)
{
var provider = GetProvider(type);
if (provider is null)
{
throw new InvalidOperationException(
$"No compression provider registered for type: {type}"
);
}
return provider.CreateDecompressStream(source);
}
/// <summary>
/// Creates a compression stream for the specified type with context.
/// </summary>
/// <param name="type">The compression type.</param>
/// <param name="destination">The destination stream.</param>
/// <param name="level">The compression level.</param>
/// <param name="context">Context information for the compression.</param>
/// <returns>A compression stream.</returns>
/// <exception cref="InvalidOperationException">If no provider is registered for the type.</exception>
/// <exception cref="NotSupportedException">If the provider does not support compression.</exception>
public Stream CreateCompressStream(
CompressionType type,
Stream destination,
int level,
CompressionContext context
)
{
var provider = GetProvider(type);
if (provider is null)
{
throw new InvalidOperationException(
$"No compression provider registered for type: {type}"
);
}
return provider.CreateCompressStream(destination, level, context);
}
/// <summary>
/// Creates a decompression stream for the specified type with context.
/// </summary>
/// <param name="type">The compression type.</param>
/// <param name="source">The source stream.</param>
/// <param name="context">Context information for the decompression.</param>
/// <returns>A decompression stream.</returns>
/// <exception cref="InvalidOperationException">If no provider is registered for the type.</exception>
/// <exception cref="NotSupportedException">If the provider does not support decompression.</exception>
public Stream CreateDecompressStream(
CompressionType type,
Stream source,
CompressionContext context
)
{
var provider = GetProvider(type);
if (provider is null)
{
throw new InvalidOperationException(
$"No compression provider registered for type: {type}"
);
}
return provider.CreateDecompressStream(source, context);
}
/// <summary>
/// Asynchronously creates a compression stream for the specified type.
/// </summary>
/// <param name="type">The compression type.</param>
/// <param name="destination">The destination stream.</param>
/// <param name="level">The compression level.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A task containing the compression stream.</returns>
/// <exception cref="InvalidOperationException">If no provider is registered for the type.</exception>
/// <exception cref="NotSupportedException">If the provider does not support compression.</exception>
public ValueTask<Stream> CreateCompressStreamAsync(
CompressionType type,
Stream destination,
int level,
CancellationToken cancellationToken = default
)
{
var provider = GetProvider(type);
if (provider is null)
{
throw new InvalidOperationException(
$"No compression provider registered for type: {type}"
);
}
return provider.CreateCompressStreamAsync(destination, level, cancellationToken);
}
/// <summary>
/// Asynchronously creates a decompression stream for the specified type.
/// </summary>
/// <param name="type">The compression type.</param>
/// <param name="source">The source stream.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A task containing the decompression stream.</returns>
/// <exception cref="InvalidOperationException">If no provider is registered for the type.</exception>
/// <exception cref="NotSupportedException">If the provider does not support decompression.</exception>
public ValueTask<Stream> CreateDecompressStreamAsync(
CompressionType type,
Stream source,
CancellationToken cancellationToken = default
)
{
var provider = GetProvider(type);
if (provider is null)
{
throw new InvalidOperationException(
$"No compression provider registered for type: {type}"
);
}
return provider.CreateDecompressStreamAsync(source, cancellationToken);
}
/// <summary>
/// Asynchronously creates a compression stream for the specified type with context.
/// </summary>
/// <param name="type">The compression type.</param>
/// <param name="destination">The destination stream.</param>
/// <param name="level">The compression level.</param>
/// <param name="context">Context information for the compression.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A task containing the compression stream.</returns>
/// <exception cref="InvalidOperationException">If no provider is registered for the type.</exception>
/// <exception cref="NotSupportedException">If the provider does not support compression.</exception>
public ValueTask<Stream> CreateCompressStreamAsync(
CompressionType type,
Stream destination,
int level,
CompressionContext context,
CancellationToken cancellationToken = default
)
{
var provider = GetProvider(type);
if (provider is null)
{
throw new InvalidOperationException(
$"No compression provider registered for type: {type}"
);
}
return provider.CreateCompressStreamAsync(destination, level, context, cancellationToken);
}
/// <summary>
/// Asynchronously creates a decompression stream for the specified type with context.
/// </summary>
/// <param name="type">The compression type.</param>
/// <param name="source">The source stream.</param>
/// <param name="context">Context information for the decompression.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A task containing the decompression stream.</returns>
/// <exception cref="InvalidOperationException">If no provider is registered for the type.</exception>
/// <exception cref="NotSupportedException">If the provider does not support decompression.</exception>
public ValueTask<Stream> CreateDecompressStreamAsync(
CompressionType type,
Stream source,
CompressionContext context,
CancellationToken cancellationToken = default
)
{
var provider = GetProvider(type);
if (provider is null)
{
throw new InvalidOperationException(
$"No compression provider registered for type: {type}"
);
}
return provider.CreateDecompressStreamAsync(source, context, cancellationToken);
}
/// <summary>
/// Gets the provider as an ICompressionProviderHooks if it supports complex initialization.
/// </summary>
/// <param name="type">The compression type.</param>
/// <returns>The compressing provider, or null if the provider doesn't support complex initialization.</returns>
public ICompressionProviderHooks? GetCompressingProvider(CompressionType type)
{
var provider = GetProvider(type);
return provider as ICompressionProviderHooks;
}
/// <summary>
/// Creates a new registry with the specified provider added or replaced.
/// </summary>
/// <param name="provider">The provider to add or replace.</param>
/// <returns>A new registry instance with the provider included.</returns>
/// <exception cref="ArgumentNullException">If provider is null.</exception>
public CompressionProviderRegistry With(ICompressionProvider provider)
{
if (provider is null)
{
throw new ArgumentNullException(nameof(provider));
}
var newProviders = new Dictionary<CompressionType, ICompressionProvider>(_providers)
{
[provider.CompressionType] = provider,
};
return new CompressionProviderRegistry(newProviders);
}
private static CompressionProviderRegistry CreateDefault()
{
var providers = new Dictionary<CompressionType, ICompressionProvider>
{
[CompressionType.Deflate] = new DeflateCompressionProvider(),
[CompressionType.GZip] = new GZipCompressionProvider(),
[CompressionType.BZip2] = new BZip2CompressionProvider(),
[CompressionType.ZStandard] = new ZStandardCompressionProvider(),
[CompressionType.LZip] = new LZipCompressionProvider(),
[CompressionType.Xz] = new XzCompressionProvider(),
[CompressionType.Lzw] = new LzwCompressionProvider(),
[CompressionType.Deflate64] = new Deflate64CompressionProvider(),
[CompressionType.Shrink] = new ShrinkCompressionProvider(),
[CompressionType.Reduce1] = new Reduce1CompressionProvider(),
[CompressionType.Reduce2] = new Reduce2CompressionProvider(),
[CompressionType.Reduce3] = new Reduce3CompressionProvider(),
[CompressionType.Reduce4] = new Reduce4CompressionProvider(),
[CompressionType.Explode] = new ExplodeCompressionProvider(),
[CompressionType.LZMA] = new LzmaCompressingProvider(),
[CompressionType.PPMd] = new PpmdCompressingProvider(),
};
return new CompressionProviderRegistry(providers);
}
private static CompressionProviderRegistry CreateEmpty()
{
var providers = new Dictionary<CompressionType, ICompressionProvider>();
return new CompressionProviderRegistry(providers);
}
}

View File

@@ -0,0 +1,29 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Providers;
public abstract class ContextRequiredDecompressionProviderBase : DecompressionOnlyProviderBase
{
protected abstract string DecompressionContextRequirementDescription { get; }
protected virtual string DecompressionContextRequirementSuffix => string.Empty;
public sealed override Stream CreateDecompressStream(Stream source) =>
throw new InvalidOperationException(
$"{DecompressionContextRequirementDescription}. "
+ $"Use CreateDecompressStream(Stream, CompressionContext) overload{DecompressionContextRequirementSuffix}."
);
public sealed override ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CancellationToken cancellationToken = default
) =>
throw new InvalidOperationException(
$"{DecompressionContextRequirementDescription}. "
+ "Use CreateDecompressStreamAsync(Stream, CompressionContext, CancellationToken) "
+ $"overload{DecompressionContextRequirementSuffix}."
);
}

View File

@@ -0,0 +1,22 @@
using System;
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Providers;
public abstract class DecompressionOnlyProviderBase : CompressionProviderBase
{
public override bool SupportsCompression => false;
public override bool SupportsDecompression => true;
protected abstract string CompressionNotSupportedMessage { get; }
public sealed override Stream CreateCompressStream(Stream destination, int compressionLevel) =>
throw new NotSupportedException(CompressionNotSupportedMessage);
public sealed override Stream CreateCompressStream(
Stream destination,
int compressionLevel,
CompressionContext context
) => throw new NotSupportedException(CompressionNotSupportedMessage);
}

View File

@@ -0,0 +1,39 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides BZip2 compression using SharpCompress's internal implementation.
/// </summary>
public sealed class BZip2CompressionProvider : CompressionProviderBase
{
public override CompressionType CompressionType => CompressionType.BZip2;
public override bool SupportsCompression => true;
public override bool SupportsDecompression => true;
public override Stream CreateCompressStream(Stream destination, int compressionLevel)
{
// BZip2 doesn't use compressionLevel parameter in this implementation
return BZip2Stream.Create(destination, CompressionMode.Compress, false);
}
public override Stream CreateDecompressStream(Stream source)
{
return BZip2Stream.Create(source, CompressionMode.Decompress, false);
}
public override async ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CancellationToken cancellationToken = default
)
{
return await BZip2Stream
.CreateAsync(source, CompressionMode.Decompress, false, false, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,22 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate64;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides Deflate64 decompression using SharpCompress's internal implementation.
/// Note: Deflate64 compression is not supported; this provider is decompression-only.
/// </summary>
public sealed class Deflate64CompressionProvider : DecompressionOnlyProviderBase
{
public override CompressionType CompressionType => CompressionType.Deflate64;
protected override string CompressionNotSupportedMessage =>
"Deflate64 compression is not supported by SharpCompress's internal implementation.";
public override Stream CreateDecompressStream(Stream source)
{
return new Deflate64Stream(source, CompressionMode.Decompress);
}
}

View File

@@ -0,0 +1,27 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides Deflate compression using SharpCompress's internal implementation.
/// </summary>
public sealed class DeflateCompressionProvider : CompressionProviderBase
{
public override CompressionType CompressionType => CompressionType.Deflate;
public override bool SupportsCompression => true;
public override bool SupportsDecompression => true;
public override Stream CreateCompressStream(Stream destination, int compressionLevel)
{
var level = (CompressionLevel)compressionLevel;
return new DeflateStream(destination, CompressionMode.Compress, level);
}
public override Stream CreateDecompressStream(Stream source)
{
return new DeflateStream(source, CompressionMode.Decompress);
}
}

View File

@@ -0,0 +1,49 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Explode;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides Explode decompression using SharpCompress's internal implementation.
/// Note: Explode compression is not supported; this provider is decompression-only.
/// </summary>
/// <remarks>
/// Explode requires compressed size, uncompressed size, and flags which must be provided via CompressionContext.
/// </remarks>
public sealed class ExplodeCompressionProvider : ContextRequiredDecompressionProviderBase
{
public override CompressionType CompressionType => CompressionType.Explode;
protected override string CompressionNotSupportedMessage =>
"Explode compression is not supported by SharpCompress's internal implementation.";
protected override string DecompressionContextRequirementDescription =>
"Explode decompression requires compressed size, uncompressed size, and flags";
protected override string DecompressionContextRequirementSuffix => " with FormatOptions";
public override Stream CreateDecompressStream(Stream source, CompressionContext context)
{
ValidateRequiredSizes(context, "Explode");
var flags = RequireFormatOption<HeaderFlags>(context, "Explode", "HeaderFlags");
return ExplodeStream.Create(source, context.InputSize, context.OutputSize, flags);
}
public override async ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CompressionContext context,
CancellationToken cancellationToken = default
)
{
ValidateRequiredSizes(context, "Explode");
var flags = RequireFormatOption<HeaderFlags>(context, "Explode", "HeaderFlags");
return await ExplodeStream
.CreateAsync(source, context.InputSize, context.OutputSize, flags, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,50 @@
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides GZip compression using SharpCompress's internal implementation.
/// </summary>
public sealed class GZipCompressionProvider : CompressionProviderBase
{
public override CompressionType CompressionType => CompressionType.GZip;
public override bool SupportsCompression => true;
public override bool SupportsDecompression => true;
public override Stream CreateCompressStream(Stream destination, int compressionLevel)
{
var level = (CompressionLevel)compressionLevel;
return new GZipStream(destination, CompressionMode.Compress, level, Encoding.UTF8);
}
public override Stream CreateDecompressStream(Stream source)
{
return new GZipStream(source, CompressionMode.Decompress);
}
public override Stream CreateDecompressStream(Stream source, CompressionContext context)
{
return new GZipStream(
source,
CompressionMode.Decompress,
CompressionLevel.Default,
context.ResolveArchiveEncoding()
);
}
public override ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CompressionContext context,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new ValueTask<Stream>(CreateDecompressStream(source, context));
}
}

View File

@@ -0,0 +1,26 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides LZip compression using SharpCompress's internal implementation.
/// </summary>
public sealed class LZipCompressionProvider : CompressionProviderBase
{
public override CompressionType CompressionType => CompressionType.LZip;
public override bool SupportsCompression => true;
public override bool SupportsDecompression => true;
public override Stream CreateCompressStream(Stream destination, int compressionLevel)
{
return new LZipStream(destination, CompressionMode.Compress);
}
public override Stream CreateDecompressStream(Stream source)
{
return new LZipStream(source, CompressionMode.Decompress);
}
}

View File

@@ -0,0 +1,116 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Compressors.LZMA;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides LZMA compression and decompression using SharpCompress's internal implementation.
/// This is a complex provider that requires initialization data for compression.
/// </summary>
public sealed class LzmaCompressingProvider : CompressionProviderBase, ICompressionProviderHooks
{
public override CompressionType CompressionType => CompressionType.LZMA;
public override bool SupportsCompression => true;
public override bool SupportsDecompression => true;
public override Stream CreateCompressStream(Stream destination, int compressionLevel)
{
throw new InvalidOperationException(
"LZMA compression requires context with CanSeek information. "
+ "Use CreateCompressStream(Stream, int, CompressionContext) overload."
);
}
public override Stream CreateCompressStream(
Stream destination,
int compressionLevel,
CompressionContext context
)
{
// LZMA stream creation returns the encoder stream
// Note: Pre-compression data and properties are handled via ICompressionProviderHooks methods
var props = new LzmaEncoderProperties(!context.CanSeek);
return LzmaStream.Create(props, false, destination);
}
public override Stream CreateDecompressStream(Stream source)
{
throw new InvalidOperationException(
"LZMA decompression requires properties. "
+ "Use CreateDecompressStream(Stream, CompressionContext) overload with Properties."
);
}
public override Stream CreateDecompressStream(Stream source, CompressionContext context)
{
if (context.Properties is null || context.Properties.Length < 5)
{
throw new ArgumentException(
"LZMA decompression requires Properties (at least 5 bytes) in CompressionContext.",
nameof(context)
);
}
return LzmaStream.Create(context.Properties, source, context.InputSize, context.OutputSize);
}
public override ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CancellationToken cancellationToken = default
) =>
throw new InvalidOperationException(
"LZMA decompression requires properties. "
+ "Use CreateDecompressStreamAsync(Stream, CompressionContext, CancellationToken) overload with Properties."
);
public override async ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CompressionContext context,
CancellationToken cancellationToken = default
)
{
if (context.Properties is null || context.Properties.Length < 5)
{
throw new ArgumentException(
"LZMA decompression requires Properties (at least 5 bytes) in CompressionContext.",
nameof(context)
);
}
return await LzmaStream
.CreateAsync(
context.Properties,
source,
context.InputSize,
context.OutputSize,
leaveOpen: false
)
.ConfigureAwait(false);
}
public byte[]? GetPreCompressionData(CompressionContext context)
{
// Zip format writes these magic bytes before the LZMA stream
return new byte[] { 9, 20, 5, 0 };
}
public byte[]? GetCompressionProperties(Stream stream, CompressionContext context)
{
// The LZMA stream exposes its properties after creation
if (stream is LzmaStream lzmaStream)
{
return lzmaStream.Properties;
}
return null;
}
public byte[]? GetPostCompressionData(Stream stream, CompressionContext context)
{
// No post-compression data needed for LZMA in Zip
return null;
}
}

View File

@@ -0,0 +1,21 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors.Lzw;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides LZW compression decompression using SharpCompress's internal implementation.
/// Note: Compression is not supported by this provider.
/// </summary>
public sealed class LzwCompressionProvider : DecompressionOnlyProviderBase
{
public override CompressionType CompressionType => CompressionType.Lzw;
protected override string CompressionNotSupportedMessage =>
"LZW compression is not supported by SharpCompress's internal implementation.";
public override Stream CreateDecompressStream(Stream source)
{
return new LzwStream(source);
}
}

View File

@@ -0,0 +1,116 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Compressors.PPMd;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides PPMd compression and decompression using SharpCompress's internal implementation.
/// This is a complex provider that requires initialization data for compression.
/// </summary>
public sealed class PpmdCompressingProvider : CompressionProviderBase, ICompressionProviderHooks
{
public override CompressionType CompressionType => CompressionType.PPMd;
public override bool SupportsCompression => true;
public override bool SupportsDecompression => true;
public override Stream CreateCompressStream(Stream destination, int compressionLevel)
{
// Ppmd doesn't use compressionLevel, uses PpmdProperties instead
var props = new PpmdProperties();
return PpmdStream.Create(props, destination, true);
}
public override Stream CreateCompressStream(
Stream destination,
int compressionLevel,
CompressionContext context
)
{
// Context not used for Ppmd compression, but we could use FormatOptions for custom properties
if (context.FormatOptions is PpmdProperties customProps)
{
return PpmdStream.Create(customProps, destination, true);
}
return CreateCompressStream(destination, compressionLevel);
}
public override Stream CreateDecompressStream(Stream source)
{
throw new InvalidOperationException(
"PPMd decompression requires properties. "
+ "Use CreateDecompressStream(Stream, CompressionContext) overload with Properties."
);
}
public override Stream CreateDecompressStream(Stream source, CompressionContext context)
{
if (context.Properties is null || context.Properties.Length < 2)
{
throw new ArgumentException(
"PPMd decompression requires Properties (at least 2 bytes) in CompressionContext.",
nameof(context)
);
}
var props = new PpmdProperties(context.Properties);
return PpmdStream.Create(props, source, false);
}
public override ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CancellationToken cancellationToken = default
) =>
throw new InvalidOperationException(
"PPMd decompression requires properties. "
+ "Use CreateDecompressStreamAsync(Stream, CompressionContext, CancellationToken) overload with Properties."
);
public override async ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CompressionContext context,
CancellationToken cancellationToken = default
)
{
if (context.Properties is null || context.Properties.Length < 2)
{
throw new ArgumentException(
"PPMd decompression requires Properties (at least 2 bytes) in CompressionContext.",
nameof(context)
);
}
var props = new PpmdProperties(context.Properties);
return await PpmdStream
.CreateAsync(props, source, false, cancellationToken)
.ConfigureAwait(false);
}
public byte[]? GetPreCompressionData(CompressionContext context)
{
// Ppmd writes its properties before the compressed data
if (context.FormatOptions is PpmdProperties customProps)
{
return customProps.Properties;
}
var defaultProps = new PpmdProperties();
return defaultProps.Properties;
}
public byte[]? GetCompressionProperties(Stream stream, CompressionContext context)
{
// Properties are already written in GetPreCompressionData
return null;
}
public byte[]? GetPostCompressionData(Stream stream, CompressionContext context)
{
// No post-compression data needed for Ppmd
return null;
}
}

View File

@@ -0,0 +1,13 @@
using SharpCompress.Common;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides Reduce1 decompression using SharpCompress's internal implementation.
/// Note: Reduce compression is not supported; this provider is decompression-only.
/// </summary>
public sealed class Reduce1CompressionProvider : ReduceCompressionProviderBase
{
public override CompressionType CompressionType => CompressionType.Reduce1;
protected override int Factor => 1;
}

View File

@@ -0,0 +1,13 @@
using SharpCompress.Common;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides Reduce2 decompression using SharpCompress's internal implementation.
/// Note: Reduce compression is not supported; this provider is decompression-only.
/// </summary>
public sealed class Reduce2CompressionProvider : ReduceCompressionProviderBase
{
public override CompressionType CompressionType => CompressionType.Reduce2;
protected override int Factor => 2;
}

View File

@@ -0,0 +1,13 @@
using SharpCompress.Common;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides Reduce3 decompression using SharpCompress's internal implementation.
/// Note: Reduce compression is not supported; this provider is decompression-only.
/// </summary>
public sealed class Reduce3CompressionProvider : ReduceCompressionProviderBase
{
public override CompressionType CompressionType => CompressionType.Reduce3;
protected override int Factor => 3;
}

View File

@@ -0,0 +1,13 @@
using SharpCompress.Common;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides Reduce4 decompression using SharpCompress's internal implementation.
/// Note: Reduce compression is not supported; this provider is decompression-only.
/// </summary>
public sealed class Reduce4CompressionProvider : ReduceCompressionProviderBase
{
public override CompressionType CompressionType => CompressionType.Reduce4;
protected override int Factor => 4;
}

View File

@@ -0,0 +1,36 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Compressors.Reduce;
namespace SharpCompress.Providers.Default;
public abstract class ReduceCompressionProviderBase : ContextRequiredDecompressionProviderBase
{
protected abstract int Factor { get; }
protected override string DecompressionContextRequirementDescription =>
"Reduce decompression requires compressed and uncompressed sizes";
protected override string CompressionNotSupportedMessage =>
"Reduce compression is not supported by SharpCompress's internal implementation.";
public sealed override Stream CreateDecompressStream(Stream source, CompressionContext context)
{
ValidateRequiredSizes(context, "Reduce");
return ReduceStream.Create(source, context.InputSize, context.OutputSize, Factor);
}
public sealed override async ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CompressionContext context,
CancellationToken cancellationToken = default
)
{
ValidateRequiredSizes(context, "Reduce");
return await ReduceStream
.CreateAsync(source, context.InputSize, context.OutputSize, Factor, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,56 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Shrink;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides Shrink decompression using SharpCompress's internal implementation.
/// Note: Shrink compression is not supported; this provider is decompression-only.
/// </summary>
/// <remarks>
/// Shrink requires compressed and uncompressed sizes which must be provided via CompressionContext.
/// </remarks>
public sealed class ShrinkCompressionProvider : ContextRequiredDecompressionProviderBase
{
public override CompressionType CompressionType => CompressionType.Shrink;
protected override string CompressionNotSupportedMessage =>
"Shrink compression is not supported by SharpCompress's internal implementation.";
protected override string DecompressionContextRequirementDescription =>
"Shrink decompression requires compressed and uncompressed sizes";
public override Stream CreateDecompressStream(Stream source, CompressionContext context)
{
ValidateRequiredSizes(context, "Shrink");
return new ShrinkStream(
source,
CompressionMode.Decompress,
context.InputSize,
context.OutputSize
);
}
public override async ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CompressionContext context,
CancellationToken cancellationToken = default
)
{
ValidateRequiredSizes(context, "Shrink");
return await ShrinkStream
.CreateAsync(
source,
CompressionMode.Decompress,
context.InputSize,
context.OutputSize,
cancellationToken
)
.ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,21 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors.Xz;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides XZ compression decompression using SharpCompress's internal implementation.
/// Note: Compression is not supported by this provider.
/// </summary>
public sealed class XzCompressionProvider : DecompressionOnlyProviderBase
{
public override CompressionType CompressionType => CompressionType.Xz;
protected override string CompressionNotSupportedMessage =>
"XZ compression is not supported by SharpCompress's internal implementation.";
public override Stream CreateDecompressStream(Stream source)
{
return new XZStream(source);
}
}

View File

@@ -0,0 +1,25 @@
using System.IO;
using SharpCompress.Common;
using ZStd = SharpCompress.Compressors.ZStandard;
namespace SharpCompress.Providers.Default;
/// <summary>
/// Provides ZStandard compression using SharpCompress's internal implementation.
/// </summary>
public sealed class ZStandardCompressionProvider : CompressionProviderBase
{
public override CompressionType CompressionType => CompressionType.ZStandard;
public override bool SupportsCompression => true;
public override bool SupportsDecompression => true;
public override Stream CreateCompressStream(Stream destination, int compressionLevel)
{
return new ZStd.CompressionStream(destination, compressionLevel);
}
public override Stream CreateDecompressStream(Stream source)
{
return new ZStd.DecompressionStream(source);
}
}

View File

@@ -0,0 +1,151 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Providers;
/// <summary>
/// Provides compression and decompression stream creation for a specific compression type.
/// Implement this interface to supply alternative compression implementations.
/// </summary>
/// <remarks>
/// <para>
/// This interface abstracts the creation of compression and decompression streams,
/// allowing SharpCompress to use different implementations of the same compression type.
/// For example, you can provide an implementation that uses System.IO.Compression
/// for Deflate/GZip instead of the internal DotNetZip-derived implementation.
/// </para>
/// <para>
/// Implementations should be thread-safe for concurrent decompression operations,
/// but CreateCompressStream/CreateDecompressStream themselves return new stream instances
/// that are not shared.
/// </para>
/// <para>
/// For simpler implementations, derive from <see cref="CompressionProviderBase"/> which provides
/// default async implementations that delegate to the synchronous methods.
/// </para>
/// </remarks>
public interface ICompressionProvider
{
/// <summary>
/// The compression type this provider handles.
/// </summary>
CompressionType CompressionType { get; }
/// <summary>
/// Whether this provider supports compression (writing).
/// </summary>
bool SupportsCompression { get; }
/// <summary>
/// Whether this provider supports decompression (reading).
/// </summary>
bool SupportsDecompression { get; }
/// <summary>
/// Creates a compression stream that compresses data written to it.
/// </summary>
/// <param name="destination">The destination stream to write compressed data to.</param>
/// <param name="compressionLevel">The compression level (0-9, algorithm-specific).</param>
/// <returns>A stream that compresses data written to it.</returns>
/// <exception cref="NotSupportedException">Thrown if SupportsCompression is false.</exception>
Stream CreateCompressStream(Stream destination, int compressionLevel);
/// <summary>
/// Creates a compression stream with context information.
/// </summary>
/// <param name="destination">The destination stream.</param>
/// <param name="compressionLevel">The compression level.</param>
/// <param name="context">Context information about the compression.</param>
/// <returns>A compression stream.</returns>
/// <exception cref="NotSupportedException">Thrown if SupportsCompression is false.</exception>
Stream CreateCompressStream(
Stream destination,
int compressionLevel,
CompressionContext context
);
/// <summary>
/// Creates a decompression stream that decompresses data read from it.
/// </summary>
/// <param name="source">The source stream to read compressed data from.</param>
/// <returns>A stream that decompresses data read from it.</returns>
/// <exception cref="NotSupportedException">Thrown if SupportsDecompression is false.</exception>
Stream CreateDecompressStream(Stream source);
/// <summary>
/// Creates a decompression stream with context information.
/// </summary>
/// <param name="source">The source stream.</param>
/// <param name="context">
/// Context information about the decompression. Providers may use
/// <see cref="CompressionContext.ReaderOptions"/> for archive header encoding
/// (via <see cref="ReaderOptions.ArchiveEncoding"/>) and
/// <see cref="CompressionContext.FormatOptions"/> for format-specific metadata
/// such as compression properties or algorithm-specific configuration.
/// </param>
/// <returns>A decompression stream.</returns>
/// <exception cref="NotSupportedException">Thrown if SupportsDecompression is false.</exception>
Stream CreateDecompressStream(Stream source, CompressionContext context);
/// <summary>
/// Asynchronously creates a compression stream that compresses data written to it.
/// </summary>
/// <param name="destination">The destination stream to write compressed data to.</param>
/// <param name="compressionLevel">The compression level (0-9, algorithm-specific).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A task containing the compression stream.</returns>
/// <exception cref="NotSupportedException">Thrown if SupportsCompression is false.</exception>
ValueTask<Stream> CreateCompressStreamAsync(
Stream destination,
int compressionLevel,
CancellationToken cancellationToken = default
);
/// <summary>
/// Asynchronously creates a compression stream with context information.
/// </summary>
/// <param name="destination">The destination stream.</param>
/// <param name="compressionLevel">The compression level.</param>
/// <param name="context">Context information about the compression.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A task containing the compression stream.</returns>
/// <exception cref="NotSupportedException">Thrown if SupportsCompression is false.</exception>
ValueTask<Stream> CreateCompressStreamAsync(
Stream destination,
int compressionLevel,
CompressionContext context,
CancellationToken cancellationToken = default
);
/// <summary>
/// Asynchronously creates a decompression stream that decompresses data read from it.
/// </summary>
/// <param name="source">The source stream to read compressed data from.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A task containing the decompression stream.</returns>
/// <exception cref="NotSupportedException">Thrown if SupportsDecompression is false.</exception>
ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CancellationToken cancellationToken = default
);
/// <summary>
/// Asynchronously creates a decompression stream with context information.
/// </summary>
/// <param name="source">The source stream.</param>
/// <param name="context">
/// Context information about the decompression. Providers may use
/// <see cref="CompressionContext.FormatOptions"/> for format-specific metadata
/// (for example, archive header encoding).
/// </param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A task containing the decompression stream.</returns>
/// <exception cref="NotSupportedException">Thrown if SupportsDecompression is false.</exception>
ValueTask<Stream> CreateDecompressStreamAsync(
Stream source,
CompressionContext context,
CancellationToken cancellationToken = default
);
}

View File

@@ -0,0 +1,42 @@
using System.IO;
namespace SharpCompress.Providers;
/// <summary>
/// Extended compression provider interface for formats that require initialization/finalization data.
/// </summary>
/// <remarks>
/// Some compression formats (like LZMA and PPMd in Zip) require special handling:
/// - Data written before compression starts (magic bytes, properties headers)
/// - Data written after compression completes (properties, footers)
/// This interface extends ICompressionProvider to support these complex initialization patterns
/// while keeping the simple ICompressionProvider interface for formats that don't need it.
/// </remarks>
public interface ICompressionProviderHooks : ICompressionProvider
{
/// <summary>
/// Gets initialization data to write before compression starts.
/// Returns null if no pre-compression data is needed.
/// </summary>
/// <param name="context">Context information.</param>
/// <returns>Bytes to write before compression, or null.</returns>
byte[]? GetPreCompressionData(CompressionContext context);
/// <summary>
/// Gets properties/data to write after creating the compression stream but before writing data.
/// Returns null if no properties are needed.
/// </summary>
/// <param name="stream">The compression stream that was created.</param>
/// <param name="context">Context information.</param>
/// <returns>Bytes to write after stream creation, or null.</returns>
byte[]? GetCompressionProperties(Stream stream, CompressionContext context);
/// <summary>
/// Gets data to write after compression is complete.
/// Returns null if no post-compression data is needed.
/// </summary>
/// <param name="stream">The compression stream.</param>
/// <param name="context">Context information.</param>
/// <returns>Bytes to write after compression, or null.</returns>
byte[]? GetPostCompressionData(Stream stream, CompressionContext context);
}

View File

@@ -0,0 +1,19 @@
namespace SharpCompress.Providers;
/// <summary>
/// Interface for compression streams that require explicit finalization
/// before disposal to ensure all compressed data is flushed properly.
/// </summary>
/// <remarks>
/// Some compression formats (notably BZip2 and LZip) require explicit
/// finalization to write trailer/footer data. Implementing this interface
/// allows generic code to handle finalization without knowing the specific stream type.
/// </remarks>
public interface IFinishable
{
/// <summary>
/// Finalizes the compression, flushing any remaining buffered data
/// and writing format-specific trailer/footer bytes.
/// </summary>
void Finish();
}

View File

@@ -0,0 +1,51 @@
using System.IO;
using System.IO.Compression;
using SharpCompress.Common;
namespace SharpCompress.Providers.System;
/// <summary>
/// Provides Deflate compression using System.IO.Compression.DeflateStream.
/// </summary>
/// <remarks>
/// On modern .NET (5+), System.IO.Compression uses hardware-accelerated zlib
/// and is significantly faster than SharpCompress's pure C# implementation.
/// </remarks>
public sealed class SystemDeflateCompressionProvider : CompressionProviderBase
{
public override CompressionType CompressionType => CompressionType.Deflate;
public override bool SupportsCompression => true;
public override bool SupportsDecompression => true;
public override Stream CreateCompressStream(Stream destination, int compressionLevel)
{
var bclLevel = MapCompressionLevel(compressionLevel);
return new DeflateStream(destination, bclLevel, leaveOpen: false);
}
public override Stream CreateDecompressStream(Stream source)
{
return new DeflateStream(
source,
global::System.IO.Compression.CompressionMode.Decompress,
leaveOpen: false
);
}
/// <summary>
/// Maps SharpCompress compression level (0-9) to BCL CompressionLevel.
/// </summary>
private static global::System.IO.Compression.CompressionLevel MapCompressionLevel(int level)
{
// Map 0-9 to appropriate BCL levels
return level switch
{
0 => global::System.IO.Compression.CompressionLevel.NoCompression,
<= 2 => global::System.IO.Compression.CompressionLevel.Fastest,
#if NET7_0_OR_GREATER
>= 8 => global::System.IO.Compression.CompressionLevel.SmallestSize,
#endif
_ => global::System.IO.Compression.CompressionLevel.Optimal,
};
}
}

View File

@@ -0,0 +1,51 @@
using System.IO;
using System.IO.Compression;
using SharpCompress.Common;
namespace SharpCompress.Providers.System;
/// <summary>
/// Provides GZip compression using System.IO.Compression.GZipStream.
/// </summary>
/// <remarks>
/// On modern .NET (5+), System.IO.Compression uses hardware-accelerated zlib
/// and is significantly faster than SharpCompress's pure C# implementation.
/// </remarks>
public sealed class SystemGZipCompressionProvider : CompressionProviderBase
{
public override CompressionType CompressionType => CompressionType.GZip;
public override bool SupportsCompression => true;
public override bool SupportsDecompression => true;
public override Stream CreateCompressStream(Stream destination, int compressionLevel)
{
var bclLevel = MapCompressionLevel(compressionLevel);
return new GZipStream(destination, bclLevel, leaveOpen: false);
}
public override Stream CreateDecompressStream(Stream source)
{
return new GZipStream(
source,
global::System.IO.Compression.CompressionMode.Decompress,
leaveOpen: false
);
}
/// <summary>
/// Maps SharpCompress compression level (0-9) to BCL CompressionLevel.
/// </summary>
private static global::System.IO.Compression.CompressionLevel MapCompressionLevel(int level)
{
// Map 0-9 to appropriate BCL levels
return level switch
{
0 => global::System.IO.Compression.CompressionLevel.NoCompression,
<= 2 => global::System.IO.Compression.CompressionLevel.Fastest,
#if NET7_0_OR_GREATER
>= 8 => global::System.IO.Compression.CompressionLevel.SmallestSize,
#endif
_ => global::System.IO.Compression.CompressionLevel.Optimal,
};
}
}

View File

@@ -43,7 +43,10 @@ public abstract partial class AbstractReader<TEntry, TVolume>
}
if (_entriesForCurrentReadStreamAsync is null)
{
return await LoadStreamForReadingAsync(RequestInitialStream()).ConfigureAwait(false);
return await LoadStreamForReadingAsync(
await RequestInitialStreamAsync(cancellationToken).ConfigureAwait(false)
)
.ConfigureAwait(false);
}
if (!_wroteCurrentEntry)
{

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -140,6 +142,10 @@ public abstract partial class AbstractReader<TEntry, TVolume> : IReader, IAsyncR
protected virtual Stream RequestInitialStream() =>
Volume.NotNull("Volume isn't loaded.").Stream;
protected virtual ValueTask<Stream> RequestInitialStreamAsync(
CancellationToken cancellationToken = default
) => new(RequestInitialStream());
internal virtual bool NextEntryForCurrentStream() =>
_entriesForCurrentReadStream.NotNull().MoveNext();

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers.Ace;
@@ -33,15 +35,25 @@ public partial class AceReader
return new MultiVolumeAceReader(streams, options ?? new ReaderOptions());
}
public static IAsyncReader OpenAsyncReader(string path, ReaderOptions? readerOptions = null)
public static ValueTask<IAsyncReader> OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IAsyncReader)OpenReader(new FileInfo(path), readerOptions);
return new((IAsyncReader)OpenReader(new FileInfo(path), readerOptions));
}
public static IAsyncReader OpenAsyncReader(Stream stream, ReaderOptions? readerOptions = null)
public static ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
return (IAsyncReader)OpenReader(stream, readerOptions);
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(stream, readerOptions));
}
public static IAsyncReader OpenAsyncReader(
@@ -53,12 +65,14 @@ public partial class AceReader
return new MultiVolumeAceReader(streams, options ?? new ReaderOptions());
}
public static IAsyncReader OpenAsyncReader(
public static ValueTask<IAsyncReader> OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
return (IAsyncReader)OpenReader(fileInfo, readerOptions);
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(fileInfo, readerOptions));
}
public static IReader OpenReader(string filePath, ReaderOptions? readerOptions = null)

View File

@@ -1,28 +1,42 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers.Arc;
public partial class ArcReader : IReaderOpenable
{
public static IAsyncReader OpenAsyncReader(string path, ReaderOptions? readerOptions = null)
{
path.NotNullOrEmpty(nameof(path));
return (IAsyncReader)OpenReader(new FileInfo(path), readerOptions);
}
public static IAsyncReader OpenAsyncReader(Stream stream, ReaderOptions? readerOptions = null)
{
return (IAsyncReader)OpenReader(stream, readerOptions);
}
public static IAsyncReader OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
public static ValueTask<IAsyncReader> OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
return (IAsyncReader)OpenReader(fileInfo, readerOptions);
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return new((IAsyncReader)OpenReader(new FileInfo(path), readerOptions));
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(stream, readerOptions));
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(fileInfo, readerOptions));
}
public static IReader OpenReader(string filePath, ReaderOptions? readerOptions = null)

View File

@@ -1,28 +1,42 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers.Arj;
public partial class ArjReader : IReaderOpenable
{
public static IAsyncReader OpenAsyncReader(string path, ReaderOptions? readerOptions = null)
{
path.NotNullOrEmpty(nameof(path));
return (IAsyncReader)OpenReader(new FileInfo(path), readerOptions);
}
public static IAsyncReader OpenAsyncReader(Stream stream, ReaderOptions? readerOptions = null)
{
return (IAsyncReader)OpenReader(stream, readerOptions);
}
public static IAsyncReader OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
public static ValueTask<IAsyncReader> OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
return (IAsyncReader)OpenReader(fileInfo, readerOptions);
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return new((IAsyncReader)OpenReader(new FileInfo(path), readerOptions));
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(stream, readerOptions));
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(fileInfo, readerOptions));
}
public static IReader OpenReader(string filePath, ReaderOptions? readerOptions = null)

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Readers.GZip;
@@ -7,23 +9,35 @@ public partial class GZipReader
: IReaderOpenable
#endif
{
public static IAsyncReader OpenAsyncReader(string path, ReaderOptions? readerOptions = null)
{
path.NotNullOrEmpty(nameof(path));
return (IAsyncReader)OpenReader(new FileInfo(path), readerOptions);
}
public static IAsyncReader OpenAsyncReader(Stream stream, ReaderOptions? readerOptions = null)
{
return (IAsyncReader)OpenReader(stream, readerOptions);
}
public static IAsyncReader OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
public static ValueTask<IAsyncReader> OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
return (IAsyncReader)OpenReader(fileInfo, readerOptions);
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return new((IAsyncReader)OpenReader(new FileInfo(path), readerOptions));
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(stream, readerOptions));
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(fileInfo, readerOptions));
}
public static IReader OpenReader(string filePath, ReaderOptions? readerOptions = null)

View File

@@ -9,18 +9,18 @@ public interface IReaderFactory : Factories.IFactory
/// <summary>
/// Opens a Reader for Non-seeking usage.
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <returns></returns>
/// <param name="stream">An open, readable stream.</param>
/// <param name="options">Reader options.</param>
/// <returns>The opened reader.</returns>
IReader OpenReader(Stream stream, ReaderOptions? options);
/// <summary>
/// Opens a Reader for Non-seeking usage asynchronously.
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
/// <param name="stream">An open, readable stream.</param>
/// <param name="options">Reader options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A <see cref="ValueTask{TResult}"/> containing the opened async reader.</returns>
ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? options,

Some files were not shown because too many files have changed in this diff Show More