Compare commits

..

5 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
89420d43cf Fix compressed TAR formats broken in ArchiveFactory
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-11 17:58:25 +00:00
copilot-swe-agent[bot]
0696bf5efc Complete fix for compressed TAR formats issue
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-11 17:55:10 +00:00
copilot-swe-agent[bot]
3ad39f96da Remove redundant Rewind() call after StartRecording()
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-11 17:52:28 +00:00
copilot-swe-agent[bot]
649729d520 Fix compressed TAR formats in ArchiveFactory
- Detect compressed TAR formats (gz, bz2, xz, lz, zst, Z) in TarFactory.OpenArchive
- Decompress to MemoryStream for Archive API seekability requirement
- Handle async-only streams by skipping format detection
- Add tests for all compressed TAR formats with ArchiveFactory

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-11 17:50:16 +00:00
copilot-swe-agent[bot]
31ed7b822e Initial plan 2026-02-11 17:40:01 +00:00
74 changed files with 2861 additions and 3771 deletions

View File

@@ -53,9 +53,9 @@ jobs:
name: ${{ matrix.os }}-nuget-package
path: artifacts/*.nupkg
# Push to NuGet.org only for version tag pushes (Windows only)
# Push to NuGet.org using C# build target (Windows only, not on PRs)
- name: Push to NuGet
if: success() && matrix.os == 'windows-latest' && startsWith(github.ref, 'refs/tags/')
if: success() && matrix.os == 'windows-latest' && github.event_name != 'pull_request'
run: dotnet run --project build/build.csproj -- push-to-nuget
env:
NUGET_API_KEY: ${{ secrets.NUGET_API_KEY }}

View File

@@ -103,11 +103,8 @@ tests/
### Factory Pattern
Factory implementations can implement one or more interfaces (`IArchiveFactory`, `IReaderFactory`, `IWriterFactory`) depending on format capabilities:
- `ArchiveFactory.OpenArchive()` - Opens archive API objects from seekable streams/files
- `ArchiveFactory.OpenAsyncArchive()` - Opens async archive API objects for async archive use cases
- `ReaderFactory.OpenReader()` - Auto-detects and opens forward-only readers
- `ReaderFactory.OpenAsyncReader()` - Auto-detects and opens forward-only async readers
- `WriterFactory.OpenWriter()` - Creates a writer for a specified `ArchiveType`
- `WriterFactory.OpenAsyncWriter()` - Creates an async writer for async write scenarios
- Factories located in: `src/SharpCompress/Factories/`
## Nullable Reference Types
@@ -135,9 +132,6 @@ SharpCompress supports multiple archive and compression formats:
### Async/Await Patterns
- All I/O operations support async/await with `CancellationToken`
- Async methods follow the naming convention: `MethodNameAsync`
- For async archive scenarios, prefer `ArchiveFactory.OpenAsyncArchive(...)` over sync `OpenArchive(...)`.
- For async forward-only read scenarios, prefer `ReaderFactory.OpenAsyncReader(...)` over sync `OpenReader(...)`.
- For async write scenarios, prefer `WriterFactory.OpenAsyncWriter(...)` over sync `OpenWriter(...)`.
- Key async methods:
- `WriteEntryToAsync` - Extract entry asynchronously
- `WriteAllToDirectoryAsync` - Extract all entries asynchronously
@@ -205,8 +199,7 @@ SharpCompress supports multiple archive and compression formats:
## Common Pitfalls
1. **Don't mix Archive and Reader APIs** - Archive needs seekable stream, Reader doesn't
2. **Don't mix sync and async open paths** - For async workflows use `OpenAsyncArchive`/`OpenAsyncReader`/`OpenAsyncWriter`, not `OpenArchive`/`OpenReader`/`OpenWriter`
3. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
4. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
5. **Tar + non-seekable stream** - Must provide file size or it will throw
6. **Format detection** - Use `ReaderFactory.OpenReader()` / `ReaderFactory.OpenAsyncReader()` for auto-detection, test with actual archive files
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
4. **Tar + non-seekable stream** - Must provide file size or it will throw
5. **Format detection** - Use `ReaderFactory.OpenReader()` for auto-detection, test with actual archive files

View File

@@ -95,7 +95,7 @@ using (var archive = ZipArchive.OpenArchive("file.zip"))
}
// Async extraction (requires IAsyncArchive)
await using (var asyncArchive = await ZipArchive.OpenAsyncArchive("file.zip"))
using (var asyncArchive = await ZipArchive.OpenAsyncArchive("file.zip"))
{
await asyncArchive.WriteToDirectoryAsync(
@"C:\output",
@@ -177,7 +177,7 @@ using (var reader = ReaderFactory.OpenReader(stream))
// Async variants (use OpenAsyncReader to get IAsyncReader)
using (var stream = File.OpenRead("file.zip"))
await using (var reader = await ReaderFactory.OpenAsyncReader(stream))
using (var reader = await ReaderFactory.OpenAsyncReader(stream))
{
while (await reader.MoveToNextEntryAsync())
{
@@ -409,7 +409,7 @@ cts.CancelAfter(TimeSpan.FromMinutes(5));
try
{
await using (var archive = await ZipArchive.OpenAsyncArchive("archive.zip"))
using (var archive = await ZipArchive.OpenAsyncArchive("archive.zip"))
{
await archive.WriteToDirectoryAsync(
@"C:\output",

View File

@@ -22,9 +22,7 @@ public static partial class ArchiveFactory
readerOptions ??= ReaderOptions.ForExternalStream;
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken)
.ConfigureAwait(false);
return await factory
.OpenAsyncArchive(stream, readerOptions, cancellationToken)
.ConfigureAwait(false);
return factory.OpenAsyncArchive(stream, readerOptions);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
@@ -47,9 +45,7 @@ public static partial class ArchiveFactory
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken)
.ConfigureAwait(false);
return await factory
.OpenAsyncArchive(fileInfo, options, cancellationToken)
.ConfigureAwait(false);
return factory.OpenAsyncArchive(fileInfo, options);
}
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(

View File

@@ -2,9 +2,12 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Factories;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives;

View File

@@ -20,15 +20,14 @@ public partial class GZipArchive
>
#endif
{
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return OpenAsyncArchive(new FileInfo(path), readerOptions, cancellationToken);
return (IWritableAsyncArchive<GZipWriterOptions>)
OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions());
}
public static IWritableArchive<GZipWriterOptions> OpenArchive(
@@ -104,50 +103,30 @@ public partial class GZipArchive
);
}
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(stream, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(stream, readerOptions);
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfo, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfo, readerOptions);
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(streams, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(streams, readerOptions);
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfos, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfos, readerOptions);
public static IWritableArchive<GZipWriterOptions> CreateArchive() => new GZipArchive();
public static ValueTask<IWritableAsyncArchive<GZipWriterOptions>> CreateAsyncArchive() =>
new(new GZipArchive());
public static IWritableAsyncArchive<GZipWriterOptions> CreateAsyncArchive() =>
new GZipArchive();
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -33,13 +32,7 @@ public interface IArchiveFactory : IFactory
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A <see cref="ValueTask{TResult}"/> containing the opened async archive.</returns>
ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
@@ -54,10 +47,5 @@ public interface IArchiveFactory : IFactory
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A <see cref="ValueTask{TResult}"/> containing the opened async archive.</returns>
ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null);
}

View File

@@ -1,7 +1,6 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
@@ -19,22 +18,19 @@ public interface IArchiveOpenable<TSync, TASync>
public static abstract TSync OpenArchive(Stream stream, ReaderOptions? readerOptions = null);
public static abstract ValueTask<TASync> OpenAsyncArchive(
public static abstract TASync OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
public static abstract ValueTask<TASync> OpenAsyncArchive(
public static abstract TASync OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
public static abstract ValueTask<TASync> OpenAsyncArchive(
public static abstract TASync OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
}

View File

@@ -2,7 +2,6 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
@@ -21,16 +20,14 @@ public interface IMultiArchiveOpenable<TSync, TASync>
ReaderOptions? readerOptions = null
);
public static abstract ValueTask<TASync> OpenAsyncArchive(
public static abstract TASync OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
public static abstract ValueTask<TASync> OpenAsyncArchive(
public static abstract TASync OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
}
#endif

View File

@@ -1,4 +1,3 @@
using System.Threading.Tasks;
using SharpCompress.Common.Options;
#if NET8_0_OR_GREATER
@@ -9,6 +8,6 @@ public interface IWritableArchiveOpenable<TOptions>
where TOptions : IWriterOptions
{
public static abstract IWritableArchive<TOptions> CreateArchive();
public static abstract ValueTask<IWritableAsyncArchive<TOptions>> CreateAsyncArchive();
public static abstract IWritableAsyncArchive<TOptions> CreateAsyncArchive();
}
#endif

View File

@@ -20,15 +20,13 @@ public partial class RarArchive
IMultiArchiveOpenable<IRarArchive, IRarAsyncArchive>
#endif
{
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
public static IRarAsyncArchive OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return new((IRarAsyncArchive)OpenArchive(new FileInfo(path), readerOptions));
return (IRarAsyncArchive)OpenArchive(new FileInfo(path), readerOptions);
}
public static IRarArchive OpenArchive(string filePath, ReaderOptions? options = null)
@@ -100,44 +98,36 @@ public partial class RarArchive
);
}
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
public static IRarAsyncArchive OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IRarAsyncArchive)OpenArchive(stream, readerOptions));
return (IRarAsyncArchive)OpenArchive(stream, readerOptions);
}
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
public static IRarAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IRarAsyncArchive)OpenArchive(fileInfo, readerOptions));
return (IRarAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
public static IRarAsyncArchive OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IRarAsyncArchive)OpenArchive(streams, readerOptions));
return (IRarAsyncArchive)OpenArchive(streams, readerOptions);
}
public static ValueTask<IRarAsyncArchive> OpenAsyncArchive(
public static IRarAsyncArchive OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IRarAsyncArchive)OpenArchive(fileInfos, readerOptions));
return (IRarAsyncArchive)OpenArchive(fileInfos, readerOptions);
}
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));

View File

@@ -16,17 +16,10 @@ public partial class SevenZipArchive
IMultiArchiveOpenable<IArchive, IAsyncArchive>
#endif
{
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncArchive OpenAsyncArchive(string path, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty("path");
return new(
(IAsyncArchive)OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions())
);
return (IAsyncArchive)OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions());
}
public static IArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
@@ -93,44 +86,33 @@ public partial class SevenZipArchive
);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(stream, readerOptions));
return (IAsyncArchive)OpenArchive(stream, readerOptions);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
public static IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfo, readerOptions));
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
public static IAsyncArchive OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(streams, readerOptions));
return (IAsyncArchive)OpenArchive(streams, readerOptions);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
public static IAsyncArchive OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfos, readerOptions));
return (IAsyncArchive)OpenArchive(fileInfos, readerOptions);
}
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));

View File

@@ -7,7 +7,6 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Factories;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers.Tar;
@@ -38,9 +37,12 @@ public partial class TarArchive
)
{
fileInfo.NotNull(nameof(fileInfo));
return OpenArchive(
[fileInfo],
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
return new TarArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
);
}
@@ -51,14 +53,13 @@ public partial class TarArchive
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
var sourceStream = new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
return new TarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
);
var compressionType = TarFactory.GetCompressionType(sourceStream);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
public static IWritableArchive<TarWriterOptions> OpenArchive(
@@ -68,14 +69,13 @@ public partial class TarArchive
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
var sourceStream = new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
return new TarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
var compressionType = TarFactory.GetCompressionType(sourceStream);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
public static IWritableArchive<TarWriterOptions> OpenArchive(
@@ -90,97 +90,35 @@ public partial class TarArchive
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return OpenArchive([stream], readerOptions);
return new TarArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
public static async ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
stream.NotNull(nameof(stream));
var sourceStream = new SourceStream(
stream,
i => null,
readerOptions ?? new ReaderOptions()
);
var compressionType = await TarFactory
.GetCompressionTypeAsync(sourceStream, cancellationToken)
.ConfigureAwait(false);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(stream, readerOptions);
public static ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return OpenAsyncArchive(new FileInfo(path), readerOptions, cancellationToken);
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(new FileInfo(path), readerOptions);
public static async ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
fileInfo.NotNull(nameof(fileInfo));
readerOptions ??= new ReaderOptions() { LeaveStreamOpen = false };
var sourceStream = new SourceStream(fileInfo, i => null, readerOptions);
var compressionType = await TarFactory
.GetCompressionTypeAsync(sourceStream, cancellationToken)
.ConfigureAwait(false);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(fileInfo, readerOptions);
public static async ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
var sourceStream = new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
);
var compressionType = await TarFactory
.GetCompressionTypeAsync(sourceStream, cancellationToken)
.ConfigureAwait(false);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(streams, readerOptions);
public static async ValueTask<IWritableAsyncArchive<TarWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
var sourceStream = new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
);
var compressionType = await TarFactory
.GetCompressionTypeAsync(sourceStream, cancellationToken)
.ConfigureAwait(false);
sourceStream.Seek(0, SeekOrigin.Begin);
return new TarArchive(sourceStream, compressionType);
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(fileInfos, readerOptions);
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
@@ -245,6 +183,5 @@ public partial class TarArchive
public static IWritableArchive<TarWriterOptions> CreateArchive() => new TarArchive();
public static ValueTask<IWritableAsyncArchive<TarWriterOptions>> CreateAsyncArchive() =>
new(new TarArchive());
public static IWritableAsyncArchive<TarWriterOptions> CreateAsyncArchive() => new TarArchive();
}

View File

@@ -2,60 +2,38 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Options;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.Xz;
using SharpCompress.Compressors.ZStandard;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
using CompressionMode = SharpCompress.Compressors.CompressionMode;
using Constants = SharpCompress.Common.Constants;
namespace SharpCompress.Archives.Tar;
public partial class TarArchive
: AbstractWritableArchive<TarArchiveEntry, TarVolume, TarWriterOptions>
{
private readonly CompressionType _compressionType;
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts();
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable();
}
internal TarArchive(SourceStream sourceStream, CompressionType compressionType)
: base(ArchiveType.Tar, sourceStream)
{
_compressionType = compressionType;
}
private TarArchive(SourceStream sourceStream)
: base(ArchiveType.Tar, sourceStream) { }
private TarArchive()
: base(ArchiveType.Tar) { }
private Stream GetStream(Stream stream) =>
_compressionType switch
{
CompressionType.BZip2 => BZip2Stream.Create(stream, CompressionMode.Decompress, false),
CompressionType.GZip => new GZipStream(stream, CompressionMode.Decompress),
CompressionType.ZStandard => new ZStandardStream(stream),
CompressionType.LZip => new LZipStream(stream, CompressionMode.Decompress),
CompressionType.Xz => new XZStream(stream),
CompressionType.Lzw => new LzwStream(stream),
CompressionType.None => stream,
_ => throw new NotSupportedException("Invalid compression type: " + _compressionType),
};
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
var stream = GetStream(volumes.Single().Stream);
var stream = volumes.Single().Stream;
if (stream.CanSeek)
{
stream.Position = 0;
@@ -63,9 +41,7 @@ public partial class TarArchive
TarHeader? previousHeader = null;
foreach (
var header in TarHeaderFactory.ReadHeader(
_compressionType == CompressionType.None
? StreamingMode.Seekable
: StreamingMode.Streaming,
StreamingMode.Seekable,
stream,
ReaderOptions.ArchiveEncoding
)
@@ -178,6 +154,6 @@ public partial class TarArchive
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return TarReader.OpenReader(GetStream(stream));
return TarReader.OpenReader(stream);
}
}

View File

@@ -95,55 +95,30 @@ public partial class ZipArchive
);
}
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(path, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(path, readerOptions);
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(stream, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(stream, readerOptions);
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfo, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfo, readerOptions);
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(streams, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(streams, readerOptions);
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> OpenAsyncArchive(
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfos, readerOptions));
}
ReaderOptions? readerOptions = null
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfos, readerOptions);
public static bool IsZipFile(string filePath, string? password = null) =>
IsZipFile(new FileInfo(filePath), password);
@@ -248,8 +223,7 @@ public partial class ZipArchive
public static IWritableArchive<ZipWriterOptions> CreateArchive() => new ZipArchive();
public static ValueTask<IWritableAsyncArchive<ZipWriterOptions>> CreateAsyncArchive() =>
new(new ZipArchive());
public static IWritableAsyncArchive<ZipWriterOptions> CreateAsyncArchive() => new ZipArchive();
public static async ValueTask<bool> IsZipMultiAsync(
Stream stream,

View File

@@ -10,10 +10,6 @@ public class ZipEntry : Entry
{
private readonly ZipFilePart? _filePart;
// WinZip AES extra data constants
private const int MinimumWinZipAesExtraDataLength = 7;
private const int WinZipAesCompressionMethodOffset = 5;
internal ZipEntry(ZipFilePart? filePart, IReaderOptions readerOptions)
: base(readerOptions)
{
@@ -37,54 +33,24 @@ public class ZipEntry : Entry
CreatedTime = times?.UnicodeTimes.Item3;
}
public override CompressionType CompressionType
{
get
public override CompressionType CompressionType =>
_filePart?.Header.CompressionMethod switch
{
var compressionMethod = GetActualCompressionMethod();
return compressionMethod switch
{
ZipCompressionMethod.BZip2 => CompressionType.BZip2,
ZipCompressionMethod.Deflate => CompressionType.Deflate,
ZipCompressionMethod.Deflate64 => CompressionType.Deflate64,
ZipCompressionMethod.LZMA => CompressionType.LZMA,
ZipCompressionMethod.PPMd => CompressionType.PPMd,
ZipCompressionMethod.None => CompressionType.None,
ZipCompressionMethod.Shrink => CompressionType.Shrink,
ZipCompressionMethod.Reduce1 => CompressionType.Reduce1,
ZipCompressionMethod.Reduce2 => CompressionType.Reduce2,
ZipCompressionMethod.Reduce3 => CompressionType.Reduce3,
ZipCompressionMethod.Reduce4 => CompressionType.Reduce4,
ZipCompressionMethod.Explode => CompressionType.Explode,
ZipCompressionMethod.ZStandard => CompressionType.ZStandard,
_ => CompressionType.Unknown,
};
}
}
private ZipCompressionMethod GetActualCompressionMethod()
{
if (_filePart?.Header.CompressionMethod != ZipCompressionMethod.WinzipAes)
{
return _filePart?.Header.CompressionMethod ?? ZipCompressionMethod.None;
}
// For WinZip AES, the actual compression method is stored in the extra data
var aesExtraData = _filePart.Header.Extra.FirstOrDefault(x =>
x.Type == ExtraDataType.WinZipAes
);
if (aesExtraData is null || aesExtraData.DataBytes.Length < MinimumWinZipAesExtraDataLength)
{
return ZipCompressionMethod.WinzipAes;
}
// The compression method is at offset 5 in the extra data
return (ZipCompressionMethod)
System.Buffers.Binary.BinaryPrimitives.ReadUInt16LittleEndian(
aesExtraData.DataBytes.AsSpan(WinZipAesCompressionMethodOffset)
);
}
ZipCompressionMethod.BZip2 => CompressionType.BZip2,
ZipCompressionMethod.Deflate => CompressionType.Deflate,
ZipCompressionMethod.Deflate64 => CompressionType.Deflate64,
ZipCompressionMethod.LZMA => CompressionType.LZMA,
ZipCompressionMethod.PPMd => CompressionType.PPMd,
ZipCompressionMethod.None => CompressionType.None,
ZipCompressionMethod.Shrink => CompressionType.Shrink,
ZipCompressionMethod.Reduce1 => CompressionType.Reduce1,
ZipCompressionMethod.Reduce2 => CompressionType.Reduce2,
ZipCompressionMethod.Reduce3 => CompressionType.Reduce3,
ZipCompressionMethod.Reduce4 => CompressionType.Reduce4,
ZipCompressionMethod.Explode => CompressionType.Explode,
ZipCompressionMethod.ZStandard => CompressionType.ZStandard,
_ => CompressionType.Unknown,
};
public override long Crc => _filePart?.Header.Crc ?? 0;

View File

@@ -1,603 +0,0 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar.UnpackV1.Decode;
using SharpCompress.Compressors.Rar.UnpackV1.PPM;
using SharpCompress.Compressors.Rar.VM;
namespace SharpCompress.Compressors.Rar.UnpackV1;
internal sealed partial class Unpack
{
public async Task DoUnpackAsync(
FileHeader fileHeader,
Stream readStream,
Stream writeStream,
CancellationToken cancellationToken = default
)
{
destUnpSize = fileHeader.UncompressedSize;
this.fileHeader = fileHeader;
this.readStream = readStream;
this.writeStream = writeStream;
if (!fileHeader.IsSolid)
{
Init();
}
suspended = false;
await DoUnpackAsync(cancellationToken).ConfigureAwait(false);
}
public async Task DoUnpackAsync(CancellationToken cancellationToken = default)
{
if (fileHeader.CompressionMethod == 0)
{
await UnstoreFileAsync(cancellationToken).ConfigureAwait(false);
return;
}
switch (fileHeader.CompressionAlgorithm)
{
case 15:
await unpack15Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 20:
case 26:
await unpack20Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 29:
case 36:
await Unpack29Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 50:
await Unpack5Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
default:
throw new InvalidFormatException(
"unknown rar compression version " + fileHeader.CompressionAlgorithm
);
}
}
private async Task UnstoreFileAsync(CancellationToken cancellationToken = default)
{
var buffer = new byte[(int)Math.Min(0x10000, destUnpSize)];
do
{
var code = await readStream
.ReadAsync(buffer, 0, buffer.Length, cancellationToken)
.ConfigureAwait(false);
if (code == 0 || code == -1)
{
break;
}
code = code < destUnpSize ? code : (int)destUnpSize;
await writeStream.WriteAsync(buffer, 0, code, cancellationToken).ConfigureAwait(false);
destUnpSize -= code;
} while (!suspended && destUnpSize > 0);
}
private async Task Unpack29Async(bool solid, CancellationToken cancellationToken = default)
{
int[] DDecode = new int[PackDef.DC];
byte[] DBits = new byte[PackDef.DC];
int Bits;
if (DDecode[1] == 0)
{
int Dist = 0,
BitLength = 0,
Slot = 0;
for (var I = 0; I < DBitLengthCounts.Length; I++, BitLength++)
{
var count = DBitLengthCounts[I];
for (var J = 0; J < count; J++, Slot++, Dist += (1 << BitLength))
{
DDecode[Slot] = Dist;
DBits[Slot] = (byte)BitLength;
}
}
}
FileExtracted = true;
if (!suspended)
{
UnpInitData(solid);
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if ((!solid || !tablesRead) && !ReadTables())
{
return;
}
}
if (ppmError)
{
return;
}
while (true)
{
unpPtr &= PackDef.MAXWINMASK;
if (inAddr > readBorder)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 260 && wrPtr != unpPtr)
{
UnpWriteBuf();
if (destUnpSize < 0)
{
return;
}
if (suspended)
{
FileExtracted = false;
return;
}
}
if (unpBlockType == BlockTypes.BLOCK_PPM)
{
var Ch = ppm.DecodeChar();
if (Ch == -1)
{
ppmError = true;
break;
}
if (Ch == PpmEscChar)
{
var NextCh = ppm.DecodeChar();
if (NextCh == 0)
{
if (!ReadTables())
{
break;
}
continue;
}
if (NextCh == 2 || NextCh == -1)
{
break;
}
if (NextCh == 3)
{
if (!ReadVMCodePPM())
{
break;
}
continue;
}
if (NextCh == 4)
{
int Distance = 0,
Length = 0;
var failed = false;
for (var I = 0; I < 4 && !failed; I++)
{
var ch = ppm.DecodeChar();
if (ch == -1)
{
failed = true;
}
else
{
if (I == 3)
{
Length = ch & 0xff;
}
else
{
Distance = (Distance << 8) + (ch & 0xff);
}
}
}
if (failed)
{
break;
}
CopyString(Length + 32, Distance + 2);
continue;
}
if (NextCh == 5)
{
var Length = ppm.DecodeChar();
if (Length == -1)
{
break;
}
CopyString(Length + 4, 1);
continue;
}
}
window[unpPtr++] = (byte)Ch;
continue;
}
var Number = this.decodeNumber(LD);
if (Number < 256)
{
window[unpPtr++] = (byte)Number;
continue;
}
if (Number >= 271)
{
var Length = LDecode[Number -= 271] + 3;
if ((Bits = LBits[Number]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
var DistNumber = this.decodeNumber(DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
if (DistNumber > 9)
{
if (Bits > 4)
{
Distance += ((Utility.URShift(GetBits(), (20 - Bits))) << 4);
AddBits(Bits - 4);
}
if (lowDistRepCount > 0)
{
lowDistRepCount--;
Distance += prevLowDist;
}
else
{
var LowDist = this.decodeNumber(LDD);
if (LowDist == 16)
{
lowDistRepCount = PackDef.LOW_DIST_REP_COUNT - 1;
Distance += prevLowDist;
}
else
{
Distance += LowDist;
prevLowDist = LowDist;
}
}
}
else
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000L)
{
Length++;
}
}
InsertOldDist(Distance);
InsertLastMatch(Length, Distance);
CopyString(Length, Distance);
continue;
}
if (Number == 256)
{
if (!ReadEndOfBlock())
{
break;
}
continue;
}
if (Number == 257)
{
if (!ReadVMCode())
{
break;
}
continue;
}
if (Number == 258)
{
if (lastLength != 0)
{
CopyString(lastLength, lastDist);
}
continue;
}
if (Number < 263)
{
var DistNum = Number - 259;
var Distance = oldDist[DistNum];
for (var I = DistNum; I > 0; I--)
{
oldDist[I] = oldDist[I - 1];
}
oldDist[0] = Distance;
var LengthNumber = this.decodeNumber(RD);
var Length = LDecode[LengthNumber] + 2;
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
InsertLastMatch(Length, Distance);
CopyString(Length, Distance);
continue;
}
if (Number < 272)
{
var Distance = SDDecode[Number -= 263] + 1;
if ((Bits = SDBits[Number]) > 0)
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
InsertOldDist(Distance);
InsertLastMatch(2, Distance);
CopyString(2, Distance);
}
}
UnpWriteBuf();
}
private async Task UnpWriteBufAsync(CancellationToken cancellationToken = default)
{
var WrittenBorder = wrPtr;
var WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
for (var I = 0; I < prgStack.Count; I++)
{
var flt = prgStack[I];
if (flt is null)
{
continue;
}
if (flt.NextWindow)
{
flt.NextWindow = false;
continue;
}
var BlockStart = flt.BlockStart;
var BlockLength = flt.BlockLength;
if (((BlockStart - WrittenBorder) & PackDef.MAXWINMASK) < WriteSize)
{
if (WrittenBorder != BlockStart)
{
await UnpWriteAreaAsync(WrittenBorder, BlockStart, cancellationToken)
.ConfigureAwait(false);
WrittenBorder = BlockStart;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
if (BlockLength <= WriteSize)
{
var BlockEnd = (BlockStart + BlockLength) & PackDef.MAXWINMASK;
if (BlockStart < BlockEnd || BlockEnd == 0)
{
rarVM.setMemory(0, window, BlockStart, BlockLength);
}
else
{
var FirstPartLength = PackDef.MAXWINSIZE - BlockStart;
rarVM.setMemory(0, window, BlockStart, FirstPartLength);
rarVM.setMemory(FirstPartLength, window, 0, BlockEnd);
}
var ParentPrg = filters[flt.ParentFilter].Program;
var Prg = flt.Program;
if (ParentPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
Prg.GlobalData.Clear();
for (
var i = 0;
i < ParentPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
Prg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = ParentPrg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
ExecuteCode(Prg);
if (Prg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
if (ParentPrg.GlobalData.Count < Prg.GlobalData.Count)
{
ParentPrg.GlobalData.SetSize(Prg.GlobalData.Count);
}
for (var i = 0; i < Prg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE; i++)
{
ParentPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = Prg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
else
{
ParentPrg.GlobalData.Clear();
}
var FilteredDataOffset = Prg.FilteredDataOffset;
var FilteredDataSize = Prg.FilteredDataSize;
var FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
try
{
Array.Copy(
rarVM.Mem,
FilteredDataOffset,
FilteredData,
0,
FilteredDataSize
);
prgStack[I] = null;
while (I + 1 < prgStack.Count)
{
var NextFilter = prgStack[I + 1];
if (
NextFilter is null
|| NextFilter.BlockStart != BlockStart
|| NextFilter.BlockLength != FilteredDataSize
|| NextFilter.NextWindow
)
{
break;
}
rarVM.setMemory(0, FilteredData, 0, FilteredDataSize);
var pPrg = filters[NextFilter.ParentFilter].Program;
var NextPrg = NextFilter.Program;
if (pPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
NextPrg.GlobalData.SetSize(pPrg.GlobalData.Count);
for (
var i = 0;
i < pPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
ExecuteCode(NextPrg);
if (NextPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
if (pPrg.GlobalData.Count < NextPrg.GlobalData.Count)
{
pPrg.GlobalData.SetSize(NextPrg.GlobalData.Count);
}
for (
var i = 0;
i < NextPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
else
{
pPrg.GlobalData.Clear();
}
FilteredDataOffset = NextPrg.FilteredDataOffset;
FilteredDataSize = NextPrg.FilteredDataSize;
if (FilteredData.Length < FilteredDataSize)
{
ArrayPool<byte>.Shared.Return(FilteredData);
FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
}
for (var i = 0; i < FilteredDataSize; i++)
{
FilteredData[i] = NextPrg.GlobalData[FilteredDataOffset + i];
}
I++;
prgStack[I] = null;
}
await writeStream
.WriteAsync(FilteredData, 0, FilteredDataSize, cancellationToken)
.ConfigureAwait(false);
writtenFileSize += FilteredDataSize;
destUnpSize -= FilteredDataSize;
WrittenBorder = BlockEnd;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
finally
{
ArrayPool<byte>.Shared.Return(FilteredData);
}
}
else
{
for (var J = I; J < prgStack.Count; J++)
{
var filt = prgStack[J];
if (filt != null && filt.NextWindow)
{
filt.NextWindow = false;
}
}
wrPtr = WrittenBorder;
return;
}
}
}
await UnpWriteAreaAsync(WrittenBorder, unpPtr, cancellationToken).ConfigureAwait(false);
wrPtr = unpPtr;
}
private async Task UnpWriteAreaAsync(
int startPtr,
int endPtr,
CancellationToken cancellationToken = default
)
{
if (endPtr < startPtr)
{
await UnpWriteDataAsync(
window,
startPtr,
-startPtr & PackDef.MAXWINMASK,
cancellationToken
)
.ConfigureAwait(false);
await UnpWriteDataAsync(window, 0, endPtr, cancellationToken).ConfigureAwait(false);
}
else
{
await UnpWriteDataAsync(window, startPtr, endPtr - startPtr, cancellationToken)
.ConfigureAwait(false);
}
}
private async Task UnpWriteDataAsync(
byte[] data,
int offset,
int size,
CancellationToken cancellationToken = default
)
{
if (destUnpSize < 0)
{
return;
}
var writeSize = size;
if (writeSize > destUnpSize)
{
writeSize = (int)destUnpSize;
}
await writeStream
.WriteAsync(data, offset, writeSize, cancellationToken)
.ConfigureAwait(false);
writtenFileSize += size;
destUnpSize -= size;
}
}

View File

@@ -150,6 +150,25 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
DoUnpack();
}
public async System.Threading.Tasks.Task DoUnpackAsync(
FileHeader fileHeader,
Stream readStream,
Stream writeStream,
System.Threading.CancellationToken cancellationToken = default
)
{
destUnpSize = fileHeader.UncompressedSize;
this.fileHeader = fileHeader;
this.readStream = readStream;
this.writeStream = writeStream;
if (!fileHeader.IsSolid)
{
Init();
}
suspended = false;
await DoUnpackAsync(cancellationToken).ConfigureAwait(false);
}
public void DoUnpack()
{
if (fileHeader.CompressionMethod == 0)
@@ -184,6 +203,42 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
}
}
public async System.Threading.Tasks.Task DoUnpackAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
if (fileHeader.CompressionMethod == 0)
{
await UnstoreFileAsync(cancellationToken).ConfigureAwait(false);
return;
}
switch (fileHeader.CompressionAlgorithm)
{
case 15: // rar 1.5 compression
await unpack15Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 20: // rar 2.x compression
case 26: // files larger than 2GB
await unpack20Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 29: // rar 3.x compression
case 36: // alternative hash
await Unpack29Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 50: // rar 5.x compression
await Unpack5Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
default:
throw new InvalidFormatException(
"unknown rar compression version " + fileHeader.CompressionAlgorithm
);
}
}
private void UnstoreFile()
{
Span<byte> buffer = stackalloc byte[(int)Math.Min(0x10000, destUnpSize)];
@@ -200,6 +255,26 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
} while (!suspended && destUnpSize > 0);
}
private async System.Threading.Tasks.Task UnstoreFileAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var buffer = new byte[(int)Math.Min(0x10000, destUnpSize)];
do
{
var code = await readStream
.ReadAsync(buffer, 0, buffer.Length, cancellationToken)
.ConfigureAwait(false);
if (code == 0 || code == -1)
{
break;
}
code = code < destUnpSize ? code : (int)destUnpSize;
await writeStream.WriteAsync(buffer, 0, code, cancellationToken).ConfigureAwait(false);
destUnpSize -= code;
} while (!suspended && destUnpSize > 0);
}
private void Unpack29(bool solid)
{
Span<int> DDecode = stackalloc int[PackDef.DC];
@@ -478,6 +553,281 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
UnpWriteBuf();
}
private async System.Threading.Tasks.Task Unpack29Async(
bool solid,
System.Threading.CancellationToken cancellationToken = default
)
{
int[] DDecode = new int[PackDef.DC];
byte[] DBits = new byte[PackDef.DC];
int Bits;
if (DDecode[1] == 0)
{
int Dist = 0,
BitLength = 0,
Slot = 0;
for (var I = 0; I < DBitLengthCounts.Length; I++, BitLength++)
{
var count = DBitLengthCounts[I];
for (var J = 0; J < count; J++, Slot++, Dist += (1 << BitLength))
{
DDecode[Slot] = Dist;
DBits[Slot] = (byte)BitLength;
}
}
}
FileExtracted = true;
if (!suspended)
{
UnpInitData(solid);
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if ((!solid || !tablesRead) && !ReadTables())
{
return;
}
}
if (ppmError)
{
return;
}
while (true)
{
unpPtr &= PackDef.MAXWINMASK;
if (inAddr > readBorder)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 260 && wrPtr != unpPtr)
{
await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
if (destUnpSize < 0)
{
return;
}
if (suspended)
{
FileExtracted = false;
return;
}
}
if (unpBlockType == BlockTypes.BLOCK_PPM)
{
var ch = ppm.DecodeChar();
if (ch == -1)
{
ppmError = true;
break;
}
if (ch == PpmEscChar)
{
var nextCh = ppm.DecodeChar();
if (nextCh == 0)
{
if (!ReadTables())
{
break;
}
continue;
}
if (nextCh == 2 || nextCh == -1)
{
break;
}
if (nextCh == 3)
{
if (!ReadVMCode())
{
break;
}
continue;
}
if (nextCh == 4)
{
uint Distance = 0,
Length = 0;
var failed = false;
for (var I = 0; I < 4 && !failed; I++)
{
var ch2 = ppm.DecodeChar();
if (ch2 == -1)
{
failed = true;
}
else if (I == 3)
{
Length = (uint)ch2;
}
else
{
Distance = (Distance << 8) + (uint)ch2;
}
}
if (failed)
{
break;
}
CopyString(Length + 32, Distance + 2);
continue;
}
if (nextCh == 5)
{
var length = ppm.DecodeChar();
if (length == -1)
{
break;
}
CopyString((uint)(length + 4), 1);
continue;
}
}
window[unpPtr++] = (byte)ch;
continue;
}
var Number = this.decodeNumber(LD);
if (Number < 256)
{
window[unpPtr++] = (byte)Number;
continue;
}
if (Number >= 271)
{
var Length = LDecode[Number -= 271] + 3;
if ((Bits = LBits[Number]) > 0)
{
Length += GetBits() >> (16 - Bits);
AddBits(Bits);
}
var DistNumber = this.decodeNumber(DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
if (DistNumber > 9)
{
if (Bits > 4)
{
Distance += (GetBits() >> (20 - Bits)) << 4;
AddBits(Bits - 4);
}
if (lowDistRepCount > 0)
{
lowDistRepCount--;
Distance += prevLowDist;
}
else
{
var LowDist = this.decodeNumber(LDD);
if (LowDist == 16)
{
lowDistRepCount = PackDef.LOW_DIST_REP_COUNT - 1;
Distance += prevLowDist;
}
else
{
Distance += LowDist;
prevLowDist = (int)LowDist;
}
}
}
else
{
Distance += GetBits() >> (16 - Bits);
AddBits(Bits);
}
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000)
{
Length++;
}
}
InsertOldDist(Distance);
lastLength = Length;
CopyString(Length, Distance);
continue;
}
if (Number == 256)
{
if (!ReadEndOfBlock())
{
break;
}
continue;
}
if (Number == 257)
{
if (!ReadVMCode())
{
break;
}
continue;
}
if (Number == 258)
{
if (lastLength != 0)
{
CopyString(lastLength, oldDist[0]);
}
continue;
}
if (Number < 263)
{
var DistNum = Number - 259;
var Distance = (uint)oldDist[DistNum];
for (var I = DistNum; I > 0; I--)
{
oldDist[I] = oldDist[I - 1];
}
oldDist[0] = (int)Distance;
var LengthNumber = this.decodeNumber(RD);
var Length = LDecode[LengthNumber] + 2;
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += GetBits() >> (16 - Bits);
AddBits(Bits);
}
lastLength = Length;
CopyString((uint)Length, Distance);
continue;
}
if (Number < 272)
{
var Distance = SDDecode[Number -= 263] + 1;
if ((Bits = SDBits[Number]) > 0)
{
Distance += GetBits() >> (16 - Bits);
AddBits(Bits);
}
InsertOldDist((uint)Distance);
lastLength = 2;
CopyString(2, (uint)Distance);
}
}
await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
}
private void UnpWriteBuf()
{
var WrittenBorder = wrPtr;
@@ -1334,6 +1684,256 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
}
}
private async System.Threading.Tasks.Task UnpWriteBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var WrittenBorder = wrPtr;
var WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
for (var I = 0; I < prgStack.Count; I++)
{
var flt = prgStack[I];
if (flt is null)
{
continue;
}
if (flt.NextWindow)
{
flt.NextWindow = false;
continue;
}
var BlockStart = flt.BlockStart;
var BlockLength = flt.BlockLength;
if (((BlockStart - WrittenBorder) & PackDef.MAXWINMASK) < WriteSize)
{
if (WrittenBorder != BlockStart)
{
await UnpWriteAreaAsync(WrittenBorder, BlockStart, cancellationToken)
.ConfigureAwait(false);
WrittenBorder = BlockStart;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
if (BlockLength <= WriteSize)
{
var BlockEnd = (BlockStart + BlockLength) & PackDef.MAXWINMASK;
if (BlockStart < BlockEnd || BlockEnd == 0)
{
rarVM.setMemory(0, window, BlockStart, BlockLength);
}
else
{
var FirstPartLength = PackDef.MAXWINSIZE - BlockStart;
rarVM.setMemory(0, window, BlockStart, FirstPartLength);
rarVM.setMemory(FirstPartLength, window, 0, BlockEnd);
}
var ParentPrg = filters[flt.ParentFilter].Program;
var Prg = flt.Program;
if (ParentPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
Prg.GlobalData.Clear();
for (
var i = 0;
i < ParentPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
Prg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = ParentPrg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
ExecuteCode(Prg);
if (Prg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
if (ParentPrg.GlobalData.Count < Prg.GlobalData.Count)
{
ParentPrg.GlobalData.SetSize(Prg.GlobalData.Count);
}
for (var i = 0; i < Prg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE; i++)
{
ParentPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = Prg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
else
{
ParentPrg.GlobalData.Clear();
}
var FilteredDataOffset = Prg.FilteredDataOffset;
var FilteredDataSize = Prg.FilteredDataSize;
var FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
try
{
Array.Copy(
rarVM.Mem,
FilteredDataOffset,
FilteredData,
0,
FilteredDataSize
);
prgStack[I] = null;
while (I + 1 < prgStack.Count)
{
var NextFilter = prgStack[I + 1];
if (
NextFilter is null
|| NextFilter.BlockStart != BlockStart
|| NextFilter.BlockLength != FilteredDataSize
|| NextFilter.NextWindow
)
{
break;
}
rarVM.setMemory(0, FilteredData, 0, FilteredDataSize);
var pPrg = filters[NextFilter.ParentFilter].Program;
var NextPrg = NextFilter.Program;
if (pPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
NextPrg.GlobalData.SetSize(pPrg.GlobalData.Count);
for (
var i = 0;
i < pPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
ExecuteCode(NextPrg);
if (NextPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
if (pPrg.GlobalData.Count < NextPrg.GlobalData.Count)
{
pPrg.GlobalData.SetSize(NextPrg.GlobalData.Count);
}
for (
var i = 0;
i < NextPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
else
{
pPrg.GlobalData.Clear();
}
FilteredDataOffset = NextPrg.FilteredDataOffset;
FilteredDataSize = NextPrg.FilteredDataSize;
if (FilteredData.Length < FilteredDataSize)
{
ArrayPool<byte>.Shared.Return(FilteredData);
FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
}
for (var i = 0; i < FilteredDataSize; i++)
{
FilteredData[i] = NextPrg.GlobalData[FilteredDataOffset + i];
}
I++;
prgStack[I] = null;
}
await writeStream
.WriteAsync(FilteredData, 0, FilteredDataSize, cancellationToken)
.ConfigureAwait(false);
writtenFileSize += FilteredDataSize;
destUnpSize -= FilteredDataSize;
WrittenBorder = BlockEnd;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
finally
{
ArrayPool<byte>.Shared.Return(FilteredData);
}
}
else
{
for (var J = I; J < prgStack.Count; J++)
{
var filt = prgStack[J];
if (filt != null && filt.NextWindow)
{
filt.NextWindow = false;
}
}
wrPtr = WrittenBorder;
return;
}
}
}
await UnpWriteAreaAsync(WrittenBorder, unpPtr, cancellationToken).ConfigureAwait(false);
wrPtr = unpPtr;
}
private async System.Threading.Tasks.Task UnpWriteAreaAsync(
int startPtr,
int endPtr,
System.Threading.CancellationToken cancellationToken = default
)
{
if (endPtr < startPtr)
{
await UnpWriteDataAsync(
window,
startPtr,
-startPtr & PackDef.MAXWINMASK,
cancellationToken
)
.ConfigureAwait(false);
await UnpWriteDataAsync(window, 0, endPtr, cancellationToken).ConfigureAwait(false);
}
else
{
await UnpWriteDataAsync(window, startPtr, endPtr - startPtr, cancellationToken)
.ConfigureAwait(false);
}
}
private async System.Threading.Tasks.Task UnpWriteDataAsync(
byte[] data,
int offset,
int size,
System.Threading.CancellationToken cancellationToken = default
)
{
if (destUnpSize < 0)
{
return;
}
var writeSize = size;
if (writeSize > destUnpSize)
{
writeSize = (int)destUnpSize;
}
await writeStream
.WriteAsync(data, offset, writeSize, cancellationToken)
.ConfigureAwait(false);
writtenFileSize += size;
destUnpSize -= size;
}
private void CleanUp()
{
if (ppm != null)

View File

@@ -1,162 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Rar.UnpackV1.Decode;
namespace SharpCompress.Compressors.Rar.UnpackV1;
internal partial class Unpack
{
private async Task unpack15Async(bool solid, CancellationToken cancellationToken = default)
{
if (suspended)
{
unpPtr = wrPtr;
}
else
{
UnpInitData(solid);
oldUnpInitData(solid);
await unpReadBufAsync(cancellationToken).ConfigureAwait(false);
if (!solid)
{
initHuff();
unpPtr = 0;
}
else
{
unpPtr = wrPtr;
}
--destUnpSize;
}
if (destUnpSize >= 0)
{
getFlagsBuf();
FlagsCnt = 8;
}
while (destUnpSize >= 0)
{
unpPtr &= PackDef.MAXWINMASK;
if (
inAddr > readTop - 30
&& !await unpReadBufAsync(cancellationToken).ConfigureAwait(false)
)
{
break;
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 270 && wrPtr != unpPtr)
{
oldUnpWriteBuf();
if (suspended)
{
return;
}
}
if (StMode != 0)
{
huffDecode();
continue;
}
if (--FlagsCnt < 0)
{
getFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
longLZ();
}
else
{
huffDecode();
}
}
else
{
FlagBuf <<= 1;
if (--FlagsCnt < 0)
{
getFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
huffDecode();
}
else
{
longLZ();
}
}
else
{
FlagBuf <<= 1;
shortLZ();
}
}
}
oldUnpWriteBuf();
}
private async Task<bool> unpReadBufAsync(CancellationToken cancellationToken = default)
{
var dataSize = readTop - inAddr;
if (dataSize < 0)
{
return false;
}
if (inAddr > MAX_SIZE / 2)
{
if (dataSize > 0)
{
Array.Copy(InBuf, inAddr, InBuf, 0, dataSize);
}
inAddr = 0;
readTop = dataSize;
}
else
{
dataSize = readTop;
}
var readCode = await readStream
.ReadAsync(InBuf, dataSize, (MAX_SIZE - dataSize) & ~0xf, cancellationToken)
.ConfigureAwait(false);
if (readCode > 0)
{
readTop += readCode;
}
readBorder = readTop - 30;
return readCode != -1;
}
private async Task oldUnpWriteBufAsync(CancellationToken cancellationToken = default)
{
if (unpPtr < wrPtr)
{
await writeStream
.WriteAsync(window, wrPtr, -wrPtr & PackDef.MAXWINMASK, cancellationToken)
.ConfigureAwait(false);
await writeStream
.WriteAsync(window, 0, unpPtr, cancellationToken)
.ConfigureAwait(false);
}
else
{
await writeStream
.WriteAsync(window, wrPtr, unpPtr - wrPtr, cancellationToken)
.ConfigureAwait(false);
}
wrPtr = unpPtr;
}
}

View File

@@ -316,6 +316,110 @@ internal partial class Unpack
oldUnpWriteBuf();
}
private async System.Threading.Tasks.Task unpack15Async(
bool solid,
System.Threading.CancellationToken cancellationToken = default
)
{
if (suspended)
{
unpPtr = wrPtr;
}
else
{
UnpInitData(solid);
oldUnpInitData(solid);
await unpReadBufAsync(cancellationToken).ConfigureAwait(false);
if (!solid)
{
initHuff();
unpPtr = 0;
}
else
{
unpPtr = wrPtr;
}
--destUnpSize;
}
if (destUnpSize >= 0)
{
getFlagsBuf();
FlagsCnt = 8;
}
while (destUnpSize >= 0)
{
unpPtr &= PackDef.MAXWINMASK;
if (
inAddr > readTop - 30
&& !await unpReadBufAsync(cancellationToken).ConfigureAwait(false)
)
{
break;
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 270 && wrPtr != unpPtr)
{
await oldUnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
if (suspended)
{
return;
}
}
if (StMode != 0)
{
huffDecode();
continue;
}
if (--FlagsCnt < 0)
{
getFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
longLZ();
}
else
{
huffDecode();
}
}
else
{
FlagBuf <<= 1;
if (--FlagsCnt < 0)
{
getFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
huffDecode();
}
else
{
longLZ();
}
}
else
{
FlagBuf <<= 1;
shortLZ();
}
}
}
await oldUnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
}
private bool unpReadBuf()
{
var dataSize = readTop - inAddr;
@@ -351,6 +455,40 @@ internal partial class Unpack
return (readCode != -1);
}
private async System.Threading.Tasks.Task<bool> unpReadBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var dataSize = readTop - inAddr;
if (dataSize < 0)
{
return (false);
}
if (inAddr > MAX_SIZE / 2)
{
if (dataSize > 0)
{
Array.Copy(InBuf, inAddr, InBuf, 0, dataSize);
}
inAddr = 0;
readTop = dataSize;
}
else
{
dataSize = readTop;
}
var readCode = await readStream
.ReadAsync(InBuf, dataSize, (MAX_SIZE - dataSize) & ~0xf, cancellationToken)
.ConfigureAwait(false);
if (readCode > 0)
{
readTop += readCode;
}
readBorder = readTop - 30;
return (readCode != -1);
}
private int getShortLen1(int pos) => pos == 1 ? Buf60 + 3 : ShortLen1[pos];
private int getShortLen2(int pos) => pos == 3 ? Buf60 + 3 : ShortLen2[pos];
@@ -814,4 +952,26 @@ internal partial class Unpack
}
wrPtr = unpPtr;
}
private async System.Threading.Tasks.Task oldUnpWriteBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
if (unpPtr < wrPtr)
{
await writeStream
.WriteAsync(window, wrPtr, -wrPtr & PackDef.MAXWINMASK, cancellationToken)
.ConfigureAwait(false);
await writeStream
.WriteAsync(window, 0, unpPtr, cancellationToken)
.ConfigureAwait(false);
}
else
{
await writeStream
.WriteAsync(window, wrPtr, unpPtr - wrPtr, cancellationToken)
.ConfigureAwait(false);
}
wrPtr = unpPtr;
}
}

View File

@@ -1,275 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Rar.UnpackV1.Decode;
namespace SharpCompress.Compressors.Rar.UnpackV1;
internal partial class Unpack
{
private async Task unpack20Async(bool solid, CancellationToken cancellationToken = default)
{
int Bits;
if (suspended)
{
unpPtr = wrPtr;
}
else
{
UnpInitData(solid);
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if (!solid)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
return;
}
}
--destUnpSize;
}
while (destUnpSize >= 0)
{
unpPtr &= PackDef.MAXWINMASK;
if (inAddr > readTop - 30)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 270 && wrPtr != unpPtr)
{
oldUnpWriteBuf();
if (suspended)
{
return;
}
}
if (UnpAudioBlock != 0)
{
var AudioNumber = this.decodeNumber(MD[UnpCurChannel]);
if (AudioNumber == 256)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
window[unpPtr++] = DecodeAudio(AudioNumber);
if (++UnpCurChannel == UnpChannels)
{
UnpCurChannel = 0;
}
--destUnpSize;
continue;
}
var Number = this.decodeNumber(LD);
if (Number < 256)
{
window[unpPtr++] = (byte)Number;
--destUnpSize;
continue;
}
if (Number > 269)
{
var Length = LDecode[Number -= 270] + 3;
if ((Bits = LBits[Number]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
var DistNumber = this.decodeNumber(DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000L)
{
Length++;
}
}
CopyString20(Length, Distance);
continue;
}
if (Number == 269)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
if (Number == 256)
{
CopyString20(lastLength, lastDist);
continue;
}
if (Number < 261)
{
var Distance = oldDist[(oldDistPtr - (Number - 256)) & 3];
var LengthNumber = this.decodeNumber(RD);
var Length = LDecode[LengthNumber] + 2;
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
if (Distance >= 0x101)
{
Length++;
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000)
{
Length++;
}
}
}
CopyString20(Length, Distance);
continue;
}
if (Number < 270)
{
var Distance = SDDecode[Number -= 261] + 1;
if ((Bits = SDBits[Number]) > 0)
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
CopyString20(2, Distance);
}
}
ReadLastTables();
oldUnpWriteBuf();
}
private async Task<bool> ReadTables20Async(CancellationToken cancellationToken = default)
{
byte[] BitLength = new byte[PackDef.BC20];
byte[] Table = new byte[PackDef.MC20 * 4];
int TableSize,
N,
I;
if (inAddr > readTop - 25)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var BitField = GetBits();
UnpAudioBlock = (BitField & 0x8000);
if (0 == (BitField & 0x4000))
{
new Span<byte>(UnpOldTable20).Clear();
}
AddBits(2);
if (UnpAudioBlock != 0)
{
UnpChannels = ((Utility.URShift(BitField, 12)) & 3) + 1;
if (UnpCurChannel >= UnpChannels)
{
UnpCurChannel = 0;
}
AddBits(2);
TableSize = PackDef.MC20 * UnpChannels;
}
else
{
TableSize = PackDef.NC20 + PackDef.DC20 + PackDef.RC20;
}
for (I = 0; I < PackDef.BC20; I++)
{
BitLength[I] = (byte)(Utility.URShift(GetBits(), 12));
AddBits(4);
}
UnpackUtility.makeDecodeTables(BitLength, 0, BD, PackDef.BC20);
I = 0;
while (I < TableSize)
{
if (inAddr > readTop - 5)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var Number = this.decodeNumber(BD);
if (Number < 16)
{
Table[I] = (byte)((Number + UnpOldTable20[I]) & 0xf);
I++;
}
else if (Number == 16)
{
N = (Utility.URShift(GetBits(), 14)) + 3;
AddBits(2);
while (N-- > 0 && I < TableSize)
{
Table[I] = Table[I - 1];
I++;
}
}
else
{
if (Number == 17)
{
N = (Utility.URShift(GetBits(), 13)) + 3;
AddBits(3);
}
else
{
N = (Utility.URShift(GetBits(), 9)) + 11;
AddBits(7);
}
while (N-- > 0 && I < TableSize)
{
Table[I++] = 0;
}
}
}
if (inAddr > readTop)
{
return true;
}
if (UnpAudioBlock != 0)
{
for (I = 0; I < UnpChannels; I++)
{
UnpackUtility.makeDecodeTables(Table, I * PackDef.MC20, MD[I], PackDef.MC20);
}
}
else
{
UnpackUtility.makeDecodeTables(Table, 0, LD, PackDef.NC20);
UnpackUtility.makeDecodeTables(Table, PackDef.NC20, DD, PackDef.DC20);
UnpackUtility.makeDecodeTables(Table, PackDef.NC20 + PackDef.DC20, RD, PackDef.RC20);
}
for (var i = 0; i < UnpOldTable20.Length; i++)
{
UnpOldTable20[i] = Table[i];
}
return true;
}
}

View File

@@ -368,6 +368,163 @@ internal partial class Unpack
oldUnpWriteBuf();
}
private async System.Threading.Tasks.Task unpack20Async(
bool solid,
System.Threading.CancellationToken cancellationToken = default
)
{
int Bits;
if (suspended)
{
unpPtr = wrPtr;
}
else
{
UnpInitData(solid);
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if (!solid)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
return;
}
}
--destUnpSize;
}
while (destUnpSize >= 0)
{
unpPtr &= PackDef.MAXWINMASK;
if (inAddr > readTop - 30)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 270 && wrPtr != unpPtr)
{
await oldUnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
if (suspended)
{
return;
}
}
if (UnpAudioBlock != 0)
{
var AudioNumber = this.decodeNumber(MD[UnpCurChannel]);
if (AudioNumber == 256)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
window[unpPtr++] = DecodeAudio(AudioNumber);
if (++UnpCurChannel == UnpChannels)
{
UnpCurChannel = 0;
}
--destUnpSize;
continue;
}
var Number = this.decodeNumber(LD);
if (Number < 256)
{
window[unpPtr++] = (byte)Number;
--destUnpSize;
continue;
}
if (Number > 269)
{
var Length = LDecode[Number -= 270] + 3;
if ((Bits = LBits[Number]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
var DistNumber = this.decodeNumber(DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000L)
{
Length++;
}
}
CopyString20(Length, Distance);
continue;
}
if (Number == 269)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
if (Number == 256)
{
CopyString20(lastLength, lastDist);
continue;
}
if (Number < 261)
{
var Distance = oldDist[(oldDistPtr - (Number - 256)) & 3];
var LengthNumber = this.decodeNumber(RD);
var Length = LDecode[LengthNumber] + 2;
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
if (Distance >= 0x101)
{
Length++;
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000)
{
Length++;
}
}
}
CopyString20(Length, Distance);
continue;
}
if (Number < 270)
{
var Distance = SDDecode[Number -= 261] + 1;
if ((Bits = SDBits[Number]) > 0)
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
CopyString20(2, Distance);
}
}
ReadLastTables();
await oldUnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
}
private void CopyString20(int Length, int Distance)
{
lastDist = oldDist[oldDistPtr++ & 3] = Distance;
@@ -534,6 +691,120 @@ internal partial class Unpack
return (true);
}
private async System.Threading.Tasks.Task<bool> ReadTables20Async(
System.Threading.CancellationToken cancellationToken = default
)
{
byte[] BitLength = new byte[PackDef.BC20];
byte[] Table = new byte[PackDef.MC20 * 4];
int TableSize,
N,
I;
if (inAddr > readTop - 25)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return (false);
}
}
var BitField = GetBits();
UnpAudioBlock = (BitField & 0x8000);
if (0 == (BitField & 0x4000))
{
new Span<byte>(UnpOldTable20).Clear();
}
AddBits(2);
if (UnpAudioBlock != 0)
{
UnpChannels = ((Utility.URShift(BitField, 12)) & 3) + 1;
if (UnpCurChannel >= UnpChannels)
{
UnpCurChannel = 0;
}
AddBits(2);
TableSize = PackDef.MC20 * UnpChannels;
}
else
{
TableSize = PackDef.NC20 + PackDef.DC20 + PackDef.RC20;
}
for (I = 0; I < PackDef.BC20; I++)
{
BitLength[I] = (byte)(Utility.URShift(GetBits(), 12));
AddBits(4);
}
UnpackUtility.makeDecodeTables(BitLength, 0, BD, PackDef.BC20);
I = 0;
while (I < TableSize)
{
if (inAddr > readTop - 5)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return (false);
}
}
var Number = this.decodeNumber(BD);
if (Number < 16)
{
Table[I] = (byte)((Number + UnpOldTable20[I]) & 0xf);
I++;
}
else if (Number == 16)
{
N = (Utility.URShift(GetBits(), 14)) + 3;
AddBits(2);
while (N-- > 0 && I < TableSize)
{
Table[I] = Table[I - 1];
I++;
}
}
else
{
if (Number == 17)
{
N = (Utility.URShift(GetBits(), 13)) + 3;
AddBits(3);
}
else
{
N = (Utility.URShift(GetBits(), 9)) + 11;
AddBits(7);
}
while (N-- > 0 && I < TableSize)
{
Table[I++] = 0;
}
}
}
if (inAddr > readTop)
{
return (true);
}
if (UnpAudioBlock != 0)
{
for (I = 0; I < UnpChannels; I++)
{
UnpackUtility.makeDecodeTables(Table, I * PackDef.MC20, MD[I], PackDef.MC20);
}
}
else
{
UnpackUtility.makeDecodeTables(Table, 0, LD, PackDef.NC20);
UnpackUtility.makeDecodeTables(Table, PackDef.NC20, DD, PackDef.DC20);
UnpackUtility.makeDecodeTables(Table, PackDef.NC20 + PackDef.DC20, RD, PackDef.RC20);
}
for (var i = 0; i < UnpOldTable20.Length; i++)
{
UnpOldTable20[i] = Table[i];
}
return (true);
}
private void unpInitData20(bool Solid)
{
if (!Solid)

View File

@@ -1,321 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Rar.UnpackV1.Decode;
namespace SharpCompress.Compressors.Rar.UnpackV1;
internal partial class Unpack
{
private async Task<bool> UnpReadBufAsync(CancellationToken cancellationToken = default)
{
var DataSize = ReadTop - Inp.InAddr; // Data left to process.
if (DataSize < 0)
{
return false;
}
BlockHeader.BlockSize -= Inp.InAddr - BlockHeader.BlockStart;
if (Inp.InAddr > MAX_SIZE / 2)
{
if (DataSize > 0)
{
Array.Copy(InBuf, inAddr, InBuf, 0, DataSize);
}
Inp.InAddr = 0;
ReadTop = DataSize;
}
else
{
DataSize = ReadTop;
}
var ReadCode = 0;
if (MAX_SIZE != DataSize)
{
ReadCode = await readStream
.ReadAsync(InBuf, DataSize, MAX_SIZE - DataSize, cancellationToken)
.ConfigureAwait(false);
}
if (ReadCode > 0) // Can be also -1.
{
ReadTop += ReadCode;
}
ReadBorder = ReadTop - 30;
BlockHeader.BlockStart = Inp.InAddr;
if (BlockHeader.BlockSize != -1) // '-1' means not defined yet.
{
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
}
return ReadCode != -1;
}
public async Task Unpack5Async(bool Solid, CancellationToken cancellationToken = default)
{
FileExtracted = true;
if (!Suspended)
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (
!await ReadBlockHeaderAsync(cancellationToken).ConfigureAwait(false)
|| !ReadTables()
|| !TablesRead5
)
{
return;
}
}
while (true)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr >= ReadBorder)
{
var FileDone = false;
// We use 'while', because for empty block containing only Huffman table,
// we'll be on the block border once again just after reading the table.
while (
Inp.InAddr > BlockHeader.BlockStart + BlockHeader.BlockSize - 1
|| Inp.InAddr == BlockHeader.BlockStart + BlockHeader.BlockSize - 1
&& Inp.InBit >= BlockHeader.BlockBitSize
)
{
if (BlockHeader.LastBlockInFile)
{
FileDone = true;
break;
}
if (
!await ReadBlockHeaderAsync(cancellationToken).ConfigureAwait(false)
|| !ReadTables()
)
{
return;
}
}
if (FileDone || !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (
((WriteBorder - UnpPtr) & MaxWinMask) < PackDef.MAX_LZ_MATCH + 3
&& WriteBorder != UnpPtr
)
{
UnpWriteBuf();
if (WrittenFileSize > DestUnpSize)
{
return;
}
if (Suspended)
{
FileExtracted = false;
return;
}
}
var MainSlot = this.DecodeNumber(LD);
if (MainSlot < 256)
{
Window[UnpPtr++] = (byte)MainSlot;
continue;
}
if (MainSlot >= 262)
{
var Length = SlotToLength(MainSlot - 262);
int DBits;
uint Distance = 1,
DistSlot = this.DecodeNumber(DD);
if (DistSlot < 4)
{
DBits = 0;
Distance += DistSlot;
}
else
{
DBits = (int)((DistSlot / 2) - 1);
Distance += (2 | (DistSlot & 1)) << DBits;
}
if (DBits > 0)
{
if (DBits >= 4)
{
if (DBits > 4)
{
Distance += ((Inp.getbits() >> (36 - DBits)) << 4);
Inp.AddBits(DBits - 4);
}
var LowDist = this.DecodeNumber(LDD);
Distance += LowDist;
}
else
{
Distance += Inp.getbits() >> (32 - DBits);
Inp.AddBits(DBits);
}
}
if (Distance > 0x100)
{
Length++;
if (Distance > 0x2000)
{
Length++;
if (Distance > 0x40000)
{
Length++;
}
}
}
InsertOldDist(Distance);
LastLength = Length;
CopyString(Length, Distance);
continue;
}
if (MainSlot == 256)
{
var Filter = new UnpackFilter();
if (
!await ReadFilterAsync(Filter, cancellationToken).ConfigureAwait(false)
|| !AddFilter(Filter)
)
{
break;
}
continue;
}
if (MainSlot == 257)
{
if (LastLength != 0)
{
CopyString(LastLength, OldDistN(0));
}
continue;
}
if (MainSlot < 262)
{
var DistNum = (int)(MainSlot - 258);
var Distance = OldDistN(DistNum);
for (var I = DistNum; I > 0; I--)
{
SetOldDistN(I, OldDistN(I - 1));
}
SetOldDistN(0, Distance);
var LengthSlot = this.DecodeNumber(RD);
var Length = SlotToLength(LengthSlot);
LastLength = Length;
CopyString(Length, Distance);
continue;
}
}
UnpWriteBuf();
}
private async Task<bool> ReadBlockHeaderAsync(CancellationToken cancellationToken = default)
{
Header.HeaderSize = 0;
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 7)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
Inp.faddbits((uint)((8 - Inp.InBit) & 7));
var BlockFlags = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
var ByteCount = (uint)(((BlockFlags >> 3) & 3) + 1);
if (ByteCount == 4)
{
return false;
}
Header.HeaderSize = (int)(2 + ByteCount);
Header.BlockBitSize = (BlockFlags & 7) + 1;
var SavedCheckSum = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
var BlockSize = 0;
for (var I = 0; I < ByteCount; I++)
{
BlockSize += (int)(Inp.fgetbits() >> 8) << (I * 8);
Inp.AddBits(8);
}
Header.BlockSize = BlockSize;
var CheckSum = (byte)(0x5a ^ BlockFlags ^ BlockSize ^ (BlockSize >> 8) ^ (BlockSize >> 16));
if (CheckSum != SavedCheckSum)
{
return false;
}
Header.BlockStart = Inp.InAddr;
ReadBorder = Math.Min(ReadBorder, Header.BlockStart + Header.BlockSize - 1);
Header.LastBlockInFile = (BlockFlags & 0x40) != 0;
Header.TablePresent = (BlockFlags & 0x80) != 0;
return true;
}
private async Task<bool> ReadFilterAsync(
UnpackFilter Filter,
CancellationToken cancellationToken = default
)
{
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 16)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
Filter.uBlockStart = ReadFilterData();
Filter.uBlockLength = ReadFilterData();
if (Filter.BlockLength > MAX_FILTER_BLOCK_SIZE)
{
Filter.BlockLength = 0;
}
Filter.Type = (byte)(Inp.fgetbits() >> 13);
Inp.faddbits(3);
if (Filter.Type == (byte)FilterType.FILTER_DELTA)
{
Filter.Channels = (byte)((Inp.fgetbits() >> 11) + 1);
Inp.faddbits(5);
}
return true;
}
}

View File

@@ -479,6 +479,354 @@ internal partial class Unpack
return ReadCode != -1;
}
private async System.Threading.Tasks.Task<bool> UnpReadBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var DataSize = ReadTop - Inp.InAddr; // Data left to process.
if (DataSize < 0)
{
return false;
}
BlockHeader.BlockSize -= Inp.InAddr - BlockHeader.BlockStart;
if (Inp.InAddr > MAX_SIZE / 2)
{
if (DataSize > 0)
{
Array.Copy(InBuf, inAddr, InBuf, 0, DataSize);
}
Inp.InAddr = 0;
ReadTop = DataSize;
}
else
{
DataSize = ReadTop;
}
var ReadCode = 0;
if (MAX_SIZE != DataSize)
{
ReadCode = await readStream
.ReadAsync(InBuf, DataSize, MAX_SIZE - DataSize, cancellationToken)
.ConfigureAwait(false);
}
if (ReadCode > 0) // Can be also -1.
{
ReadTop += ReadCode;
}
ReadBorder = ReadTop - 30;
BlockHeader.BlockStart = Inp.InAddr;
if (BlockHeader.BlockSize != -1) // '-1' means not defined yet.
{
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
}
return ReadCode != -1;
}
public async System.Threading.Tasks.Task Unpack5Async(
bool Solid,
System.Threading.CancellationToken cancellationToken = default
)
{
FileExtracted = true;
if (!Suspended)
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (
!await ReadBlockHeaderAsync(cancellationToken).ConfigureAwait(false)
|| !ReadTables()
|| !TablesRead5
)
{
return;
}
}
while (true)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr >= ReadBorder)
{
var FileDone = false;
// We use 'while', because for empty block containing only Huffman table,
// we'll be on the block border once again just after reading the table.
while (
Inp.InAddr > BlockHeader.BlockStart + BlockHeader.BlockSize - 1
|| Inp.InAddr == BlockHeader.BlockStart + BlockHeader.BlockSize - 1
&& Inp.InBit >= BlockHeader.BlockBitSize
)
{
if (BlockHeader.LastBlockInFile)
{
FileDone = true;
break;
}
if (
!await ReadBlockHeaderAsync(cancellationToken).ConfigureAwait(false)
|| !ReadTables()
)
{
return;
}
}
if (FileDone || !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (
((WriteBorder - UnpPtr) & MaxWinMask) < PackDef.MAX_LZ_MATCH + 3
&& WriteBorder != UnpPtr
)
{
UnpWriteBuf();
if (WrittenFileSize > DestUnpSize)
{
return;
}
if (Suspended)
{
FileExtracted = false;
return;
}
}
//uint MainSlot=DecodeNumber(Inp,LD);
var MainSlot = this.DecodeNumber(LD);
if (MainSlot < 256)
{
// if (Fragmented)
// FragWindow[UnpPtr++]=(byte)MainSlot;
// else
Window[UnpPtr++] = (byte)MainSlot;
continue;
}
if (MainSlot >= 262)
{
var Length = SlotToLength(MainSlot - 262);
//uint DBits,Distance=1,DistSlot=DecodeNumber(Inp,&BlockTables.DD);
int DBits;
uint Distance = 1,
DistSlot = this.DecodeNumber(DD);
if (DistSlot < 4)
{
DBits = 0;
Distance += DistSlot;
}
else
{
//DBits=DistSlot/2 - 1;
DBits = (int)((DistSlot / 2) - 1);
Distance += (2 | (DistSlot & 1)) << DBits;
}
if (DBits > 0)
{
if (DBits >= 4)
{
if (DBits > 4)
{
Distance += ((Inp.getbits() >> (36 - DBits)) << 4);
Inp.AddBits(DBits - 4);
}
//uint LowDist=DecodeNumber(Inp,&BlockTables.LDD);
var LowDist = this.DecodeNumber(LDD);
Distance += LowDist;
}
else
{
Distance += Inp.getbits() >> (32 - DBits);
Inp.AddBits(DBits);
}
}
if (Distance > 0x100)
{
Length++;
if (Distance > 0x2000)
{
Length++;
if (Distance > 0x40000)
{
Length++;
}
}
}
InsertOldDist(Distance);
LastLength = Length;
// if (Fragmented)
// FragWindow.CopyString(Length,Distance,UnpPtr,MaxWinMask);
// else
CopyString(Length, Distance);
continue;
}
if (MainSlot == 256)
{
var Filter = new UnpackFilter();
if (
!await ReadFilterAsync(Filter, cancellationToken).ConfigureAwait(false)
|| !AddFilter(Filter)
)
{
break;
}
continue;
}
if (MainSlot == 257)
{
if (LastLength != 0)
// if (Fragmented)
// FragWindow.CopyString(LastLength,OldDist[0],UnpPtr,MaxWinMask);
// else
//CopyString(LastLength,OldDist[0]);
{
CopyString(LastLength, OldDistN(0));
}
continue;
}
if (MainSlot < 262)
{
//uint DistNum=MainSlot-258;
var DistNum = (int)(MainSlot - 258);
//uint Distance=OldDist[DistNum];
var Distance = OldDistN(DistNum);
//for (uint I=DistNum;I>0;I--)
for (var I = DistNum; I > 0; I--)
//OldDistN[I]=OldDistN(I-1);
{
SetOldDistN(I, OldDistN(I - 1));
}
//OldDistN[0]=Distance;
SetOldDistN(0, Distance);
var LengthSlot = this.DecodeNumber(RD);
var Length = SlotToLength(LengthSlot);
LastLength = Length;
// if (Fragmented)
// FragWindow.CopyString(Length,Distance,UnpPtr,MaxWinMask);
// else
CopyString(Length, Distance);
continue;
}
}
UnpWriteBuf();
}
private async System.Threading.Tasks.Task<bool> ReadBlockHeaderAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
Header.HeaderSize = 0;
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 7)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
//Inp.faddbits((8-Inp.InBit)&7);
Inp.faddbits((uint)((8 - Inp.InBit) & 7));
var BlockFlags = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
//uint ByteCount=((BlockFlags>>3)&3)+1; // Block size byte count.
var ByteCount = (uint)(((BlockFlags >> 3) & 3) + 1); // Block size byte count.
if (ByteCount == 4)
{
return false;
}
//Header.HeaderSize=2+ByteCount;
Header.HeaderSize = (int)(2 + ByteCount);
Header.BlockBitSize = (BlockFlags & 7) + 1;
var SavedCheckSum = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
var BlockSize = 0;
//for (uint I=0;I<ByteCount;I++)
for (var I = 0; I < ByteCount; I++)
{
//BlockSize+=(Inp.fgetbits()>>8)<<(I*8);
BlockSize += (int)(Inp.fgetbits() >> 8) << (I * 8);
Inp.AddBits(8);
}
Header.BlockSize = BlockSize;
var CheckSum = (byte)(0x5a ^ BlockFlags ^ BlockSize ^ (BlockSize >> 8) ^ (BlockSize >> 16));
if (CheckSum != SavedCheckSum)
{
return false;
}
Header.BlockStart = Inp.InAddr;
ReadBorder = Math.Min(ReadBorder, Header.BlockStart + Header.BlockSize - 1);
Header.LastBlockInFile = (BlockFlags & 0x40) != 0;
Header.TablePresent = (BlockFlags & 0x80) != 0;
return true;
}
private async System.Threading.Tasks.Task<bool> ReadFilterAsync(
UnpackFilter Filter,
System.Threading.CancellationToken cancellationToken = default
)
{
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 16)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
Filter.uBlockStart = ReadFilterData();
Filter.uBlockLength = ReadFilterData();
if (Filter.BlockLength > MAX_FILTER_BLOCK_SIZE)
{
Filter.BlockLength = 0;
}
//Filter.Type=Inp.fgetbits()>>13;
Filter.Type = (byte)(Inp.fgetbits() >> 13);
Inp.faddbits(3);
if (Filter.Type == (byte)FilterType.FILTER_DELTA)
{
//Filter.Channels=(Inp.fgetbits()>>11)+1;
Filter.Channels = (byte)((Inp.fgetbits() >> 11) + 1);
Inp.faddbits(5);
}
return true;
}
//?
// void UnpWriteBuf()
// {

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
using size_t = System.UInt32;
@@ -25,11 +23,11 @@ internal partial class Unpack : IRarUnpack
// NOTE: caller has logic to check for -1 for error we throw instead.
readStream.Read(buf, offset, count);
private async Task<int> UnpIO_UnpReadAsync(
private async System.Threading.Tasks.Task<int> UnpIO_UnpReadAsync(
byte[] buf,
int offset,
int count,
CancellationToken cancellationToken = default
System.Threading.CancellationToken cancellationToken = default
) =>
// NOTE: caller has logic to check for -1 for error we throw instead.
await readStream.ReadAsync(buf, offset, count, cancellationToken).ConfigureAwait(false);
@@ -37,11 +35,11 @@ internal partial class Unpack : IRarUnpack
private void UnpIO_UnpWrite(byte[] buf, size_t offset, uint count) =>
writeStream.Write(buf, checked((int)offset), checked((int)count));
private async Task UnpIO_UnpWriteAsync(
private async System.Threading.Tasks.Task UnpIO_UnpWriteAsync(
byte[] buf,
size_t offset,
uint count,
CancellationToken cancellationToken = default
System.Threading.CancellationToken cancellationToken = default
) =>
await writeStream
.WriteAsync(buf, checked((int)offset), checked((int)count), cancellationToken)
@@ -68,11 +66,11 @@ internal partial class Unpack : IRarUnpack
DoUnpack();
}
public async Task DoUnpackAsync(
public async System.Threading.Tasks.Task DoUnpackAsync(
FileHeader fileHeader,
Stream readStream,
Stream writeStream,
CancellationToken cancellationToken = default
System.Threading.CancellationToken cancellationToken = default
)
{
DestUnpSize = fileHeader.UncompressedSize;
@@ -99,7 +97,9 @@ internal partial class Unpack : IRarUnpack
}
}
public async Task DoUnpackAsync(CancellationToken cancellationToken = default)
public async System.Threading.Tasks.Task DoUnpackAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
if (fileHeader.IsStored)
{
@@ -133,7 +133,9 @@ internal partial class Unpack : IRarUnpack
} while (!Suspended);
}
private async Task UnstoreFileAsync(CancellationToken cancellationToken = default)
private async System.Threading.Tasks.Task UnstoreFileAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var buffer = new byte[(int)Math.Min(0x10000, DestUnpSize)];
do

View File

@@ -1,100 +0,0 @@
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal partial class Unpack
{
private async Task Unpack15Async(bool Solid, CancellationToken cancellationToken = default)
{
UnpInitData(Solid);
UnpInitData15(Solid);
await UnpReadBufAsync(cancellationToken).ConfigureAwait(false);
if (!Solid)
{
InitHuff();
UnpPtr = 0;
}
else
{
UnpPtr = WrPtr;
}
--DestUnpSize;
if (DestUnpSize >= 0)
{
GetFlagsBuf();
FlagsCnt = 8;
}
while (DestUnpSize >= 0)
{
UnpPtr &= MaxWinMask;
if (
Inp.InAddr > ReadTop - 30
&& !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false)
)
{
break;
}
if (((WrPtr - UnpPtr) & MaxWinMask) < 270 && WrPtr != UnpPtr)
{
UnpWriteBuf20();
}
if (StMode != 0)
{
HuffDecode();
continue;
}
if (--FlagsCnt < 0)
{
GetFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
LongLZ();
}
else
{
HuffDecode();
}
}
else
{
FlagBuf <<= 1;
if (--FlagsCnt < 0)
{
GetFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
HuffDecode();
}
else
{
LongLZ();
}
}
else
{
FlagBuf <<= 1;
ShortLZ();
}
}
}
UnpWriteBuf20();
}
}

View File

@@ -200,6 +200,102 @@ internal partial class Unpack
UnpWriteBuf20();
}
private async System.Threading.Tasks.Task Unpack15Async(
bool Solid,
System.Threading.CancellationToken cancellationToken = default
)
{
UnpInitData(Solid);
UnpInitData15(Solid);
await UnpReadBufAsync(cancellationToken).ConfigureAwait(false);
if (!Solid)
{
InitHuff();
UnpPtr = 0;
}
else
{
UnpPtr = WrPtr;
}
--DestUnpSize;
if (DestUnpSize >= 0)
{
GetFlagsBuf();
FlagsCnt = 8;
}
while (DestUnpSize >= 0)
{
UnpPtr &= MaxWinMask;
if (
Inp.InAddr > ReadTop - 30
&& !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false)
)
{
break;
}
if (((WrPtr - UnpPtr) & MaxWinMask) < 270 && WrPtr != UnpPtr)
{
await UnpWriteBuf20Async(cancellationToken).ConfigureAwait(false);
}
if (StMode != 0)
{
HuffDecode();
continue;
}
if (--FlagsCnt < 0)
{
GetFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
LongLZ();
}
else
{
HuffDecode();
}
}
else
{
FlagBuf <<= 1;
if (--FlagsCnt < 0)
{
GetFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
HuffDecode();
}
else
{
LongLZ();
}
}
else
{
FlagBuf <<= 1;
ShortLZ();
}
}
}
await UnpWriteBuf20Async(cancellationToken).ConfigureAwait(false);
}
//#define GetShortLen1(pos) ((pos)==1 ? Buf60+3:ShortLen1[pos])
private uint GetShortLen1(uint pos) => ((pos) == 1 ? (uint)(Buf60 + 3) : ShortLen1[pos]);

View File

@@ -1,319 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.Unpack.Unpack20Local;
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal partial class Unpack
{
private async Task Unpack20Async(bool Solid, CancellationToken cancellationToken = default)
{
uint Bits;
if (Suspended)
{
UnpPtr = WrPtr;
}
else
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if (
(!Solid || !TablesRead2)
&& !await ReadTables20Async(cancellationToken).ConfigureAwait(false)
)
{
return;
}
--DestUnpSize;
}
while (DestUnpSize >= 0)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr > ReadTop - 30)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((WrPtr - UnpPtr) & MaxWinMask) < 270 && WrPtr != UnpPtr)
{
UnpWriteBuf20();
if (Suspended)
{
return;
}
}
if (UnpAudioBlock)
{
var AudioNumber = DecodeNumber(Inp, MD[UnpCurChannel]);
if (AudioNumber == 256)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
Window[UnpPtr++] = DecodeAudio((int)AudioNumber);
if (++UnpCurChannel == UnpChannels)
{
UnpCurChannel = 0;
}
--DestUnpSize;
continue;
}
var Number = DecodeNumber(Inp, BlockTables.LD);
if (Number < 256)
{
Window[UnpPtr++] = (byte)Number;
--DestUnpSize;
continue;
}
if (Number > 269)
{
var Length = (uint)(LDecode[Number -= 270] + 3);
if ((Bits = LBits[Number]) > 0)
{
Length += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
var DistNumber = DecodeNumber(Inp, BlockTables.DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
Distance += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000L)
{
Length++;
}
}
CopyString20(Length, Distance);
continue;
}
if (Number == 269)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
if (Number == 256)
{
CopyString20(LastLength, LastDist);
continue;
}
if (Number < 261)
{
var Distance = OldDist[(OldDistPtr - (Number - 256)) & 3];
var LengthNumber = DecodeNumber(Inp, BlockTables.RD);
var Length = (uint)(LDecode[LengthNumber] + 2);
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
if (Distance >= 0x101)
{
Length++;
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000)
{
Length++;
}
}
}
CopyString20(Length, Distance);
continue;
}
if (Number < 270)
{
var Distance = (uint)(SDDecode[Number -= 261] + 1);
if ((Bits = SDBits[Number]) > 0)
{
Distance += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
CopyString20(2, Distance);
continue;
}
}
ReadLastTables();
UnpWriteBuf20();
}
private async Task UnpWriteBuf20Async(CancellationToken cancellationToken = default)
{
if (UnpPtr != WrPtr)
{
UnpSomeRead = true;
}
if (UnpPtr < WrPtr)
{
await UnpIO_UnpWriteAsync(
Window,
WrPtr,
(uint)(-(int)WrPtr & MaxWinMask),
cancellationToken
)
.ConfigureAwait(false);
await UnpIO_UnpWriteAsync(Window, 0, UnpPtr, cancellationToken).ConfigureAwait(false);
UnpAllBuf = true;
}
else
{
await UnpIO_UnpWriteAsync(Window, WrPtr, UnpPtr - WrPtr, cancellationToken)
.ConfigureAwait(false);
}
WrPtr = UnpPtr;
}
private async Task<bool> ReadTables20Async(CancellationToken cancellationToken = default)
{
byte[] BitLength = new byte[checked((int)BC20)];
byte[] Table = new byte[checked((int)MC20 * 4)];
if (Inp.InAddr > ReadTop - 25)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var BitField = Inp.getbits();
UnpAudioBlock = (BitField & 0x8000) != 0;
if ((BitField & 0x4000) != 0)
{
Array.Clear(UnpOldTable20, 0, UnpOldTable20.Length);
}
Inp.addbits(2);
uint TableSize;
if (UnpAudioBlock)
{
UnpChannels = ((BitField >> 12) & 3) + 1;
if (UnpCurChannel >= UnpChannels)
{
UnpCurChannel = 0;
}
Inp.addbits(2);
TableSize = MC20 * UnpChannels;
}
else
{
TableSize = NC20 + DC20 + RC20;
}
for (int I = 0; I < checked((int)BC20); I++)
{
BitLength[I] = (byte)(Inp.getbits() >> 12);
Inp.addbits(4);
}
MakeDecodeTables(BitLength, 0, BlockTables.BD, BC20);
for (int I = 0; I < checked((int)TableSize); )
{
if (Inp.InAddr > ReadTop - 5)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var Number = DecodeNumber(Inp, BlockTables.BD);
if (Number < 16)
{
Table[I] = (byte)((Number + UnpOldTable20[I]) & 0xf);
I++;
}
else if (Number == 16)
{
var N = (Inp.getbits() >> 14) + 3;
Inp.addbits(2);
if (I == 0)
{
return false; // We cannot have "repeat previous" code at the first position.
}
else
{
while (N-- > 0 && I < TableSize)
{
Table[I] = Table[I - 1];
I++;
}
}
}
else
{
uint N;
if (Number == 17)
{
N = (Inp.getbits() >> 13) + 3;
Inp.addbits(3);
}
else
{
N = (Inp.getbits() >> 9) + 11;
Inp.addbits(7);
}
while (N-- > 0 && I < TableSize)
{
Table[I++] = 0;
}
}
}
TablesRead2 = true;
if (Inp.InAddr > ReadTop)
{
return true;
}
if (UnpAudioBlock)
{
for (int I = 0; I < UnpChannels; I++)
{
MakeDecodeTables(Table, (int)(I * MC20), MD[I], MC20);
}
}
else
{
MakeDecodeTables(Table, 0, BlockTables.LD, NC20);
MakeDecodeTables(Table, (int)NC20, BlockTables.DD, DC20);
MakeDecodeTables(Table, (int)(NC20 + DC20), BlockTables.RD, RC20);
}
Array.Copy(Table, 0, this.UnpOldTable20, 0, UnpOldTable20.Length);
return true;
}
}

View File

@@ -342,6 +342,170 @@ internal partial class Unpack
UnpWriteBuf20();
}
private async System.Threading.Tasks.Task Unpack20Async(
bool Solid,
System.Threading.CancellationToken cancellationToken = default
)
{
uint Bits;
if (Suspended)
{
UnpPtr = WrPtr;
}
else
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if (
(!Solid || !TablesRead2)
&& !await ReadTables20Async(cancellationToken).ConfigureAwait(false)
)
{
return;
}
--DestUnpSize;
}
while (DestUnpSize >= 0)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr > ReadTop - 30)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((WrPtr - UnpPtr) & MaxWinMask) < 270 && WrPtr != UnpPtr)
{
await UnpWriteBuf20Async(cancellationToken).ConfigureAwait(false);
if (Suspended)
{
return;
}
}
if (UnpAudioBlock)
{
var AudioNumber = DecodeNumber(Inp, MD[UnpCurChannel]);
if (AudioNumber == 256)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
Window[UnpPtr++] = DecodeAudio((int)AudioNumber);
if (++UnpCurChannel == UnpChannels)
{
UnpCurChannel = 0;
}
--DestUnpSize;
continue;
}
var Number = DecodeNumber(Inp, BlockTables.LD);
if (Number < 256)
{
Window[UnpPtr++] = (byte)Number;
--DestUnpSize;
continue;
}
if (Number > 269)
{
var Length = (uint)(LDecode[Number -= 270] + 3);
if ((Bits = LBits[Number]) > 0)
{
Length += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
var DistNumber = DecodeNumber(Inp, BlockTables.DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
Distance += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000L)
{
Length++;
}
}
CopyString20(Length, Distance);
continue;
}
if (Number == 269)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
if (Number == 256)
{
CopyString20(LastLength, LastDist);
continue;
}
if (Number < 261)
{
var Distance = OldDist[(OldDistPtr - (Number - 256)) & 3];
var LengthNumber = DecodeNumber(Inp, BlockTables.RD);
var Length = (uint)(LDecode[LengthNumber] + 2);
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
if (Distance >= 0x101)
{
Length++;
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000)
{
Length++;
}
}
}
CopyString20(Length, Distance);
continue;
}
if (Number < 270)
{
var Distance = (uint)(SDDecode[Number -= 261] + 1);
if ((Bits = SDBits[Number]) > 0)
{
Distance += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
CopyString20(2, Distance);
continue;
}
}
ReadLastTables();
await UnpWriteBuf20Async(cancellationToken).ConfigureAwait(false);
}
private void UnpWriteBuf20()
{
if (UnpPtr != WrPtr)
@@ -363,6 +527,36 @@ internal partial class Unpack
WrPtr = UnpPtr;
}
private async System.Threading.Tasks.Task UnpWriteBuf20Async(
System.Threading.CancellationToken cancellationToken = default
)
{
if (UnpPtr != WrPtr)
{
UnpSomeRead = true;
}
if (UnpPtr < WrPtr)
{
await UnpIO_UnpWriteAsync(
Window,
WrPtr,
(uint)(-(int)WrPtr & MaxWinMask),
cancellationToken
)
.ConfigureAwait(false);
await UnpIO_UnpWriteAsync(Window, 0, UnpPtr, cancellationToken).ConfigureAwait(false);
UnpAllBuf = true;
}
else
{
await UnpIO_UnpWriteAsync(Window, WrPtr, UnpPtr - WrPtr, cancellationToken)
.ConfigureAwait(false);
}
WrPtr = UnpPtr;
}
private bool ReadTables20()
{
Span<byte> BitLength = stackalloc byte[checked((int)BC20)];
@@ -483,6 +677,130 @@ internal partial class Unpack
return true;
}
private async System.Threading.Tasks.Task<bool> ReadTables20Async(
System.Threading.CancellationToken cancellationToken = default
)
{
byte[] BitLength = new byte[checked((int)BC20)];
byte[] Table = new byte[checked((int)MC20 * 4)];
if (Inp.InAddr > ReadTop - 25)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var BitField = Inp.getbits();
UnpAudioBlock = (BitField & 0x8000) != 0;
if ((BitField & 0x4000) != 0)
{
Array.Clear(UnpOldTable20, 0, UnpOldTable20.Length);
}
Inp.addbits(2);
uint TableSize;
if (UnpAudioBlock)
{
UnpChannels = ((BitField >> 12) & 3) + 1;
if (UnpCurChannel >= UnpChannels)
{
UnpCurChannel = 0;
}
Inp.addbits(2);
TableSize = MC20 * UnpChannels;
}
else
{
TableSize = NC20 + DC20 + RC20;
}
for (int I = 0; I < checked((int)BC20); I++)
{
BitLength[I] = (byte)(Inp.getbits() >> 12);
Inp.addbits(4);
}
MakeDecodeTables(BitLength, 0, BlockTables.BD, BC20);
for (int I = 0; I < checked((int)TableSize); )
{
if (Inp.InAddr > ReadTop - 5)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var Number = DecodeNumber(Inp, BlockTables.BD);
if (Number < 16)
{
Table[I] = (byte)((Number + UnpOldTable20[I]) & 0xF);
I++;
}
else if (Number < 18)
{
uint N;
if (Number == 16)
{
N = (Inp.getbits() >> 14) + 3;
Inp.addbits(2);
}
else
{
N = (Inp.getbits() >> 13) + 11;
Inp.addbits(3);
}
if (I == 0)
{
return false;
}
while (N-- > 0 && I < checked((int)TableSize))
{
Table[I] = Table[I - 1];
I++;
}
}
else
{
uint N;
if (Number == 18)
{
N = (Inp.getbits() >> 13) + 3;
Inp.addbits(3);
}
else
{
N = (Inp.getbits() >> 9) + 11;
Inp.addbits(7);
}
while (N-- > 0 && I < checked((int)TableSize))
{
Table[I++] = 0;
}
}
}
if (UnpAudioBlock)
{
for (int I = 0; I < UnpChannels; I++)
{
MakeDecodeTables(Table, (int)(I * MC20), MD[I], MC20);
}
}
else
{
MakeDecodeTables(Table, 0, BlockTables.LD, NC20);
MakeDecodeTables(Table, (int)NC20, BlockTables.DD, DC20);
MakeDecodeTables(Table, (int)(NC20 + DC20), BlockTables.RD, RC20);
}
Array.Copy(Table, 0, this.UnpOldTable20, 0, UnpOldTable20.Length);
return true;
}
private void ReadLastTables()
{
if (ReadTop >= Inp.InAddr + 5)

View File

@@ -1,709 +0,0 @@
#nullable disable
using System;
using System.Threading;
using System.Threading.Tasks;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.UnpackGlobal;
using size_t = System.UInt32;
namespace SharpCompress.Compressors.Rar.UnpackV2017;
internal partial class Unpack
{
private async Task Unpack5Async(bool Solid, CancellationToken cancellationToken = default)
{
FileExtracted = true;
if (!Suspended)
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (
!await ReadBlockHeaderAsync(Inp, cancellationToken).ConfigureAwait(false)
|| !await ReadTablesAsync(Inp, cancellationToken).ConfigureAwait(false)
|| !TablesRead5
)
{
return;
}
}
while (true)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr >= ReadBorder)
{
var FileDone = false;
// We use 'while', because for empty block containing only Huffman table,
// we'll be on the block border once again just after reading the table.
while (
Inp.InAddr > BlockHeader.BlockStart + BlockHeader.BlockSize - 1
|| Inp.InAddr == BlockHeader.BlockStart + BlockHeader.BlockSize - 1
&& Inp.InBit >= BlockHeader.BlockBitSize
)
{
if (BlockHeader.LastBlockInFile)
{
FileDone = true;
break;
}
if (
!await ReadBlockHeaderAsync(Inp, cancellationToken).ConfigureAwait(false)
|| !await ReadTablesAsync(Inp, cancellationToken).ConfigureAwait(false)
)
{
return;
}
}
if (FileDone || !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((WriteBorder - UnpPtr) & MaxWinMask) < MAX_LZ_MATCH + 3 && WriteBorder != UnpPtr)
{
await UnpWriteBufAsync(cancellationToken);
if (WrittenFileSize > DestUnpSize)
{
return;
}
if (Suspended)
{
FileExtracted = false;
return;
}
}
var MainSlot = DecodeNumber(Inp, BlockTables.LD);
if (MainSlot < 256)
{
if (Fragmented)
{
FragWindow[UnpPtr++] = (byte)MainSlot;
}
else
{
Window[UnpPtr++] = (byte)MainSlot;
}
continue;
}
if (MainSlot >= 262)
{
var Length = SlotToLength(Inp, MainSlot - 262);
uint DBits,
Distance = 1,
DistSlot = DecodeNumber(Inp, BlockTables.DD);
if (DistSlot < 4)
{
DBits = 0;
Distance += DistSlot;
}
else
{
DBits = (DistSlot / 2) - 1;
Distance += (2 | (DistSlot & 1)) << (int)DBits;
}
if (DBits > 0)
{
if (DBits >= 4)
{
if (DBits > 4)
{
Distance += ((Inp.getbits32() >> (int)(36 - DBits)) << 4);
Inp.addbits(DBits - 4);
}
var LowDist = DecodeNumber(Inp, BlockTables.LDD);
Distance += LowDist;
}
else
{
Distance += Inp.getbits32() >> (int)(32 - DBits);
Inp.addbits(DBits);
}
}
if (Distance > 0x100)
{
Length++;
if (Distance > 0x2000)
{
Length++;
if (Distance > 0x40000)
{
Length++;
}
}
}
InsertOldDist(Distance);
LastLength = Length;
if (Fragmented)
{
FragWindow.CopyString(Length, Distance, ref UnpPtr, MaxWinMask);
}
else
{
CopyString(Length, Distance);
}
continue;
}
if (MainSlot == 256)
{
var Filter = new UnpackFilter();
if (
!await ReadFilterAsync(Inp, Filter, cancellationToken).ConfigureAwait(false)
|| !AddFilter(Filter)
)
{
break;
}
continue;
}
if (MainSlot == 257)
{
if (LastLength != 0)
{
if (Fragmented)
{
FragWindow.CopyString(LastLength, OldDist[0], ref UnpPtr, MaxWinMask);
}
else
{
CopyString(LastLength, OldDist[0]);
}
}
continue;
}
if (MainSlot < 262)
{
var DistNum = MainSlot - 258;
var Distance = OldDist[DistNum];
for (var I = DistNum; I > 0; I--)
{
OldDist[I] = OldDist[I - 1];
}
OldDist[0] = Distance;
var LengthSlot = DecodeNumber(Inp, BlockTables.RD);
var Length = SlotToLength(Inp, LengthSlot);
LastLength = Length;
if (Fragmented)
{
FragWindow.CopyString(Length, Distance, ref UnpPtr, MaxWinMask);
}
else
{
CopyString(Length, Distance);
}
continue;
}
}
await UnpWriteBufAsync(cancellationToken);
}
private async Task<bool> ReadFilterAsync(
BitInput Inp,
UnpackFilter Filter,
CancellationToken cancellationToken = default
)
{
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 16)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
Filter.BlockStart = ReadFilterData(Inp);
Filter.BlockLength = ReadFilterData(Inp);
if (Filter.BlockLength > MAX_FILTER_BLOCK_SIZE)
{
Filter.BlockLength = 0;
}
Filter.Type = (byte)(Inp.fgetbits() >> 13);
Inp.faddbits(3);
if (Filter.Type == FILTER_DELTA)
{
Filter.Channels = (byte)((Inp.fgetbits() >> 11) + 1);
Inp.faddbits(5);
}
return true;
}
private async Task<bool> UnpReadBufAsync(CancellationToken cancellationToken = default)
{
var DataSize = ReadTop - Inp.InAddr; // Data left to process.
if (DataSize < 0)
{
return false;
}
BlockHeader.BlockSize -= Inp.InAddr - BlockHeader.BlockStart;
if (Inp.InAddr > MAX_SIZE / 2)
{
if (DataSize > 0)
{
Buffer.BlockCopy(Inp.InBuf, Inp.InAddr, Inp.InBuf, 0, DataSize);
}
Inp.InAddr = 0;
ReadTop = DataSize;
}
else
{
DataSize = ReadTop;
}
var ReadCode = 0;
if (MAX_SIZE != DataSize)
{
ReadCode = await UnpIO_UnpReadAsync(
Inp.InBuf,
DataSize,
MAX_SIZE - DataSize,
cancellationToken
)
.ConfigureAwait(false);
}
if (ReadCode > 0) // Can be also -1.
{
ReadTop += ReadCode;
}
ReadBorder = ReadTop - 30;
BlockHeader.BlockStart = Inp.InAddr;
if (BlockHeader.BlockSize != -1) // '-1' means not defined yet.
{
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
}
return ReadCode != -1;
}
private async Task UnpWriteBufAsync(CancellationToken cancellationToken = default)
{
var WrittenBorder = WrPtr;
var FullWriteSize = (UnpPtr - WrittenBorder) & MaxWinMask;
var WriteSizeLeft = FullWriteSize;
var NotAllFiltersProcessed = false;
for (var I = 0; I < Filters.Count; I++)
{
var flt = Filters[I];
if (flt.Type == FILTER_NONE)
{
continue;
}
if (flt.NextWindow)
{
if (((flt.BlockStart - WrPtr) & MaxWinMask) <= FullWriteSize)
{
flt.NextWindow = false;
}
continue;
}
var BlockStart = flt.BlockStart;
var BlockLength = flt.BlockLength;
if (((BlockStart - WrittenBorder) & MaxWinMask) < WriteSizeLeft)
{
if (WrittenBorder != BlockStart)
{
await UnpWriteAreaAsync(WrittenBorder, BlockStart, cancellationToken)
.ConfigureAwait(false);
WrittenBorder = BlockStart;
WriteSizeLeft = (UnpPtr - WrittenBorder) & MaxWinMask;
}
if (BlockLength <= WriteSizeLeft)
{
if (BlockLength > 0)
{
var BlockEnd = (BlockStart + BlockLength) & MaxWinMask;
FilterSrcMemory = EnsureCapacity(
FilterSrcMemory,
checked((int)BlockLength)
);
var Mem = FilterSrcMemory;
if (BlockStart < BlockEnd || BlockEnd == 0)
{
if (Fragmented)
{
FragWindow.CopyData(Mem, 0, BlockStart, BlockLength);
}
else
{
Buffer.BlockCopy(Window, (int)BlockStart, Mem, 0, (int)BlockLength);
}
}
else
{
var FirstPartLength = MaxWinSize - BlockStart;
if (Fragmented)
{
FragWindow.CopyData(Mem, 0, BlockStart, FirstPartLength);
FragWindow.CopyData(Mem, FirstPartLength, 0, BlockEnd);
}
else
{
Buffer.BlockCopy(
Window,
(int)BlockStart,
Mem,
0,
(int)FirstPartLength
);
Buffer.BlockCopy(
Window,
0,
Mem,
(int)FirstPartLength,
(int)BlockEnd
);
}
}
var OutMem = ApplyFilter(Mem, BlockLength, flt);
Filters[I].Type = FILTER_NONE;
if (OutMem != null)
{
await UnpIO_UnpWriteAsync(OutMem, 0, BlockLength, cancellationToken)
.ConfigureAwait(false);
WrittenFileSize += BlockLength;
}
WrittenBorder = BlockEnd;
WriteSizeLeft = (UnpPtr - WrittenBorder) & MaxWinMask;
}
}
else
{
NotAllFiltersProcessed = true;
for (var J = I; J < Filters.Count; J++)
{
var fltj = Filters[J];
if (
fltj.Type != FILTER_NONE
&& fltj.NextWindow == false
&& ((fltj.BlockStart - WrPtr) & MaxWinMask) < FullWriteSize
)
{
fltj.NextWindow = true;
}
}
break;
}
}
}
var EmptyCount = 0;
for (var I = 0; I < Filters.Count; I++)
{
if (EmptyCount > 0)
{
Filters[I - EmptyCount] = Filters[I];
}
if (Filters[I].Type == FILTER_NONE)
{
EmptyCount++;
}
}
if (EmptyCount > 0)
{
Filters.RemoveRange(Filters.Count - EmptyCount, EmptyCount);
}
if (!NotAllFiltersProcessed)
{
await UnpWriteAreaAsync(WrittenBorder, UnpPtr, cancellationToken).ConfigureAwait(false);
WrPtr = UnpPtr;
}
WriteBorder = (UnpPtr + Math.Min(MaxWinSize, UNPACK_MAX_WRITE)) & MaxWinMask;
if (
WriteBorder == UnpPtr
|| WrPtr != UnpPtr
&& ((WrPtr - UnpPtr) & MaxWinMask) < ((WriteBorder - UnpPtr) & MaxWinMask)
)
{
WriteBorder = WrPtr;
}
}
private async Task UnpWriteAreaAsync(
size_t StartPtr,
size_t EndPtr,
CancellationToken cancellationToken = default
)
{
if (EndPtr != StartPtr)
{
UnpSomeRead = true;
}
if (EndPtr < StartPtr)
{
UnpAllBuf = true;
}
if (Fragmented)
{
var SizeToWrite = (EndPtr - StartPtr) & MaxWinMask;
while (SizeToWrite > 0)
{
var BlockSize = FragWindow.GetBlockSize(StartPtr, SizeToWrite);
FragWindow.GetBuffer(StartPtr, out var __buffer, out var __offset);
await UnpWriteDataAsync(__buffer, __offset, BlockSize, cancellationToken)
.ConfigureAwait(false);
SizeToWrite -= BlockSize;
StartPtr = (StartPtr + BlockSize) & MaxWinMask;
}
}
else if (EndPtr < StartPtr)
{
await UnpWriteDataAsync(Window, StartPtr, MaxWinSize - StartPtr, cancellationToken)
.ConfigureAwait(false);
await UnpWriteDataAsync(Window, 0, EndPtr, cancellationToken).ConfigureAwait(false);
}
else
{
await UnpWriteDataAsync(Window, StartPtr, EndPtr - StartPtr, cancellationToken)
.ConfigureAwait(false);
}
}
private async Task UnpWriteDataAsync(
byte[] Data,
size_t offset,
size_t Size,
CancellationToken cancellationToken = default
)
{
if (WrittenFileSize >= DestUnpSize)
{
return;
}
var WriteSize = Size;
var LeftToWrite = DestUnpSize - WrittenFileSize;
if (WriteSize > LeftToWrite)
{
WriteSize = (size_t)LeftToWrite;
}
await UnpIO_UnpWriteAsync(Data, offset, WriteSize, cancellationToken).ConfigureAwait(false);
WrittenFileSize += Size;
}
private async Task<bool> ReadBlockHeaderAsync(
BitInput Inp,
CancellationToken cancellationToken = default
)
{
BlockHeader.HeaderSize = 0;
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 7)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
Inp.faddbits((uint)((8 - Inp.InBit) & 7));
var BlockFlags = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
var ByteCount = (uint)(((BlockFlags >> 3) & 3) + 1); // Block size byte count.
if (ByteCount == 4)
{
return false;
}
BlockHeader.HeaderSize = (int)(2 + ByteCount);
BlockHeader.BlockBitSize = (BlockFlags & 7) + 1;
var SavedCheckSum = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
var BlockSize = 0;
for (uint I = 0; I < ByteCount; I++)
{
BlockSize += (int)((Inp.fgetbits() >> 8) << (int)(I * 8));
Inp.addbits(8);
}
BlockHeader.BlockSize = BlockSize;
var CheckSum = (byte)(0x5a ^ BlockFlags ^ BlockSize ^ (BlockSize >> 8) ^ (BlockSize >> 16));
if (CheckSum != SavedCheckSum)
{
return false;
}
BlockHeader.BlockStart = Inp.InAddr;
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
BlockHeader.LastBlockInFile = (BlockFlags & 0x40) != 0;
BlockHeader.TablePresent = (BlockFlags & 0x80) != 0;
return true;
}
private async Task<bool> ReadTablesAsync(
BitInput Inp,
CancellationToken cancellationToken = default
)
{
if (!BlockHeader.TablePresent)
{
return true;
}
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 25)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var BitLength = new byte[checked((int)BC)];
for (int I = 0; I < BC; I++)
{
uint Length = (byte)(Inp.fgetbits() >> 12);
Inp.faddbits(4);
if (Length == 15)
{
uint ZeroCount = (byte)(Inp.fgetbits() >> 12);
Inp.faddbits(4);
if (ZeroCount == 0)
{
BitLength[I] = 15;
}
else
{
ZeroCount += 2;
while (ZeroCount-- > 0 && I < BitLength.Length)
{
BitLength[I++] = 0;
}
I--;
}
}
else
{
BitLength[I] = (byte)Length;
}
}
MakeDecodeTables(BitLength, 0, BlockTables.BD, BC);
var Table = new byte[checked((int)HUFF_TABLE_SIZE)];
const int TableSize = checked((int)HUFF_TABLE_SIZE);
for (int I = 0; I < TableSize; )
{
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 5)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var Number = DecodeNumber(Inp, BlockTables.BD);
if (Number < 16)
{
Table[I] = (byte)Number;
I++;
}
else if (Number < 18)
{
uint N;
if (Number == 16)
{
N = (Inp.fgetbits() >> 13) + 3;
Inp.faddbits(3);
}
else
{
N = (Inp.fgetbits() >> 9) + 11;
Inp.faddbits(7);
}
if (I == 0)
{
// We cannot have "repeat previous" code at the first position.
// Multiple such codes would shift Inp position without changing I,
// which can lead to reading beyond of Inp boundary in mutithreading
// mode, where Inp.ExternalBuffer disables bounds check and we just
// reserve a lot of buffer space to not need such check normally.
return false;
}
else
{
while (N-- > 0 && I < TableSize)
{
Table[I] = Table[I - 1];
I++;
}
}
}
else
{
uint N;
if (Number == 18)
{
N = (Inp.fgetbits() >> 13) + 3;
Inp.faddbits(3);
}
else
{
N = (Inp.fgetbits() >> 9) + 11;
Inp.faddbits(7);
}
while (N-- > 0 && I < TableSize)
{
Table[I++] = 0;
}
}
}
TablesRead5 = true;
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop)
{
return false;
}
MakeDecodeTables(Table, 0, BlockTables.LD, NC);
MakeDecodeTables(Table, (int)NC, BlockTables.DD, DC);
MakeDecodeTables(Table, (int)(NC + DC), BlockTables.LDD, LDC);
MakeDecodeTables(Table, (int)(NC + DC + LDC), BlockTables.RD, RC);
return true;
}
}

View File

@@ -24,7 +24,11 @@ internal partial class Unpack
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (!ReadBlockHeader(Inp) || !ReadTables(Inp) || !TablesRead5)
if (
!ReadBlockHeader(Inp, ref BlockHeader)
|| !ReadTables(Inp, ref BlockHeader, ref BlockTables)
|| !TablesRead5
)
{
return;
}
@@ -51,7 +55,10 @@ internal partial class Unpack
FileDone = true;
break;
}
if (!ReadBlockHeader(Inp) || !ReadTables(Inp))
if (
!ReadBlockHeader(Inp, ref BlockHeader)
|| !ReadTables(Inp, ref BlockHeader, ref BlockTables)
)
{
return;
}
@@ -209,6 +216,180 @@ internal partial class Unpack
UnpWriteBuf();
}
private async System.Threading.Tasks.Task Unpack5Async(
bool Solid,
System.Threading.CancellationToken cancellationToken = default
)
{
FileExtracted = true;
if (!Suspended)
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if (
!ReadBlockHeader(Inp, ref BlockHeader)
|| !ReadTables(Inp, ref BlockHeader, ref BlockTables)
|| !TablesRead5
)
{
return;
}
}
while (true)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr >= ReadBorder)
{
var FileDone = false;
while (
Inp.InAddr > BlockHeader.BlockStart + BlockHeader.BlockSize - 1
|| Inp.InAddr == BlockHeader.BlockStart + BlockHeader.BlockSize - 1
&& Inp.InBit >= BlockHeader.BlockBitSize
)
{
if (BlockHeader.LastBlockInFile)
{
FileDone = true;
break;
}
if (
!ReadBlockHeader(Inp, ref BlockHeader)
|| !ReadTables(Inp, ref BlockHeader, ref BlockTables)
)
{
return;
}
}
if (FileDone || !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((WriteBorder - UnpPtr) & MaxWinMask) < MAX_LZ_MATCH + 3 && WriteBorder != UnpPtr)
{
await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
if (WrittenFileSize > DestUnpSize)
{
return;
}
}
uint MainSlot = DecodeNumber(Inp, BlockTables.LD);
if (MainSlot < 256)
{
if (Fragmented)
{
FragWindow[UnpPtr++] = (byte)MainSlot;
}
else
{
Window[UnpPtr++] = (byte)MainSlot;
}
continue;
}
if (MainSlot >= 262)
{
uint Length = SlotToLength(Inp, MainSlot - 262);
uint DBits,
Distance = 1,
DistSlot = DecodeNumber(Inp, BlockTables.DD);
if (DistSlot < 4)
{
DBits = 0;
Distance += DistSlot;
}
else
{
DBits = (DistSlot / 2) - 1;
Distance += (2 | (DistSlot & 1)) << (int)DBits;
}
if (DBits > 0)
{
if (DBits >= 4)
{
if (DBits > 4)
{
Distance += ((Inp.getbits() >> (int)(20 - DBits)) << 4);
Inp.addbits(DBits - 4);
}
uint LowDist = DecodeNumber(Inp, BlockTables.LDD);
Distance += LowDist;
}
else
{
Distance += Inp.getbits() >> (int)(16 - DBits);
Inp.addbits(DBits);
}
}
if (Distance > 0x100)
{
Length++;
if (Distance > 0x2000)
{
Length++;
if (Distance > 0x40000)
{
Length++;
}
}
}
InsertOldDist(Distance);
LastLength = Length;
CopyString(Length, Distance);
continue;
}
if (MainSlot == 256)
{
var Filter = new UnpackFilter();
if (!ReadFilter(Inp, Filter) || !AddFilter(Filter))
{
break;
}
continue;
}
if (MainSlot == 257)
{
if (LastLength != 0)
{
CopyString((uint)LastLength, (uint)OldDist[0]);
}
continue;
}
if (MainSlot < 262)
{
uint DistNum = MainSlot - 258;
uint Distance = (uint)OldDist[(int)DistNum];
for (var I = (int)DistNum; I > 0; I--)
{
OldDist[I] = OldDist[I - 1];
}
OldDist[0] = Distance;
uint LengthSlot = DecodeNumber(Inp, BlockTables.RD);
uint Length = SlotToLength(Inp, LengthSlot);
LastLength = Length;
CopyString(Length, Distance);
continue;
}
}
await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
}
private uint ReadFilterData(BitInput Inp)
{
var ByteCount = (Inp.fgetbits() >> 14) + 1;
@@ -326,6 +507,58 @@ internal partial class Unpack
return ReadCode != -1;
}
private async System.Threading.Tasks.Task<bool> UnpReadBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var DataSize = ReadTop - Inp.InAddr; // Data left to process.
if (DataSize < 0)
{
return false;
}
BlockHeader.BlockSize -= Inp.InAddr - BlockHeader.BlockStart;
if (Inp.InAddr > MAX_SIZE / 2)
{
if (DataSize > 0)
{
Buffer.BlockCopy(Inp.InBuf, Inp.InAddr, Inp.InBuf, 0, DataSize);
}
Inp.InAddr = 0;
ReadTop = DataSize;
}
else
{
DataSize = ReadTop;
}
var ReadCode = 0;
if (MAX_SIZE != DataSize)
{
ReadCode = await UnpIO_UnpReadAsync(
Inp.InBuf,
DataSize,
MAX_SIZE - DataSize,
cancellationToken
)
.ConfigureAwait(false);
}
if (ReadCode > 0) // Can be also -1.
{
ReadTop += ReadCode;
}
ReadBorder = ReadTop - 30;
BlockHeader.BlockStart = Inp.InAddr;
if (BlockHeader.BlockSize != -1) // '-1' means not defined yet.
{
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
}
return ReadCode != -1;
}
private void UnpWriteBuf()
{
var WrittenBorder = WrPtr;
@@ -520,6 +753,163 @@ internal partial class Unpack
}
}
private async System.Threading.Tasks.Task UnpWriteBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var WrittenBorder = WrPtr;
var FullWriteSize = (UnpPtr - WrittenBorder) & MaxWinMask;
var WriteSizeLeft = FullWriteSize;
var NotAllFiltersProcessed = false;
for (var I = 0; I < Filters.Count; I++)
{
var flt = Filters[I];
if (flt.Type == FILTER_NONE)
{
continue;
}
if (flt.NextWindow)
{
if (((flt.BlockStart - WrPtr) & MaxWinMask) <= FullWriteSize)
{
flt.NextWindow = false;
}
continue;
}
var BlockStart = flt.BlockStart;
var BlockLength = flt.BlockLength;
if (((BlockStart - WrittenBorder) & MaxWinMask) < WriteSizeLeft)
{
if (WrittenBorder != BlockStart)
{
await UnpWriteAreaAsync(WrittenBorder, BlockStart, cancellationToken)
.ConfigureAwait(false);
WrittenBorder = BlockStart;
WriteSizeLeft = (UnpPtr - WrittenBorder) & MaxWinMask;
}
if (BlockLength <= WriteSizeLeft)
{
if (BlockLength > 0)
{
var BlockEnd = (BlockStart + BlockLength) & MaxWinMask;
FilterSrcMemory = EnsureCapacity(
FilterSrcMemory,
checked((int)BlockLength)
);
var Mem = FilterSrcMemory;
if (BlockStart < BlockEnd || BlockEnd == 0)
{
if (Fragmented)
{
FragWindow.CopyData(Mem, 0, BlockStart, BlockLength);
}
else
{
Buffer.BlockCopy(Window, (int)BlockStart, Mem, 0, (int)BlockLength);
}
}
else
{
var FirstPartLength = MaxWinSize - BlockStart;
if (Fragmented)
{
FragWindow.CopyData(Mem, 0, BlockStart, FirstPartLength);
FragWindow.CopyData(Mem, FirstPartLength, 0, BlockEnd);
}
else
{
Buffer.BlockCopy(
Window,
(int)BlockStart,
Mem,
0,
(int)FirstPartLength
);
Buffer.BlockCopy(
Window,
0,
Mem,
(int)FirstPartLength,
(int)BlockEnd
);
}
}
var OutMem = ApplyFilter(Mem, BlockLength, flt);
Filters[I].Type = FILTER_NONE;
if (OutMem != null)
{
await UnpIO_UnpWriteAsync(OutMem, 0, BlockLength, cancellationToken)
.ConfigureAwait(false);
WrittenFileSize += BlockLength;
}
WrittenBorder = BlockEnd;
WriteSizeLeft = (UnpPtr - WrittenBorder) & MaxWinMask;
}
}
else
{
NotAllFiltersProcessed = true;
for (var J = I; J < Filters.Count; J++)
{
var fltj = Filters[J];
if (
fltj.Type != FILTER_NONE
&& fltj.NextWindow == false
&& ((fltj.BlockStart - WrPtr) & MaxWinMask) < FullWriteSize
)
{
fltj.NextWindow = true;
}
}
break;
}
}
}
var EmptyCount = 0;
for (var I = 0; I < Filters.Count; I++)
{
if (EmptyCount > 0)
{
Filters[I - EmptyCount] = Filters[I];
}
if (Filters[I].Type == FILTER_NONE)
{
EmptyCount++;
}
}
if (EmptyCount > 0)
{
Filters.RemoveRange(Filters.Count - EmptyCount, EmptyCount);
}
if (!NotAllFiltersProcessed)
{
await UnpWriteAreaAsync(WrittenBorder, UnpPtr, cancellationToken).ConfigureAwait(false);
WrPtr = UnpPtr;
}
WriteBorder = (UnpPtr + Math.Min(MaxWinSize, UNPACK_MAX_WRITE)) & MaxWinMask;
if (
WriteBorder == UnpPtr
|| WrPtr != UnpPtr
&& ((WrPtr - UnpPtr) & MaxWinMask) < ((WriteBorder - UnpPtr) & MaxWinMask)
)
{
WriteBorder = WrPtr;
}
}
private byte[] ApplyFilter(byte[] __d, uint DataSize, UnpackFilter Flt)
{
var Data = 0;
@@ -651,6 +1041,48 @@ internal partial class Unpack
}
}
private async System.Threading.Tasks.Task UnpWriteAreaAsync(
size_t StartPtr,
size_t EndPtr,
System.Threading.CancellationToken cancellationToken = default
)
{
if (EndPtr != StartPtr)
{
UnpSomeRead = true;
}
if (EndPtr < StartPtr)
{
UnpAllBuf = true;
}
if (Fragmented)
{
var SizeToWrite = (EndPtr - StartPtr) & MaxWinMask;
while (SizeToWrite > 0)
{
var BlockSize = FragWindow.GetBlockSize(StartPtr, SizeToWrite);
FragWindow.GetBuffer(StartPtr, out var __buffer, out var __offset);
await UnpWriteDataAsync(__buffer, __offset, BlockSize, cancellationToken)
.ConfigureAwait(false);
SizeToWrite -= BlockSize;
StartPtr = (StartPtr + BlockSize) & MaxWinMask;
}
}
else if (EndPtr < StartPtr)
{
await UnpWriteDataAsync(Window, StartPtr, MaxWinSize - StartPtr, cancellationToken)
.ConfigureAwait(false);
await UnpWriteDataAsync(Window, 0, EndPtr, cancellationToken).ConfigureAwait(false);
}
else
{
await UnpWriteDataAsync(Window, StartPtr, EndPtr - StartPtr, cancellationToken)
.ConfigureAwait(false);
}
}
private void UnpWriteData(byte[] Data, size_t offset, size_t Size)
{
if (WrittenFileSize >= DestUnpSize)
@@ -669,6 +1101,29 @@ internal partial class Unpack
WrittenFileSize += Size;
}
private async System.Threading.Tasks.Task UnpWriteDataAsync(
byte[] Data,
size_t offset,
size_t Size,
System.Threading.CancellationToken cancellationToken = default
)
{
if (WrittenFileSize >= DestUnpSize)
{
return;
}
var WriteSize = Size;
var LeftToWrite = DestUnpSize - WrittenFileSize;
if (WriteSize > LeftToWrite)
{
WriteSize = (size_t)LeftToWrite;
}
await UnpIO_UnpWriteAsync(Data, offset, WriteSize, cancellationToken).ConfigureAwait(false);
WrittenFileSize += Size;
}
private void UnpInitData50(bool Solid)
{
if (!Solid)
@@ -677,9 +1132,9 @@ internal partial class Unpack
}
}
private bool ReadBlockHeader(BitInput Inp)
private bool ReadBlockHeader(BitInput Inp, ref UnpackBlockHeader Header)
{
BlockHeader.HeaderSize = 0;
Header.HeaderSize = 0;
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 7)
{
@@ -700,9 +1155,9 @@ internal partial class Unpack
return false;
}
BlockHeader.HeaderSize = (int)(2 + ByteCount);
Header.HeaderSize = (int)(2 + ByteCount);
BlockHeader.BlockBitSize = (BlockFlags & 7) + 1;
Header.BlockBitSize = (BlockFlags & 7) + 1;
var SavedCheckSum = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
@@ -714,24 +1169,28 @@ internal partial class Unpack
Inp.addbits(8);
}
BlockHeader.BlockSize = BlockSize;
Header.BlockSize = BlockSize;
var CheckSum = (byte)(0x5a ^ BlockFlags ^ BlockSize ^ (BlockSize >> 8) ^ (BlockSize >> 16));
if (CheckSum != SavedCheckSum)
{
return false;
}
BlockHeader.BlockStart = Inp.InAddr;
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
Header.BlockStart = Inp.InAddr;
ReadBorder = Math.Min(ReadBorder, Header.BlockStart + Header.BlockSize - 1);
BlockHeader.LastBlockInFile = (BlockFlags & 0x40) != 0;
BlockHeader.TablePresent = (BlockFlags & 0x80) != 0;
Header.LastBlockInFile = (BlockFlags & 0x40) != 0;
Header.TablePresent = (BlockFlags & 0x80) != 0;
return true;
}
private bool ReadTables(BitInput Inp)
private bool ReadTables(
BitInput Inp,
ref UnpackBlockHeader Header,
ref UnpackBlockTables Tables
)
{
if (!BlockHeader.TablePresent)
if (!Header.TablePresent)
{
return true;
}
@@ -774,7 +1233,7 @@ internal partial class Unpack
}
}
MakeDecodeTables(BitLength, 0, BlockTables.BD, BC);
MakeDecodeTables(BitLength, 0, Tables.BD, BC);
Span<byte> Table = stackalloc byte[checked((int)HUFF_TABLE_SIZE)];
const int TableSize = checked((int)HUFF_TABLE_SIZE);
@@ -788,7 +1247,7 @@ internal partial class Unpack
}
}
var Number = DecodeNumber(Inp, BlockTables.BD);
var Number = DecodeNumber(Inp, Tables.BD);
if (Number < 16)
{
Table[I] = (byte)Number;
@@ -850,10 +1309,10 @@ internal partial class Unpack
return false;
}
MakeDecodeTables(Table, 0, BlockTables.LD, NC);
MakeDecodeTables(Table, (int)NC, BlockTables.DD, DC);
MakeDecodeTables(Table, (int)(NC + DC), BlockTables.LDD, LDC);
MakeDecodeTables(Table, (int)(NC + DC + LDC), BlockTables.RD, RC);
MakeDecodeTables(Table, 0, Tables.LD, NC);
MakeDecodeTables(Table, (int)NC, Tables.DD, DC);
MakeDecodeTables(Table, (int)(NC + DC), Tables.LDD, LDC);
MakeDecodeTables(Table, (int)(NC + DC + LDC), Tables.RD, RC);
return true;
}

View File

@@ -2,8 +2,6 @@
using System;
using System.Buffers;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.UnpackGlobal;
@@ -198,10 +196,10 @@ internal sealed partial class Unpack : BitInput
}
}
private async Task DoUnpackAsync(
private async System.Threading.Tasks.Task DoUnpackAsync(
uint Method,
bool Solid,
CancellationToken cancellationToken = default
System.Threading.CancellationToken cancellationToken = default
)
{
// Methods <50 will crash in Fragmented mode when accessing NULL Window.

View File

@@ -63,26 +63,12 @@ public class GZipFactory
GZipArchive.OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(stream, readerOptions));
}
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfo, readerOptions));
}
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
#endregion

View File

@@ -87,7 +87,7 @@ public class LzwFactory : Factory, IReaderFactory
)
{
cancellationToken.ThrowIfCancellationRequested();
return LzwReader.OpenAsyncReader(stream, options);
return new(LzwReader.OpenAsyncReader(stream, options));
}
#endregion

View File

@@ -54,30 +54,16 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
RarArchive.OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(stream, readerOptions));
}
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
RarArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfo, readerOptions));
}
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
#endregion

View File

@@ -49,30 +49,16 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
SevenZipArchive.OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(stream, readerOptions));
}
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
SevenZipArchive.OpenAsyncArchive(stream, readerOptions);
/// <inheritdoc/>
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfo, readerOptions));
}
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.OpenAsyncArchive(fileInfo, readerOptions);
#endregion
@@ -88,7 +74,7 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IAsyncArchive OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null
) => (IAsyncArchive)OpenArchive(streams, readerOptions);
) => SevenZipArchive.OpenAsyncArchive(streams, readerOptions);
/// <inheritdoc/>
public IArchive OpenArchive(
@@ -100,7 +86,7 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IAsyncArchive OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
) => (IAsyncArchive)OpenArchive(fileInfos, readerOptions);
) => SevenZipArchive.OpenAsyncArchive(fileInfos, readerOptions);
#endregion

View File

@@ -109,80 +109,156 @@ public class TarFactory
#endregion
public static CompressionType GetCompressionType(Stream stream)
{
stream.Seek(0, SeekOrigin.Begin);
foreach (var wrapper in TarWrapper.Wrappers)
{
stream.Seek(0, SeekOrigin.Begin);
if (wrapper.IsMatch(stream))
{
stream.Seek(0, SeekOrigin.Begin);
var decompressedStream = wrapper.CreateStream(stream);
if (TarArchive.IsTarFile(decompressedStream))
{
return wrapper.CompressionType;
}
}
}
throw new InvalidFormatException("Not a tar file.");
}
public static async ValueTask<CompressionType> GetCompressionTypeAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
stream.Seek(0, SeekOrigin.Begin);
foreach (var wrapper in TarWrapper.Wrappers)
{
stream.Seek(0, SeekOrigin.Begin);
if (wrapper.IsMatch(stream))
{
stream.Seek(0, SeekOrigin.Begin);
var decompressedStream = wrapper.CreateStream(stream);
if (
await TarArchive
.IsTarFileAsync(decompressedStream, cancellationToken)
.ConfigureAwait(false)
)
{
return wrapper.CompressionType;
}
}
}
throw new InvalidFormatException("Not a tar file.");
}
#region IArchiveFactory
/// <inheritdoc/>
public IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null) =>
TarArchive.OpenArchive(stream, readerOptions);
public IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
readerOptions ??= new ReaderOptions();
// Try to detect compressed TAR formats
// For async-only streams, skip detection and assume uncompressed
bool canDoSyncDetection = true;
try
{
// Test if we can do synchronous reads
var testBuffer = new byte[1];
var pos = stream.Position;
stream.Read(testBuffer, 0, 0); // Try a zero-length read
stream.Position = pos;
}
catch (NotSupportedException)
{
// Stream doesn't support synchronous reads
canDoSyncDetection = false;
}
if (!canDoSyncDetection)
{
// For async-only streams, we can't do format detection
// Assume it's an uncompressed TAR
return TarArchive.OpenArchive(stream, readerOptions);
}
var sharpCompressStream = new SharpCompressStream(stream);
sharpCompressStream.StartRecording();
foreach (var wrapper in TarWrapper.Wrappers)
{
sharpCompressStream.Rewind();
if (wrapper.IsMatch(sharpCompressStream))
{
sharpCompressStream.Rewind();
var decompressedStream = wrapper.CreateStream(sharpCompressStream);
if (TarArchive.IsTarFile(decompressedStream))
{
sharpCompressStream.StopRecording();
// For compressed TAR files, we need to decompress to a seekable stream
// since Archive API requires seekable streams
if (wrapper.CompressionType != CompressionType.None)
{
// Rewind and create a fresh decompression stream
sharpCompressStream.Rewind();
decompressedStream = wrapper.CreateStream(sharpCompressStream);
// Decompress to a MemoryStream to make it seekable
var memoryStream = new MemoryStream();
decompressedStream.CopyTo(memoryStream);
memoryStream.Position = 0;
// If we shouldn't leave the stream open, close the original
if (!readerOptions.LeaveStreamOpen)
{
stream.Dispose();
}
// Open the decompressed TAR with LeaveStreamOpen = false
// so the MemoryStream gets cleaned up with the archive
return TarArchive.OpenArchive(
memoryStream,
readerOptions with
{
LeaveStreamOpen = false,
}
);
}
// For uncompressed TAR, use the original stream directly
sharpCompressStream.Rewind();
return TarArchive.OpenArchive(stream, readerOptions);
}
}
}
// Fallback: try opening as uncompressed TAR
sharpCompressStream.StopRecording();
return TarArchive.OpenArchive(stream, readerOptions);
}
/// <inheritdoc/>
public async ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) =>
await TarArchive
.OpenAsyncArchive(stream, readerOptions, cancellationToken)
.ConfigureAwait(false);
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
TarArchive.OpenArchive(fileInfo, readerOptions);
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
readerOptions ??= new ReaderOptions();
// Open the file and check if it's compressed
using var testStream = fileInfo.OpenRead();
var sharpCompressStream = new SharpCompressStream(testStream);
sharpCompressStream.StartRecording();
foreach (var wrapper in TarWrapper.Wrappers)
{
sharpCompressStream.Rewind();
if (wrapper.IsMatch(sharpCompressStream))
{
sharpCompressStream.Rewind();
var decompressedStream = wrapper.CreateStream(sharpCompressStream);
if (TarArchive.IsTarFile(decompressedStream))
{
sharpCompressStream.StopRecording();
// For compressed TAR files, decompress to memory
if (wrapper.CompressionType != CompressionType.None)
{
// Reopen file and decompress
using var fileStream = fileInfo.OpenRead();
var compressedStream = new SharpCompressStream(fileStream);
compressedStream.StartRecording();
var decompStream = wrapper.CreateStream(compressedStream);
var memoryStream = new MemoryStream();
decompStream.CopyTo(memoryStream);
memoryStream.Position = 0;
// Open with LeaveStreamOpen = false so MemoryStream gets cleaned up
return TarArchive.OpenArchive(
memoryStream,
readerOptions with
{
LeaveStreamOpen = false,
}
);
}
// Uncompressed, can use TarArchive's FileInfo overload directly
break;
}
}
}
// Open as regular TAR file
return TarArchive.OpenArchive(fileInfo, readerOptions);
}
/// <inheritdoc/>
public async ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) =>
await TarArchive
.OpenAsyncArchive(fileInfo, readerOptions, cancellationToken)
.ConfigureAwait(false);
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
#endregion

View File

@@ -127,30 +127,16 @@ public class ZipFactory
ZipArchive.OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(stream, readerOptions));
}
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ZipArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncArchive)OpenArchive(fileInfo, readerOptions));
}
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
#endregion

View File

@@ -1,7 +1,5 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers.Ace;
@@ -35,25 +33,15 @@ public partial class AceReader
return new MultiVolumeAceReader(streams, options ?? new ReaderOptions());
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(string path, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return new((IAsyncReader)OpenReader(new FileInfo(path), readerOptions));
return (IAsyncReader)OpenReader(new FileInfo(path), readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(Stream stream, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(stream, readerOptions));
return (IAsyncReader)OpenReader(stream, readerOptions);
}
public static IAsyncReader OpenAsyncReader(
@@ -65,14 +53,12 @@ public partial class AceReader
return new MultiVolumeAceReader(streams, options ?? new ReaderOptions());
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
public static IAsyncReader OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(fileInfo, readerOptions));
return (IAsyncReader)OpenReader(fileInfo, readerOptions);
}
public static IReader OpenReader(string filePath, ReaderOptions? readerOptions = null)

View File

@@ -1,42 +1,28 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers.Arc;
public partial class ArcReader : IReaderOpenable
{
public static ValueTask<IAsyncReader> OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(string path, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return new((IAsyncReader)OpenReader(new FileInfo(path), readerOptions));
return (IAsyncReader)OpenReader(new FileInfo(path), readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(Stream stream, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(stream, readerOptions));
return (IAsyncReader)OpenReader(stream, readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
public static IAsyncReader OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(fileInfo, readerOptions));
return (IAsyncReader)OpenReader(fileInfo, readerOptions);
}
public static IReader OpenReader(string filePath, ReaderOptions? readerOptions = null)

View File

@@ -1,42 +1,28 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers.Arj;
public partial class ArjReader : IReaderOpenable
{
public static ValueTask<IAsyncReader> OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(string path, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return new((IAsyncReader)OpenReader(new FileInfo(path), readerOptions));
return (IAsyncReader)OpenReader(new FileInfo(path), readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(Stream stream, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(stream, readerOptions));
return (IAsyncReader)OpenReader(stream, readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
public static IAsyncReader OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(fileInfo, readerOptions));
return (IAsyncReader)OpenReader(fileInfo, readerOptions);
}
public static IReader OpenReader(string filePath, ReaderOptions? readerOptions = null)

View File

@@ -1,6 +1,4 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Readers.GZip;
@@ -9,35 +7,23 @@ public partial class GZipReader
: IReaderOpenable
#endif
{
public static ValueTask<IAsyncReader> OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(string path, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return new((IAsyncReader)OpenReader(new FileInfo(path), readerOptions));
return (IAsyncReader)OpenReader(new FileInfo(path), readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(Stream stream, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(stream, readerOptions));
return (IAsyncReader)OpenReader(stream, readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
public static IAsyncReader OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(fileInfo, readerOptions));
return (IAsyncReader)OpenReader(fileInfo, readerOptions);
}
public static IReader OpenReader(string filePath, ReaderOptions? readerOptions = null)

View File

@@ -9,18 +9,18 @@ public interface IReaderFactory : Factories.IFactory
/// <summary>
/// Opens a Reader for Non-seeking usage.
/// </summary>
/// <param name="stream">An open, readable stream.</param>
/// <param name="options">Reader options.</param>
/// <returns>The opened reader.</returns>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <returns></returns>
IReader OpenReader(Stream stream, ReaderOptions? options);
/// <summary>
/// Opens a Reader for Non-seeking usage asynchronously.
/// </summary>
/// <param name="stream">An open, readable stream.</param>
/// <param name="options">Reader options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A <see cref="ValueTask{TResult}"/> containing the opened async reader.</returns>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? options,

View File

@@ -1,7 +1,6 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Readers;
@@ -16,22 +15,19 @@ public interface IReaderOpenable
public static abstract IReader OpenReader(Stream stream, ReaderOptions? readerOptions = null);
public static abstract ValueTask<IAsyncReader> OpenAsyncReader(
public static abstract IAsyncReader OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
public static abstract ValueTask<IAsyncReader> OpenAsyncReader(
public static abstract IAsyncReader OpenAsyncReader(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
public static abstract ValueTask<IAsyncReader> OpenAsyncReader(
public static abstract IAsyncReader OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
}
#endif

View File

@@ -1,6 +1,4 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Readers.Lzw;
@@ -9,35 +7,23 @@ public partial class LzwReader
: IReaderOpenable
#endif
{
public static ValueTask<IAsyncReader> OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(string path, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return new((IAsyncReader)OpenReader(new FileInfo(path), readerOptions));
return (IAsyncReader)OpenReader(new FileInfo(path), readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(Stream stream, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(stream, readerOptions));
return (IAsyncReader)OpenReader(stream, readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
public static IAsyncReader OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(fileInfo, readerOptions));
return (IAsyncReader)OpenReader(fileInfo, readerOptions);
}
public static IReader OpenReader(string filePath, ReaderOptions? readerOptions = null)

View File

@@ -1,42 +1,28 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers.Rar;
public partial class RarReader : IReaderOpenable
{
public static ValueTask<IAsyncReader> OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(string path, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return new((IAsyncReader)OpenReader(new FileInfo(path), readerOptions));
return (IAsyncReader)OpenReader(new FileInfo(path), readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(Stream stream, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(stream, readerOptions));
return (IAsyncReader)OpenReader(stream, readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
public static IAsyncReader OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(fileInfo, readerOptions));
return (IAsyncReader)OpenReader(fileInfo, readerOptions);
}
}
#endif

View File

@@ -1,6 +1,8 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;

View File

@@ -1,6 +1,16 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.Xz;
using SharpCompress.Compressors.ZStandard;
using SharpCompress.IO;
namespace SharpCompress.Readers.Tar;

View File

@@ -1,15 +1,5 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.ZStandard;
using SharpCompress.IO;
namespace SharpCompress.Readers.Tar;
@@ -18,117 +8,23 @@ public partial class TarReader
: IReaderOpenable
#endif
{
public static ValueTask<IAsyncReader> OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(string path, ReaderOptions? readerOptions = null)
{
path.NotNullOrEmpty(nameof(path));
return OpenAsyncReader(new FileInfo(path), readerOptions, cancellationToken);
return (IAsyncReader)OpenReader(new FileInfo(path), readerOptions);
}
public static async ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(Stream stream, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
stream.NotNull(nameof(stream));
options ??= new ReaderOptions();
var sharpCompressStream = SharpCompressStream.Create(
stream,
bufferSize: options.RewindableBufferSize
);
long pos = sharpCompressStream.Position;
if (
await GZipArchive
.IsGZipFileAsync(sharpCompressStream, cancellationToken)
.ConfigureAwait(false)
)
{
sharpCompressStream.Position = pos;
var testStream = new GZipStream(sharpCompressStream, CompressionMode.Decompress);
if (
await TarArchive.IsTarFileAsync(testStream, cancellationToken).ConfigureAwait(false)
)
{
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.GZip);
}
throw new InvalidFormatException("Not a tar file.");
}
sharpCompressStream.Position = pos;
if (
await BZip2Stream
.IsBZip2Async(sharpCompressStream, cancellationToken)
.ConfigureAwait(false)
)
{
sharpCompressStream.Position = pos;
var testStream = BZip2Stream.Create(
sharpCompressStream,
CompressionMode.Decompress,
false
);
if (
await TarArchive.IsTarFileAsync(testStream, cancellationToken).ConfigureAwait(false)
)
{
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.BZip2);
}
throw new InvalidFormatException("Not a tar file.");
}
sharpCompressStream.Position = pos;
if (
await ZStandardStream
.IsZStandardAsync(sharpCompressStream, cancellationToken)
.ConfigureAwait(false)
)
{
sharpCompressStream.Position = pos;
var testStream = new ZStandardStream(sharpCompressStream);
if (
await TarArchive.IsTarFileAsync(testStream, cancellationToken).ConfigureAwait(false)
)
{
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.ZStandard);
}
throw new InvalidFormatException("Not a tar file.");
}
sharpCompressStream.Position = pos;
if (
await LZipStream
.IsLZipFileAsync(sharpCompressStream, cancellationToken)
.ConfigureAwait(false)
)
{
sharpCompressStream.Position = pos;
var testStream = new LZipStream(sharpCompressStream, CompressionMode.Decompress);
if (
await TarArchive.IsTarFileAsync(testStream, cancellationToken).ConfigureAwait(false)
)
{
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.LZip);
}
throw new InvalidFormatException("Not a tar file.");
}
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.None);
return (IAsyncReader)OpenReader(stream, readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
public static IAsyncReader OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
readerOptions ??= new ReaderOptions() { LeaveStreamOpen = false };
return OpenAsyncReader(fileInfo.OpenRead(), readerOptions, cancellationToken);
return (IAsyncReader)OpenReader(fileInfo, readerOptions);
}
public static IReader OpenReader(string filePath, ReaderOptions? readerOptions = null)
@@ -140,77 +36,6 @@ public partial class TarReader
public static IReader OpenReader(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
readerOptions ??= new ReaderOptions() { LeaveStreamOpen = false };
return OpenReader(fileInfo.OpenRead(), readerOptions);
}
/// <summary>
/// Opens a TarReader for Non-seeking usage with a single volume
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <returns></returns>
public static IReader OpenReader(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
options ??= new ReaderOptions();
var sharpCompressStream = SharpCompressStream.Create(
stream,
bufferSize: options.RewindableBufferSize
);
long pos = sharpCompressStream.Position;
if (GZipArchive.IsGZipFile(sharpCompressStream))
{
sharpCompressStream.Position = pos;
var testStream = new GZipStream(sharpCompressStream, CompressionMode.Decompress);
if (TarArchive.IsTarFile(testStream))
{
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.GZip);
}
throw new InvalidFormatException("Not a tar file.");
}
sharpCompressStream.Position = pos;
if (BZip2Stream.IsBZip2(sharpCompressStream))
{
sharpCompressStream.Position = pos;
var testStream = BZip2Stream.Create(
sharpCompressStream,
CompressionMode.Decompress,
false
);
if (TarArchive.IsTarFile(testStream))
{
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.BZip2);
}
throw new InvalidFormatException("Not a tar file.");
}
sharpCompressStream.Position = pos;
if (ZStandardStream.IsZStandard(sharpCompressStream))
{
sharpCompressStream.Position = pos;
var testStream = new ZStandardStream(sharpCompressStream);
if (TarArchive.IsTarFile(testStream))
{
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.ZStandard);
}
throw new InvalidFormatException("Not a tar file.");
}
sharpCompressStream.Position = pos;
if (LZipStream.IsLZipFile(sharpCompressStream))
{
sharpCompressStream.Position = pos;
var testStream = new LZipStream(sharpCompressStream, CompressionMode.Decompress);
if (TarArchive.IsTarFile(testStream))
{
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.LZip);
}
throw new InvalidFormatException("Not a tar file.");
}
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.None);
}
}

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Compressors;
@@ -43,6 +45,80 @@ public partial class TarReader : AbstractReader<TarEntry, TarVolume>
};
}
#region OpenReader
/// <summary>
/// Opens a TarReader for Non-seeking usage with a single volume
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <returns></returns>
public static IReader OpenReader(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
options = options ?? new ReaderOptions();
var sharpCompressStream = SharpCompressStream.Create(
stream,
bufferSize: options.RewindableBufferSize
);
long pos = sharpCompressStream.Position;
if (GZipArchive.IsGZipFile(sharpCompressStream))
{
sharpCompressStream.Position = pos;
var testStream = new GZipStream(sharpCompressStream, CompressionMode.Decompress);
if (TarArchive.IsTarFile(testStream))
{
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.GZip);
}
throw new InvalidFormatException("Not a tar file.");
}
sharpCompressStream.Position = pos;
if (BZip2Stream.IsBZip2(sharpCompressStream))
{
sharpCompressStream.Position = pos;
var testStream = BZip2Stream.Create(
sharpCompressStream,
CompressionMode.Decompress,
false
);
if (TarArchive.IsTarFile(testStream))
{
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.BZip2);
}
throw new InvalidFormatException("Not a tar file.");
}
sharpCompressStream.Position = pos;
if (ZStandardStream.IsZStandard(sharpCompressStream))
{
sharpCompressStream.Position = pos;
var testStream = new ZStandardStream(sharpCompressStream);
if (TarArchive.IsTarFile(testStream))
{
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.ZStandard);
}
throw new InvalidFormatException("Not a tar file.");
}
sharpCompressStream.Position = pos;
if (LZipStream.IsLZipFile(sharpCompressStream))
{
sharpCompressStream.Position = pos;
var testStream = new LZipStream(sharpCompressStream, CompressionMode.Decompress);
if (TarArchive.IsTarFile(testStream))
{
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.LZip);
}
throw new InvalidFormatException("Not a tar file.");
}
sharpCompressStream.Position = pos;
return new TarReader(sharpCompressStream, options, CompressionType.None);
}
#endregion OpenReader
protected override IEnumerable<TarEntry> GetEntries(Stream stream) =>
TarEntry.GetEntries(
StreamingMode.Streaming,
@@ -51,4 +127,6 @@ public partial class TarReader : AbstractReader<TarEntry, TarVolume>
Options.ArchiveEncoding,
Options
);
// GetEntriesAsync moved to TarReader.Async.cs
}

View File

@@ -1,42 +1,28 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Readers.Zip;
public partial class ZipReader : IReaderOpenable
{
public static ValueTask<IAsyncReader> OpenAsyncReader(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(string path, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return new((IAsyncReader)OpenReader(new FileInfo(path), readerOptions));
return (IAsyncReader)OpenReader(new FileInfo(path), readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
public static IAsyncReader OpenAsyncReader(Stream stream, ReaderOptions? readerOptions = null)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(stream, readerOptions));
return (IAsyncReader)OpenReader(stream, readerOptions);
}
public static ValueTask<IAsyncReader> OpenAsyncReader(
public static IAsyncReader OpenAsyncReader(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
)
{
cancellationToken.ThrowIfCancellationRequested();
return new((IAsyncReader)OpenReader(fileInfo, readerOptions));
return (IAsyncReader)OpenReader(fileInfo, readerOptions);
}
public static IReader OpenReader(string filePath, ReaderOptions? readerOptions = null)

View File

@@ -216,9 +216,9 @@
"net10.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "kICGrGYEzCNI3wPzfEXcwNHgTvlvVn9yJDhSdRK+oZQy4jvYH529u7O0xf5ocQKzOMjfS07+3z9PKRIjrFMJDA=="
"requested": "[10.0.2, )",
"resolved": "10.0.2",
"contentHash": "sXdDtMf2qcnbygw9OdE535c2lxSxrZP8gO4UhDJ0xiJbl1wIqXS1OTcTDFTIJPOFd6Mhcm8gPEthqWGUxBsTqw=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
@@ -264,9 +264,9 @@
"net8.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[8.0.22, )",
"resolved": "8.0.22",
"contentHash": "MhcMithKEiyyNkD2ZfbDZPmcOdi0GheGfg8saEIIEfD/fol3iHmcV8TsZkD4ZYz5gdUuoX4YtlVySUU7Sxl9SQ=="
"requested": "[8.0.23, )",
"resolved": "8.0.23",
"contentHash": "GqHiB1HbbODWPbY/lc5xLQH8siEEhNA0ptpJCC6X6adtAYNEzu5ZlqV3YHA3Gh7fuEwgA8XqVwMtH2KNtuQM1Q=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",

View File

@@ -1,6 +1,5 @@
using System;
using System.IO;
using System.Threading.Tasks;
using BenchmarkDotNet.Attributes;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
@@ -37,14 +36,6 @@ public class GZipBenchmarks
gzipStream.Write(_sourceData, 0, _sourceData.Length);
}
[Benchmark(Description = "GZip: Compress 100KB (Async)")]
public async Task GZipCompressAsync()
{
using var outputStream = new MemoryStream();
using var gzipStream = new GZipStream(outputStream, CompressionMode.Compress);
await gzipStream.WriteAsync(_sourceData, 0, _sourceData.Length).ConfigureAwait(false);
}
[Benchmark(Description = "GZip: Decompress 100KB")]
public void GZipDecompress()
{
@@ -52,12 +43,4 @@ public class GZipBenchmarks
using var gzipStream = new GZipStream(inputStream, CompressionMode.Decompress);
gzipStream.CopyTo(Stream.Null);
}
[Benchmark(Description = "GZip: Decompress 100KB (Async)")]
public async Task GZipDecompressAsync()
{
using var inputStream = new MemoryStream(_compressedData);
using var gzipStream = new GZipStream(inputStream, CompressionMode.Decompress);
await gzipStream.CopyToAsync(Stream.Null).ConfigureAwait(false);
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using BenchmarkDotNet.Attributes;
using SharpCompress.Archives.Rar;
using SharpCompress.Readers;
@@ -31,18 +30,6 @@ public class RarBenchmarks : ArchiveBenchmarkBase
}
}
[Benchmark(Description = "Rar: Extract all entries (Archive API, Async)")]
public async Task RarExtractArchiveApiAsync()
{
using var stream = new MemoryStream(_rarBytes);
await using var archive = await RarArchive.OpenAsyncArchive(stream).ConfigureAwait(false);
await foreach (var entry in archive.EntriesAsync.Where(e => !e.IsDirectory))
{
await using var entryStream = await entry.OpenEntryStreamAsync().ConfigureAwait(false);
await entryStream.CopyToAsync(Stream.Null).ConfigureAwait(false);
}
}
[Benchmark(Description = "Rar: Extract all entries (Reader API)")]
public void RarExtractReaderApi()
{
@@ -56,18 +43,4 @@ public class RarBenchmarks : ArchiveBenchmarkBase
}
}
}
[Benchmark(Description = "Rar: Extract all entries (Reader API, Async)")]
public async Task RarExtractReaderApiAsync()
{
using var stream = new MemoryStream(_rarBytes);
await using var reader = await ReaderFactory.OpenAsyncReader(stream).ConfigureAwait(false);
while (await reader.MoveToNextEntryAsync().ConfigureAwait(false))
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToAsync(Stream.Null).ConfigureAwait(false);
}
}
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using BenchmarkDotNet.Attributes;
using SharpCompress.Archives.SevenZip;
@@ -32,20 +31,6 @@ public class SevenZipBenchmarks : ArchiveBenchmarkBase
}
}
[Benchmark(Description = "7Zip LZMA: Extract all entries (Async)")]
public async Task SevenZipLzmaExtractAsync()
{
using var stream = new MemoryStream(_lzmaBytes);
await using var archive = await SevenZipArchive
.OpenAsyncArchive(stream)
.ConfigureAwait(false);
await foreach (var entry in archive.EntriesAsync.Where(e => !e.IsDirectory))
{
await using var entryStream = await entry.OpenEntryStreamAsync().ConfigureAwait(false);
await entryStream.CopyToAsync(Stream.Null).ConfigureAwait(false);
}
}
[Benchmark(Description = "7Zip LZMA2: Extract all entries")]
public void SevenZipLzma2Extract()
{
@@ -57,46 +42,4 @@ public class SevenZipBenchmarks : ArchiveBenchmarkBase
entryStream.CopyTo(Stream.Null);
}
}
[Benchmark(Description = "7Zip LZMA2: Extract all entries (Async)")]
public async Task SevenZipLzma2ExtractAsync()
{
using var stream = new MemoryStream(_lzma2Bytes);
await using var archive = await SevenZipArchive
.OpenAsyncArchive(stream)
.ConfigureAwait(false);
await foreach (var entry in archive.EntriesAsync.Where(e => !e.IsDirectory))
{
await using var entryStream = await entry.OpenEntryStreamAsync().ConfigureAwait(false);
await entryStream.CopyToAsync(Stream.Null).ConfigureAwait(false);
}
}
[Benchmark(Description = "7Zip LZMA2 Reader: Extract all entries")]
public void SevenZipLzma2Extract_Reader()
{
using var stream = new MemoryStream(_lzma2Bytes);
using var archive = SevenZipArchive.OpenArchive(stream);
using var reader = archive.ExtractAllEntries();
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
}
[Benchmark(Description = "7Zip LZMA2 Reader: Extract all entries (Async)")]
public async Task SevenZipLzma2ExtractAsync_Reader()
{
using var stream = new MemoryStream(_lzma2Bytes);
await using var archive = await SevenZipArchive
.OpenAsyncArchive(stream)
.ConfigureAwait(false);
await using var reader = await archive.ExtractAllEntriesAsync();
while (await reader.MoveToNextEntryAsync().ConfigureAwait(false))
{
await using var entryStream = await reader.OpenEntryStreamAsync().ConfigureAwait(false);
await entryStream.CopyToAsync(Stream.Null).ConfigureAwait(false);
}
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using BenchmarkDotNet.Attributes;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
@@ -35,18 +34,6 @@ public class TarBenchmarks : ArchiveBenchmarkBase
}
}
[Benchmark(Description = "Tar: Extract all entries (Archive API, Async)")]
public async Task TarExtractArchiveApiAsync()
{
using var stream = new MemoryStream(_tarBytes);
await using var archive = await TarArchive.OpenAsyncArchive(stream).ConfigureAwait(false);
await foreach (var entry in archive.EntriesAsync.Where(e => !e.IsDirectory))
{
await using var entryStream = await entry.OpenEntryStreamAsync().ConfigureAwait(false);
await entryStream.CopyToAsync(Stream.Null).ConfigureAwait(false);
}
}
[Benchmark(Description = "Tar: Extract all entries (Reader API)")]
public void TarExtractReaderApi()
{
@@ -61,20 +48,6 @@ public class TarBenchmarks : ArchiveBenchmarkBase
}
}
[Benchmark(Description = "Tar: Extract all entries (Reader API, Async)")]
public async Task TarExtractReaderApiAsync()
{
using var stream = new MemoryStream(_tarBytes);
await using var reader = await ReaderFactory.OpenAsyncReader(stream).ConfigureAwait(false);
while (await reader.MoveToNextEntryAsync().ConfigureAwait(false))
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToAsync(Stream.Null).ConfigureAwait(false);
}
}
}
[Benchmark(Description = "Tar.GZip: Extract all entries")]
public void TarGzipExtract()
{
@@ -87,18 +60,6 @@ public class TarBenchmarks : ArchiveBenchmarkBase
}
}
[Benchmark(Description = "Tar.GZip: Extract all entries (Async)")]
public async Task TarGzipExtractAsync()
{
using var stream = new MemoryStream(_tarGzBytes);
await using var archive = await TarArchive.OpenAsyncArchive(stream).ConfigureAwait(false);
await foreach (var entry in archive.EntriesAsync.Where(e => !e.IsDirectory))
{
await using var entryStream = await entry.OpenEntryStreamAsync().ConfigureAwait(false);
await entryStream.CopyToAsync(Stream.Null).ConfigureAwait(false);
}
}
[Benchmark(Description = "Tar: Create archive with small files")]
public void TarCreateSmallFiles()
{
@@ -117,22 +78,4 @@ public class TarBenchmarks : ArchiveBenchmarkBase
writer.Write($"file{i}.txt", entryStream);
}
}
[Benchmark(Description = "Tar: Create archive with small files (Async)")]
public async Task TarCreateSmallFilesAsync()
{
using var outputStream = new MemoryStream();
await using var writer = WriterFactory.OpenAsyncWriter(
outputStream,
ArchiveType.Tar,
new WriterOptions(CompressionType.None) { LeaveStreamOpen = true }
);
for (int i = 0; i < 10; i++)
{
var data = new byte[1024];
using var entryStream = new MemoryStream(data);
await writer.WriteAsync($"file{i}.txt", entryStream).ConfigureAwait(false);
}
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using BenchmarkDotNet.Attributes;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
@@ -35,18 +34,6 @@ public class ZipBenchmarks : ArchiveBenchmarkBase
}
}
[Benchmark(Description = "Zip: Extract all entries (Archive API, Async)")]
public async Task ZipExtractArchiveApiAsync()
{
using var stream = new MemoryStream(_archiveBytes);
await using var archive = await ZipArchive.OpenAsyncArchive(stream).ConfigureAwait(false);
await foreach (var entry in archive.EntriesAsync.Where(e => !e.IsDirectory))
{
await using var entryStream = await entry.OpenEntryStreamAsync().ConfigureAwait(false);
await entryStream.CopyToAsync(Stream.Null).ConfigureAwait(false);
}
}
[Benchmark(Description = "Zip: Extract all entries (Reader API)")]
public void ZipExtractReaderApi()
{
@@ -61,20 +48,6 @@ public class ZipBenchmarks : ArchiveBenchmarkBase
}
}
[Benchmark(Description = "Zip: Extract all entries (Reader API, Async)")]
public async Task ZipExtractReaderApiAsync()
{
using var stream = new MemoryStream(_archiveBytes);
await using var reader = await ReaderFactory.OpenAsyncReader(stream).ConfigureAwait(false);
while (await reader.MoveToNextEntryAsync().ConfigureAwait(false))
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToAsync(Stream.Null).ConfigureAwait(false);
}
}
}
[Benchmark(Description = "Zip: Create archive with small files")]
public void ZipCreateSmallFiles()
{
@@ -93,22 +66,4 @@ public class ZipBenchmarks : ArchiveBenchmarkBase
writer.Write($"file{i}.txt", entryStream);
}
}
[Benchmark(Description = "Zip: Create archive with small files (Async)")]
public async Task ZipCreateSmallFilesAsync()
{
using var outputStream = new MemoryStream();
await using var writer = WriterFactory.OpenAsyncWriter(
outputStream,
ArchiveType.Zip,
new WriterOptions(CompressionType.Deflate) { LeaveStreamOpen = true }
);
for (int i = 0; i < 10; i++)
{
var data = new byte[1024];
using var entryStream = new MemoryStream(data);
await writer.WriteAsync($"file{i}.txt", entryStream).ConfigureAwait(false);
}
}
}

View File

@@ -20,10 +20,10 @@ public class Program
// Default: Run BenchmarkDotNet
var config = DefaultConfig.Instance.AddJob(
Job.Default.WithToolchain(InProcessEmitToolchain.Instance)
.WithWarmupCount(5) // Minimal warmup iterations for CI
.WithIterationCount(30) // Minimal measurement iterations for CI
.WithInvocationCount(30)
.WithUnrollFactor(2)
.WithWarmupCount(3) // Minimal warmup iterations for CI
.WithIterationCount(10) // Minimal measurement iterations for CI
.WithInvocationCount(10)
.WithUnrollFactor(1)
);
BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args, config);

View File

@@ -5,11 +5,11 @@ This project contains performance benchmarks for SharpCompress using [BenchmarkD
## Overview
The benchmarks test all major archive formats supported by SharpCompress:
- **Zip**: Read (Archive & Reader API) and Write operations, each with sync and async variants
- **Tar**: Read (Archive & Reader API) and Write operations, including Tar.GZip, each with sync and async variants
- **Rar**: Read operations (Archive & Reader API), each with sync and async variants
- **7Zip**: Read operations for LZMA and LZMA2 compression, each with sync and async variants
- **GZip**: Compression and decompression, each with sync and async variants
- **Zip**: Read (Archive & Reader API) and Write operations
- **Tar**: Read (Archive & Reader API) and Write operations, including Tar.GZip
- **Rar**: Read operations (Archive & Reader API)
- **7Zip**: Read operations for LZMA and LZMA2 compression
- **GZip**: Compression and decompression
## Running Benchmarks

View File

@@ -1,49 +1,23 @@
| Method | Mean | Error | StdDev | Allocated |
|---------------------------- |---------:|---------:|---------:|----------:|
| SharpCompress_0_44_Original | 581.8 ms | 11.56 ms | 17.65 ms | 48.77 MB |
| Method | Mean | Error | StdDev | Median | Gen0 | Gen1 | Gen2 | Allocated |
|-------------------- |-----------:|----------:|----------:|-----------:|---------:|---------:|---------:|----------:|
| ZipArchiveRead | 959.2 μs | 52.16 μs | 153.78 μs | 928.7 μs | 27.3438 | 5.8594 | - | 345.75 KB |
| TarArchiveRead | 252.1 μs | 20.97 μs | 61.82 μs | 251.9 μs | 12.2070 | 5.8594 | - | 154.78 KB |
| TarGzArchiveRead | 600.9 μs | 19.25 μs | 53.98 μs | 607.8 μs | 16.6016 | 6.8359 | - | 204.95 KB |
| TarBz2ArchiveRead | NA | NA | NA | NA | NA | NA | NA | NA |
| SevenZipArchiveRead | 8,354.4 μs | 273.01 μs | 747.35 μs | 8,093.2 μs | 109.3750 | 109.3750 | 109.3750 | 787.99 KB |
| RarArchiveRead | 1,648.6 μs | 131.91 μs | 388.94 μs | 1,617.6 μs | 17.5781 | 5.8594 | - | 222.62 KB |
| Method | Mean | Error | StdDev | Gen0 | Gen1 | Gen2 | Allocated |
|--------------------------------- |-----------:|--------:|---------:|--------:|--------:|--------:|----------:|
| &#39;GZip: Compress 100KB&#39; | 3,317.1 μs | 7.15 μs | 10.02 μs | 33.3333 | 33.3333 | 33.3333 | 519.31 KB |
| &#39;GZip: Compress 100KB (Async)&#39; | 3,280.3 μs | 8.30 μs | 11.63 μs | 33.3333 | 33.3333 | 33.3333 | 519.46 KB |
| &#39;GZip: Decompress 100KB&#39; | 432.5 μs | 2.43 μs | 3.56 μs | - | - | - | 33.92 KB |
| &#39;GZip: Decompress 100KB (Async)&#39; | 442.8 μs | 1.20 μs | 1.76 μs | - | - | - | 34.24 KB |
| Method | Mean | Error | StdDev | Gen0 | Gen1 | Gen2 | Allocated |
|------------------------------------------------ |-----------:|----------:|----------:|---------:|---------:|---------:|-----------:|
| &#39;Rar: Extract all entries (Archive API)&#39; | 908.2 μs | 12.42 μs | 17.01 μs | - | - | - | 90.68 KB |
| &#39;Rar: Extract all entries (Archive API, Async)&#39; | 1,175.4 μs | 118.74 μs | 177.72 μs | - | - | - | 96.09 KB |
| &#39;Rar: Extract all entries (Reader API)&#39; | 1,215.1 μs | 2.26 μs | 3.09 μs | - | - | - | 148.85 KB |
| &#39;Rar: Extract all entries (Reader API, Async)&#39; | 1,592.0 μs | 22.58 μs | 33.10 μs | 500.0000 | 500.0000 | 500.0000 | 4776.76 KB |
| Method | Mean | Error | StdDev | Gen0 | Gen1 | Gen2 | Allocated |
|------------------------------------------------- |----------:|----------:|----------:|---------:|---------:|---------:|-----------:|
| &#39;7Zip LZMA: Extract all entries&#39; | 7.723 ms | 0.0111 ms | 0.0152 ms | 33.3333 | 33.3333 | 33.3333 | 272.68 KB |
| &#39;7Zip LZMA: Extract all entries (Async)&#39; | 35.827 ms | 0.0381 ms | 0.0546 ms | 200.0000 | 33.3333 | 33.3333 | 3402.82 KB |
| &#39;7Zip LZMA2: Extract all entries&#39; | 7.758 ms | 0.0074 ms | 0.0104 ms | 33.3333 | 33.3333 | 33.3333 | 272.46 KB |
| &#39;7Zip LZMA2: Extract all entries (Async)&#39; | 36.317 ms | 0.0345 ms | 0.0506 ms | 200.0000 | 33.3333 | 33.3333 | 3409.72 KB |
| &#39;7Zip LZMA2 Reader: Extract all entries&#39; | 7.706 ms | 0.0114 ms | 0.0163 ms | 33.3333 | 33.3333 | 33.3333 | 273.03 KB |
| &#39;7Zip LZMA2 Reader: Extract all entries (Async)&#39; | 22.951 ms | 0.0973 ms | 0.1426 ms | 100.0000 | 100.0000 | 100.0000 | 2420.81 KB |
| Method | Mean | Error | StdDev | Gen0 | Gen1 | Gen2 | Allocated |
|------------------------------------------------ |----------:|---------:|---------:|--------:|--------:|--------:|----------:|
| &#39;Tar: Extract all entries (Archive API)&#39; | 40.82 μs | 0.292 μs | 0.427 μs | - | - | - | 16.36 KB |
| &#39;Tar: Extract all entries (Archive API, Async)&#39; | 105.12 μs | 6.183 μs | 9.254 μs | - | - | - | 14.57 KB |
| &#39;Tar: Extract all entries (Reader API)&#39; | 187.89 μs | 1.571 μs | 2.254 μs | 66.6667 | 66.6667 | 66.6667 | 341.24 KB |
| &#39;Tar: Extract all entries (Reader API, Async)&#39; | 229.78 μs | 4.852 μs | 6.802 μs | 66.6667 | 66.6667 | 66.6667 | 376.64 KB |
| &#39;Tar.GZip: Extract all entries&#39; | NA | NA | NA | NA | NA | NA | NA |
| &#39;Tar.GZip: Extract all entries (Async)&#39; | NA | NA | NA | NA | NA | NA | NA |
| &#39;Tar: Create archive with small files&#39; | 46.98 μs | 0.287 μs | 0.394 μs | - | - | - | 68.11 KB |
| &#39;Tar: Create archive with small files (Async)&#39; | 53.14 μs | 0.352 μs | 0.493 μs | - | - | - | 68.11 KB |
| Method | Mean | Error | StdDev | Gen0 | Gen1 | Allocated |
|------------------------------------------------ |---------:|---------:|---------:|---------:|--------:|-----------:|
| &#39;Zip: Extract all entries (Archive API)&#39; | 556.7 μs | 3.38 μs | 4.74 μs | - | - | 180.22 KB |
| &#39;Zip: Extract all entries (Archive API, Async)&#39; | 615.7 μs | 15.98 μs | 22.92 μs | - | - | 125.52 KB |
| &#39;Zip: Extract all entries (Reader API)&#39; | 542.2 μs | 1.10 μs | 1.46 μs | - | - | 121.04 KB |
| &#39;Zip: Extract all entries (Reader API, Async)&#39; | 562.8 μs | 2.42 μs | 3.55 μs | - | - | 123.34 KB |
| &#39;Zip: Create archive with small files&#39; | 271.1 μs | 12.93 μs | 18.95 μs | 166.6667 | 33.3333 | 2806.28 KB |
| &#39;Zip: Create archive with small files (Async)&#39; | 394.3 μs | 25.59 μs | 36.71 μs | 166.6667 | 33.3333 | 2811.42 KB |
| Method | Mean | Error | StdDev | Allocated |
|------------------------- |-----------:|---------:|---------:|----------:|
| &#39;GZip: Compress 100KB&#39; | 3,268.7 μs | 28.50 μs | 16.96 μs | 519.2 KB |
| &#39;GZip: Decompress 100KB&#39; | 436.6 μs | 3.23 μs | 1.69 μs | 34.18 KB |
| Method | Mean | Error | StdDev | Allocated |
|----------------------------------------- |---------:|----------:|----------:|----------:|
| &#39;Rar: Extract all entries (Archive API)&#39; | 2.054 ms | 0.3927 ms | 0.2598 ms | 91.09 KB |
| &#39;Rar: Extract all entries (Reader API)&#39; | 2.235 ms | 0.0253 ms | 0.0132 ms | 149.48 KB |
| Method | Mean | Error | StdDev | Allocated |
|---------------------------------- |---------:|----------:|----------:|----------:|
| &#39;7Zip LZMA: Extract all entries&#39; | 9.124 ms | 2.1930 ms | 1.4505 ms | 272.8 KB |
| &#39;7Zip LZMA2: Extract all entries&#39; | 7.810 ms | 0.1323 ms | 0.0788 ms | 272.58 KB |
| Method | Mean | Error | StdDev | Allocated |
|----------------------------------------- |----------:|---------:|---------:|----------:|
| &#39;Tar: Extract all entries (Archive API)&#39; | 56.36 μs | 3.312 μs | 1.971 μs | 16.65 KB |
| &#39;Tar: Extract all entries (Reader API)&#39; | 175.34 μs | 2.616 μs | 1.557 μs | 213.36 KB |
| &#39;Tar.GZip: Extract all entries&#39; | NA | NA | NA | NA |
| &#39;Tar: Create archive with small files&#39; | 51.38 μs | 2.349 μs | 1.398 μs | 68.7 KB |
| Method | Mean | Error | StdDev | Gen0 | Allocated |
|----------------------------------------- |-----------:|---------:|---------:|---------:|-----------:|
| &#39;Zip: Extract all entries (Archive API)&#39; | 1,188.4 μs | 28.62 μs | 14.97 μs | - | 181.66 KB |
| &#39;Zip: Extract all entries (Reader API)&#39; | 1,137.0 μs | 5.58 μs | 2.92 μs | - | 123.19 KB |
| &#39;Zip: Create archive with small files&#39; | 258.2 μs | 8.98 μs | 4.70 μs | 100.0000 | 2806.93 KB |

View File

@@ -614,7 +614,7 @@ public class ArchiveTests : ReaderTests
{
using (var stream = SharpCompressStream.CreateNonDisposing(File.OpenRead(path)))
await using (
var archive = await archiveFactory.OpenAsyncArchive(
var archive = archiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream),
readerOptions
)

View File

@@ -72,7 +72,7 @@ public class AsyncTests : TestBase
public async ValueTask Archive_Entry_Async_Open_Stream()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz");
await using var archive = await GZipArchive.OpenAsyncArchive(
await using var archive = GZipArchive.OpenAsyncArchive(
new AsyncOnlyStream(File.OpenRead(testArchive))
);
@@ -123,7 +123,7 @@ public class AsyncTests : TestBase
// Verify the archive was created and contains the entry
Assert.True(File.Exists(outputPath));
await using var archive = await ZipArchive.OpenAsyncArchive(outputPath);
await using var archive = ZipArchive.OpenAsyncArchive(outputPath);
Assert.Single(await archive.EntriesAsync.Where(e => !e.IsDirectory).ToListAsync());
}

View File

@@ -24,7 +24,7 @@ public class GZipArchiveAsyncTests : ArchiveTests
#else
await using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
#endif
await using (var archive = await GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream)))
await using (var archive = GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream)))
{
var entry = await archive.EntriesAsync.FirstAsync();
await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
@@ -51,9 +51,7 @@ public class GZipArchiveAsyncTests : ArchiveTests
await using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
#endif
{
await using (
var archive = await GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream))
)
await using (var archive = GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream)))
{
var entry = await archive.EntriesAsync.FirstAsync();
await entry.WriteToFileAsync(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
@@ -81,7 +79,7 @@ public class GZipArchiveAsyncTests : ArchiveTests
#else
await using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
#endif
await using (var archive = await GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream)))
await using (var archive = GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream)))
{
await Assert.ThrowsAsync<NotSupportedException>(async () =>
await archive.AddEntryAsync("jpg\\test.jpg", File.OpenRead(jpg), closeStream: true)
@@ -107,9 +105,7 @@ public class GZipArchiveAsyncTests : ArchiveTests
inputStream.Position = 0;
}
await using var archive = await GZipArchive.OpenAsyncArchive(
new AsyncOnlyStream(inputStream)
);
await using var archive = GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(inputStream));
var archiveEntry = await archive.EntriesAsync.FirstAsync();
MemoryStream tarStream;
@@ -163,7 +159,7 @@ public class GZipArchiveAsyncTests : ArchiveTests
#else
await using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
#endif
await using var archive = await GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream));
await using var archive = GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream));
await foreach (var entry in archive.EntriesAsync.Where(entry => !entry.IsDirectory))
{
Assert.InRange(entry.Crc, 0L, 0xFFFFFFFFL);
@@ -178,7 +174,7 @@ public class GZipArchiveAsyncTests : ArchiveTests
#else
await using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
#endif
await using var archive = await GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream));
await using var archive = GZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream));
Assert.Equal(archive.Type, ArchiveType.GZip);
}
}

View File

@@ -22,7 +22,7 @@ public class GZipReaderAsyncTests : ReaderTests
{
//read only as GZip item
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
await using var reader = await GZipReader.OpenAsyncReader(new AsyncOnlyStream(stream));
await using var reader = GZipReader.OpenAsyncReader(new AsyncOnlyStream(stream));
while (await reader.MoveToNextEntryAsync())
{
Assert.NotEqual(0, reader.Entry.Size);

View File

@@ -69,7 +69,7 @@ public class RarArchiveAsyncTests : ArchiveTests
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, testArchive)))
await using (
var archive = await RarArchive.OpenAsyncArchive(
var archive = RarArchive.OpenAsyncArchive(
stream,
new ReaderOptions { Password = password, LeaveStreamOpen = true }
)
@@ -691,7 +691,7 @@ public class RarArchiveAsyncTests : ArchiveTests
{
var testFile = "Rar.issue1050.rar";
using var fileStream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, testFile));
await using var archive = await RarArchive.OpenAsyncArchive(fileStream);
await using var archive = RarArchive.OpenAsyncArchive(fileStream);
// Extract using archive.WriteToDirectoryAsync without explicit options
await archive.WriteToDirectoryAsync(SCRATCH_FILES_PATH);

View File

@@ -178,8 +178,7 @@ public abstract class ReaderTests : TestBase
await using (
var reader = await ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(testStream),
options,
cancellationToken
options
)
)
{

View File

@@ -233,7 +233,7 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
// This test verifies that solid archives iterate entries as contiguous streams
// rather than recreating the decompression stream for each entry
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.solid.7z");
await using var archive = await SevenZipArchive.OpenAsyncArchive(testArchive);
await using var archive = SevenZipArchive.OpenAsyncArchive(testArchive);
Assert.True(((SevenZipArchive)archive).IsSolid);
await using var reader = await archive.ExtractAllEntriesAsync();
@@ -254,7 +254,7 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
// This test verifies that the folder stream is reused within each folder
// and not recreated for each entry in solid archives
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.solid.7z");
await using var archive = await SevenZipArchive.OpenAsyncArchive(testArchive);
await using var archive = SevenZipArchive.OpenAsyncArchive(testArchive);
Assert.True(((SevenZipArchive)archive).IsSolid);
await using var reader = await archive.ExtractAllEntriesAsync();

View File

@@ -55,7 +55,7 @@ public class TarArchiveAsyncTests : ArchiveTests
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
await using (
var archive2 = await TarArchive.OpenAsyncArchive(
var archive2 = TarArchive.OpenAsyncArchive(
new AsyncOnlyStream(File.OpenRead(unmodified)),
new ReaderOptions() { LeaveStreamOpen = false }
)
@@ -113,7 +113,7 @@ public class TarArchiveAsyncTests : ArchiveTests
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
await using (
var archive2 = await TarArchive.OpenAsyncArchive(
var archive2 = TarArchive.OpenAsyncArchive(
new AsyncOnlyStream(File.OpenRead(unmodified)),
new ReaderOptions() { LeaveStreamOpen = false }
)
@@ -145,7 +145,7 @@ public class TarArchiveAsyncTests : ArchiveTests
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
await using (var archive = await TarArchive.CreateAsyncArchive())
await using (var archive = TarArchive.CreateAsyncArchive())
{
await archive.AddAllFromDirectoryAsync(ORIGINAL_FILES_PATH);
var twopt = new TarWriterOptions(CompressionType.None, true)
@@ -165,7 +165,7 @@ public class TarArchiveAsyncTests : ArchiveTests
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
await using (var archive = await TarArchive.OpenAsyncArchive(unmodified))
await using (var archive = TarArchive.OpenAsyncArchive(unmodified))
{
await archive.AddEntryAsync("jpg\\test.jpg", jpg);
await archive.SaveToAsync(
@@ -183,7 +183,7 @@ public class TarArchiveAsyncTests : ArchiveTests
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.mod.tar");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
await using (var archive = await TarArchive.OpenAsyncArchive(unmodified))
await using (var archive = TarArchive.OpenAsyncArchive(unmodified))
{
var entry = await archive.EntriesAsync.SingleAsync(x =>
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
@@ -253,9 +253,7 @@ public class TarArchiveAsyncTests : ArchiveTests
var numberOfEntries = 0;
await using (
var archiveFactory = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(memoryStream)
)
var archiveFactory = TarArchive.OpenAsyncArchive(new AsyncOnlyStream(memoryStream))
)
{
await foreach (var entry in archiveFactory.EntriesAsync)

View File

@@ -7,7 +7,6 @@ using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Test.Mocks;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
using Xunit;
@@ -24,16 +23,6 @@ public class TarArchiveTests : ArchiveTests
[Fact]
public void TarArchivePathRead() => ArchiveFileRead("Tar.tar");
[Fact]
public void TarArchiveStreamRead_Throws_On_NonSeekable_Stream()
{
using Stream stream = new ForwardOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar"))
);
Assert.Throws<ArgumentException>(() => ArchiveFactory.OpenArchive(stream));
}
[Fact]
public void Tar_FileName_Exactly_100_Characters()
{
@@ -64,7 +53,7 @@ public class TarArchiveTests : ArchiveTests
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = ArchiveFactory.OpenArchive(unmodified))
using (var archive2 = TarArchive.OpenArchive(unmodified))
{
Assert.Equal(1, archive2.Entries.Count());
Assert.Contains(filename, archive2.Entries.Select(entry => entry.Key));
@@ -83,7 +72,7 @@ public class TarArchiveTests : ArchiveTests
public void Tar_NonUstarArchiveWithLongNameDoesNotSkipEntriesAfterTheLongOne()
{
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "very long filename.tar");
using var archive = ArchiveFactory.OpenArchive(unmodified);
using var archive = TarArchive.OpenArchive(unmodified);
Assert.Equal(5, archive.Entries.Count());
Assert.Contains("very long filename/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
@@ -130,7 +119,7 @@ public class TarArchiveTests : ArchiveTests
// Step 2: check if the written tar file can be read correctly
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = ArchiveFactory.OpenArchive(unmodified))
using (var archive2 = TarArchive.OpenArchive(unmodified))
{
Assert.Equal(1, archive2.Entries.Count());
Assert.Contains(longFilename, archive2.Entries.Select(entry => entry.Key));
@@ -149,7 +138,7 @@ public class TarArchiveTests : ArchiveTests
public void Tar_UstarArchivePathReadLongName()
{
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "ustar with long names.tar");
using var archive = ArchiveFactory.OpenArchive(unmodified);
using var archive = TarArchive.OpenArchive(unmodified);
Assert.Equal(6, archive.Entries.Count());
Assert.Contains("Directory/", archive.Entries.Select(entry => entry.Key));
Assert.Contains(
@@ -296,9 +285,9 @@ public class TarArchiveTests : ArchiveTests
var numberOfEntries = 0;
using (var archive = ArchiveFactory.OpenArchive(memoryStream))
using (var archiveFactory = TarArchive.OpenArchive(memoryStream))
{
foreach (var entry in archive.Entries)
foreach (var entry in archiveFactory.Entries)
{
++numberOfEntries;
@@ -320,23 +309,38 @@ public class TarArchiveTests : ArchiveTests
Assert.False(isTar);
}
[Fact]
public void TarArchiveStreamRead_Autodetect_CompressedTar()
[Theory]
[InlineData("Tar.tar.gz")]
[InlineData("Tar.tar.bz2")]
[InlineData("Tar.tar.xz")]
[InlineData("Tar.tar.lz")]
[InlineData("Tar.tar.zst")]
[InlineData("Tar.tar.Z")]
[InlineData("Tar.oldgnu.tar.gz")]
[InlineData("TarWithSymlink.tar.gz")]
public void Tar_Compressed_Archive_Factory(string filename)
{
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, filename);
using Stream stream = File.OpenRead(archiveFullPath);
using var archive = ArchiveFactory.OpenArchive(stream);
Assert.Equal(ArchiveType.Tar, archive.Type);
Assert.NotEmpty(archive.Entries);
Assert.True(archive.Type == ArchiveType.Tar);
Assert.True(archive.Entries.Any());
}
[Fact]
public void TarReaderStreamRead_Autodetect_CompressedTar()
[Theory]
[InlineData("Tar.tar.gz")]
[InlineData("Tar.tar.bz2")]
[InlineData("Tar.tar.xz")]
[InlineData("Tar.tar.lz")]
[InlineData("Tar.tar.zst")]
[InlineData("Tar.tar.Z")]
[InlineData("Tar.oldgnu.tar.gz")]
[InlineData("TarWithSymlink.tar.gz")]
public void Tar_Compressed_Archive_Factory_FromFile(string filename)
{
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
using var reader = ReaderFactory.OpenReader(stream);
Assert.Equal(ArchiveType.Tar, reader.ArchiveType);
Assert.True(reader.MoveToNextEntry());
var archiveFullPath = Path.Combine(TEST_ARCHIVES_PATH, filename);
using var archive = ArchiveFactory.OpenArchive(archiveFullPath);
Assert.True(archive.Type == ArchiveType.Tar);
Assert.True(archive.Entries.Any());
}
}

View File

@@ -74,7 +74,7 @@ public class TarReaderAsyncTests : ReaderTests
public async ValueTask Tar_BZip2_Entry_Stream_Async()
{
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2"));
await using var reader = await TarReader.OpenAsyncReader(stream);
await using var reader = TarReader.OpenAsyncReader(stream);
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
@@ -135,7 +135,7 @@ public class TarReaderAsyncTests : ReaderTests
public async ValueTask Tar_BZip2_Skip_Entry_Stream_Async()
{
using Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2"));
await using var reader = await TarReader.OpenAsyncReader(stream);
await using var reader = TarReader.OpenAsyncReader(stream);
var names = new List<string>();
while (await reader.MoveToNextEntryAsync())
{

View File

@@ -126,7 +126,7 @@ public class ZipArchiveAsyncTests : ArchiveTests
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.mod.zip");
await using (var archive = await ZipArchive.OpenAsyncArchive(unmodified))
await using (var archive = ZipArchive.OpenAsyncArchive(unmodified))
{
var entry = await archive.EntriesAsync.SingleAsync(x =>
x.Key.NotNull().EndsWith("jpg", StringComparison.OrdinalIgnoreCase)
@@ -151,7 +151,7 @@ public class ZipArchiveAsyncTests : ArchiveTests
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.mod.zip");
var modified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip");
await using (var archive = await ZipArchive.OpenAsyncArchive(unmodified))
await using (var archive = ZipArchive.OpenAsyncArchive(unmodified))
{
await archive.AddEntryAsync("jpg\\test.jpg", jpg);
@@ -171,7 +171,7 @@ public class ZipArchiveAsyncTests : ArchiveTests
var scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Zip.deflate.noEmptyDirs.zip");
var unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.noEmptyDirs.zip");
await using (var archive = (ZipArchive)await ZipArchive.CreateAsyncArchive())
await using (var archive = (ZipArchive)ZipArchive.CreateAsyncArchive())
{
archive.DeflateCompressionLevel = CompressionLevel.BestSpeed;
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
@@ -191,7 +191,7 @@ public class ZipArchiveAsyncTests : ArchiveTests
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.zip")))
{
IAsyncArchive archive = await ZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream));
IAsyncArchive archive = ZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream));
try
{
await foreach (var entry in archive.EntriesAsync.Where(entry => !entry.IsDirectory))
@@ -212,7 +212,7 @@ public class ZipArchiveAsyncTests : ArchiveTests
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.zip")))
{
IAsyncArchive archive = await ZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream));
IAsyncArchive archive = ZipArchive.OpenAsyncArchive(new AsyncOnlyStream(stream));
try
{
await archive.WriteToDirectoryAsync(SCRATCH_FILES_PATH);
@@ -239,7 +239,7 @@ public class ZipArchiveAsyncTests : ArchiveTests
)
#endif
{
await using IAsyncArchive archive = await ZipArchive.OpenAsyncArchive(
await using IAsyncArchive archive = ZipArchive.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
await archive.WriteToDirectoryAsync(SCRATCH_FILES_PATH, progress);

View File

@@ -491,56 +491,6 @@ public class ZipArchiveTests : ArchiveTests
}
}
[Fact]
public void Zip_WinzipAES_CompressionType()
{
// Test that WinZip AES encrypted entries correctly report their compression type
using var deflateArchive = ZipArchive.OpenArchive(
Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.WinzipAES.zip"),
new ReaderOptions { Password = "test" }
);
foreach (var entry in deflateArchive.Entries.Where(x => !x.IsDirectory))
{
Assert.True(entry.IsEncrypted);
Assert.Equal(CompressionType.Deflate, entry.CompressionType);
}
using var lzmaArchive = ZipArchive.OpenArchive(
Path.Combine(TEST_ARCHIVES_PATH, "Zip.lzma.WinzipAES.zip"),
new ReaderOptions { Password = "test" }
);
foreach (var entry in lzmaArchive.Entries.Where(x => !x.IsDirectory))
{
Assert.True(entry.IsEncrypted);
Assert.Equal(CompressionType.LZMA, entry.CompressionType);
}
}
[Fact]
public void Zip_Pkware_CompressionType()
{
// Test that Pkware encrypted entries correctly report their compression type
using var deflateArchive = ZipArchive.OpenArchive(
Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.pkware.zip"),
new ReaderOptions { Password = "test" }
);
foreach (var entry in deflateArchive.Entries.Where(x => !x.IsDirectory))
{
Assert.True(entry.IsEncrypted);
Assert.Equal(CompressionType.Deflate, entry.CompressionType);
}
using var bzip2Archive = ZipArchive.OpenArchive(
Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.pkware.zip"),
new ReaderOptions { Password = "test" }
);
foreach (var entry in bzip2Archive.Entries.Where(x => !x.IsDirectory))
{
Assert.True(entry.IsEncrypted);
Assert.Equal(CompressionType.BZip2, entry.CompressionType);
}
}
[Fact]
public void Zip_Read_Volume_Comment()
{

View File

@@ -224,7 +224,7 @@ public class ZipReaderAsyncTests : ReaderTests
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.LZMA, reader.Entry.CompressionType);
Assert.Equal(CompressionType.Unknown, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(SCRATCH_FILES_PATH);
}
}
@@ -252,7 +252,7 @@ public class ZipReaderAsyncTests : ReaderTests
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.Deflate, reader.Entry.CompressionType);
Assert.Equal(CompressionType.Unknown, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(SCRATCH_FILES_PATH);
}
}

View File

@@ -201,7 +201,7 @@ public class ZipReaderTests : ReaderTests
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.LZMA, reader.Entry.CompressionType);
Assert.Equal(CompressionType.Unknown, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH);
}
}
@@ -223,7 +223,7 @@ public class ZipReaderTests : ReaderTests
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.Deflate, reader.Entry.CompressionType);
Assert.Equal(CompressionType.Unknown, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH);
}
}