Compare commits

..

56 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
921cff00a5 Fix async test method naming: rename Sync to Async
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-12 12:08:09 +00:00
copilot-swe-agent[bot]
0f37049aad Initial plan 2026-01-12 12:05:04 +00:00
Adam Hathcock
3fb07d129f Use async dispose always 2026-01-12 10:19:01 +00:00
Adam Hathcock
a35e65ee42 use ifdefs for creating files? 2026-01-08 16:52:23 +00:00
Adam Hathcock
d1fcf31f7e fmt 2026-01-08 16:31:11 +00:00
Adam Hathcock
17cd934b5b use async methods where we can 2026-01-08 16:24:11 +00:00
Adam Hathcock
ae614cd3fe update references 2026-01-08 16:14:40 +00:00
Adam Hathcock
ef0b9d525c merge conflicts 2026-01-08 15:37:55 +00:00
Adam Hathcock
01e6e04a78 Merge branch 'master' into adam/async
# Conflicts:
#	src/SharpCompress/Common/Zip/Headers/LocalEntryHeader.cs
#	src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs
2026-01-08 15:36:41 +00:00
Adam Hathcock
a7d6d6493e add version numbers again to get them 2026-01-08 14:22:54 +00:00
Adam Hathcock
b6cc95af73 Merge pull request #1122 from adamhathcock/adam/netstandard-20-readd
Readd netstandard 2.0
2026-01-08 14:03:30 +00:00
Adam Hathcock
bdcc1d32c2 fix scratch dir creation 2026-01-08 14:01:35 +00:00
Adam Hathcock
90d91cc7c2 Merge pull request #1117 from adamhathcock/adam/rework-archive-encoding
Change ArchiveEncoding to interface.
2026-01-08 13:39:30 +00:00
Adam Hathcock
ec83cf588f Readd netstandard 2.0 2026-01-08 13:33:36 +00:00
Adam Hathcock
4f0a2e3c95 disable zip64 tests 2026-01-08 12:55:16 +00:00
Adam Hathcock
3747a27109 Task to ValueTask 2026-01-08 12:35:12 +00:00
Adam Hathcock
b501bac54a better names for new interfaces 2026-01-08 12:02:26 +00:00
Adam Hathcock
7aec98d652 read async interface for reader 2026-01-08 11:28:15 +00:00
Adam Hathcock
406b198e0e can't dispose before returning 2026-01-08 10:24:33 +00:00
Adam Hathcock
8e42296c3a switch Task to ValueTask 2026-01-08 10:22:53 +00:00
Adam Hathcock
60e5220bd0 fmt 2026-01-08 09:41:48 +00:00
Adam Hathcock
0f37cbfd0b archive async path uses new async interface 2026-01-08 09:39:04 +00:00
Adam Hathcock
541fd136d5 IArchiveAsync 2026-01-08 09:14:46 +00:00
Adam Hathcock
60d42ca9c3 fmt 2026-01-07 16:38:48 +00:00
Adam Hathcock
ac0716ddeb write testing 2026-01-07 15:01:04 +00:00
Adam Hathcock
b9792ca491 fix async zip decompression 2026-01-07 14:54:32 +00:00
Adam Hathcock
c3fd42057a Pass more Zip tests 2026-01-07 14:47:20 +00:00
Adam Hathcock
39d85ff4f6 conflicts from merge 2026-01-07 12:18:14 +00:00
Adam Hathcock
fbce3e77ba Merge branch 'master' into adam/async
# Conflicts:
#	src/SharpCompress/Utility.cs
2026-01-07 12:11:19 +00:00
Adam Hathcock
66e9de2685 fixed comment 2026-01-07 11:26:42 +00:00
Adam Hathcock
321520408b fmt 2026-01-07 11:12:02 +00:00
Adam Hathcock
68451bd75f Use explicit enum, add comments 2026-01-07 11:10:15 +00:00
Adam Hathcock
486fdf118b move to own files and refactor UTF8 usage 2026-01-07 10:39:18 +00:00
Adam Hathcock
bd3cda0617 Some restoring of functionality 2026-01-07 10:32:02 +00:00
Adam Hathcock
725503d1ce Change ArchiveEncoding to interface. Simplify class. Question what to do about Forced and complex access 2026-01-07 08:44:12 +00:00
Adam Hathcock
b825e15406 Merge pull request #1100 from adamhathcock/copilot/fix-read-method-implementations
Consolidate stream extension methods and simplify with framework methods
2026-01-05 17:24:54 +00:00
copilot-swe-agent[bot]
9bd86f64c9 Replace manual TransferTo implementation with Stream.CopyTo framework methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-03 18:41:02 +00:00
copilot-swe-agent[bot]
77015224f6 Add input validation for ReadBytesAsync count parameter
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-03 17:19:55 +00:00
copilot-swe-agent[bot]
372ecb77d0 Use threshold-based ArrayPool strategy for BinaryReaderExtensions
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-03 17:17:59 +00:00
copilot-swe-agent[bot]
05642cbdc6 Use ArrayPool for temporary buffers in BinaryReaderExtensions
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-03 17:12:43 +00:00
copilot-swe-agent[bot]
1a71c01fd4 Consolidate ReadExact and ReadFully methods into Utility.cs
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-03 15:58:27 +00:00
Adam Hathcock
54640548ed Consolidate reads 2026-01-03 15:47:10 +00:00
copilot-swe-agent[bot]
ea02d31096 Add IsArchiveAsync overloads for Zip and GZip factories
- Added IsArchiveAsync interface method to IFactory
- Implemented async versions of IsZipFile, IsZipMulti, IsGZipFile
- Updated ZipFactory and GZipFactory to override IsArchiveAsync
- Updated ReaderFactory.OpenAsync to use IsArchiveAsync
- Fixed Zip_Reader_Disposal_Test2_Async to use ReaderFactory.OpenAsync
- Fixed TestStream to properly forward ReadAsync calls
- Removed BufferedStream wrapping from AsyncBinaryReader as it uses sync Read
- Added default implementation in Factory base class

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-02 18:10:54 +00:00
Adam Hathcock
d04830ba90 Add some OpenAsync 2026-01-02 17:52:18 +00:00
Adam Hathcock
8533b09091 start of implementing zip reading async 2025-12-31 14:53:55 +00:00
Adam Hathcock
44b7955d85 reader tests 2025-12-31 14:43:15 +00:00
Adam Hathcock
038b9f18c6 Merge remote-tracking branch 'origin/master' into copilot/add-buffered-stream-async-read 2025-12-31 14:24:31 +00:00
copilot-swe-agent[bot]
6e0e20ba6e Fix zip64_locator to use ReadUInt32Async instead of ReadUInt16Async
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-30 11:50:41 +00:00
copilot-swe-agent[bot]
ec31cb9987 Fix Zip headers to support both sync and async reading
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-30 11:47:31 +00:00
copilot-swe-agent[bot]
39a0b4ce78 Use BufferedStream for async reading in AsyncBinaryReader
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-30 11:23:55 +00:00
Adam Hathcock
af719707bf Merge branch 'adam/async-binary-reader' into copilot/add-buffered-stream-async-read 2025-12-30 11:17:16 +00:00
copilot-swe-agent[bot]
8415a19912 Initial plan 2025-12-30 11:15:28 +00:00
Adam Hathcock
1607d2768e Merge branch 'master' into adam/async-binary-reader 2025-12-30 11:13:14 +00:00
Adam Hathcock
fb76bd82f2 first commit of async reader 2025-11-26 08:09:20 +00:00
Adam Hathcock
3bdaba46a9 fmt 2025-11-25 15:39:43 +00:00
Adam Hathcock
7c3c94ed7f Add ArcReaderAsync tests 2025-11-25 14:44:03 +00:00
171 changed files with 5033 additions and 2470 deletions

View File

@@ -368,6 +368,9 @@ dotnet_diagnostic.NX0001.severity = error
dotnet_diagnostic.NX0002.severity = silent
dotnet_diagnostic.NX0003.severity = silent
dotnet_diagnostic.VSTHRD110.severity = error
dotnet_diagnostic.VSTHRD107.severity = error
##########################################
# Styles
##########################################

View File

@@ -13,7 +13,11 @@
<PackageVersion Include="System.Memory" Version="4.6.3" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="10.0.102" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<GlobalPackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
<GlobalPackageReference
Include="Microsoft.VisualStudio.Threading.Analyzers"
Version="17.14.15"
/>
</ItemGroup>
</Project>

View File

@@ -230,7 +230,7 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
}
else
{
// Not tagged - create prerelease version
// Not tagged - create prerelease version based on next minor version
var allTags = (await GetGitOutput("tag", "--list"))
.Split('\n', StringSplitOptions.RemoveEmptyEntries)
.Where(tag => Regex.IsMatch(tag.Trim(), @"^\d+\.\d+\.\d+$"))
@@ -240,22 +240,8 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
var lastTag = allTags.OrderBy(tag => Version.Parse(tag)).LastOrDefault() ?? "0.0.0";
var lastVersion = Version.Parse(lastTag);
// Determine version increment based on branch
var currentBranch = await GetCurrentBranch();
Version nextVersion;
if (currentBranch == "release")
{
// Release branch: increment patch version
nextVersion = new Version(lastVersion.Major, lastVersion.Minor, lastVersion.Build + 1);
Console.WriteLine($"Building prerelease for release branch (patch increment)");
}
else
{
// Master or other branches: increment minor version
nextVersion = new Version(lastVersion.Major, lastVersion.Minor + 1, 0);
Console.WriteLine($"Building prerelease for {currentBranch} branch (minor increment)");
}
// Increment minor version for next release
var nextVersion = new Version(lastVersion.Major, lastVersion.Minor + 1, 0);
// Use commit count since the last version tag if available; otherwise, fall back to total count
var revListArgs = allTags.Any() ? $"--count {lastTag}..HEAD" : "--count HEAD";
@@ -267,28 +253,6 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
}
}
static async Task<string> GetCurrentBranch()
{
// In GitHub Actions, GITHUB_REF_NAME contains the branch name
var githubRefName = Environment.GetEnvironmentVariable("GITHUB_REF_NAME");
if (!string.IsNullOrEmpty(githubRefName))
{
return githubRefName;
}
// Fallback to git command for local builds
try
{
var (output, _) = await ReadAsync("git", "branch --show-current");
return output.Trim();
}
catch (Exception ex)
{
Console.WriteLine($"Warning: Could not determine current branch: {ex.Message}");
return "unknown";
}
}
static async Task<string> GetGitOutput(string command, string args)
{
try

View File

@@ -25,14 +25,20 @@
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[10.0.102, )",
"resolved": "10.0.102",
"contentHash": "Oxq3RCIJSdtpIU4hLqO7XaDe/Ra3HS9Wi8rJl838SAg6Zu1iQjerA0+xXWBgUFYbgknUGCLOU0T+lzMLkvY9Qg==",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "10.0.102",
"Microsoft.SourceLink.Common": "10.0.102"
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"Microsoft.VisualStudio.Threading.Analyzers": {
"type": "Direct",
"requested": "[17.14.15, )",
"resolved": "17.14.15",
"contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw=="
},
"SimpleExec": {
"type": "Direct",
"requested": "[13.0.0, )",
@@ -41,8 +47,8 @@
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "10.0.102",
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
@@ -51,8 +57,8 @@
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "10.0.102",
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
}
}
}

View File

@@ -1,14 +1,13 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public abstract class AbstractArchive<TEntry, TVolume> : IArchive
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
@@ -26,6 +25,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive
_sourceStream = sourceStream;
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
_lazyVolumesAsync = new LazyAsyncReadOnlyCollection<TVolume>(
LoadVolumesAsync(_sourceStream)
);
_lazyEntriesAsync = new LazyAsyncReadOnlyCollection<TEntry>(
LoadEntriesAsync(_lazyVolumesAsync)
);
}
internal AbstractArchive(ArchiveType type)
@@ -34,19 +39,16 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive
ReaderOptions = new();
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
_lazyVolumesAsync = new LazyAsyncReadOnlyCollection<TVolume>(
AsyncEnumerableEx.Empty<TVolume>()
);
_lazyEntriesAsync = new LazyAsyncReadOnlyCollection<TEntry>(
AsyncEnumerableEx.Empty<TEntry>()
);
}
public ArchiveType Type { get; }
private static Stream CheckStreams(Stream stream)
{
if (!stream.CanSeek || !stream.CanRead)
{
throw new ArchiveException("Archive streams must be Readable and Seekable");
}
return stream;
}
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
@@ -72,6 +74,19 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
protected virtual IAsyncEnumerable<TVolume> LoadVolumesAsync(SourceStream sourceStream) =>
LoadVolumes(sourceStream).ToAsyncEnumerable();
protected virtual async IAsyncEnumerable<TEntry> LoadEntriesAsync(
IAsyncEnumerable<TVolume> volumes
)
{
foreach (var item in LoadEntries(await volumes.ToListAsync()))
{
yield return item;
}
}
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
@@ -118,6 +133,7 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive
}
protected abstract IReader CreateReaderForSolidExtraction();
protected abstract ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
@@ -140,4 +156,67 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive
return Entries.All(x => x.IsComplete);
}
}
#region Async Support
private readonly LazyAsyncReadOnlyCollection<TVolume> _lazyVolumesAsync;
private readonly LazyAsyncReadOnlyCollection<TEntry> _lazyEntriesAsync;
public virtual async ValueTask DisposeAsync()
{
if (!_disposed)
{
await foreach (var v in _lazyVolumesAsync)
{
v.Dispose();
}
foreach (var v in _lazyEntriesAsync.GetLoaded().Cast<Entry>())
{
v.Close();
}
_sourceStream?.Dispose();
_disposed = true;
}
}
private async ValueTask EnsureEntriesLoadedAsync()
{
await _lazyEntriesAsync.EnsureFullyLoaded();
await _lazyVolumesAsync.EnsureFullyLoaded();
}
public virtual IAsyncEnumerable<TEntry> EntriesAsync => _lazyEntriesAsync;
IAsyncEnumerable<IArchiveEntry> IAsyncArchive.EntriesAsync =>
EntriesAsync.Cast<TEntry, IArchiveEntry>();
public IAsyncEnumerable<IVolume> VolumesAsync => _lazyVolumesAsync.Cast<TVolume, IVolume>();
public async ValueTask<IAsyncReader> ExtractAllEntriesAsync()
{
if (!IsSolid && Type != ArchiveType.SevenZip)
{
throw new SharpCompressException(
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
await EnsureEntriesLoadedAsync();
return await CreateReaderForSolidExtractionAsync();
}
public virtual ValueTask<bool> IsSolidAsync() => new(false);
public async ValueTask<bool> IsCompleteAsync()
{
await EnsureEntriesLoadedAsync();
return await EntriesAsync.All(x => x.IsComplete);
}
public async ValueTask<long> TotalSizeAsync() =>
await EntriesAsync.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
public async ValueTask<long> TotalUncompressSizeAsync() =>
await EntriesAsync.Aggregate(0L, (total, cf) => total + cf.Size);
#endregion
}

View File

@@ -162,7 +162,7 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
SaveTo(stream, options, OldEntries, newEntries);
}
public async Task SaveToAsync(
public async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
CancellationToken cancellationToken = default
@@ -208,7 +208,7 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
IEnumerable<TEntry> newEntries
);
protected abstract Task SaveToAsync(
protected abstract ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<TEntry> oldEntries,

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;
@@ -24,6 +26,28 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
}
/// <summary>
/// Opens an Archive for random access asynchronously
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static async ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken)
.ConfigureAwait(false);
return await factory
.OpenAsync(stream, readerOptions, cancellationToken)
.ConfigureAwait(false);
}
public static IWritableArchive Create(ArchiveType type)
{
var factory = Factory
@@ -49,6 +73,22 @@ public static class ArchiveFactory
return Open(new FileInfo(filePath), options);
}
/// <summary>
/// Opens an Archive from a filepath asynchronously.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenAsync(new FileInfo(filePath), options, cancellationToken);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -61,6 +101,25 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(fileInfo).Open(fileInfo, options);
}
/// <summary>
/// Opens an Archive from a FileInfo object asynchronously.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken)
.ConfigureAwait(false);
return await factory.OpenAsync(fileInfo, options, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
@@ -87,6 +146,40 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
}
/// <summary>
/// Opens a multi-part archive from files asynchronously.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async ValueTask<IAsyncArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return await OpenAsync(fileInfo, options, cancellationToken).ConfigureAwait(false);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = FindFactory<IMultiArchiveFactory>(fileInfo);
return await factory
.OpenAsync(filesArray, options, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
@@ -113,6 +206,41 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
}
/// <summary>
/// Opens a multi-part archive from streams asynchronously.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async ValueTask<IAsyncArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return await OpenAsync(firstStream, options, cancellationToken).ConfigureAwait(false);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
var factory = FindFactory<IMultiArchiveFactory>(firstStream);
return await factory
.OpenAsync(streamsArray, options, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
@@ -166,14 +294,68 @@ public static class ArchiveFactory
);
}
public static bool IsArchive(string filePath, out ArchiveType? type)
private static async ValueTask<T> FindFactoryAsync<T>(
FileInfo finfo,
CancellationToken cancellationToken
)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return await FindFactoryAsync<T>(stream, cancellationToken);
}
private static async ValueTask<T> FindFactoryAsync<T>(
Stream stream,
CancellationToken cancellationToken
)
where T : IFactory
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
var factories = Factory.Factories.OfType<T>();
var startPosition = stream.Position;
foreach (var factory in factories)
{
stream.Seek(startPosition, SeekOrigin.Begin);
if (await factory.IsArchiveAsync(stream, cancellationToken: cancellationToken))
{
stream.Seek(startPosition, SeekOrigin.Begin);
return factory;
}
}
var extensions = string.Join(", ", factories.Select(item => item.Name));
throw new InvalidOperationException(
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
);
}
public static bool IsArchive(
string filePath,
out ArchiveType? type,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
filePath.NotNullOrEmpty(nameof(filePath));
using Stream s = File.OpenRead(filePath);
return IsArchive(s, out type);
return IsArchive(s, out type, bufferSize);
}
public static bool IsArchive(Stream stream, out ArchiveType? type)
public static bool IsArchive(
Stream stream,
out ArchiveType? type,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
type = null;
stream.NotNull(nameof(stream));

View File

@@ -1,12 +1,14 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
class AutoArchiveFactory : IArchiveFactory
internal class AutoArchiveFactory : IArchiveFactory
{
public string Name => nameof(AutoArchiveFactory);
@@ -14,14 +16,36 @@ class AutoArchiveFactory : IArchiveFactory
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
public bool IsArchive(Stream stream, string? password = null) =>
throw new NotSupportedException();
public bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => throw new NotSupportedException();
public ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => throw new NotSupportedException();
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(stream, readerOptions);
public async ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await ArchiveFactory.OpenAsync(stream, readerOptions, cancellationToken);
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(fileInfo, readerOptions);
public async ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await ArchiveFactory.OpenAsync(fileInfo, readerOptions, cancellationToken);
}

View File

@@ -102,6 +102,70 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Opens a GZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a GZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a GZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a GZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
public static GZipArchive Create() => new();
/// <summary>
@@ -138,10 +202,13 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
public Task SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
public ValueTask SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
SaveToAsync(new FileInfo(filePath), cancellationToken);
public async Task SaveToAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
public async ValueTask SaveToAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
@@ -167,6 +234,28 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return true;
}
public static async ValueTask<bool> IsGZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// read the header on the first read
byte[] header = new byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!await stream.ReadFullyAsync(header, cancellationToken).ConfigureAwait(false))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
internal GZipArchive()
: base(ArchiveType.GZip) { }
@@ -213,7 +302,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
}
}
protected override async Task SaveToAsync(
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
@@ -250,4 +339,11 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
stream.Position = 0;
return GZipReader.Open(stream);
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new(GZipReader.Open(stream));
}
}

View File

@@ -23,10 +23,12 @@ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
return Parts.Single().GetCompressedStream().NotNull();
}
public virtual Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
// GZip synchronous implementation is fast enough, just wrap it
return Task.FromResult(OpenEntryStream());
return OpenEntryStream();
}
#region IArchiveEntry Members

View File

@@ -17,7 +17,7 @@ public interface IArchiveEntry : IEntry
/// Opens the current entry as a stream that will decompress as it is read asynchronously.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
/// <summary>
/// The archive can find all the parts of the archive needed to extract this entry.

View File

@@ -9,6 +9,8 @@ namespace SharpCompress.Archives;
public static class IArchiveEntryExtensions
{
private const int BufferSize = 81920;
/// <param name="archiveEntry">The archive entry to extract.</param>
extension(IArchiveEntry archiveEntry)
{
@@ -26,7 +28,7 @@ public static class IArchiveEntryExtensions
using var entryStream = archiveEntry.OpenEntryStream();
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
sourceStream.CopyTo(streamToWriteTo, Constants.BufferSize);
sourceStream.CopyTo(streamToWriteTo, BufferSize);
}
/// <summary>
@@ -35,7 +37,7 @@ public static class IArchiveEntryExtensions
/// <param name="streamToWriteTo">The stream to write the entry content to.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
public async Task WriteToAsync(
public async ValueTask WriteToAsync(
Stream streamToWriteTo,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
@@ -49,7 +51,7 @@ public static class IArchiveEntryExtensions
using var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
await sourceStream
.CopyToAsync(streamToWriteTo, Constants.BufferSize, cancellationToken)
.CopyToAsync(streamToWriteTo, BufferSize, cancellationToken)
.ConfigureAwait(false);
}
}
@@ -108,18 +110,20 @@ public static class IArchiveEntryExtensions
/// <summary>
/// Extract to specific directory asynchronously, retaining filename
/// </summary>
public Task WriteToDirectoryAsync(
public async ValueTask WriteToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
ExtractionMethods.WriteEntryToDirectoryAsync(
entry,
destinationDirectory,
options,
entry.WriteToFileAsync,
cancellationToken
);
await ExtractionMethods
.WriteEntryToDirectoryAsync(
entry,
destinationDirectory,
options,
entry.WriteToFileAsync,
cancellationToken
)
.ConfigureAwait(false);
/// <summary>
/// Extract to specific file
@@ -139,21 +143,23 @@ public static class IArchiveEntryExtensions
/// <summary>
/// Extract to specific file asynchronously
/// </summary>
public Task WriteToFileAsync(
public async ValueTask WriteToFileAsync(
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
ExtractionMethods.WriteEntryToFileAsync(
entry,
destinationFileName,
options,
async (x, fm, ct) =>
{
using var fs = File.Open(destinationFileName, fm);
await entry.WriteToAsync(fs, null, ct).ConfigureAwait(false);
},
cancellationToken
);
await ExtractionMethods
.WriteEntryToFileAsync(
entry,
destinationFileName,
options,
async (x, fm, ct) =>
{
using var fs = File.Open(destinationFileName, fm);
await entry.WriteToAsync(fs, null, ct).ConfigureAwait(false);
},
cancellationToken
)
.ConfigureAwait(false);
}
}

View File

@@ -1,8 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
@@ -80,89 +78,5 @@ public static class IArchiveExtensions
);
}
}
/// <summary>
/// Extract to specific directory asynchronously with progress reporting and cancellation support
/// </summary>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public async Task WriteToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
)
{
// For solid archives (Rar, 7Zip), use the optimized reader-based approach
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
{
using var reader = archive.ExtractAllEntries();
await reader.WriteAllToDirectoryAsync(
destinationDirectory,
options,
cancellationToken
);
}
else
{
// For non-solid archives, extract entries directly
await archive.WriteToDirectoryAsyncInternal(
destinationDirectory,
options,
progress,
cancellationToken
);
}
}
private async Task WriteToDirectoryAsyncInternal(
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress,
CancellationToken cancellationToken
)
{
// Prepare for progress reporting
var totalBytes = archive.TotalUncompressSize;
var bytesRead = 0L;
// Tracking for created directories.
var seenDirectories = new HashSet<string>();
// Extract
foreach (var entry in archive.Entries)
{
cancellationToken.ThrowIfCancellationRequested();
if (entry.IsDirectory)
{
var dirPath = Path.Combine(
destinationDirectory,
entry.Key.NotNull("Entry Key is null")
);
if (
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(parentDirectory);
}
continue;
}
// Use the entry's WriteToDirectoryAsync method which respects ExtractionOptions
await entry
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
.ConfigureAwait(false);
// Update progress
bytesRead += entry.Size;
progress?.Report(
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
);
}
}
}
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -26,10 +28,34 @@ public interface IArchiveFactory : IFactory
/// <param name="readerOptions">reading options.</param>
IArchive Open(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens an Archive for random access asynchronously.
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens an Archive from a FileInfo object asynchronously.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -0,0 +1,43 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public interface IAsyncArchive : IAsyncDisposable
{
IAsyncEnumerable<IArchiveEntry> EntriesAsync { get; }
IAsyncEnumerable<IVolume> VolumesAsync { get; }
ArchiveType Type { get; }
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
/// </summary>
ValueTask<IAsyncReader> ExtractAllEntriesAsync();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
/// </summary>
ValueTask<bool> IsSolidAsync();
/// <summary>
/// This checks to see if all the known entries have IsComplete = true
/// </summary>
ValueTask<bool> IsCompleteAsync();
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
ValueTask<long> TotalSizeAsync();
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
ValueTask<long> TotalUncompressSizeAsync();
}

View File

@@ -0,0 +1,93 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public static class IAsyncArchiveExtensions
{
/// <summary>
/// Extract to specific directory asynchronously with progress reporting and cancellation support
/// </summary>
/// <param name="archive">The archive to extract.</param>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public static async Task WriteToDirectoryAsync(
this IAsyncArchive archive,
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
)
{
// For solid archives (Rar, 7Zip), use the optimized reader-based approach
if (await archive.IsSolidAsync() || archive.Type == ArchiveType.SevenZip)
{
await using var reader = await archive.ExtractAllEntriesAsync();
await reader.WriteAllToDirectoryAsync(destinationDirectory, options, cancellationToken);
}
else
{
// For non-solid archives, extract entries directly
await archive.WriteToDirectoryAsyncInternal(
destinationDirectory,
options,
progress,
cancellationToken
);
}
}
private static async Task WriteToDirectoryAsyncInternal(
this IAsyncArchive archive,
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress,
CancellationToken cancellationToken
)
{
// Prepare for progress reporting
var totalBytes = await archive.TotalUncompressSizeAsync();
var bytesRead = 0L;
// Tracking for created directories.
var seenDirectories = new HashSet<string>();
// Extract
await foreach (var entry in archive.EntriesAsync.WithCancellation(cancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
if (entry.IsDirectory)
{
var dirPath = Path.Combine(
destinationDirectory,
entry.Key.NotNull("Entry Key is null")
);
if (
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(parentDirectory);
}
continue;
}
// Use the entry's WriteToDirectoryAsync method which respects ExtractionOptions
await entry
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
.ConfigureAwait(false);
// Update progress
bytesRead += entry.Size;
progress?.Report(new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes));
}
}
}

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -27,10 +29,34 @@ public interface IMultiArchiveFactory : IFactory
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens a multi-part archive from streams asynchronously.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Constructor with IEnumerable Stream objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens a multi-part archive from files asynchronously.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -22,7 +22,7 @@ public interface IWritableArchive : IArchive
void SaveTo(Stream stream, WriterOptions options);
Task SaveToAsync(
ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
CancellationToken cancellationToken = default

View File

@@ -44,14 +44,14 @@ public static class IWritableArchiveExtensions
writableArchive.SaveTo(stream, options);
}
public static Task SaveToAsync(
public static ValueTask SaveToAsync(
this IWritableArchive writableArchive,
string filePath,
WriterOptions options,
CancellationToken cancellationToken = default
) => writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken);
public static async Task SaveToAsync(
public static async ValueTask SaveToAsync(
this IWritableArchive writableArchive,
FileInfo fileInfo,
WriterOptions options,

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -65,7 +67,13 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
}
protected override IReader CreateReaderForSolidExtraction()
protected override IReader CreateReaderForSolidExtraction() =>
CreateReaderForSolidExtractionInternal();
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync() =>
new(CreateReaderForSolidExtractionInternal());
private RarReader CreateReaderForSolidExtractionInternal()
{
if (this.IsMultipartVolume())
{
@@ -181,6 +189,70 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
);
}
/// <summary>
/// Opens a RarArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a RarArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a RarArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a RarArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
public static bool IsRarFile(FileInfo fileInfo)

View File

@@ -92,7 +92,9 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
return stream;
}
public async Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
RarStream stream;
if (IsRarV3)

View File

@@ -105,6 +105,70 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
@@ -201,6 +265,9 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
protected override IReader CreateReaderForSolidExtraction() =>
new SevenZipReader(ReaderOptions, this);
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync() =>
new(new SevenZipReader(ReaderOptions, this));
public override bool IsSolid =>
Entries
.Where(x => !x.IsDirectory)
@@ -212,31 +279,10 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
public override long TotalSize =>
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
internal sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive _archive;
private SevenZipEntry? _currentEntry;
private Stream? _currentFolderStream;
private CFolder? _currentFolder;
/// <summary>
/// Enables internal diagnostics for tests.
/// When disabled (default), diagnostics properties return null to avoid exposing internal state.
/// </summary>
internal bool DiagnosticsEnabled { get; set; }
/// <summary>
/// Current folder instance used to decide whether the solid folder stream should be reused.
/// Only available when <see cref="DiagnosticsEnabled"/> is true.
/// </summary>
internal object? DiagnosticsCurrentFolder => DiagnosticsEnabled ? _currentFolder : null;
/// <summary>
/// Current shared folder stream instance.
/// Only available when <see cref="DiagnosticsEnabled"/> is true.
/// </summary>
internal Stream? DiagnosticsCurrentFolderStream =>
DiagnosticsEnabled ? _currentFolderStream : null;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
@@ -252,10 +298,9 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
_currentEntry = dir;
yield return dir;
}
// For solid archives (entries in the same folder share a compressed stream),
// we must iterate entries sequentially and maintain the folder stream state
// across entries in the same folder to avoid recreating the decompression
// stream for each file, which breaks contiguous streaming.
// For non-directory entries, yield them without creating shared streams
// Each call to GetEntryStream() will create a fresh decompression stream
// to avoid state corruption issues with async operations
foreach (var entry in entries.Where(x => !x.IsDirectory))
{
_currentEntry = entry;
@@ -265,53 +310,19 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
protected override EntryStream GetEntryStream()
{
// Create a fresh decompression stream for each file (no state sharing).
// However, the LZMA decoder has bugs in its async implementation that cause
// state corruption even on fresh streams. The SyncOnlyStream wrapper
// works around these bugs by forcing async operations to use sync equivalents.
//
// TODO: Fix the LZMA decoder async bugs (in LzmaStream, Decoder, OutWindow)
// so this wrapper is no longer necessary.
var entry = _currentEntry.NotNull("currentEntry is not null");
if (entry.IsDirectory)
{
return CreateEntryStream(Stream.Null);
}
var filePart = (SevenZipFilePart)entry.FilePart;
if (!filePart.Header.HasStream)
{
// Entries with no underlying stream (e.g., empty files or anti-items)
// should return an empty stream, matching previous behavior.
return CreateEntryStream(Stream.Null);
}
var folder = filePart.Folder;
// Check if we're starting a new folder - dispose old folder stream if needed
if (folder != _currentFolder)
{
_currentFolderStream?.Dispose();
_currentFolderStream = null;
_currentFolder = folder;
}
// Create the folder stream once per folder
if (_currentFolderStream is null)
{
_currentFolderStream = _archive._database!.GetFolderStream(
_archive.Volumes.Single().Stream,
folder!,
_archive._database.PasswordProvider
);
}
// Wrap with SyncOnlyStream to work around LZMA async bugs
// Return a ReadOnlySubStream that reads from the shared folder stream
return CreateEntryStream(
new SyncOnlyStream(
new ReadOnlySubStream(_currentFolderStream, entry.Size, leaveOpen: true)
)
);
}
public override void Dispose()
{
_currentFolderStream?.Dispose();
_currentFolderStream = null;
base.Dispose();
return CreateEntryStream(new SyncOnlyStream(entry.FilePart.GetCompressedStream()));
}
}

View File

@@ -12,8 +12,9 @@ public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
public Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
Task.FromResult(OpenEntryStream());
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
) => OpenEntryStream();
public IArchive Archive { get; }

View File

@@ -103,6 +103,70 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Opens a TarArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a TarArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a TarArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a TarArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
public static bool IsTarFile(FileInfo fileInfo)
@@ -180,7 +244,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
using (var entryStream = entry.OpenEntryStream())
{
using var memoryStream = new MemoryStream();
entryStream.CopyTo(memoryStream, Constants.BufferSize);
entryStream.CopyTo(memoryStream);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
@@ -259,7 +323,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
}
}
protected override async Task SaveToAsync(
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
@@ -302,4 +366,11 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
stream.Position = 0;
return TarReader.Open(stream);
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new(TarReader.Open(stream));
}
}

View File

@@ -14,9 +14,9 @@ public class TarArchiveEntry : TarEntry, IArchiveEntry
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public virtual Task<Stream> OpenEntryStreamAsync(
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
) => Task.FromResult(OpenEntryStream());
) => OpenEntryStream();
#region IArchiveEntry Members

View File

@@ -124,27 +124,102 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
);
}
public static bool IsZipFile(string filePath, string? password = null) =>
IsZipFile(new FileInfo(filePath), password);
/// <summary>
/// Opens a ZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
/// <summary>
/// Opens a ZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a ZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a ZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
public static bool IsZipFile(
string filePath,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => IsZipFile(new FileInfo(filePath), password, bufferSize);
public static bool IsZipFile(
FileInfo fileInfo,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsZipFile(stream, password);
return IsZipFile(stream, password, bufferSize);
}
public static bool IsZipFile(Stream stream, string? password = null)
public static bool IsZipFile(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
@@ -166,14 +241,18 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
}
public static bool IsZipMulti(Stream stream, string? password = null)
public static bool IsZipMulti(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
@@ -184,7 +263,95 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
var x = z.ReadSeekableHeader(stream, useSync: true).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static async ValueTask<bool> IsZipFileAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = await headerFactory
.ReadStreamHeaderAsync(stream)
.Where(x => x.ZipHeaderType != ZipHeaderType.Split)
.FirstOrDefaultAsync();
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static async ValueTask<bool> IsZipMultiAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
ZipHeader? x = null;
await foreach (
var h in z.ReadSeekableHeaderAsync(stream)
.WithCancellation(cancellationToken)
)
{
x = h;
break;
}
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
@@ -214,7 +381,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
if (streams.Count() > 1) //test part 2 - true = multipart not split
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
var isZip = IsZipFile(streams[1], ReaderOptions.Password);
var isZip = IsZipFile(streams[1], ReaderOptions.Password, ReaderOptions.BufferSize);
streams[1].Position -= 4;
if (isZip)
{
@@ -239,7 +406,9 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var vols = volumes.ToArray();
foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream))
foreach (
var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream, useSync: true)
)
{
if (h != null)
{
@@ -283,6 +452,59 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
}
protected override async IAsyncEnumerable<ZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<ZipVolume> volumes
)
{
var vols = await volumes.ToListAsync();
var volsArray = vols.ToArray();
await foreach (
var h in headerFactory.NotNull().ReadSeekableHeaderAsync(volsArray.Last().Stream)
)
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
var deh = (DirectoryEntryHeader)h;
Stream s;
if (
deh.RelativeOffsetOfEntryHeader + deh.CompressedSize
> volsArray[deh.DiskNumberStart].Stream.Length
)
{
var v = volsArray.Skip(deh.DiskNumberStart).ToArray();
s = new SourceStream(
v[0].Stream,
i => i < v.Length ? v[i].Stream : null,
new ReaderOptions() { LeaveStreamOpen = true }
);
}
else
{
s = volsArray[deh.DiskNumberStart].Stream;
}
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
);
}
break;
case ZipHeaderType.DirectoryEnd:
{
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volsArray.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}
}
public void SaveTo(Stream stream) => SaveTo(stream, new WriterOptions(CompressionType.Deflate));
protected override void SaveTo(
@@ -314,7 +536,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
}
protected override async Task SaveToAsync(
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
@@ -370,4 +592,11 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
((IStreamStack)stream).StackSeek(0);
return ZipReader.Open(stream, ReaderOptions, Entries);
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new(ZipReader.Open(stream));
}
}

View File

@@ -13,9 +13,17 @@ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public virtual Task<Stream> OpenEntryStreamAsync(
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
) => Task.FromResult(OpenEntryStream());
)
{
var part = Parts.Single();
if (part is SeekableZipFilePart seekablePart)
{
return (await seekablePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
}
return OpenEntryStream();
}
#region IArchiveEntry Members

View File

@@ -46,7 +46,7 @@ namespace SharpCompress.Common.Ace.Headers
}
}
public AceFileHeader(ArchiveEncoding archiveEncoding)
public AceFileHeader(IArchiveEncoding archiveEncoding)
: base(archiveEncoding, AceHeaderType.FILE) { }
/// <summary>

View File

@@ -31,13 +31,13 @@ namespace SharpCompress.Common.Ace.Headers
(byte)'*',
];
public AceHeader(ArchiveEncoding archiveEncoding, AceHeaderType type)
public AceHeader(IArchiveEncoding archiveEncoding, AceHeaderType type)
{
AceHeaderType = type;
ArchiveEncoding = archiveEncoding;
}
public ArchiveEncoding ArchiveEncoding { get; }
public IArchiveEncoding ArchiveEncoding { get; }
public AceHeaderType AceHeaderType { get; }
public ushort HeaderFlags { get; set; }

View File

@@ -22,7 +22,7 @@ namespace SharpCompress.Common.Ace.Headers
public List<byte> Comment { get; set; } = new();
public byte AceVersion { get; private set; }
public AceMainHeader(ArchiveEncoding archiveEncoding)
public AceMainHeader(IArchiveEncoding archiveEncoding)
: base(archiveEncoding, AceHeaderType.MAIN) { }
/// <summary>

View File

@@ -7,7 +7,7 @@ namespace SharpCompress.Common.Arc
{
public class ArcEntryHeader
{
public ArchiveEncoding ArchiveEncoding { get; }
public IArchiveEncoding ArchiveEncoding { get; }
public CompressionType CompressionMethod { get; private set; }
public string? Name { get; private set; }
public long CompressedSize { get; private set; }
@@ -16,7 +16,7 @@ namespace SharpCompress.Common.Arc
public long OriginalSize { get; private set; }
public long DataStartPosition { get; private set; }
public ArcEntryHeader(ArchiveEncoding archiveEncoding)
public ArcEntryHeader(IArchiveEncoding archiveEncoding)
{
this.ArchiveEncoding = archiveEncoding;
}

View File

@@ -3,55 +3,11 @@ using System.Text;
namespace SharpCompress.Common;
public class ArchiveEncoding
public class ArchiveEncoding : IArchiveEncoding
{
/// <summary>
/// Default encoding to use when archive format doesn't specify one.
/// </summary>
public Encoding? Default { get; set; }
/// <summary>
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
/// </summary>
public Encoding? Password { get; set; }
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
/// </summary>
public Encoding Default { get; set; } = Encoding.Default;
public Encoding Password { get; set; } = Encoding.Default;
public Encoding UTF8 { get; set; } = Encoding.UTF8;
public Encoding? Forced { get; set; }
/// <summary>
/// Set this when you want to use a custom method for all decoding operations.
/// </summary>
/// <returns>string Func(bytes, index, length)</returns>
public Func<byte[], int, int, string>? CustomDecoder { get; set; }
public ArchiveEncoding()
: this(Encoding.Default, Encoding.Default) { }
public ArchiveEncoding(Encoding def, Encoding password)
{
Default = def;
Password = password;
}
#if !NETFRAMEWORK
static ArchiveEncoding() => Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
#endif
public string Decode(byte[] bytes) => Decode(bytes, 0, bytes.Length);
public string Decode(byte[] bytes, int start, int length) =>
GetDecoder().Invoke(bytes, start, length);
public string DecodeUTF8(byte[] bytes) => Encoding.UTF8.GetString(bytes, 0, bytes.Length);
public byte[] Encode(string str) => GetEncoding().GetBytes(str);
public Encoding GetEncoding() => Forced ?? Default ?? Encoding.UTF8;
public Encoding GetPasswordEncoding() => Password ?? Encoding.UTF8;
public Func<byte[], int, int, string> GetDecoder() =>
CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
public Func<byte[], int, int, EncodingType, string>? CustomDecoder { get; set; }
}

View File

@@ -0,0 +1,87 @@
using System;
using System.Text;
namespace SharpCompress.Common;
/// <summary>
/// Specifies the type of encoding to use.
/// </summary>
public enum EncodingType
{
/// <summary>
/// Uses the default encoding.
/// </summary>
Default,
/// <summary>
/// Uses UTF-8 encoding.
/// </summary>
UTF8,
}
/// <summary>
/// Provides extension methods for archive encoding.
/// </summary>
public static class ArchiveEncodingExtensions
{
#if !NETFRAMEWORK
/// <summary>
/// Registers the code pages encoding provider.
/// </summary>
static ArchiveEncodingExtensions() =>
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
#endif
extension(IArchiveEncoding encoding)
{
/// <summary>
/// Gets the encoding based on the archive encoding settings.
/// </summary>
/// <param name="useUtf8">Whether to use UTF-8.</param>
/// <returns>The encoding.</returns>
public Encoding GetEncoding(bool useUtf8 = false) =>
encoding.Forced ?? (useUtf8 ? encoding.UTF8 : encoding.Default);
/// <summary>
/// Gets the decoder function for the archive encoding.
/// </summary>
/// <returns>The decoder function.</returns>
public Func<byte[], int, int, EncodingType, string> GetDecoder() =>
encoding.CustomDecoder
?? (
(bytes, index, count, type) =>
encoding.GetEncoding(type == EncodingType.UTF8).GetString(bytes, index, count)
);
/// <summary>
/// Encodes a string using the default encoding.
/// </summary>
/// <param name="str">The string to encode.</param>
/// <returns>The encoded bytes.</returns>
public byte[] Encode(string str) => encoding.Default.GetBytes(str);
/// <summary>
/// Decodes bytes using the specified encoding type.
/// </summary>
/// <param name="bytes">The bytes to decode.</param>
/// <param name="type">The encoding type.</param>
/// <returns>The decoded string.</returns>
public string Decode(byte[] bytes, EncodingType type = EncodingType.Default) =>
encoding.Decode(bytes, 0, bytes.Length, type);
/// <summary>
/// Decodes a portion of bytes using the specified encoding type.
/// </summary>
/// <param name="bytes">The bytes to decode.</param>
/// <param name="start">The start index.</param>
/// <param name="length">The length.</param>
/// <param name="type">The encoding type.</param>
/// <returns>The decoded string.</returns>
public string Decode(
byte[] bytes,
int start,
int length,
EncodingType type = EncodingType.Default
) => encoding.GetDecoder()(bytes, start, length, type);
}
}

View File

@@ -0,0 +1,95 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common
{
public sealed class AsyncBinaryReader : IDisposable
{
private readonly Stream _stream;
private readonly Stream _originalStream;
private readonly bool _leaveOpen;
private readonly byte[] _buffer = new byte[8];
private bool _disposed;
public AsyncBinaryReader(Stream stream, bool leaveOpen = false, int bufferSize = 4096)
{
_originalStream = stream ?? throw new ArgumentNullException(nameof(stream));
_leaveOpen = leaveOpen;
// Use the stream directly without wrapping in BufferedStream
// BufferedStream uses synchronous Read internally which doesn't work with async-only streams
// SharpCompress uses SharpCompressStream for buffering which supports true async reads
_stream = stream;
}
public Stream BaseStream => _stream;
public async ValueTask<byte> ReadByteAsync(CancellationToken ct = default)
{
await _stream.ReadExactAsync(_buffer, 0, 1, ct).ConfigureAwait(false);
return _buffer[0];
}
public async ValueTask<ushort> ReadUInt16Async(CancellationToken ct = default)
{
await _stream.ReadExactAsync(_buffer, 0, 2, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt16LittleEndian(_buffer);
}
public async ValueTask<uint> ReadUInt32Async(CancellationToken ct = default)
{
await _stream.ReadExactAsync(_buffer, 0, 4, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt32LittleEndian(_buffer);
}
public async ValueTask<ulong> ReadUInt64Async(CancellationToken ct = default)
{
await _stream.ReadExactAsync(_buffer, 0, 8, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt64LittleEndian(_buffer);
}
public async ValueTask<byte[]> ReadBytesAsync(int count, CancellationToken ct = default)
{
var result = new byte[count];
await _stream.ReadExactAsync(result, 0, count, ct).ConfigureAwait(false);
return result;
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
// Dispose the original stream if we own it
if (!_leaveOpen)
{
_originalStream.Dispose();
}
}
#if NET6_0_OR_GREATER
public async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
_disposed = true;
// Dispose the original stream if we own it
if (!_leaveOpen)
{
await _originalStream.DisposeAsync().ConfigureAwait(false);
}
}
#endif
}
}

View File

@@ -1,10 +0,0 @@
namespace SharpCompress.Common;
public static class Constants
{
/// <summary>
/// The default buffer size for stream operations, matching .NET's Stream.CopyTo default of 81920 bytes.
/// This can be modified globally at runtime.
/// </summary>
public static int BufferSize { get; set; } = 81920;
}

View File

@@ -56,7 +56,7 @@ public class EntryStream : Stream, IStreamStack
/// <summary>
/// Asynchronously skip the rest of the entry stream.
/// </summary>
public async Task SkipEntryAsync(CancellationToken cancellationToken = default)
public async ValueTask SkipEntryAsync(CancellationToken cancellationToken = default)
{
await this.SkipAsync(cancellationToken).ConfigureAwait(false);
_completed = true;

View File

@@ -124,11 +124,11 @@ internal static class ExtractionMethods
}
}
public static async Task WriteEntryToDirectoryAsync(
public static async ValueTask WriteEntryToDirectoryAsync(
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Func<string, ExtractionOptions?, CancellationToken, Task> writeAsync,
Func<string, ExtractionOptions?, CancellationToken, ValueTask> writeAsync,
CancellationToken cancellationToken = default
)
{
@@ -197,11 +197,11 @@ internal static class ExtractionMethods
}
}
public static async Task WriteEntryToFileAsync(
public static async ValueTask WriteEntryToFileAsync(
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Func<string, FileMode, CancellationToken, Task> openAndWriteAsync,
Func<string, FileMode, CancellationToken, ValueTask> openAndWriteAsync,
CancellationToken cancellationToken = default
)
{

View File

@@ -1,12 +1,14 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common;
public abstract class FilePart
{
protected FilePart(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
protected FilePart(IArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
internal ArchiveEncoding ArchiveEncoding { get; }
internal IArchiveEncoding ArchiveEncoding { get; }
internal abstract string? FilePartName { get; }
public int Index { get; set; }
@@ -14,4 +16,8 @@ public abstract class FilePart
internal abstract Stream? GetCompressedStream();
internal abstract Stream? GetRawStream();
internal bool Skipped { get; set; }
internal virtual ValueTask<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
) => new(GetCompressedStream());
}

View File

@@ -13,7 +13,7 @@ internal sealed class GZipFilePart : FilePart
private string? _name;
private readonly Stream _stream;
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
internal GZipFilePart(Stream stream, IArchiveEncoding archiveEncoding)
: base(archiveEncoding)
{
_stream = stream;

View File

@@ -0,0 +1,36 @@
using System;
using System.Text;
namespace SharpCompress.Common;
/// <summary>
/// Defines the encoding settings for archives.
/// </summary>
public interface IArchiveEncoding
{
/// <summary>
/// Default encoding to use when archive format doesn't specify one. Required and defaults to Encoding.Default.
/// </summary>
public Encoding Default { get; set; }
/// <summary>
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898. Required and defaults to Encoding.Default.
/// </summary>
public Encoding Password { get; set; }
/// <summary>
/// Default encoding to use when archive format specifies UTF-8 encoding. Required and defaults to Encoding.UTF8.
/// </summary>
public Encoding UTF8 { get; set; }
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
/// </summary>
public Encoding? Forced { get; set; }
/// <summary>
/// Set this when you want to use a custom method for all decoding operations.
/// </summary>
/// <returns>string Func(bytes, index, length, EncodingType)</returns>
public Func<byte[], int, int, EncodingType, string>? CustomDecoder { get; set; }
}

View File

@@ -7,5 +7,5 @@ public class OptionsBase
/// </summary>
public bool LeaveStreamOpen { get; set; } = true;
public ArchiveEncoding ArchiveEncoding { get; set; } = new();
public IArchiveEncoding ArchiveEncoding { get; set; } = new ArchiveEncoding();
}

View File

@@ -13,7 +13,7 @@ internal class RarHeader : IRarHeader
internal static RarHeader? TryReadBase(
RarCrcBinaryReader reader,
bool isRar5,
ArchiveEncoding archiveEncoding
IArchiveEncoding archiveEncoding
)
{
try
@@ -26,7 +26,7 @@ internal class RarHeader : IRarHeader
}
}
private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
private RarHeader(RarCrcBinaryReader reader, bool isRar5, IArchiveEncoding archiveEncoding)
{
_headerType = HeaderType.Null;
_isRar5 = isRar5;
@@ -115,7 +115,7 @@ internal class RarHeader : IRarHeader
protected int HeaderSize { get; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal IArchiveEncoding ArchiveEncoding { get; }
/// <summary>
/// Extra header size.

View File

@@ -15,7 +15,7 @@ internal class SevenZipFilePart : FilePart
ArchiveDatabase database,
int index,
CFileItem fileEntry,
ArchiveEncoding archiveEncoding
IArchiveEncoding archiveEncoding
)
: base(archiveEncoding)
{

View File

@@ -11,7 +11,7 @@ internal sealed class TarHeader
internal static readonly DateTime EPOCH = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
public TarHeader(
ArchiveEncoding archiveEncoding,
IArchiveEncoding archiveEncoding,
TarHeaderWriteFormat writeFormat = TarHeaderWriteFormat.GNU_TAR_LONG_LINK
)
{
@@ -30,7 +30,7 @@ internal sealed class TarHeader
internal DateTime LastModifiedTime { get; set; }
internal EntryType EntryType { get; set; }
internal Stream? PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal IArchiveEncoding ArchiveEncoding { get; }
internal const int BLOCK_SIZE = 512;

View File

@@ -54,7 +54,7 @@ public class TarEntry : Entry
StreamingMode mode,
Stream stream,
CompressionType compressionType,
ArchiveEncoding archiveEncoding
IArchiveEncoding archiveEncoding
)
{
foreach (var header in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))

View File

@@ -10,7 +10,7 @@ internal static class TarHeaderFactory
internal static IEnumerable<TarHeader?> ReadHeader(
StreamingMode mode,
Stream stream,
ArchiveEncoding archiveEncoding
IArchiveEncoding archiveEncoding
)
{
while (true)

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -19,6 +20,18 @@ internal class DirectoryEndHeader : ZipHeader
Comment = reader.ReadBytes(CommentLength);
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
VolumeNumber = await reader.ReadUInt16Async();
FirstVolumeWithDirectory = await reader.ReadUInt16Async();
TotalNumberOfEntriesInDisk = await reader.ReadUInt16Async();
TotalNumberOfEntries = await reader.ReadUInt16Async();
DirectorySize = await reader.ReadUInt32Async();
DirectoryStartOffsetRelativeToDisk = await reader.ReadUInt32Async();
CommentLength = await reader.ReadUInt16Async();
Comment = await reader.ReadBytesAsync(CommentLength);
}
public ushort VolumeNumber { get; private set; }
public ushort FirstVolumeWithDirectory { get; private set; }

View File

@@ -1,11 +1,12 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
internal class DirectoryEntryHeader : ZipFileEntry
{
public DirectoryEntryHeader(ArchiveEncoding archiveEncoding)
public DirectoryEntryHeader(IArchiveEncoding archiveEncoding)
: base(ZipHeaderType.DirectoryEntry, archiveEncoding) { }
internal override void Read(BinaryReader reader)
@@ -31,7 +32,37 @@ internal class DirectoryEntryHeader : ZipFileEntry
var extra = reader.ReadBytes(extraLength);
var comment = reader.ReadBytes(commentLength);
// According to .ZIP File Format Specification
ProcessReadData(name, extra, comment);
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
Version = await reader.ReadUInt16Async();
VersionNeededToExtract = await reader.ReadUInt16Async();
Flags = (HeaderFlags)await reader.ReadUInt16Async();
CompressionMethod = (ZipCompressionMethod)await reader.ReadUInt16Async();
OriginalLastModifiedTime = LastModifiedTime = await reader.ReadUInt16Async();
OriginalLastModifiedDate = LastModifiedDate = await reader.ReadUInt16Async();
Crc = await reader.ReadUInt32Async();
CompressedSize = await reader.ReadUInt32Async();
UncompressedSize = await reader.ReadUInt32Async();
var nameLength = await reader.ReadUInt16Async();
var extraLength = await reader.ReadUInt16Async();
var commentLength = await reader.ReadUInt16Async();
DiskNumberStart = await reader.ReadUInt16Async();
InternalFileAttributes = await reader.ReadUInt16Async();
ExternalFileAttributes = await reader.ReadUInt32Async();
RelativeOffsetOfEntryHeader = await reader.ReadUInt32Async();
var name = await reader.ReadBytesAsync(nameLength);
var extra = await reader.ReadBytesAsync(extraLength);
var comment = await reader.ReadBytesAsync(commentLength);
ProcessReadData(name, extra, comment);
}
private void ProcessReadData(byte[] name, byte[] extra, byte[] comment)
{
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
//
@@ -41,8 +72,8 @@ internal class DirectoryEntryHeader : ZipFileEntry
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.DecodeUTF8(name);
Comment = ArchiveEncoding.DecodeUTF8(comment);
Name = ArchiveEncoding.Decode(name, EncodingType.UTF8);
Comment = ArchiveEncoding.Decode(comment, EncodingType.UTF8);
}
else
{

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -8,4 +9,6 @@ internal class IgnoreHeader : ZipHeader
: base(type) { }
internal override void Read(BinaryReader reader) { }
internal override ValueTask Read(AsyncBinaryReader reader) => default;
}

View File

@@ -1,13 +1,12 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
internal class LocalEntryHeader : ZipFileEntry
internal class LocalEntryHeader(IArchiveEncoding archiveEncoding)
: ZipFileEntry(ZipHeaderType.LocalEntry, archiveEncoding)
{
public LocalEntryHeader(ArchiveEncoding archiveEncoding)
: base(ZipHeaderType.LocalEntry, archiveEncoding) { }
internal override void Read(BinaryReader reader)
{
Version = reader.ReadUInt16();
@@ -23,7 +22,29 @@ internal class LocalEntryHeader : ZipFileEntry
var name = reader.ReadBytes(nameLength);
var extra = reader.ReadBytes(extraLength);
// According to .ZIP File Format Specification
ProcessReadData(name, extra);
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
Version = await reader.ReadUInt16Async();
Flags = (HeaderFlags)await reader.ReadUInt16Async();
CompressionMethod = (ZipCompressionMethod)await reader.ReadUInt16Async();
OriginalLastModifiedTime = LastModifiedTime = await reader.ReadUInt16Async();
OriginalLastModifiedDate = LastModifiedDate = await reader.ReadUInt16Async();
Crc = await reader.ReadUInt32Async();
CompressedSize = await reader.ReadUInt32Async();
UncompressedSize = await reader.ReadUInt32Async();
var nameLength = await reader.ReadUInt16Async();
var extraLength = await reader.ReadUInt16Async();
var name = await reader.ReadBytesAsync(nameLength);
var extra = await reader.ReadBytesAsync(extraLength);
ProcessReadData(name, extra);
}
private void ProcessReadData(byte[] name, byte[] extra)
{
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
//
@@ -33,7 +54,7 @@ internal class LocalEntryHeader : ZipFileEntry
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.DecodeUTF8(name);
Name = ArchiveEncoding.Decode(name, EncodingType.UTF8);
}
else
{

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -9,4 +10,7 @@ internal class SplitHeader : ZipHeader
: base(ZipHeaderType.Split) { }
internal override void Read(BinaryReader reader) => throw new NotImplementedException();
internal override ValueTask Read(AsyncBinaryReader reader) =>
throw new NotImplementedException();
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -26,6 +27,25 @@ internal class Zip64DirectoryEndHeader : ZipHeader
);
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
SizeOfDirectoryEndRecord = (long)await reader.ReadUInt64Async();
VersionMadeBy = await reader.ReadUInt16Async();
VersionNeededToExtract = await reader.ReadUInt16Async();
VolumeNumber = await reader.ReadUInt32Async();
FirstVolumeWithDirectory = await reader.ReadUInt32Async();
TotalNumberOfEntriesInDisk = (long)await reader.ReadUInt64Async();
TotalNumberOfEntries = (long)await reader.ReadUInt64Async();
DirectorySize = (long)await reader.ReadUInt64Async();
DirectoryStartOffsetRelativeToDisk = (long)await reader.ReadUInt64Async();
DataSector = await reader.ReadBytesAsync(
(int)(
SizeOfDirectoryEndRecord
- SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS
)
);
}
private const int SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS = 44;
public long SizeOfDirectoryEndRecord { get; private set; }

View File

@@ -1,12 +1,10 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
internal class Zip64DirectoryEndLocatorHeader : ZipHeader
internal class Zip64DirectoryEndLocatorHeader() : ZipHeader(ZipHeaderType.Zip64DirectoryEndLocator)
{
public Zip64DirectoryEndLocatorHeader()
: base(ZipHeaderType.Zip64DirectoryEndLocator) { }
internal override void Read(BinaryReader reader)
{
FirstVolumeWithDirectory = reader.ReadUInt32();
@@ -14,6 +12,13 @@ internal class Zip64DirectoryEndLocatorHeader : ZipHeader
TotalNumberOfVolumes = reader.ReadUInt32();
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
FirstVolumeWithDirectory = await reader.ReadUInt32Async();
RelativeOffsetOfTheEndOfDirectoryRecord = (long)await reader.ReadUInt64Async();
TotalNumberOfVolumes = await reader.ReadUInt32Async();
}
public uint FirstVolumeWithDirectory { get; private set; }
public long RelativeOffsetOfTheEndOfDirectoryRecord { get; private set; }

View File

@@ -2,18 +2,14 @@ using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
internal abstract class ZipFileEntry : ZipHeader
internal abstract class ZipFileEntry(ZipHeaderType type, IArchiveEncoding archiveEncoding)
: ZipHeader(type)
{
protected ZipFileEntry(ZipHeaderType type, ArchiveEncoding archiveEncoding)
: base(type)
{
Extra = new List<ExtraData>();
ArchiveEncoding = archiveEncoding;
}
internal bool IsDirectory
{
get
@@ -30,7 +26,7 @@ internal abstract class ZipFileEntry : ZipHeader
internal Stream? PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal IArchiveEncoding ArchiveEncoding { get; } = archiveEncoding;
internal string? Name { get; set; }
@@ -44,7 +40,7 @@ internal abstract class ZipFileEntry : ZipHeader
internal long UncompressedSize { get; set; }
internal List<ExtraData> Extra { get; set; }
internal List<ExtraData> Extra { get; set; } = new();
public string? Password { get; set; }
@@ -63,6 +59,24 @@ internal abstract class ZipFileEntry : ZipHeader
return encryptionData;
}
internal async ValueTask<PkwareTraditionalEncryptionData> ComposeEncryptionDataAsync(
Stream archiveStream,
CancellationToken cancellationToken = default
)
{
if (archiveStream is null)
{
throw new ArgumentNullException(nameof(archiveStream));
}
var buffer = new byte[12];
await archiveStream.ReadFullyAsync(buffer, 0, 12, cancellationToken).ConfigureAwait(false);
var encryptionData = PkwareTraditionalEncryptionData.ForRead(Password!, this, buffer);
return encryptionData;
}
internal WinzipAesEncryptionData? WinzipAesEncryptionData { get; set; }
/// <summary>

View File

@@ -1,18 +1,14 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
internal abstract class ZipHeader
internal abstract class ZipHeader(ZipHeaderType type)
{
protected ZipHeader(ZipHeaderType type)
{
ZipHeaderType = type;
HasData = true;
}
internal ZipHeaderType ZipHeaderType { get; }
internal ZipHeaderType ZipHeaderType { get; } = type;
internal abstract void Read(BinaryReader reader);
internal abstract ValueTask Read(AsyncBinaryReader reader);
internal bool HasData { get; set; }
internal bool HasData { get; set; } = true;
}

View File

@@ -8,9 +8,9 @@ internal class PkwareTraditionalEncryptionData
{
private static readonly CRC32 CRC32 = new();
private readonly uint[] _keys = { 0x12345678, 0x23456789, 0x34567890 };
private readonly ArchiveEncoding _archiveEncoding;
private readonly IArchiveEncoding _archiveEncoding;
private PkwareTraditionalEncryptionData(string password, ArchiveEncoding archiveEncoding)
private PkwareTraditionalEncryptionData(string password, IArchiveEncoding archiveEncoding)
{
_archiveEncoding = archiveEncoding;
Initialize(password);
@@ -103,7 +103,7 @@ internal class PkwareTraditionalEncryptionData
internal byte[] StringToByteArray(string value)
{
var a = _archiveEncoding.GetPasswordEncoding().GetBytes(value);
var a = _archiveEncoding.Password.GetBytes(value);
return a;
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
namespace SharpCompress.Common.Zip;
@@ -25,9 +27,24 @@ internal class SeekableZipFilePart : ZipFilePart
return base.GetCompressedStream();
}
internal override async ValueTask<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
if (!_isLocalHeaderLoaded)
{
await LoadLocalHeaderAsync(cancellationToken);
_isLocalHeaderLoaded = true;
}
return await base.GetCompressedStreamAsync(cancellationToken);
}
private void LoadLocalHeader() =>
Header = _headerFactory.GetLocalHeader(BaseStream, (DirectoryEntryHeader)Header);
private async ValueTask LoadLocalHeaderAsync(CancellationToken cancellationToken = default) =>
Header = await _headerFactory.GetLocalHeaderAsync(BaseStream, (DirectoryEntryHeader)Header);
protected override Stream CreateBaseStream()
{
BaseStream.Position = Header.DataStartPosition.NotNull();

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -15,10 +16,77 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
private const int MAX_SEARCH_LENGTH_FOR_EOCD = 65557;
private bool _zip64;
internal SeekableZipHeaderFactory(string? password, ArchiveEncoding archiveEncoding)
internal SeekableZipHeaderFactory(string? password, IArchiveEncoding archiveEncoding)
: base(StreamingMode.Seekable, password, archiveEncoding) { }
internal IEnumerable<ZipHeader> ReadSeekableHeader(Stream stream)
internal async IAsyncEnumerable<ZipHeader> ReadSeekableHeaderAsync(Stream stream)
{
var reader = new AsyncBinaryReader(stream);
await SeekBackToHeaderAsync(stream, reader);
var eocd_location = stream.Position;
var entry = new DirectoryEndHeader();
await entry.Read(reader);
if (entry.IsZip64)
{
_zip64 = true;
// ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin);
uint zip64_locator = await reader.ReadUInt32Async();
if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR)
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
await zip64Locator.Read(reader);
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
var zip64Signature = await reader.ReadUInt32Async();
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
await zip64Entry.Read(reader);
stream.Seek(zip64Entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
else
{
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
var position = stream.Position;
while (true)
{
stream.Position = position;
var signature = await reader.ReadUInt32Async();
var nextHeader = await ReadHeader(signature, reader, _zip64);
position = stream.Position;
if (nextHeader is null)
{
yield break;
}
if (nextHeader is DirectoryEntryHeader entryHeader)
{
//entry could be zero bytes so we need to know that.
entryHeader.HasData = entryHeader.CompressedSize != 0;
yield return entryHeader;
}
else if (nextHeader is DirectoryEndHeader endHeader)
{
yield return endHeader;
}
}
}
internal IEnumerable<ZipHeader> ReadSeekableHeader(Stream stream, bool useSync)
{
var reader = new BinaryReader(stream);
@@ -85,6 +153,73 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
}
}
internal async IAsyncEnumerable<ZipHeader> ReadSeekableHeaderAsync(Stream stream, bool useSync)
{
var reader = new AsyncBinaryReader(stream);
await SeekBackToHeaderAsync(stream, reader);
var eocd_location = stream.Position;
var entry = new DirectoryEndHeader();
await entry.Read(reader);
if (entry.IsZip64)
{
_zip64 = true;
// ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin);
var zip64_locator = await reader.ReadUInt32Async();
if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR)
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
await zip64Locator.Read(reader);
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
var zip64Signature = await reader.ReadUInt32Async();
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
await zip64Entry.Read(reader);
stream.Seek(zip64Entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
else
{
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
var position = stream.Position;
while (true)
{
stream.Position = position;
var signature = await reader.ReadUInt32Async();
var nextHeader = await ReadHeader(signature, reader, _zip64);
position = stream.Position;
if (nextHeader is null)
{
yield break;
}
if (nextHeader is DirectoryEntryHeader entryHeader)
{
//entry could be zero bytes so we need to know that.
entryHeader.HasData = entryHeader.CompressedSize != 0;
yield return entryHeader;
}
else if (nextHeader is DirectoryEndHeader endHeader)
{
yield return endHeader;
}
}
}
private static bool IsMatch(byte[] haystack, int position, byte[] needle)
{
for (var i = 0; i < needle.Length; i++)
@@ -98,6 +233,45 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
return true;
}
private static async ValueTask SeekBackToHeaderAsync(Stream stream, AsyncBinaryReader reader)
{
// Minimum EOCD length
if (stream.Length < MINIMUM_EOCD_LENGTH)
{
throw new ArchiveException(
"Could not find Zip file Directory at the end of the file. File may be corrupted."
);
}
var len =
stream.Length < MAX_SEARCH_LENGTH_FOR_EOCD
? (int)stream.Length
: MAX_SEARCH_LENGTH_FOR_EOCD;
// We search for marker in reverse to find the first occurance
byte[] needle = { 0x06, 0x05, 0x4b, 0x50 };
stream.Seek(-len, SeekOrigin.End);
var seek = await reader.ReadBytesAsync(len);
// Search in reverse
Array.Reverse(seek);
// don't exclude the minimum eocd region, otherwise you fail to locate the header in empty zip files
var max_search_area = len; // - MINIMUM_EOCD_LENGTH;
for (var pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
{
if (IsMatch(seek, pos_from_end, needle))
{
stream.Seek(-pos_from_end, SeekOrigin.End);
return;
}
}
throw new ArchiveException("Failed to locate the Zip Header");
}
private static void SeekBackToHeader(Stream stream, BinaryReader reader)
{
// Minimum EOCD length
@@ -163,4 +337,31 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
}
return localEntryHeader;
}
internal async ValueTask<LocalEntryHeader> GetLocalHeaderAsync(
Stream stream,
DirectoryEntryHeader directoryEntryHeader
)
{
stream.Seek(directoryEntryHeader.RelativeOffsetOfEntryHeader, SeekOrigin.Begin);
var reader = new AsyncBinaryReader(stream);
var signature = await reader.ReadUInt32Async();
if (await ReadHeader(signature, reader, _zip64) is not LocalEntryHeader localEntryHeader)
{
throw new InvalidOperationException();
}
// populate fields only known from the DirectoryEntryHeader
localEntryHeader.HasData = directoryEntryHeader.HasData;
localEntryHeader.ExternalFileAttributes = directoryEntryHeader.ExternalFileAttributes;
localEntryHeader.Comment = directoryEntryHeader.Comment;
if (FlagUtility.HasFlag(localEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
{
localEntryHeader.Crc = directoryEntryHeader.Crc;
localEntryHeader.CompressedSize = directoryEntryHeader.CompressedSize;
localEntryHeader.UncompressedSize = directoryEntryHeader.UncompressedSize;
}
return localEntryHeader;
}
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
@@ -31,6 +33,28 @@ internal sealed class StreamingZipFilePart : ZipFilePart
return _decompressionStream;
}
internal override async ValueTask<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
if (!Header.HasData)
{
return Stream.Null;
}
_decompressionStream = await CreateDecompressionStreamAsync(
await GetCryptoStreamAsync(CreateBaseStream(), cancellationToken)
.ConfigureAwait(false),
Header.CompressionMethod,
cancellationToken
)
.ConfigureAwait(false);
if (LeaveStreamOpen)
{
return SharpCompressStream.Create(_decompressionStream, leaveOpen: true);
}
return _decompressionStream;
}
internal BinaryReader FixStreamedFileLocation(ref SharpCompressStream rewindableStream)
{
if (Header.IsDirectory)

View File

@@ -2,6 +2,9 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -13,7 +16,7 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
internal StreamingZipHeaderFactory(
string? password,
ArchiveEncoding archiveEncoding,
IArchiveEncoding archiveEncoding,
IEnumerable<ZipEntry>? entries
)
: base(StreamingMode.Streaming, password, archiveEncoding) => _entries = entries;
@@ -200,4 +203,331 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
yield return header;
}
}
/// <summary>
/// Reads ZIP headers asynchronously for streams that do not support synchronous reads.
/// </summary>
internal IAsyncEnumerable<ZipHeader> ReadStreamHeaderAsync(Stream stream) =>
new StreamHeaderAsyncEnumerable(this, stream);
/// <summary>
/// Invokes the shared async header parsing logic on the base factory.
/// </summary>
private ValueTask<ZipHeader?> ReadHeaderAsyncInternal(
uint headerBytes,
AsyncBinaryReader reader
) => ReadHeader(headerBytes, reader);
/// <summary>
/// Exposes the last parsed local entry header to the async enumerator so it can handle streaming data descriptors.
/// </summary>
private LocalEntryHeader? LastEntryHeader
{
get => _lastEntryHeader;
set => _lastEntryHeader = value;
}
/// <summary>
/// Produces an async enumerator for streaming ZIP headers.
/// </summary>
private sealed class StreamHeaderAsyncEnumerable : IAsyncEnumerable<ZipHeader>
{
private readonly StreamingZipHeaderFactory _headerFactory;
private readonly Stream _stream;
public StreamHeaderAsyncEnumerable(StreamingZipHeaderFactory headerFactory, Stream stream)
{
_headerFactory = headerFactory;
_stream = stream;
}
public IAsyncEnumerator<ZipHeader> GetAsyncEnumerator(
CancellationToken cancellationToken = default
) => new StreamHeaderAsyncEnumerator(_headerFactory, _stream, cancellationToken);
}
/// <summary>
/// Async implementation of <see cref="ReadStreamHeader"/> using <see cref="AsyncBinaryReader"/> to avoid sync reads.
/// </summary>
private sealed class StreamHeaderAsyncEnumerator : IAsyncEnumerator<ZipHeader>, IDisposable
{
private readonly StreamingZipHeaderFactory _headerFactory;
private readonly SharpCompressStream _rewindableStream;
private readonly AsyncBinaryReader _reader;
private readonly CancellationToken _cancellationToken;
private bool _completed;
public StreamHeaderAsyncEnumerator(
StreamingZipHeaderFactory headerFactory,
Stream stream,
CancellationToken cancellationToken
)
{
_headerFactory = headerFactory;
_rewindableStream = EnsureSharpCompressStream(stream);
_reader = new AsyncBinaryReader(_rewindableStream, leaveOpen: true);
_cancellationToken = cancellationToken;
}
private ZipHeader? _current;
public ZipHeader Current =>
_current ?? throw new InvalidOperationException("No current header is available.");
/// <summary>
/// Advances to the next ZIP header in the stream, honoring streaming data descriptors where applicable.
/// </summary>
public async ValueTask<bool> MoveNextAsync()
{
if (_completed)
{
return false;
}
while (true)
{
_cancellationToken.ThrowIfCancellationRequested();
uint headerBytes;
var lastEntryHeader = _headerFactory.LastEntryHeader;
if (
lastEntryHeader != null
&& FlagUtility.HasFlag(lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
)
{
if (lastEntryHeader.Part is null)
{
continue;
}
var pos = _rewindableStream.CanSeek ? (long?)_rewindableStream.Position : null;
var crc = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
if (crc == POST_DATA_DESCRIPTOR)
{
crc = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
}
lastEntryHeader.Crc = crc;
//attempt 32bit read
ulong compressedSize = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
ulong uncompressedSize = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
headerBytes = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
//check for zip64 sentinel or unexpected header
bool isSentinel =
compressedSize == 0xFFFFFFFF || uncompressedSize == 0xFFFFFFFF;
bool isHeader = headerBytes == 0x04034b50 || headerBytes == 0x02014b50;
if (!isHeader && !isSentinel)
{
//reshuffle into 64-bit values
compressedSize = (uncompressedSize << 32) | compressedSize;
uncompressedSize =
((ulong)headerBytes << 32)
| await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
headerBytes = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
}
else if (isSentinel)
{
//standards-compliant zip64 descriptor
compressedSize = await _reader
.ReadUInt64Async(_cancellationToken)
.ConfigureAwait(false);
uncompressedSize = await _reader
.ReadUInt64Async(_cancellationToken)
.ConfigureAwait(false);
}
lastEntryHeader.CompressedSize = (long)compressedSize;
lastEntryHeader.UncompressedSize = (long)uncompressedSize;
if (pos.HasValue)
{
lastEntryHeader.DataStartPosition = pos - lastEntryHeader.CompressedSize;
}
}
else if (lastEntryHeader != null && lastEntryHeader.IsZip64)
{
if (lastEntryHeader.Part is null)
{
continue;
}
var pos = _rewindableStream.CanSeek ? (long?)_rewindableStream.Position : null;
headerBytes = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
_ = await _reader.ReadUInt16Async(_cancellationToken).ConfigureAwait(false); // version
_ = await _reader.ReadUInt16Async(_cancellationToken).ConfigureAwait(false); // flags
_ = await _reader.ReadUInt16Async(_cancellationToken).ConfigureAwait(false); // compressionMethod
_ = await _reader.ReadUInt16Async(_cancellationToken).ConfigureAwait(false); // lastModifiedDate
_ = await _reader.ReadUInt16Async(_cancellationToken).ConfigureAwait(false); // lastModifiedTime
var crc = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
if (crc == POST_DATA_DESCRIPTOR)
{
crc = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
}
lastEntryHeader.Crc = crc;
// The DataDescriptor can be either 64bit or 32bit
var compressedSize = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
var uncompressedSize = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
// Check if we have header or 64bit DataDescriptor
var testHeader = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50);
var test64Bit = ((long)uncompressedSize << 32) | compressedSize;
if (test64Bit == lastEntryHeader.CompressedSize && testHeader)
{
lastEntryHeader.UncompressedSize =
(
(long)
await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false) << 32
) | headerBytes;
headerBytes = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
}
else
{
lastEntryHeader.UncompressedSize = uncompressedSize;
}
if (pos.HasValue)
{
lastEntryHeader.DataStartPosition = pos - lastEntryHeader.CompressedSize;
// 4 = First 4 bytes of the entry header (i.e. 50 4B 03 04)
_rewindableStream.Position = pos.Value + 4;
}
}
else
{
headerBytes = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
}
_headerFactory.LastEntryHeader = null;
var header = await _headerFactory
.ReadHeaderAsyncInternal(headerBytes, _reader)
.ConfigureAwait(false);
if (header is null)
{
_completed = true;
return false;
}
//entry could be zero bytes so we need to know that.
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
{
var localHeader = (LocalEntryHeader)header;
var directoryHeader = _headerFactory._entries?.FirstOrDefault(entry =>
entry.Key == localHeader.Name
&& localHeader.CompressedSize == 0
&& localHeader.UncompressedSize == 0
&& localHeader.Crc == 0
&& localHeader.IsDirectory == false
);
if (directoryHeader != null)
{
localHeader.UncompressedSize = directoryHeader.Size;
localHeader.CompressedSize = directoryHeader.CompressedSize;
localHeader.Crc = (uint)directoryHeader.Crc;
}
// If we have CompressedSize, there is data to be read
if (localHeader.CompressedSize > 0)
{
header.HasData = true;
} // Check if zip is streaming ( Length is 0 and is declared in PostDataDescriptor )
else if (localHeader.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor))
{
var nextHeaderBytes = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
((IStreamStack)_rewindableStream).Rewind(sizeof(uint));
// Check if next data is PostDataDescriptor, streamed file with 0 length
header.HasData = !IsHeader(nextHeaderBytes);
}
else // We are not streaming and compressed size is 0, we have no data
{
header.HasData = false;
}
}
_current = header;
return true;
}
}
public ValueTask DisposeAsync()
{
Dispose();
return default;
}
/// <summary>
/// Disposes the underlying reader (without closing the archive stream).
/// </summary>
public void Dispose()
{
_reader.Dispose();
}
/// <summary>
/// Ensures the stream is a <see cref="SharpCompressStream"/> so header parsing can use rewind/buffer helpers.
/// </summary>
private static SharpCompressStream EnsureSharpCompressStream(Stream stream)
{
if (stream is SharpCompressStream sharpCompressStream)
{
return sharpCompressStream;
}
// Ensure the stream is already a SharpCompressStream so the buffer/size is set.
// The original code wrapped this with RewindableStream; use SharpCompressStream so we can get the buffer size.
if (stream is SourceStream src)
{
return new SharpCompressStream(
stream,
src.ReaderOptions.LeaveStreamOpen,
bufferSize: src.ReaderOptions.BufferSize
);
}
throw new ArgumentException("Stream must be a SharpCompressStream", nameof(stream));
}
}
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
@@ -264,4 +266,244 @@ internal abstract class ZipFilePart : FilePart
}
return plainStream;
}
internal override async ValueTask<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
if (!Header.HasData)
{
return Stream.Null;
}
var decompressionStream = await CreateDecompressionStreamAsync(
await GetCryptoStreamAsync(CreateBaseStream(), cancellationToken)
.ConfigureAwait(false),
Header.CompressionMethod,
cancellationToken
)
.ConfigureAwait(false);
if (LeaveStreamOpen)
{
return SharpCompressStream.Create(decompressionStream, leaveOpen: true);
}
return decompressionStream;
}
protected async Task<Stream> GetCryptoStreamAsync(
Stream plainStream,
CancellationToken cancellationToken = default
)
{
var isFileEncrypted = FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted);
if (Header.CompressedSize == 0 && isFileEncrypted)
{
throw new NotSupportedException("Cannot encrypt file with unknown size at start.");
}
if (
(
Header.CompressedSize == 0
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
) || Header.IsZip64
)
{
plainStream = SharpCompressStream.Create(plainStream, leaveOpen: true); //make sure AES doesn't close
}
else
{
plainStream = new ReadOnlySubStream(plainStream, Header.CompressedSize); //make sure AES doesn't close
}
if (isFileEncrypted)
{
switch (Header.CompressionMethod)
{
case ZipCompressionMethod.None:
case ZipCompressionMethod.Shrink:
case ZipCompressionMethod.Reduce1:
case ZipCompressionMethod.Reduce2:
case ZipCompressionMethod.Reduce3:
case ZipCompressionMethod.Reduce4:
case ZipCompressionMethod.Deflate:
case ZipCompressionMethod.Deflate64:
case ZipCompressionMethod.BZip2:
case ZipCompressionMethod.LZMA:
case ZipCompressionMethod.PPMd:
{
return new PkwareTraditionalCryptoStream(
plainStream,
await Header
.ComposeEncryptionDataAsync(plainStream, cancellationToken)
.ConfigureAwait(false),
CryptoMode.Decrypt
);
}
case ZipCompressionMethod.WinzipAes:
{
if (Header.WinzipAesEncryptionData != null)
{
return new WinzipAesCryptoStream(
plainStream,
Header.WinzipAesEncryptionData,
Header.CompressedSize - 10
);
}
return plainStream;
}
default:
{
throw new InvalidOperationException("Header.CompressionMethod is invalid");
}
}
}
return plainStream;
}
protected async Task<Stream> CreateDecompressionStreamAsync(
Stream stream,
ZipCompressionMethod method,
CancellationToken cancellationToken = default
)
{
switch (method)
{
case ZipCompressionMethod.None:
{
if (Header.CompressedSize is 0)
{
return new DataDescriptorStream(stream);
}
return stream;
}
case ZipCompressionMethod.Shrink:
{
return new ShrinkStream(
stream,
CompressionMode.Decompress,
Header.CompressedSize,
Header.UncompressedSize
);
}
case ZipCompressionMethod.Reduce1:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 1);
}
case ZipCompressionMethod.Reduce2:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 2);
}
case ZipCompressionMethod.Reduce3:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 3);
}
case ZipCompressionMethod.Reduce4:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 4);
}
case ZipCompressionMethod.Explode:
{
return new ExplodeStream(
stream,
Header.CompressedSize,
Header.UncompressedSize,
Header.Flags
);
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.Deflate64:
{
return new Deflate64Stream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.BZip2:
{
return new BZip2Stream(stream, CompressionMode.Decompress, false);
}
case ZipCompressionMethod.LZMA:
{
if (FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted))
{
throw new NotSupportedException("LZMA with pkware encryption.");
}
var buffer = new byte[4];
await stream.ReadFullyAsync(buffer, 0, 4, cancellationToken).ConfigureAwait(false);
var version = BinaryPrimitives.ReadUInt16LittleEndian(buffer.AsSpan(0, 2));
var propsSize = BinaryPrimitives.ReadUInt16LittleEndian(buffer.AsSpan(2, 2));
var props = new byte[propsSize];
await stream
.ReadFullyAsync(props, 0, propsSize, cancellationToken)
.ConfigureAwait(false);
return new LzmaStream(
props,
stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: Header.UncompressedSize
);
}
case ZipCompressionMethod.Xz:
{
return new XZStream(stream);
}
case ZipCompressionMethod.ZStandard:
{
return new DecompressionStream(stream);
}
case ZipCompressionMethod.PPMd:
{
var props = new byte[2];
await stream.ReadFullyAsync(props, 0, 2, cancellationToken).ConfigureAwait(false);
return new PpmdStream(new PpmdProperties(props), stream, false);
}
case ZipCompressionMethod.WinzipAes:
{
var data = Header.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
if (data is null)
{
throw new InvalidFormatException("No Winzip AES extra data found.");
}
if (data.Length != 7)
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
var compressedMethod = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes);
if (compressedMethod != 0x01 && compressedMethod != 0x02)
{
throw new InvalidFormatException(
"Unexpected vendor version number for WinZip AES metadata"
);
}
var vendorId = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(2));
if (vendorId != 0x4541)
{
throw new InvalidFormatException(
"Unexpected vendor ID for WinZip AES metadata"
);
}
return await CreateDecompressionStreamAsync(
stream,
(ZipCompressionMethod)
BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)),
cancellationToken
);
}
default:
{
throw new NotSupportedException("CompressionMethod: " + Header.CompressionMethod);
}
}
}
}

View File

@@ -1,6 +1,8 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -21,12 +23,12 @@ internal class ZipHeaderFactory
protected LocalEntryHeader? _lastEntryHeader;
private readonly string? _password;
private readonly StreamingMode _mode;
private readonly ArchiveEncoding _archiveEncoding;
private readonly IArchiveEncoding _archiveEncoding;
protected ZipHeaderFactory(
StreamingMode mode,
string? password,
ArchiveEncoding archiveEncoding
IArchiveEncoding archiveEncoding
)
{
_mode = mode;
@@ -34,6 +36,82 @@ internal class ZipHeaderFactory
_archiveEncoding = archiveEncoding;
}
protected async ValueTask<ZipHeader?> ReadHeader(
uint headerBytes,
AsyncBinaryReader reader,
bool zip64 = false
)
{
switch (headerBytes)
{
case ENTRY_HEADER_BYTES:
{
var entryHeader = new LocalEntryHeader(_archiveEncoding);
await entryHeader.Read(reader);
await LoadHeaderAsync(entryHeader, reader.BaseStream).ConfigureAwait(false);
_lastEntryHeader = entryHeader;
return entryHeader;
}
case DIRECTORY_START_HEADER_BYTES:
{
var entry = new DirectoryEntryHeader(_archiveEncoding);
await entry.Read(reader);
return entry;
}
case POST_DATA_DESCRIPTOR:
{
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(
_lastEntryHeader.NotNull().Flags,
HeaderFlags.UsePostDataDescriptor
)
)
{
_lastEntryHeader.Crc = await reader.ReadUInt32Async();
_lastEntryHeader.CompressedSize = zip64
? (long)await reader.ReadUInt64Async()
: await reader.ReadUInt32Async();
_lastEntryHeader.UncompressedSize = zip64
? (long)await reader.ReadUInt64Async()
: await reader.ReadUInt32Async();
}
else
{
await reader.ReadBytesAsync(zip64 ? 20 : 12);
}
return null;
}
case DIGITAL_SIGNATURE:
return null;
case DIRECTORY_END_HEADER_BYTES:
{
var entry = new DirectoryEndHeader();
await entry.Read(reader);
return entry;
}
case SPLIT_ARCHIVE_HEADER_BYTES:
{
return new SplitHeader();
}
case ZIP64_END_OF_CENTRAL_DIRECTORY:
{
var entry = new Zip64DirectoryEndHeader();
await entry.Read(reader);
return entry;
}
case ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR:
{
var entry = new Zip64DirectoryEndLocatorHeader();
await entry.Read(reader);
return entry;
}
default:
return null;
}
}
protected ZipHeader? ReadHeader(uint headerBytes, BinaryReader reader, bool zip64 = false)
{
switch (headerBytes)
@@ -205,4 +283,82 @@ internal class ZipHeaderFactory
//}
}
/// <summary>
/// Loads encryption metadata and stream positioning for a header using async reads where needed.
/// </summary>
private async ValueTask LoadHeaderAsync(ZipFileEntry entryHeader, Stream stream)
{
if (FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.Encrypted))
{
if (
!entryHeader.IsDirectory
&& entryHeader.CompressedSize == 0
&& FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
)
{
throw new NotSupportedException(
"SharpCompress cannot currently read non-seekable Zip Streams with encrypted data that has been written in a non-seekable manner."
);
}
if (_password is null)
{
throw new CryptographicException("No password supplied for encrypted zip.");
}
entryHeader.Password = _password;
if (entryHeader.CompressionMethod == ZipCompressionMethod.WinzipAes)
{
var data = entryHeader.Extra.SingleOrDefault(x =>
x.Type == ExtraDataType.WinZipAes
);
if (data != null)
{
var keySize = (WinzipAesKeySize)data.DataBytes[4];
var salt = new byte[WinzipAesEncryptionData.KeyLengthInBytes(keySize) / 2];
var passwordVerifyValue = new byte[2];
await stream.ReadExactAsync(salt, 0, salt.Length).ConfigureAwait(false);
await stream.ReadExactAsync(passwordVerifyValue, 0, 2).ConfigureAwait(false);
entryHeader.WinzipAesEncryptionData = new WinzipAesEncryptionData(
keySize,
salt,
passwordVerifyValue,
_password
);
entryHeader.CompressedSize -= (uint)(salt.Length + 2);
}
}
}
if (entryHeader.IsDirectory)
{
return;
}
switch (_mode)
{
case StreamingMode.Seekable:
{
entryHeader.DataStartPosition = stream.Position;
stream.Position += entryHeader.CompressedSize;
break;
}
case StreamingMode.Streaming:
{
entryHeader.PackedStream = stream;
break;
}
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
}

View File

@@ -104,7 +104,7 @@ public static class ADCBase
/// <param name="bufferSize">Max size for decompressed data</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>Result containing bytes read and decompressed data</returns>
public static async Task<AdcDecompressResult> DecompressAsync(
public static async ValueTask<AdcDecompressResult> DecompressAsync(
byte[] input,
int bufferSize = 262144,
CancellationToken cancellationToken = default
@@ -117,7 +117,7 @@ public static class ADCBase
/// <param name="bufferSize">Max size for decompressed data</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>Result containing bytes read and decompressed data</returns>
public static async Task<AdcDecompressResult> DecompressAsync(
public static async ValueTask<AdcDecompressResult> DecompressAsync(
Stream input,
int bufferSize = 262144,
CancellationToken cancellationToken = default

View File

@@ -30,7 +30,6 @@ public sealed class BZip2Stream : Stream, IStreamStack
private readonly Stream stream;
private bool isDisposed;
private readonly bool leaveOpen;
/// <summary>
/// Create a BZip2Stream
@@ -38,30 +37,19 @@ public sealed class BZip2Stream : Stream, IStreamStack
/// <param name="stream">The stream to read from</param>
/// <param name="compressionMode">Compression Mode</param>
/// <param name="decompressConcatenated">Decompress Concatenated</param>
/// <param name="leaveOpen">Leave the stream open after disposing</param>
public BZip2Stream(
Stream stream,
CompressionMode compressionMode,
bool decompressConcatenated,
bool leaveOpen = false
)
public BZip2Stream(Stream stream, CompressionMode compressionMode, bool decompressConcatenated)
{
#if DEBUG_STREAMS
this.DebugConstruct(typeof(BZip2Stream));
#endif
this.leaveOpen = leaveOpen;
Mode = compressionMode;
if (Mode == CompressionMode.Compress)
{
this.stream = new CBZip2OutputStream(stream, 9, leaveOpen);
this.stream = new CBZip2OutputStream(stream);
}
else
{
this.stream = new CBZip2InputStream(
stream,
decompressConcatenated,
leaveOpen: leaveOpen
);
this.stream = new CBZip2InputStream(stream, decompressConcatenated);
}
}

View File

@@ -168,7 +168,6 @@ internal class CBZip2InputStream : Stream, IStreamStack
private int computedBlockCRC,
computedCombinedCRC;
private readonly bool decompressConcatenated;
private readonly bool leaveOpen;
private int i2,
count,
@@ -182,10 +181,9 @@ internal class CBZip2InputStream : Stream, IStreamStack
private char z;
private bool isDisposed;
public CBZip2InputStream(Stream zStream, bool decompressConcatenated, bool leaveOpen = false)
public CBZip2InputStream(Stream zStream, bool decompressConcatenated)
{
this.decompressConcatenated = decompressConcatenated;
this.leaveOpen = leaveOpen;
ll8 = null;
tt = null;
BsSetStream(zStream);
@@ -209,10 +207,7 @@ internal class CBZip2InputStream : Stream, IStreamStack
this.DebugDispose(typeof(CBZip2InputStream));
#endif
base.Dispose(disposing);
if (!leaveOpen)
{
bsStream?.Dispose();
}
bsStream?.Dispose();
}
internal static int[][] InitIntArray(int n1, int n2)
@@ -403,10 +398,7 @@ internal class CBZip2InputStream : Stream, IStreamStack
private void BsFinishedWithStream()
{
if (!leaveOpen)
{
bsStream?.Dispose();
}
bsStream?.Dispose();
bsStream = null;
}

View File

@@ -341,14 +341,12 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
private int currentChar = -1;
private int runLength;
private readonly bool leaveOpen;
public CBZip2OutputStream(Stream inStream, bool leaveOpen = false)
: this(inStream, 9, leaveOpen) { }
public CBZip2OutputStream(Stream inStream)
: this(inStream, 9) { }
public CBZip2OutputStream(Stream inStream, int inBlockSize, bool leaveOpen = false)
public CBZip2OutputStream(Stream inStream, int inBlockSize)
{
this.leaveOpen = leaveOpen;
block = null;
quadrant = null;
zptr = null;
@@ -483,10 +481,7 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
this.DebugDispose(typeof(CBZip2OutputStream));
#endif
Dispose();
if (!leaveOpen)
{
bsStream?.Dispose();
}
bsStream?.Dispose();
bsStream = null;
}
}

View File

@@ -400,7 +400,7 @@ internal class ZlibBaseStream : Stream, IStreamStack
}
}
private async Task finishAsync(CancellationToken cancellationToken = default)
private async ValueTask finishAsync(CancellationToken cancellationToken = default)
{
if (_z is null)
{
@@ -586,13 +586,7 @@ internal class ZlibBaseStream : Stream, IStreamStack
public override void Flush()
{
// Only flush the underlying stream when in write mode
// Flushing input streams during read operations is not meaningful
// and can cause issues with forward-only/non-seekable streams
if (_streamMode == StreamMode.Writer)
{
_stream.Flush();
}
_stream.Flush();
//rewind the buffer
((IStreamStack)this).Rewind(z.AvailableBytesIn); //unused
z.AvailableBytesIn = 0;
@@ -600,13 +594,7 @@ internal class ZlibBaseStream : Stream, IStreamStack
public override async Task FlushAsync(CancellationToken cancellationToken)
{
// Only flush the underlying stream when in write mode
// Flushing input streams during read operations is not meaningful
// and can cause issues with forward-only/non-seekable streams
if (_streamMode == StreamMode.Writer)
{
await _stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
await _stream.FlushAsync(cancellationToken).ConfigureAwait(false);
//rewind the buffer
((IStreamStack)this).Rewind(z.AvailableBytesIn); //unused
z.AvailableBytesIn = 0;
@@ -658,7 +646,9 @@ internal class ZlibBaseStream : Stream, IStreamStack
return _encoding.GetString(buffer, 0, buffer.Length);
}
private async Task<string> ReadZeroTerminatedStringAsync(CancellationToken cancellationToken)
private async ValueTask<string> ReadZeroTerminatedStringAsync(
CancellationToken cancellationToken
)
{
var list = new List<byte>();
var done = false;
@@ -741,7 +731,9 @@ internal class ZlibBaseStream : Stream, IStreamStack
return totalBytesRead;
}
private async Task<int> _ReadAndValidateGzipHeaderAsync(CancellationToken cancellationToken)
private async ValueTask<int> _ReadAndValidateGzipHeaderAsync(
CancellationToken cancellationToken
)
{
var totalBytesRead = 0;

View File

@@ -87,7 +87,7 @@ internal class OutWindow : IDisposable
_stream = null;
}
public async Task ReleaseStreamAsync(CancellationToken cancellationToken = default)
public async ValueTask ReleaseStreamAsync(CancellationToken cancellationToken = default)
{
await FlushAsync(cancellationToken).ConfigureAwait(false);
_stream = null;
@@ -112,7 +112,7 @@ internal class OutWindow : IDisposable
_streamPos = _pos;
}
private async Task FlushAsync(CancellationToken cancellationToken = default)
private async ValueTask FlushAsync(CancellationToken cancellationToken = default)
{
if (_stream is null)
{
@@ -303,7 +303,7 @@ internal class OutWindow : IDisposable
return len - size;
}
public async Task<int> CopyStreamAsync(
public async ValueTask<int> CopyStreamAsync(
Stream stream,
int len,
CancellationToken cancellationToken = default

View File

@@ -46,13 +46,11 @@ public sealed class LZipStream : Stream, IStreamStack
private long _writeCount;
private readonly Stream? _originalStream;
private readonly bool _leaveOpen;
public LZipStream(Stream stream, CompressionMode mode, bool leaveOpen = false)
public LZipStream(Stream stream, CompressionMode mode)
{
Mode = mode;
_originalStream = stream;
_leaveOpen = leaveOpen;
if (mode == CompressionMode.Decompress)
{
@@ -62,7 +60,7 @@ public sealed class LZipStream : Stream, IStreamStack
throw new InvalidFormatException("Not an LZip stream");
}
var properties = GetProperties(dSize);
_stream = new LzmaStream(properties, stream, leaveOpen: leaveOpen);
_stream = new LzmaStream(properties, stream);
}
else
{
@@ -129,7 +127,7 @@ public sealed class LZipStream : Stream, IStreamStack
{
Finish();
_stream.Dispose();
if (Mode == CompressionMode.Compress && !_leaveOpen)
if (Mode == CompressionMode.Compress)
{
_originalStream?.Dispose();
}

View File

@@ -35,7 +35,6 @@ public class LzmaStream : Stream, IStreamStack
private readonly Stream _inputStream;
private readonly long _inputSize;
private readonly long _outputSize;
private readonly bool _leaveOpen;
private readonly int _dictionarySize;
private readonly OutWindow _outWindow = new();
@@ -57,28 +56,14 @@ public class LzmaStream : Stream, IStreamStack
private readonly Encoder _encoder;
private bool _isDisposed;
public LzmaStream(byte[] properties, Stream inputStream, bool leaveOpen = false)
: this(properties, inputStream, -1, -1, null, properties.Length < 5, leaveOpen) { }
public LzmaStream(byte[] properties, Stream inputStream)
: this(properties, inputStream, -1, -1, null, properties.Length < 5) { }
public LzmaStream(byte[] properties, Stream inputStream, long inputSize, bool leaveOpen = false)
: this(properties, inputStream, inputSize, -1, null, properties.Length < 5, leaveOpen) { }
public LzmaStream(byte[] properties, Stream inputStream, long inputSize)
: this(properties, inputStream, inputSize, -1, null, properties.Length < 5) { }
public LzmaStream(
byte[] properties,
Stream inputStream,
long inputSize,
long outputSize,
bool leaveOpen = false
)
: this(
properties,
inputStream,
inputSize,
outputSize,
null,
properties.Length < 5,
leaveOpen
) { }
public LzmaStream(byte[] properties, Stream inputStream, long inputSize, long outputSize)
: this(properties, inputStream, inputSize, outputSize, null, properties.Length < 5) { }
public LzmaStream(
byte[] properties,
@@ -86,15 +71,13 @@ public class LzmaStream : Stream, IStreamStack
long inputSize,
long outputSize,
Stream presetDictionary,
bool isLzma2,
bool leaveOpen = false
bool isLzma2
)
{
_inputStream = inputStream;
_inputSize = inputSize;
_outputSize = outputSize;
_isLzma2 = isLzma2;
_leaveOpen = leaveOpen;
#if DEBUG_STREAMS
this.DebugConstruct(typeof(LzmaStream));
@@ -196,10 +179,7 @@ public class LzmaStream : Stream, IStreamStack
{
_position = _encoder.Code(null, true);
}
if (!_leaveOpen)
{
_inputStream?.Dispose();
}
_inputStream?.Dispose();
_outWindow.Dispose();
}
base.Dispose(disposing);
@@ -449,7 +429,7 @@ public class LzmaStream : Stream, IStreamStack
{
var controlBuffer = new byte[1];
await _inputStream
.ReadExactlyAsync(controlBuffer, 0, 1, cancellationToken)
.ReadExactAsync(controlBuffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
var control = controlBuffer[0];
_inputPosition++;
@@ -478,13 +458,13 @@ public class LzmaStream : Stream, IStreamStack
_availableBytes = (control & 0x1F) << 16;
var buffer = new byte[2];
await _inputStream
.ReadExactlyAsync(buffer, 0, 2, cancellationToken)
.ReadExactAsync(buffer, 0, 2, cancellationToken)
.ConfigureAwait(false);
_availableBytes += (buffer[0] << 8) + buffer[1] + 1;
_inputPosition += 2;
await _inputStream
.ReadExactlyAsync(buffer, 0, 2, cancellationToken)
.ReadExactAsync(buffer, 0, 2, cancellationToken)
.ConfigureAwait(false);
_rangeDecoderLimit = (buffer[0] << 8) + buffer[1] + 1;
_inputPosition += 2;
@@ -493,7 +473,7 @@ public class LzmaStream : Stream, IStreamStack
{
_needProps = false;
await _inputStream
.ReadExactlyAsync(controlBuffer, 0, 1, cancellationToken)
.ReadExactAsync(controlBuffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
Properties[0] = controlBuffer[0];
_inputPosition++;
@@ -522,7 +502,7 @@ public class LzmaStream : Stream, IStreamStack
_uncompressedChunk = true;
var buffer = new byte[2];
await _inputStream
.ReadExactlyAsync(buffer, 0, 2, cancellationToken)
.ReadExactAsync(buffer, 0, 2, cancellationToken)
.ConfigureAwait(false);
_availableBytes = (buffer[0] << 8) + buffer[1] + 1;
_inputPosition += 2;

View File

@@ -53,39 +53,4 @@ internal static class Utils
throw new InvalidOperationException("Assertion failed.");
}
}
public static void ReadExact(this Stream stream, byte[] buffer, int offset, int length)
{
if (stream is null)
{
throw new ArgumentNullException(nameof(stream));
}
if (buffer is null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0 || offset > buffer.Length)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (length < 0 || length > buffer.Length - offset)
{
throw new ArgumentOutOfRangeException(nameof(length));
}
while (length > 0)
{
var fetched = stream.Read(buffer, offset, length);
if (fetched <= 0)
{
throw new EndOfStreamException();
}
offset += fetched;
length -= fetched;
}
}
}

View File

@@ -68,7 +68,7 @@ internal class RarStream : Stream, IStreamStack
_position = 0;
}
public async Task InitializeAsync(CancellationToken cancellationToken = default)
public async ValueTask InitializeAsync(CancellationToken cancellationToken = default)
{
fetch = true;
await unpack.DoUnpackAsync(fileHeader, readStream, this, cancellationToken);

View File

@@ -58,7 +58,7 @@ internal static class MultiByteIntegers
MaxBytes = 9;
}
var LastByte = await ReadByteAsync(reader, cancellationToken).ConfigureAwait(false);
var LastByte = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
var Output = (ulong)LastByte & 0x7F;
var i = 0;
@@ -69,7 +69,7 @@ internal static class MultiByteIntegers
throw new InvalidFormatException();
}
LastByte = await ReadByteAsync(reader, cancellationToken).ConfigureAwait(false);
LastByte = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
if (LastByte == 0)
{
throw new InvalidFormatException();
@@ -79,37 +79,4 @@ internal static class MultiByteIntegers
}
return Output;
}
public static async Task<byte> ReadByteAsync(
this BinaryReader reader,
CancellationToken cancellationToken = default
)
{
var buffer = new byte[1];
var bytesRead = await reader
.BaseStream.ReadAsync(buffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
if (bytesRead != 1)
{
throw new EndOfStreamException();
}
return buffer[0];
}
public static async Task<byte[]> ReadBytesAsync(
this BinaryReader reader,
int count,
CancellationToken cancellationToken = default
)
{
var buffer = new byte[count];
var bytesRead = await reader
.BaseStream.ReadAsync(buffer, 0, count, cancellationToken)
.ConfigureAwait(false);
if (bytesRead != count)
{
throw new EndOfStreamException();
}
return buffer;
}
}

View File

@@ -132,7 +132,7 @@ public sealed class XZBlock : XZReadOnlyStream
_paddingSkipped = true;
}
private async Task SkipPaddingAsync(CancellationToken cancellationToken = default)
private async ValueTask SkipPaddingAsync(CancellationToken cancellationToken = default)
{
var bytes = (BaseStream.Position - _startPosition) % 4;
if (bytes > 0)
@@ -158,7 +158,7 @@ public sealed class XZBlock : XZReadOnlyStream
_crcChecked = true;
}
private async Task CheckCrcAsync(CancellationToken cancellationToken = default)
private async ValueTask CheckCrcAsync(CancellationToken cancellationToken = default)
{
var crc = new byte[_checkSize];
await BaseStream.ReadAsync(crc, 0, _checkSize, cancellationToken).ConfigureAwait(false);
@@ -194,7 +194,7 @@ public sealed class XZBlock : XZReadOnlyStream
HeaderIsLoaded = true;
}
private async Task LoadHeaderAsync(CancellationToken cancellationToken = default)
private async ValueTask LoadHeaderAsync(CancellationToken cancellationToken = default)
{
await ReadHeaderSizeAsync(cancellationToken).ConfigureAwait(false);
var headerCache = await CacheHeaderAsync(cancellationToken).ConfigureAwait(false);
@@ -218,7 +218,7 @@ public sealed class XZBlock : XZReadOnlyStream
}
}
private async Task ReadHeaderSizeAsync(CancellationToken cancellationToken = default)
private async ValueTask ReadHeaderSizeAsync(CancellationToken cancellationToken = default)
{
var buffer = new byte[1];
await BaseStream.ReadAsync(buffer, 0, 1, cancellationToken).ConfigureAwait(false);
@@ -249,7 +249,7 @@ public sealed class XZBlock : XZReadOnlyStream
return blockHeaderWithoutCrc;
}
private async Task<byte[]> CacheHeaderAsync(CancellationToken cancellationToken = default)
private async ValueTask<byte[]> CacheHeaderAsync(CancellationToken cancellationToken = default)
{
var blockHeaderWithoutCrc = new byte[BlockHeaderSize - 4];
blockHeaderWithoutCrc[0] = _blockHeaderSizeByte;

View File

@@ -62,7 +62,7 @@ public class XZFooter
}
}
public async Task ProcessAsync(CancellationToken cancellationToken = default)
public async ValueTask ProcessAsync(CancellationToken cancellationToken = default)
{
var crc = await _reader
.BaseStream.ReadLittleEndianUInt32Async(cancellationToken)

View File

@@ -41,7 +41,7 @@ public class XZHeader
ProcessStreamFlags();
}
public async Task ProcessAsync(CancellationToken cancellationToken = default)
public async ValueTask ProcessAsync(CancellationToken cancellationToken = default)
{
CheckMagicBytes(await _reader.ReadBytesAsync(6, cancellationToken).ConfigureAwait(false));
await ProcessStreamFlagsAsync(cancellationToken).ConfigureAwait(false);
@@ -65,7 +65,7 @@ public class XZHeader
}
}
private async Task ProcessStreamFlagsAsync(CancellationToken cancellationToken = default)
private async ValueTask ProcessStreamFlagsAsync(CancellationToken cancellationToken = default)
{
var streamFlags = await _reader.ReadBytesAsync(2, cancellationToken).ConfigureAwait(false);
var crc = await _reader

View File

@@ -41,7 +41,7 @@ public class XZIndex
return index;
}
public static async Task<XZIndex> FromStreamAsync(
public static async ValueTask<XZIndex> FromStreamAsync(
Stream stream,
bool indexMarkerAlreadyVerified,
CancellationToken cancellationToken = default
@@ -71,7 +71,7 @@ public class XZIndex
VerifyCrc32();
}
public async Task ProcessAsync(CancellationToken cancellationToken = default)
public async ValueTask ProcessAsync(CancellationToken cancellationToken = default)
{
if (!_indexMarkerAlreadyVerified)
{
@@ -100,7 +100,7 @@ public class XZIndex
}
}
private async Task VerifyIndexMarkerAsync(CancellationToken cancellationToken = default)
private async ValueTask VerifyIndexMarkerAsync(CancellationToken cancellationToken = default)
{
var marker = await _reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
if (marker != 0)
@@ -122,7 +122,7 @@ public class XZIndex
}
}
private async Task SkipPaddingAsync(CancellationToken cancellationToken = default)
private async ValueTask SkipPaddingAsync(CancellationToken cancellationToken = default)
{
var bytes = (int)(_reader.BaseStream.Position - StreamStartPosition) % 4;
if (bytes > 0)
@@ -143,7 +143,7 @@ public class XZIndex
// TODO verify this matches
}
private async Task VerifyCrc32Async(CancellationToken cancellationToken = default)
private async ValueTask VerifyCrc32Async(CancellationToken cancellationToken = default)
{
var crc = await _reader
.BaseStream.ReadLittleEndianUInt32Async(cancellationToken)

View File

@@ -142,7 +142,7 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
HeaderIsRead = true;
}
private async Task ReadHeaderAsync(CancellationToken cancellationToken = default)
private async ValueTask ReadHeaderAsync(CancellationToken cancellationToken = default)
{
Header = await XZHeader
.FromStreamAsync(BaseStream, cancellationToken)
@@ -153,7 +153,7 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
private void ReadIndex() => Index = XZIndex.FromStream(BaseStream, true);
private async Task ReadIndexAsync(CancellationToken cancellationToken = default) =>
private async ValueTask ReadIndexAsync(CancellationToken cancellationToken = default) =>
Index = await XZIndex
.FromStreamAsync(BaseStream, true, cancellationToken)
.ConfigureAwait(false);
@@ -162,7 +162,7 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
private void ReadFooter() => Footer = XZFooter.FromStream(BaseStream);
// TODO verify footer
private async Task ReadFooterAsync(CancellationToken cancellationToken = default) =>
private async ValueTask ReadFooterAsync(CancellationToken cancellationToken = default) =>
Footer = await XZFooter
.FromStreamAsync(BaseStream, cancellationToken)
.ConfigureAwait(false);
@@ -202,7 +202,7 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
return bytesRead;
}
private async Task<int> ReadBlocksAsync(
private async ValueTask<int> ReadBlocksAsync(
byte[] buffer,
int offset,
int count,

View File

@@ -77,7 +77,7 @@ public class CompressionStream : Stream
#if !NETSTANDARD2_0 && !NETFRAMEWORK
public override async ValueTask DisposeAsync()
#else
public async Task DisposeAsync()
public async ValueTask DisposeAsync()
#endif
{
if (compressor == null)
@@ -137,7 +137,7 @@ public class CompressionStream : Stream
private void FlushInternal(ZSTD_EndDirective directive) => WriteInternal(null, directive);
private async Task FlushInternalAsync(
private async ValueTask FlushInternalAsync(
ZSTD_EndDirective directive,
CancellationToken cancellationToken = default
) => await WriteInternalAsync(null, directive, cancellationToken).ConfigureAwait(false);
@@ -183,7 +183,7 @@ public class CompressionStream : Stream
CancellationToken cancellationToken = default
)
#else
private async Task WriteInternalAsync(
private async ValueTask WriteInternalAsync(
ReadOnlyMemory<byte>? buffer,
ZSTD_EndDirective directive,
CancellationToken cancellationToken = default
@@ -235,14 +235,16 @@ public class CompressionStream : Stream
.ConfigureAwait(false);
#else
public override Task WriteAsync(
public override async Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
) => WriteAsync(new ReadOnlyMemory<byte>(buffer, offset, count), cancellationToken);
) =>
await WriteAsync(new ReadOnlyMemory<byte>(buffer, offset, count), cancellationToken)
.ConfigureAwait(false);
public async Task WriteAsync(
public async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
) =>

View File

@@ -177,9 +177,9 @@ public class DecompressionStream : Stream
int offset,
int count,
CancellationToken cancellationToken
) => ReadAsync(new Memory<byte>(buffer, offset, count), cancellationToken);
) => ReadAsync(new Memory<byte>(buffer, offset, count), cancellationToken).AsTask();
public async Task<int> ReadAsync(
public async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Ace.Headers;
@@ -22,12 +23,25 @@ namespace SharpCompress.Factories
yield return "ace";
}
public override bool IsArchive(Stream stream, string? password = null)
{
return AceHeader.IsArchive(stream);
}
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => AceHeader.IsArchive(stream);
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
AceReader.Open(stream, options);
public ValueTask<IAsyncReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
) => new(AceReader.Open(stream, options));
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
}
}

View File

@@ -4,6 +4,7 @@ using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
@@ -23,7 +24,11 @@ namespace SharpCompress.Factories
yield return "arc";
}
public override bool IsArchive(Stream stream, string? password = null)
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
//You may have to use some(paranoid) checks to ensure that you actually are
//processing an ARC file, since other archivers also adopted the idea of putting
@@ -38,5 +43,17 @@ namespace SharpCompress.Factories
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
ArcReader.Open(stream, options);
public ValueTask<IAsyncReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
) => new(ArcReader.Open(stream, options));
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
}
}

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Arj.Headers;
@@ -22,12 +23,28 @@ namespace SharpCompress.Factories
yield return "arj";
}
public override bool IsArchive(Stream stream, string? password = null)
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
return ArjHeader.IsArchive(stream);
}
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
ArjReader.Open(stream, options);
public ValueTask<IAsyncReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
) => new(ArjReader.Open(stream, options));
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
}
}

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -51,7 +53,29 @@ public abstract class Factory : IFactory
public abstract IEnumerable<string> GetSupportedExtensions();
/// <inheritdoc/>
public abstract bool IsArchive(Stream stream, string? password = null);
public abstract bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
);
public abstract ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
);
/// <inheritdoc/>
public virtual ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(IsArchive(stream, password, bufferSize));
}
/// <inheritdoc/>
public virtual FileInfo? GetFilePart(int index, FileInfo part1) => null;
@@ -78,7 +102,7 @@ public abstract class Factory : IFactory
{
long pos = ((IStreamStack)stream).GetPosition();
if (IsArchive(stream, options.Password))
if (IsArchive(stream, options.Password, options.BufferSize))
{
((IStreamStack)stream).StackSeek(pos);
reader = readerFactory.OpenReader(stream, options);
@@ -88,4 +112,34 @@ public abstract class Factory : IFactory
return false;
}
internal virtual async ValueTask<(bool, IAsyncReader?)> TryOpenReaderAsync(
SharpCompressStream stream,
ReaderOptions options,
CancellationToken cancellationToken
)
{
if (this is IReaderFactory readerFactory)
{
long pos = ((IStreamStack)stream).GetPosition();
if (
await IsArchiveAsync(
stream,
options.Password,
options.BufferSize,
cancellationToken
)
)
{
((IStreamStack)stream).StackSeek(pos);
return (
true,
await readerFactory.OpenReaderAsync(stream, options, cancellationToken)
);
}
}
return (false, null);
}
}

View File

@@ -1,6 +1,8 @@
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
@@ -40,8 +42,19 @@ public class GZipFactory
}
/// <inheritdoc/>
public override bool IsArchive(Stream stream, string? password = null) =>
GZipArchive.IsGZipFile(stream);
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => GZipArchive.IsGZipFile(stream);
/// <inheritdoc/>
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => GZipArchive.IsGZipFileAsync(stream, cancellationToken);
#endregion
@@ -51,10 +64,30 @@ public class GZipFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => GZipArchive.OpenAsync(stream, readerOptions, cancellationToken);
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => GZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
#endregion
#region IMultiArchiveFactory
@@ -63,10 +96,24 @@ public class GZipFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => GZipArchive.OpenAsync(streams, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => GZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
#endregion
#region IReaderFactory
@@ -105,6 +152,17 @@ public class GZipFactory
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
GZipReader.Open(stream, options);
/// <inheritdoc/>
public ValueTask<IAsyncReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(GZipReader.Open(stream, options));
}
#endregion
#region IWriterFactory
@@ -119,6 +177,17 @@ public class GZipFactory
return new GZipWriter(stream, new GZipWriterOptions(writerOptions));
}
/// <inheritdoc/>
public ValueTask<IWriter> OpenAsync(
Stream stream,
WriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, writerOptions));
}
#endregion
#region IWriteableArchiveFactory

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Factories;
@@ -36,7 +38,25 @@ public interface IFactory
/// </summary>
/// <param name="stream">A stream, pointing to the beginning of the archive.</param>
/// <param name="password">optional password</param>
bool IsArchive(Stream stream, string? password = null);
bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
);
/// <summary>
/// Returns true if the stream represents an archive of the format defined by this type asynchronously.
/// </summary>
/// <param name="stream">A stream, pointing to the beginning of the archive.</param>
/// <param name="password">optional password</param>
/// <param name="bufferSize">buffer size for reading</param>
/// <param name="cancellationToken">cancellation token</param>
ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
);
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
@@ -29,8 +31,11 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
}
/// <inheritdoc/>
public override bool IsArchive(Stream stream, string? password = null) =>
RarArchive.IsRarFile(stream);
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => RarArchive.IsRarFile(stream);
/// <inheritdoc/>
public override FileInfo? GetFilePart(int index, FileInfo part1) =>
@@ -44,10 +49,30 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
RarArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => RarArchive.OpenAsync(stream, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
RarArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => RarArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
#endregion
#region IMultiArchiveFactory
@@ -56,10 +81,24 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
RarArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => RarArchive.OpenAsync(streams, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
RarArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => RarArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
#endregion
#region IReaderFactory
@@ -68,5 +107,16 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
RarReader.Open(stream, options);
/// <inheritdoc/>
public ValueTask<IAsyncReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(RarReader.Open(stream, options));
}
#endregion
}

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Common;
@@ -28,8 +30,11 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
}
/// <inheritdoc/>
public override bool IsArchive(Stream stream, string? password = null) =>
SevenZipArchive.IsSevenZipFile(stream);
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => SevenZipArchive.IsSevenZipFile(stream);
#endregion
@@ -39,10 +44,30 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => SevenZipArchive.OpenAsync(stream, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => SevenZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
#endregion
#region IMultiArchiveFactory
@@ -51,10 +76,24 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => SevenZipArchive.OpenAsync(streams, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => SevenZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
#endregion
#region reader

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
@@ -53,8 +55,17 @@ public class TarFactory
}
/// <inheritdoc/>
public override bool IsArchive(Stream stream, string? password = null) =>
TarArchive.IsTarFile(stream);
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => TarArchive.IsTarFile(stream);
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
#endregion
@@ -64,10 +75,24 @@ public class TarFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
TarArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => TarArchive.OpenAsync(stream, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
TarArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => TarArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
#endregion
#region IMultiArchiveFactory
@@ -76,10 +101,24 @@ public class TarFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
TarArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => TarArchive.OpenAsync(streams, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
TarArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => TarArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
#endregion
#region IReaderFactory
@@ -231,6 +270,17 @@ public class TarFactory
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
TarReader.Open(stream, options);
/// <inheritdoc/>
public ValueTask<IAsyncReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(TarReader.Open(stream, options));
}
#endregion
#region IWriterFactory
@@ -239,6 +289,17 @@ public class TarFactory
public IWriter Open(Stream stream, WriterOptions writerOptions) =>
new TarWriter(stream, new TarWriterOptions(writerOptions));
/// <inheritdoc/>
public ValueTask<IWriter> OpenAsync(
Stream stream,
WriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, writerOptions));
}
#endregion
#region IWriteableArchiveFactory

View File

@@ -20,6 +20,15 @@ internal class ZStandardFactory : Factory
yield return "zstd";
}
public override bool IsArchive(Stream stream, string? password = null) =>
ZStandardStream.IsZStandard(stream);
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = 65536
) => ZStandardStream.IsZStandard(stream);
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
}

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
@@ -39,7 +41,11 @@ public class ZipFactory
}
/// <inheritdoc/>
public override bool IsArchive(Stream stream, string? password = null)
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var startPosition = stream.CanSeek ? stream.Position : -1;
@@ -47,10 +53,10 @@ public class ZipFactory
if (stream is not SharpCompressStream) // wrap to provide buffer bef
{
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
if (ZipArchive.IsZipFile(stream, password))
if (ZipArchive.IsZipFile(stream, password, bufferSize))
{
return true;
}
@@ -65,7 +71,56 @@ public class ZipFactory
stream.Position = startPosition;
//test the zip (last) file of a multipart zip
if (ZipArchive.IsZipMulti(stream, password))
if (ZipArchive.IsZipMulti(stream, password, bufferSize))
{
return true;
}
stream.Position = startPosition;
return false;
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
/// <inheritdoc/>
public override async ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var startPosition = stream.CanSeek ? stream.Position : -1;
// probe for single volume zip
if (stream is not SharpCompressStream) // wrap to provide buffer bef
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
if (await ZipArchive.IsZipFileAsync(stream, password, bufferSize, cancellationToken))
{
return true;
}
// probe for a multipart zip
if (!stream.CanSeek)
{
return false;
}
stream.Position = startPosition;
//test the zip (last) file of a multipart zip
if (await ZipArchive.IsZipMultiAsync(stream, password, bufferSize, cancellationToken))
{
return true;
}
@@ -87,10 +142,24 @@ public class ZipFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ZipArchive.OpenAsync(stream, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
#endregion
#region IMultiArchiveFactory
@@ -99,10 +168,24 @@ public class ZipFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ZipArchive.OpenAsync(streams, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
#endregion
#region IReaderFactory
@@ -111,6 +194,17 @@ public class ZipFactory
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
ZipReader.Open(stream, options);
/// <inheritdoc/>
public ValueTask<IAsyncReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(ZipReader.Open(stream, options));
}
#endregion
#region IWriterFactory
@@ -119,6 +213,17 @@ public class ZipFactory
public IWriter Open(Stream stream, WriterOptions writerOptions) =>
new ZipWriter(stream, new ZipWriterOptions(writerOptions));
/// <inheritdoc/>
public ValueTask<IWriter> OpenAsync(
Stream stream,
WriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, writerOptions));
}
#endregion
#region IWriteableArchiveFactory

View File

@@ -1,5 +1,4 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
@@ -29,25 +28,14 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
#if DEBUG_STREAMS
this.DebugDispose(typeof(BufferedSubStream));
#endif
if (_isDisposed)
{
return;
}
_isDisposed = true;
if (disposing && _cache is not null)
{
ArrayPool<byte>.Shared.Return(_cache);
_cache = null;
}
if (disposing) { }
base.Dispose(disposing);
}
private int _cacheOffset;
private int _cacheLength;
private byte[]? _cache = ArrayPool<byte>.Shared.Rent(81920);
private readonly byte[] _cache = new byte[32 << 10];
private long origin;
private bool _isDisposed;
private long BytesLeftToRead { get; set; }
@@ -69,26 +57,14 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
private void RefillCache()
{
if (_isDisposed)
{
throw new ObjectDisposedException(nameof(BufferedSubStream));
}
var count = (int)Math.Min(BytesLeftToRead, _cache!.Length);
var count = (int)Math.Min(BytesLeftToRead, _cache.Length);
_cacheOffset = 0;
if (count == 0)
{
_cacheLength = 0;
return;
}
// Only seek if we're not already at the correct position
// This avoids expensive seek operations when reading sequentially
if (Stream.CanSeek && Stream.Position != origin)
{
Stream.Position = origin;
}
Stream.Position = origin;
_cacheLength = Stream.Read(_cache, 0, count);
origin += _cacheLength;
BytesLeftToRead -= _cacheLength;
@@ -96,24 +72,14 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
private async ValueTask RefillCacheAsync(CancellationToken cancellationToken)
{
if (_isDisposed)
{
throw new ObjectDisposedException(nameof(BufferedSubStream));
}
var count = (int)Math.Min(BytesLeftToRead, _cache!.Length);
var count = (int)Math.Min(BytesLeftToRead, _cache.Length);
_cacheOffset = 0;
if (count == 0)
{
_cacheLength = 0;
return;
}
// Only seek if we're not already at the correct position
// This avoids expensive seek operations when reading sequentially
if (Stream.CanSeek && Stream.Position != origin)
{
Stream.Position = origin;
}
Stream.Position = origin;
_cacheLength = await Stream
.ReadAsync(_cache, 0, count, cancellationToken)
.ConfigureAwait(false);
@@ -136,7 +102,7 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
}
count = Math.Min(count, _cacheLength - _cacheOffset);
Buffer.BlockCopy(_cache!, _cacheOffset, buffer, offset, count);
Buffer.BlockCopy(_cache, _cacheOffset, buffer, offset, count);
_cacheOffset += count;
}
@@ -154,7 +120,7 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
}
}
return _cache![_cacheOffset++];
return _cache[_cacheOffset++];
}
public override async Task<int> ReadAsync(
@@ -177,7 +143,7 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
}
count = Math.Min(count, _cacheLength - _cacheOffset);
Buffer.BlockCopy(_cache!, _cacheOffset, buffer, offset, count);
Buffer.BlockCopy(_cache, _cacheOffset, buffer, offset, count);
_cacheOffset += count;
}
@@ -204,7 +170,7 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
}
count = Math.Min(count, _cacheLength - _cacheOffset);
_cache!.AsSpan(_cacheOffset, count).CopyTo(buffer.Span);
_cache.AsSpan(_cacheOffset, count).CopyTo(buffer.Span);
_cacheOffset += count;
}

View File

@@ -206,11 +206,11 @@ public class SharpCompressStream : Stream, IStreamStack
{
ValidateBufferState();
// Fill buffer if needed, handling short reads from underlying stream
// Fill buffer if needed
if (_bufferedLength == 0)
{
_bufferedLength = Stream.Read(_buffer!, 0, _bufferSize);
_bufferPosition = 0;
_bufferedLength = FillBuffer(_buffer!, 0, _bufferSize);
}
int available = _bufferedLength - _bufferPosition;
int toRead = Math.Min(count, available);
@@ -222,8 +222,11 @@ public class SharpCompressStream : Stream, IStreamStack
return toRead;
}
// If buffer exhausted, refill
int r = Stream.Read(_buffer!, 0, _bufferSize);
if (r == 0)
return 0;
_bufferedLength = r;
_bufferPosition = 0;
_bufferedLength = FillBuffer(_buffer!, 0, _bufferSize);
if (_bufferedLength == 0)
{
return 0;
@@ -247,31 +250,6 @@ public class SharpCompressStream : Stream, IStreamStack
}
}
/// <summary>
/// Fills the buffer by reading from the underlying stream, handling short reads.
/// Implements the ReadFully pattern: reads in a loop until buffer is full or EOF is reached.
/// </summary>
/// <param name="buffer">Buffer to fill</param>
/// <param name="offset">Offset in buffer (always 0 in current usage)</param>
/// <param name="count">Number of bytes to read</param>
/// <returns>Total number of bytes read (may be less than count if EOF is reached)</returns>
private int FillBuffer(byte[] buffer, int offset, int count)
{
// Implement ReadFully pattern but return the actual count read
// This is the same logic as Utility.ReadFully but returns count instead of bool
var total = 0;
int read;
while ((read = Stream.Read(buffer, offset + total, count - total)) > 0)
{
total += read;
if (total >= count)
{
return total;
}
}
return total;
}
public override long Seek(long offset, SeekOrigin origin)
{
if (_bufferingEnabled)
@@ -279,6 +257,7 @@ public class SharpCompressStream : Stream, IStreamStack
ValidateBufferState();
}
long orig = _internalPosition;
long targetPos;
// Calculate the absolute target position based on origin
switch (origin)
@@ -346,12 +325,13 @@ public class SharpCompressStream : Stream, IStreamStack
{
ValidateBufferState();
// Fill buffer if needed, handling short reads from underlying stream
// Fill buffer if needed
if (_bufferedLength == 0)
{
_bufferPosition = 0;
_bufferedLength = await FillBufferAsync(_buffer!, 0, _bufferSize, cancellationToken)
_bufferedLength = await Stream
.ReadAsync(_buffer!, 0, _bufferSize, cancellationToken)
.ConfigureAwait(false);
_bufferPosition = 0;
}
int available = _bufferedLength - _bufferPosition;
int toRead = Math.Min(count, available);
@@ -363,9 +343,13 @@ public class SharpCompressStream : Stream, IStreamStack
return toRead;
}
// If buffer exhausted, refill
_bufferPosition = 0;
_bufferedLength = await FillBufferAsync(_buffer!, 0, _bufferSize, cancellationToken)
int r = await Stream
.ReadAsync(_buffer!, 0, _bufferSize, cancellationToken)
.ConfigureAwait(false);
if (r == 0)
return 0;
_bufferedLength = r;
_bufferPosition = 0;
if (_bufferedLength == 0)
{
return 0;
@@ -386,38 +370,6 @@ public class SharpCompressStream : Stream, IStreamStack
}
}
/// <summary>
/// Async version of FillBuffer. Implements the ReadFullyAsync pattern.
/// Reads in a loop until buffer is full or EOF is reached.
/// </summary>
private async Task<int> FillBufferAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
// Implement ReadFullyAsync pattern but return the actual count read
// This is the same logic as Utility.ReadFullyAsync but returns count instead of bool
var total = 0;
int read;
while (
(
read = await Stream
.ReadAsync(buffer, offset + total, count - total, cancellationToken)
.ConfigureAwait(false)
) > 0
)
{
total += read;
if (total >= count)
{
return total;
}
}
return total;
}
public override async Task WriteAsync(
byte[] buffer,
int offset,
@@ -448,15 +400,13 @@ public class SharpCompressStream : Stream, IStreamStack
{
ValidateBufferState();
// Fill buffer if needed, handling short reads from underlying stream
// Fill buffer if needed
if (_bufferedLength == 0)
{
_bufferPosition = 0;
_bufferedLength = await FillBufferMemoryAsync(
_buffer.AsMemory(0, _bufferSize),
cancellationToken
)
_bufferedLength = await Stream
.ReadAsync(_buffer.AsMemory(0, _bufferSize), cancellationToken)
.ConfigureAwait(false);
_bufferPosition = 0;
}
int available = _bufferedLength - _bufferPosition;
int toRead = Math.Min(buffer.Length, available);
@@ -468,12 +418,13 @@ public class SharpCompressStream : Stream, IStreamStack
return toRead;
}
// If buffer exhausted, refill
_bufferPosition = 0;
_bufferedLength = await FillBufferMemoryAsync(
_buffer.AsMemory(0, _bufferSize),
cancellationToken
)
int r = await Stream
.ReadAsync(_buffer.AsMemory(0, _bufferSize), cancellationToken)
.ConfigureAwait(false);
if (r == 0)
return 0;
_bufferedLength = r;
_bufferPosition = 0;
if (_bufferedLength == 0)
{
return 0;
@@ -492,35 +443,6 @@ public class SharpCompressStream : Stream, IStreamStack
}
}
/// <summary>
/// Async version of FillBuffer for Memory{byte}. Implements the ReadFullyAsync pattern.
/// Reads in a loop until buffer is full or EOF is reached.
/// </summary>
private async ValueTask<int> FillBufferMemoryAsync(
Memory<byte> buffer,
CancellationToken cancellationToken
)
{
// Implement ReadFullyAsync pattern but return the actual count read
var total = 0;
int read;
while (
(
read = await Stream
.ReadAsync(buffer.Slice(total), cancellationToken)
.ConfigureAwait(false)
) > 0
)
{
total += read;
if (total >= buffer.Length)
{
return total;
}
}
return total;
}
public override async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default

View File

@@ -222,26 +222,8 @@ public class SourceStream : Stream, IStreamStack
SetStream(0);
while (_prevSize + Current.Length < pos)
{
var currentLength = Current.Length;
_prevSize += currentLength;
if (!SetStream(_stream + 1))
{
// No more streams available, cannot seek to requested position
throw new InvalidOperationException(
$"Cannot seek to position {pos}. End of stream reached at position {_prevSize}."
);
}
// Safety check: if we have a zero-length stream and we're still not
// making progress toward the target position, we're in an invalid state
if (currentLength == 0 && Current.Length == 0)
{
// Both old and new stream have zero length - cannot make progress
throw new InvalidOperationException(
$"Cannot seek to position {pos}. Encountered zero-length streams at position {_prevSize}."
);
}
_prevSize += Current.Length;
SetStream(_stream + 1);
}
}

View File

@@ -0,0 +1,103 @@
#nullable disable
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress;
internal sealed class LazyAsyncReadOnlyCollection<T>(IAsyncEnumerable<T> source)
: IAsyncEnumerable<T>
{
private readonly List<T> backing = new();
private readonly IAsyncEnumerator<T> source = source.GetAsyncEnumerator();
private bool fullyLoaded;
private class LazyLoader(
LazyAsyncReadOnlyCollection<T> lazyReadOnlyCollection,
CancellationToken cancellationToken
) : IAsyncEnumerator<T>
{
private bool disposed;
private int index = -1;
public ValueTask DisposeAsync()
{
if (!disposed)
{
disposed = true;
}
return default;
}
public async ValueTask<bool> MoveNextAsync()
{
cancellationToken.ThrowIfCancellationRequested();
if (index + 1 < lazyReadOnlyCollection.backing.Count)
{
index++;
return true;
}
if (
!lazyReadOnlyCollection.fullyLoaded
&& await lazyReadOnlyCollection.source.MoveNextAsync()
)
{
lazyReadOnlyCollection.backing.Add(lazyReadOnlyCollection.source.Current);
index++;
return true;
}
lazyReadOnlyCollection.fullyLoaded = true;
return false;
}
#region IEnumerator<T> Members
public T Current => lazyReadOnlyCollection.backing[index];
#endregion
#region IDisposable Members
public void Dispose()
{
if (!disposed)
{
disposed = true;
}
}
#endregion
}
internal async ValueTask EnsureFullyLoaded()
{
if (!fullyLoaded)
{
var loader = new LazyLoader(this, CancellationToken.None);
while (await loader.MoveNextAsync())
{
// Intentionally empty
}
fullyLoaded = true;
}
}
internal IEnumerable<T> GetLoaded() => backing;
#region ICollection<T> Members
public void Add(T item) => throw new NotSupportedException();
public void Clear() => throw new NotSupportedException();
public bool IsReadOnly => true;
public bool Remove(T item) => throw new NotSupportedException();
#endregion
public IAsyncEnumerator<T> GetAsyncEnumerator(CancellationToken cancellationToken = default) =>
new LazyLoader(this, cancellationToken);
}

View File

@@ -0,0 +1,101 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace SharpCompress;
public static class AsyncEnumerableEx
{
public static async IAsyncEnumerable<T> Empty<T>()
where T : notnull
{
await Task.CompletedTask;
yield break;
}
}
public static class EnumerableExtensions
{
public static async IAsyncEnumerable<T> ToAsyncEnumerable<T>(this IEnumerable<T> source)
{
await Task.CompletedTask;
foreach (var item in source)
{
yield return item;
}
}
}
public static class AsyncEnumerableExtensions
{
extension<T>(IAsyncEnumerable<T> source)
where T : notnull
{
public async ValueTask<List<T>> ToListAsync()
{
var list = new List<T>();
await foreach (var item in source)
{
list.Add(item);
}
return list;
}
public async IAsyncEnumerable<TResult> Cast<TResult>()
where TResult : class
{
await foreach (var item in source)
{
yield return (item as TResult).NotNull();
}
}
public async ValueTask<bool> All(Func<T, bool> predicate)
{
await foreach (var item in source)
{
if (!predicate(item))
{
return false;
}
}
return true;
}
public async IAsyncEnumerable<T> Where(Func<T, bool> predicate)
{
await foreach (var item in source)
{
if (predicate(item))
{
yield return item;
}
}
}
public async ValueTask<T?> FirstOrDefaultAsync()
{
await foreach (var item in source)
{
return item; // Returns the very first item found
}
return default; // Returns null/default if the stream is empty
}
public async ValueTask<TAccumulate> Aggregate<TAccumulate>(
TAccumulate seed,
Func<TAccumulate, T, TAccumulate> func
)
{
TAccumulate result = seed;
await foreach (var element in source)
{
result = func(result, element);
}
return result;
}
}
}

View File

@@ -0,0 +1,65 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress;
public static class BinaryReaderExtensions
{
extension(BinaryReader reader)
{
public async ValueTask<byte> ReadByteAsync(CancellationToken cancellationToken = default)
{
var buffer = new byte[1];
await reader
.BaseStream.ReadExactAsync(buffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
return buffer[0];
}
public async ValueTask<byte[]> ReadBytesAsync(
int count,
CancellationToken cancellationToken = default
)
{
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count), "Count must be non-negative.");
}
if (count == 0)
{
return Array.Empty<byte>();
}
// For small allocations, direct allocation is more efficient than pooling
// due to ArrayPool overhead and the need to copy data to return array
if (count <= 256)
{
var bytes = new byte[count];
await reader
.BaseStream.ReadExactAsync(bytes, 0, count, cancellationToken)
.ConfigureAwait(false);
return bytes;
}
// For larger allocations, use ArrayPool to reduce GC pressure
var buffer = ArrayPool<byte>.Shared.Rent(count);
try
{
await reader
.BaseStream.ReadExactAsync(buffer, 0, count, cancellationToken)
.ConfigureAwait(false);
var bytes = new byte[count];
Array.Copy(buffer, 0, bytes, 0, count);
return bytes;
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
}
}

View File

@@ -1,5 +1,3 @@
#if NETFRAMEWORK || NETSTANDARD2_0
using System;
using System.Buffers;
using System.IO;
@@ -8,63 +6,62 @@ using System.Threading.Tasks;
namespace SharpCompress;
internal static class StreamExtensions
public static class StreamExtensions
{
internal static int Read(this Stream stream, Span<byte> buffer)
extension(Stream stream)
{
var temp = ArrayPool<byte>.Shared.Rent(buffer.Length);
try
public void Skip(long advanceAmount)
{
var read = stream.Read(temp, 0, buffer.Length);
temp.AsSpan(0, read).CopyTo(buffer);
return read;
}
finally
{
ArrayPool<byte>.Shared.Return(temp);
}
}
internal static void Write(this Stream stream, ReadOnlySpan<byte> buffer)
{
var temp = ArrayPool<byte>.Shared.Rent(buffer.Length);
buffer.CopyTo(temp);
try
{
stream.Write(temp, 0, buffer.Length);
}
finally
{
ArrayPool<byte>.Shared.Return(temp);
}
}
internal static async Task ReadExactlyAsync(
this Stream stream,
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
var totalRead = 0;
while (totalRead < count)
{
var read = await stream
.ReadAsync(buffer, offset + totalRead, count - totalRead, cancellationToken)
.ConfigureAwait(false);
if (read == 0)
if (stream.CanSeek)
{
throw new EndOfStreamException();
stream.Position += advanceAmount;
return;
}
using var readOnlySubStream = new IO.ReadOnlySubStream(stream, advanceAmount);
readOnlySubStream.CopyTo(Stream.Null);
}
public void Skip() => stream.CopyTo(Stream.Null);
public Task SkipAsync(CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
return stream.CopyToAsync(Stream.Null);
}
internal int Read(Span<byte> buffer)
{
var temp = ArrayPool<byte>.Shared.Rent(buffer.Length);
try
{
var read = stream.Read(temp, 0, buffer.Length);
temp.AsSpan(0, read).CopyTo(buffer);
return read;
}
finally
{
ArrayPool<byte>.Shared.Return(temp);
}
}
internal void Write(ReadOnlySpan<byte> buffer)
{
var temp = ArrayPool<byte>.Shared.Rent(buffer.Length);
buffer.CopyTo(temp);
try
{
stream.Write(temp, 0, buffer.Length);
}
finally
{
ArrayPool<byte>.Shared.Return(temp);
}
totalRead += read;
}
}
}
#endif

View File

@@ -12,12 +12,13 @@ namespace SharpCompress.Readers;
/// <summary>
/// A generic push reader that reads unseekable comrpessed streams.
/// </summary>
public abstract class AbstractReader<TEntry, TVolume> : IReader
public abstract class AbstractReader<TEntry, TVolume> : IReader, IAsyncReader
where TEntry : Entry
where TVolume : Volume
{
private bool _completed;
private IEnumerator<TEntry>? _entriesForCurrentReadStream;
private IAsyncEnumerator<TEntry>? _entriesForCurrentReadStreamAsync;
private bool _wroteCurrentEntry;
internal AbstractReader(ReaderOptions options, ArchiveType archiveType)
@@ -36,9 +37,19 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
public abstract TVolume? Volume { get; }
/// <summary>
/// Current file entry
/// Current file entry (from either sync or async enumeration).
/// </summary>
public TEntry Entry => _entriesForCurrentReadStream.NotNull().Current;
public TEntry Entry
{
get
{
if (_entriesForCurrentReadStreamAsync is not null)
{
return _entriesForCurrentReadStreamAsync.Current;
}
return _entriesForCurrentReadStream.NotNull().Current;
}
}
#region IDisposable Members
@@ -48,6 +59,15 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
Volume?.Dispose();
}
public virtual async ValueTask DisposeAsync()
{
if (_entriesForCurrentReadStreamAsync is not null)
{
await _entriesForCurrentReadStreamAsync.DisposeAsync();
}
Volume?.Dispose();
}
#endregion
public bool Cancelled { get; private set; }
@@ -67,6 +87,12 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
public bool MoveToNextEntry()
{
if (_entriesForCurrentReadStreamAsync is not null)
{
throw new InvalidOperationException(
$"{nameof(MoveToNextEntry)} cannot be used after {nameof(MoveToNextEntryAsync)} has been used."
);
}
if (_completed)
{
return false;
@@ -92,7 +118,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
return false;
}
public async Task<bool> MoveToNextEntryAsync(CancellationToken cancellationToken = default)
public async ValueTask<bool> MoveToNextEntryAsync(CancellationToken cancellationToken = default)
{
if (_completed)
{
@@ -102,16 +128,16 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
{
throw new ReaderCancelledException("Reader has been cancelled.");
}
if (_entriesForCurrentReadStream is null)
if (_entriesForCurrentReadStreamAsync is null)
{
return LoadStreamForReading(RequestInitialStream());
return await LoadStreamForReadingAsync(RequestInitialStream());
}
if (!_wroteCurrentEntry)
{
await SkipEntryAsync(cancellationToken).ConfigureAwait(false);
}
_wroteCurrentEntry = false;
if (NextEntryForCurrentStream())
if (await NextEntryForCurrentStreamAsync(cancellationToken))
{
return true;
}
@@ -121,6 +147,12 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
protected bool LoadStreamForReading(Stream stream)
{
if (_entriesForCurrentReadStreamAsync is not null)
{
throw new InvalidOperationException(
$"{nameof(LoadStreamForReading)} cannot be used after {nameof(LoadStreamForReadingAsync)} has been used."
);
}
_entriesForCurrentReadStream?.Dispose();
if (stream is null || !stream.CanRead)
{
@@ -134,14 +166,59 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
return _entriesForCurrentReadStream.MoveNext();
}
protected async ValueTask<bool> LoadStreamForReadingAsync(Stream stream)
{
if (_entriesForCurrentReadStreamAsync is not null)
{
await _entriesForCurrentReadStreamAsync.DisposeAsync();
}
if (stream is null || !stream.CanRead)
{
throw new MultipartStreamRequiredException(
"File is split into multiple archives: '"
+ Entry.Key
+ "'. A new readable stream is required. Use Cancel if it was intended."
);
}
_entriesForCurrentReadStreamAsync = GetEntriesAsync(stream).GetAsyncEnumerator();
return await _entriesForCurrentReadStreamAsync.MoveNextAsync();
}
protected virtual Stream RequestInitialStream() =>
Volume.NotNull("Volume isn't loaded.").Stream;
internal virtual bool NextEntryForCurrentStream() =>
_entriesForCurrentReadStream.NotNull().MoveNext();
internal virtual ValueTask<bool> NextEntryForCurrentStreamAsync() =>
_entriesForCurrentReadStreamAsync.NotNull().MoveNextAsync();
/// <summary>
/// Moves the current async enumerator to the next entry.
/// </summary>
internal virtual ValueTask<bool> NextEntryForCurrentStreamAsync(
CancellationToken cancellationToken
)
{
if (_entriesForCurrentReadStreamAsync is not null)
{
return _entriesForCurrentReadStreamAsync.MoveNextAsync();
}
return new ValueTask<bool>(NextEntryForCurrentStream());
}
protected abstract IEnumerable<TEntry> GetEntries(Stream stream);
protected virtual async IAsyncEnumerable<TEntry> GetEntriesAsync(Stream stream)
{
await Task.CompletedTask;
foreach (var entry in GetEntries(stream))
{
yield return entry;
}
}
#region Entry Skip/Write
private void SkipEntry()
@@ -152,7 +229,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
}
}
private async Task SkipEntryAsync(CancellationToken cancellationToken)
private async ValueTask SkipEntryAsync(CancellationToken cancellationToken)
{
if (!Entry.IsDirectory)
{
@@ -182,7 +259,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
s.SkipEntry();
}
private async Task SkipAsync(CancellationToken cancellationToken)
private async ValueTask SkipAsync(CancellationToken cancellationToken)
{
var part = Entry.Parts.First();
@@ -231,7 +308,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
_wroteCurrentEntry = true;
}
public async Task WriteEntryToAsync(
public async ValueTask WriteEntryToAsync(
Stream writableStream,
CancellationToken cancellationToken = default
)
@@ -262,23 +339,19 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
{
using Stream s = OpenEntryStream();
var sourceStream = WrapWithProgress(s, Entry);
sourceStream.CopyTo(writeStream, Constants.BufferSize);
sourceStream.CopyTo(writeStream, 81920);
}
internal async Task WriteAsync(Stream writeStream, CancellationToken cancellationToken)
internal async ValueTask WriteAsync(Stream writeStream, CancellationToken cancellationToken)
{
#if NETFRAMEWORK || NETSTANDARD2_0
using Stream s = OpenEntryStream();
using Stream s = await OpenEntryStreamAsync(cancellationToken).ConfigureAwait(false);
var sourceStream = WrapWithProgress(s, Entry);
await sourceStream
.CopyToAsync(writeStream, Constants.BufferSize, cancellationToken)
.ConfigureAwait(false);
await sourceStream.CopyToAsync(writeStream, 81920, cancellationToken).ConfigureAwait(false);
#else
await using Stream s = OpenEntryStream();
await using Stream s = await OpenEntryStreamAsync(cancellationToken).ConfigureAwait(false);
var sourceStream = WrapWithProgress(s, Entry);
await sourceStream
.CopyToAsync(writeStream, Constants.BufferSize, cancellationToken)
.ConfigureAwait(false);
await sourceStream.CopyToAsync(writeStream, 81920, cancellationToken).ConfigureAwait(false);
#endif
}
@@ -327,7 +400,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
return stream;
}
public async Task<EntryStream> OpenEntryStreamAsync(
public async ValueTask<EntryStream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
@@ -351,11 +424,19 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
protected virtual EntryStream GetEntryStream() =>
CreateEntryStream(Entry.Parts.First().GetCompressedStream());
protected virtual Task<EntryStream> GetEntryStreamAsync(
protected virtual async Task<EntryStream> GetEntryStreamAsync(
CancellationToken cancellationToken = default
) => Task.FromResult(GetEntryStream());
)
{
var stream = await Entry
.Parts.First()
.GetCompressedStreamAsync(cancellationToken)
.ConfigureAwait(false);
return CreateEntryStream(stream);
}
#endregion
IEntry IReader.Entry => Entry;
IEntry IAsyncReader.Entry => Entry;
}

View File

@@ -25,7 +25,7 @@ namespace SharpCompress.Readers.Ace
/// </remarks>
public abstract class AceReader : AbstractReader<AceEntry, AceVolume>
{
private readonly ArchiveEncoding _archiveEncoding;
private readonly IArchiveEncoding _archiveEncoding;
internal AceReader(ReaderOptions options)
: base(options, ArchiveType.Ace)

Some files were not shown because too many files have changed in this diff Show More