Compare commits

..

18 Commits

Author SHA1 Message Date
Adam Hathcock
d915fbf6d6 first pass of Open Async 2025-11-30 11:53:57 +00:00
Adam Hathcock
e44a43d2b1 Merge pull request #1054 from adamhathcock/copilot/fix-zipreader-directory-entry
Document ZipReader DirectoryEntry behavior and add verification test
2025-11-29 15:46:51 +00:00
Adam Hathcock
8997f00b9b Merge pull request #1049 from adamhathcock/copilot/drop-dotnet-6-support
Drop .NET 6, .NET Standard 2.0, .NET 4.8.1, add .NET 10 support
2025-11-29 15:46:27 +00:00
copilot-swe-agent[bot]
c5da416764 Apply code review feedback: improve documentation structure and add explicit DirectoryEntry case
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 15:39:36 +00:00
Adam Hathcock
840e58fc03 Update tests/SharpCompress.Test/Zip/ZipReaderTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-29 15:36:46 +00:00
copilot-swe-agent[bot]
7f911c5219 Add documentation about ZipReader directory and central directory handling
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 15:29:07 +00:00
copilot-swe-agent[bot]
a887390c23 Add multi-volume ZIP documentation to FORMATS.md
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 15:25:35 +00:00
copilot-swe-agent[bot]
f4dddcec8e Changes before error encountered
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 14:06:03 +00:00
copilot-swe-agent[bot]
0d9d82d7e6 Initial plan 2025-11-29 13:42:43 +00:00
Adam Hathcock
d34a47c148 update dependencies 2025-11-29 11:56:20 +00:00
copilot-swe-agent[bot]
5aa216bd21 Remove netstandard2.0 and net481 targets, keep net48, net8.0, net10.0
- Updated target frameworks to net48;net8.0;net10.0 only
- Removed netstandard2.0 and net481 package conditions
- Updated conditional compilation to use NETFRAMEWORK only
- Updated package description and README

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 11:42:05 +00:00
copilot-swe-agent[bot]
8af47548fe Fix .NET 10.0 formatting in README
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 11:04:54 +00:00
copilot-swe-agent[bot]
131bd2b7b8 Drop .NET 6 support and add .NET 10 support
- Updated SharpCompress.csproj target frameworks from net48;net481;netstandard2.0;net6.0;net8.0 to net48;net481;netstandard2.0;net8.0;net10.0
- Updated test and build projects to use .NET 10
- Updated global.json to .NET 10 SDK
- Updated CI workflow to use .NET 10
- Fixed deprecated Rfc2898DeriveBytes constructor for .NET 10 (SYSLIB0060)
- Updated package description and README to reflect new supported frameworks
- Updated package versions for .NET 10 compatibility

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 11:03:48 +00:00
copilot-swe-agent[bot]
1993673a22 Initial plan 2025-11-29 10:42:07 +00:00
Adam Hathcock
30e036f9ec Mark for 0.42.0 2025-11-28 13:24:03 +00:00
Adam Hathcock
095c871174 Merge pull request #1043 from adamhathcock/copilot/fix-divide-by-zero-exception
Fix DivideByZeroException when compressing empty files with BZip2
2025-11-27 16:47:39 +00:00
copilot-swe-agent[bot]
6d73c5b295 Fix DivideByZeroException when using BZip2 with empty files
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-27 15:59:39 +00:00
copilot-swe-agent[bot]
cc4d28193c Initial plan 2025-11-27 15:53:13 +00:00
65 changed files with 2670 additions and 469 deletions

View File

@@ -17,7 +17,7 @@ jobs:
- uses: actions/checkout@v6
- uses: actions/setup-dotnet@v5
with:
dotnet-version: 8.0.x
dotnet-version: 10.0.x
- run: dotnet run --project build/build.csproj
- uses: actions/upload-artifact@v5
with:

View File

@@ -4,17 +4,17 @@
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.0" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="12.0.0" />
<PackageVersion Include="System.Buffers" Version="4.6.1" />
<PackageVersion Include="System.Memory" Version="4.6.3" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.6" />
<PackageVersion Include="Microsoft.NET.ILLink.Tasks" Version="8.0.21" />
<PackageVersion Include="Microsoft.NET.ILLink.Tasks" Version="10.0.0" />
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
</ItemGroup>

View File

@@ -22,11 +22,16 @@
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading. See [Zip Format Notes](#zip-format-notes) for details on multi-volume archives and streaming behavior.
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
5. LZip has no support for extra data like the file name or timestamp. There is a default filename used when looking at the entry Key on the archive.
### Zip Format Notes
- Multi-volume/split ZIP archives require ZipArchive (seekable streams) as ZipReader cannot seek across volume files.
- ZipReader processes entries from LocalEntry headers (which include directory entries ending with `/`) and intentionally skips DirectoryEntry headers from the central directory, as they are redundant in streaming mode - all entry data comes from LocalEntry headers which ZipReader has already processed.
## Compression Streams
For those who want to directly compress/decompress bits. The single file formats are represented here as well. However, BZip2, LZip and XZ have no metadata (GZip has a little) so using them without something like a Tar file makes little sense.

View File

@@ -1,6 +1,6 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd, unarc and unarj with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
SharpCompress is a compression library in pure C# for .NET Framework 4.8, .NET 8.0 and .NET 10.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd, unarc and unarj with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<TargetFramework>net10.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" />

View File

@@ -1,7 +1,7 @@
{
"version": 2,
"dependencies": {
"net8.0": {
"net10.0": {
"Bullseye": {
"type": "Direct",
"requested": "[6.0.0, )",

View File

@@ -1,6 +1,6 @@
{
"sdk": {
"version": "8.0.100",
"version": "10.0.100",
"rollForward": "latestFeature"
}
}

View File

@@ -1,7 +1,10 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -12,10 +15,8 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private readonly LazyReadOnlyCollection<TVolume> _lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> _lazyEntries;
private bool _disposed;
private readonly SourceStream? _sourceStream;
protected SourceStream? SourceStream { get; internal set; }
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
@@ -24,21 +25,24 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
protected Lazy<IReadOnlyCollection<TVolume>> LazyVolumes { get; internal set; }
protected Lazy<IReadOnlyCollection<TEntry>> LazyEntries { get; internal set; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
{
Type = type;
ReaderOptions = sourceStream.ReaderOptions;
_sourceStream = sourceStream;
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
SourceStream = sourceStream;
LazyVolumes = new Lazy<IReadOnlyCollection<TVolume>>(() => LoadVolumes(SourceStream).ToList());
LazyEntries = new Lazy<IReadOnlyCollection<TEntry>>(() => LoadEntries(Volumes).ToList());
}
internal AbstractArchive(ArchiveType type)
{
Type = type;
ReaderOptions = new();
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
LazyVolumes = new Lazy<IReadOnlyCollection<TVolume>>(() => Enumerable.Empty<TVolume>().ToList());
LazyEntries = new Lazy<IReadOnlyCollection<TEntry>>(() => Enumerable.Empty<TEntry>().ToList());
}
public ArchiveType Type { get; }
@@ -61,12 +65,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries => _lazyEntries;
public virtual ICollection<TEntry> Entries => LazyEntries.Value;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes => _lazyVolumes;
public ICollection<TVolume> Volumes => LazyVolumes.Value;
/// <summary>
/// The total size of the files compressed in the archive.
@@ -82,18 +86,26 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
protected virtual Task<IReadOnlyCollection<TEntry>> LoadEntriesAsync(IEnumerable<TVolume> volumes, CancellationToken cancellationToken)
{
return Task.FromResult<IReadOnlyCollection<TEntry>>(LoadEntries(volumes).ToList());
}
protected virtual Task<IReadOnlyCollection<TVolume>> LoadVolumesAsync(SourceStream sourceStream, CancellationToken cancellationToken)
{
return Task.FromResult<IReadOnlyCollection<TVolume>>(LoadVolumes(sourceStream).ToList());
}
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
IEnumerable<IVolume> IArchive.Volumes => LazyVolumes.Value.Cast<IVolume>();
public virtual void Dispose()
{
if (!_disposed)
{
_lazyVolumes.ForEach(v => v.Dispose());
_lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
_sourceStream?.Dispose();
LazyVolumes.Value.ForEach(v => v.Dispose());
LazyEntries.Value.Cast<Entry>().ForEach(x => x.Close());
SourceStream?.Dispose();
_disposed = true;
}
@@ -101,8 +113,8 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
void IArchiveExtractionListener.EnsureEntriesLoaded()
{
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
LazyEntries.Value.EnsureFullyLoaded();
LazyVolumes.Value.EnsureFullyLoaded();
}
void IExtractionListener.FireCompressedBytesRead(
@@ -172,9 +184,4 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
return Entries.All(x => x.IsComplete);
}
}
public virtual bool IsMultiVolume =>
_sourceStream?.Files.Count > 1 || _sourceStream?.Streams.Count > 1;
public virtual bool SupportsMultiThreading => false;
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;
@@ -24,6 +26,25 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
}
/// <summary>
/// Opens an Archive for random access
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken);
return await factory.OpenAsync(stream, readerOptions, cancellationToken);
}
public static IWritableArchive Create(ArchiveType type)
{
var factory = Factory
@@ -49,6 +70,22 @@ public static class ArchiveFactory
return Open(new FileInfo(filePath), options);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static Task<IArchive> OpenAsync(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenAsync(new FileInfo(filePath), options, cancellationToken);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -61,6 +98,24 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(fileInfo).Open(fileInfo, options);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken);
return await factory.OpenAsync(fileInfo, options, cancellationToken);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
@@ -87,6 +142,38 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return await OpenAsync(fileInfo, options, cancellationToken);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IMultiArchiveFactory>(fileInfo, cancellationToken);
return await factory.OpenAsync(filesArray, options, cancellationToken);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
@@ -113,6 +200,38 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return await OpenAsync(firstStream, options, cancellationToken);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
var factory = await FindFactoryAsync<IMultiArchiveFactory>(firstStream, cancellationToken);
return await factory.OpenAsync(streamsArray, options, cancellationToken);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
@@ -126,6 +245,20 @@ public static class ArchiveFactory
archive.WriteToDirectory(destinationDirectory, options);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static async Task WriteToDirectoryAsync(
string sourceArchive,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
)
{
using var archive = await OpenAsync(sourceArchive, cancellationToken: cancellationToken);
await archive.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken);
}
private static T FindFactory<T>(FileInfo finfo)
where T : IFactory
{
@@ -134,6 +267,17 @@ public static class ArchiveFactory
return FindFactory<T>(stream);
}
private static async Task<T> FindFactoryAsync<T>(
FileInfo finfo,
CancellationToken cancellationToken
)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return await FindFactoryAsync<T>(stream, cancellationToken);
}
private static T FindFactory<T>(Stream stream)
where T : IFactory
{
@@ -166,6 +310,38 @@ public static class ArchiveFactory
);
}
private static async Task<T> FindFactoryAsync<T>(Stream stream, CancellationToken cancellationToken)
where T : IFactory
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
var factories = Factory.Factories.OfType<T>();
var startPosition = stream.Position;
foreach (var factory in factories)
{
stream.Seek(startPosition, SeekOrigin.Begin);
if (await factory.IsArchiveAsync(stream, cancellationToken).ConfigureAwait(false))
{
stream.Seek(startPosition, SeekOrigin.Begin);
return factory;
}
}
var extensions = string.Join(", ", factories.Select(item => item.Name));
throw new InvalidOperationException(
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
);
}
public static bool IsArchive(
string filePath,
out ArchiveType? type,
@@ -177,6 +353,17 @@ public static class ArchiveFactory
return IsArchive(s, out type, bufferSize);
}
public static async Task<(bool, ArchiveType?)> IsArchiveAsync(
string filePath,
CancellationToken cancellationToken = default,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
filePath.NotNullOrEmpty(nameof(filePath));
using Stream s = File.OpenRead(filePath);
return await IsArchiveAsync(s, cancellationToken, bufferSize);
}
public static bool IsArchive(
Stream stream,
out ArchiveType? type,
@@ -208,6 +395,36 @@ public static class ArchiveFactory
return false;
}
public static async Task<(bool, ArchiveType?)> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken = default,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
var startPosition = stream.Position;
foreach (var factory in Factory.Factories)
{
var isArchive = await factory.IsArchiveAsync(stream, cancellationToken)
.ConfigureAwait(false);
stream.Position = startPosition;
if (isArchive)
{
return (true, factory.KnownArchiveType);
}
}
return (false, null);
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>

View File

@@ -2,11 +2,13 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
using SharpCompress.Polyfills;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
@@ -27,6 +29,22 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
string filePath,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return await OpenAsync(new FileInfo(filePath), readerOptions ?? new ReaderOptions(), cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -44,6 +62,32 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
fileInfo.NotNull(nameof(fileInfo));
var archive = new GZipArchive();
archive.SourceStream = new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
@@ -65,6 +109,33 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
var archive = new GZipArchive();
archive.SourceStream = new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
@@ -83,6 +154,33 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
var archive = new GZipArchive();
archive.SourceStream = new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -102,21 +200,62 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<GZipArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
var archive = new GZipArchive();
archive.SourceStream = new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions());
archive.LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
public static GZipArchive Create() => new();
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
* Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private GZipArchive(SourceStream sourceStream)
: base(ArchiveType.GZip, sourceStream) { }
private GZipArchive()
: base(ArchiveType.GZip)
{
LazyVolumes = new Lazy<IReadOnlyCollection<GZipVolume>>(() => LoadVolumes(SourceStream!).ToList());
LazyEntries = new Lazy<IReadOnlyCollection<GZipArchiveEntry>>(() => LoadEntries(Volumes).ToList());
}
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.LoadAllParts();
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
}
protected override async Task<IReadOnlyCollection<GZipVolume>> LoadVolumesAsync(SourceStream sourceStream, CancellationToken cancellationToken)
{
await sourceStream.LoadAllPartsAsync(cancellationToken).ConfigureAwait(false);
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0)).ToList();
}
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
public static bool IsGZipFile(FileInfo fileInfo)
@@ -130,6 +269,25 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return IsGZipFile(stream);
}
public static Task<bool> IsGZipFileAsync(
string filePath,
CancellationToken cancellationToken = default
) => IsGZipFileAsync(new FileInfo(filePath), cancellationToken);
public static async Task<bool> IsGZipFileAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return await IsGZipFileAsync(stream, cancellationToken).ConfigureAwait(false);
}
public void SaveTo(string filePath) => SaveTo(new FileInfo(filePath));
public void SaveTo(FileInfo fileInfo)
@@ -167,6 +325,28 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return true;
}
public static async Task<bool> IsGZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// read the header on the first read
byte[] header = new byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!await stream.ReadFullyAsync(header, cancellationToken).ConfigureAwait(false))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
internal GZipArchive()
: base(ArchiveType.GZip) { }
@@ -244,6 +424,17 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
protected override async Task<IReadOnlyCollection<GZipArchiveEntry>> LoadEntriesAsync(IEnumerable<GZipVolume> volumes, CancellationToken cancellationToken)
{
var list = new List<GZipArchiveEntry>();
var stream = volumes.Single().Stream;
list.Add(new GZipArchiveEntry(
this,
new GZipFilePart(stream, ReaderOptions.ArchiveEncoding)
));
return list.AsReadOnly();
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
@@ -251,3 +442,4 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return GZipReader.Open(stream);
}
}

View File

@@ -45,14 +45,4 @@ public interface IArchive : IDisposable
/// The total size of the files as uncompressed in the archive.
/// </summary>
long TotalUncompressSize { get; }
/// <summary>
/// Is the archive part of a multi-volume set.
/// </summary>
bool IsMultiVolume { get; }
/// <summary>
/// Does the archive support multi-threaded extraction.
/// </summary>
bool SupportsMultiThreading { get; }
}

View File

@@ -88,7 +88,7 @@ public static class IArchiveEntryExtensions
entry,
destinationDirectory,
options,
entry.WriteToFileAsync,
(x, opt) => entry.WriteToFileAsync(x, opt, cancellationToken),
cancellationToken
);
@@ -124,10 +124,10 @@ public static class IArchiveEntryExtensions
entry,
destinationFileName,
options,
async (x, fm, ct) =>
async (x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
await entry.WriteToAsync(fs, ct).ConfigureAwait(false);
await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false);
},
cancellationToken
);

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -26,10 +28,34 @@ public interface IArchiveFactory : IFactory
/// <param name="readerOptions">reading options.</param>
IArchive Open(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens an Archive for random access.
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">the cancellation token.</param>
Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">the cancellation token.</param>
Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -27,10 +29,34 @@ public interface IMultiArchiveFactory : IFactory
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">the cancellation token.</param>
Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Constructor with IEnumerable Stream objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with IEnumerable Stream objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">the cancellation token.</param>
Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -0,0 +1,39 @@
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar;
/// <summary>
/// A rar part based on a FileInfo object
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
{
FileInfo = fileInfo;
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();
}
private static ReaderOptions FixOptions(ReaderOptions options)
{
//make sure we're closing streams with fileinfo
options.LeaveStreamOpen = false;
return options;
}
internal ReadOnlyCollection<RarFilePart> FileParts { get; }
internal FileInfo FileInfo { get; }
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
internal override IEnumerable<RarFilePart> ReadFileParts() => FileParts;
}

View File

@@ -0,0 +1,21 @@
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar;
internal sealed class FileInfoRarFilePart : SeekableFilePart
{
internal FileInfoRarFilePart(
FileInfoRarArchiveVolume volume,
string? password,
MarkHeader mh,
FileHeader fh,
FileInfo fi
)
: base(mh, fh, volume.Index, volume.Stream, password) => FileInfo = fi;
internal FileInfo FileInfo { get; }
internal override string FilePartName =>
"Rar File: " + FileInfo.FullName + " File Entry: " + FileHeader.FileName;
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -47,9 +49,9 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
sourceStream.LoadAllParts(); //request all streams
var streams = sourceStream.Streams.ToArray();
var i = 0;
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
{
var i = 0;
sourceStream.IsVolumes = true;
streams[1].Position = 0;
sourceStream.Position = 0;
@@ -57,18 +59,12 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
return sourceStream.Streams.Select(a => new StreamRarArchiveVolume(
a,
ReaderOptions,
i++,
IsMultiVolume
i++
));
}
//split mode or single file
return new StreamRarArchiveVolume(
sourceStream,
ReaderOptions,
0,
IsMultiVolume
).AsEnumerable();
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
}
protected override IReader CreateReaderForSolidExtraction()
@@ -89,7 +85,6 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
}
public override bool IsSolid => Volumes.First().IsSolidArchive;
public override bool SupportsMultiThreading => !IsMultiVolume && !IsSolid;
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;
@@ -198,6 +193,24 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
return IsRarFile(stream);
}
public static Task<bool> IsRarFileAsync(
string filePath,
CancellationToken cancellationToken = default
) => IsRarFileAsync(new FileInfo(filePath), cancellationToken);
public static async Task<bool> IsRarFileAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return await IsRarFileAsync(stream, cancellationToken).ConfigureAwait(false);
}
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try
@@ -211,5 +224,23 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
}
}
public static async Task<bool> IsRarFileAsync(
Stream stream,
CancellationToken cancellationToken = default,
ReaderOptions? options = null
)
{
try
{
await MarkHeader.ReadAsync(stream, true, false, cancellationToken)
.ConfigureAwait(false);
return true;
}
catch
{
return false;
}
}
#endregion
}

View File

@@ -134,6 +134,4 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
);
}
}
public override bool SupportsMultiThreading => Parts.Single().SupportsMultiThreading;
}

View File

@@ -1,29 +1,25 @@
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Archives.Rar;
internal class SeekableRarFilePart : RarFilePart
internal class SeekableFilePart : RarFilePart
{
private readonly Stream _stream;
private readonly string? _password;
private readonly bool _isMultiVolume;
internal SeekableRarFilePart(
internal SeekableFilePart(
MarkHeader mh,
FileHeader fh,
int index,
Stream stream,
string? password,
bool isMultiVolume
string? password
)
: base(mh, fh, index)
{
_stream = stream;
_password = password;
_isMultiVolume = isMultiVolume;
}
internal override Stream GetCompressedStream()
@@ -46,7 +42,4 @@ internal class SeekableRarFilePart : RarFilePart
}
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
public override bool SupportsMultiThreading =>
!_isMultiVolume && _stream is SourceStream ss && ss.IsFileMode && ss.Files.Count == 1;
}

View File

@@ -9,28 +9,11 @@ namespace SharpCompress.Archives.Rar;
internal class StreamRarArchiveVolume : RarVolume
{
private readonly bool _isMultiVolume;
internal StreamRarArchiveVolume(
Stream stream,
ReaderOptions options,
int index,
bool isMultiVolume
)
: base(StreamingMode.Seekable, stream, options, index)
{
_isMultiVolume = isMultiVolume;
}
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index)
: base(StreamingMode.Seekable, stream, options, index) { }
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
new SeekableRarFilePart(
markHeader,
fileHeader,
Index,
Stream,
ReaderOptions.Password,
_isMultiVolume
);
new SeekableFilePart(markHeader, fileHeader, Index, Stream, ReaderOptions.Password);
}

View File

@@ -2,10 +2,14 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using SharpCompress.Polyfills;
using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip;
@@ -25,6 +29,18 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
string filePath,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -42,6 +58,18 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
@@ -63,6 +91,18 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
@@ -84,6 +124,18 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -103,6 +155,18 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static Task<SevenZipArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
@@ -118,6 +182,11 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
public static Task<bool> IsSevenZipFileAsync(
string filePath,
CancellationToken cancellationToken = default
) => IsSevenZipFileAsync(new FileInfo(filePath), cancellationToken);
public static bool IsSevenZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
@@ -128,6 +197,19 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return IsSevenZipFile(stream);
}
public static async Task<bool> IsSevenZipFileAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return await IsSevenZipFileAsync(stream, cancellationToken).ConfigureAwait(false);
}
internal SevenZipArchive()
: base(ArchiveType.SevenZip) { }
@@ -163,6 +245,8 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return entries;
}
protected override Task<IReadOnlyCollection<SevenZipArchiveEntry>> LoadEntriesAsync(IEnumerable<SevenZipVolume> volumes, CancellationToken cancellationToken) => throw new NotImplementedException();
private void LoadFactory(Stream stream)
{
if (_database is null)
@@ -174,6 +258,8 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
protected override Task<IReadOnlyCollection<SevenZipVolume>> LoadVolumesAsync(SourceStream sourceStream, CancellationToken cancellationToken) => throw new NotImplementedException();
public static bool IsSevenZipFile(Stream stream)
{
try
@@ -186,6 +272,21 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
public static async Task<bool> IsSevenZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
try
{
return await SignatureMatchAsync(stream, cancellationToken).ConfigureAwait(false);
}
catch
{
return false;
}
}
private static ReadOnlySpan<byte> Signature =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
@@ -196,6 +297,16 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return signatureBytes.SequenceEqual(Signature);
}
private static async Task<bool> SignatureMatchAsync(
Stream stream,
CancellationToken cancellationToken
)
{
var signatureBytes = new byte[6];
await stream.ReadFullyAsync(signatureBytes, cancellationToken).ConfigureAwait(false);
return signatureBytes.SequenceEqual(Signature.ToArray());
}
protected override IReader CreateReaderForSolidExtraction() =>
new SevenZipReader(ReaderOptions, this);

View File

@@ -2,12 +2,14 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Polyfills;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
@@ -28,6 +30,22 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
string filePath,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return await OpenAsync(new FileInfo(filePath), readerOptions ?? new ReaderOptions(), cancellationToken);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -45,6 +63,32 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
fileInfo.NotNull(nameof(fileInfo));
var archive = new TarArchive();
archive.SourceStream = new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
@@ -66,6 +110,33 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
var archive = new TarArchive();
archive.SourceStream = new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
@@ -84,6 +155,33 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
var archive = new TarArchive();
archive.SourceStream = new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
);
archive.LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -103,6 +201,34 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<TarArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
var archive = new TarArchive();
archive.SourceStream = new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions());
archive.LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => archive.LoadVolumes(archive.SourceStream).ToList());
archive.LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(
() => archive.LoadEntriesAsync(archive.Volumes, cancellationToken).GetAwaiter().GetResult().ToList()
);
return archive;
}
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
public static bool IsTarFile(FileInfo fileInfo)
@@ -115,6 +241,24 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
return IsTarFile(stream);
}
public static async Task<bool> IsTarFileAsync(
string filePath,
CancellationToken cancellationToken = default
) => await IsTarFileAsync(new FileInfo(filePath), cancellationToken);
public static async Task<bool> IsTarFileAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return await IsTarFileAsync(stream, cancellationToken);
}
public static bool IsTarFile(Stream stream)
{
try
@@ -131,6 +275,26 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
return false;
}
public static async Task<bool> IsTarFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var readSucceeded = await tarHeader.ReadAsync(stream, cancellationToken)
.ConfigureAwait(false);
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch { }
return false;
}
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
@@ -145,7 +309,11 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
: base(ArchiveType.Tar, sourceStream) { }
private TarArchive()
: base(ArchiveType.Tar) { }
: base(ArchiveType.Tar)
{
LazyVolumes = new Lazy<IReadOnlyCollection<TarVolume>>(() => LoadVolumes(SourceStream!).ToList());
LazyEntries = new Lazy<IReadOnlyCollection<TarArchiveEntry>>(() => LoadEntries(Volumes).ToList());
}
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
@@ -205,6 +373,67 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
}
}
private async Task<IReadOnlyCollection<TarArchiveEntry>> LoadEntriesAsync(IEnumerable<TarVolume> volumes, CancellationToken cancellationToken)
{
var list = new List<TarArchiveEntry>();
var stream = volumes.Single().Stream;
TarHeader? previousHeader = null;
await foreach (
var header in TarHeaderFactory.ReadHeaderAsync(
StreamingMode.Seekable,
stream,
ReaderOptions.ArchiveEncoding,
cancellationToken
).WithCancellation(cancellationToken)
)
{
if (header != null)
{
if (header.EntryType == EntryType.LongName)
{
previousHeader = header;
}
else
{
if (previousHeader != null)
{
var entry = new TarArchiveEntry(
this,
new TarFilePart(previousHeader, stream),
CompressionType.None
);
var oldStreamPos = stream.Position;
using (var entryStream = entry.OpenEntryStream())
{
using var memoryStream = new MemoryStream();
await entryStream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
header.Name = ReaderOptions.ArchiveEncoding.Decode(bytes).TrimNulls();
}
stream.Position = oldStreamPos;
previousHeader = null;
}
list.Add(new TarArchiveEntry(
this,
new TarFilePart(header, stream),
CompressionType.None
));
}
}
else
{
throw new IncompleteArchiveException("Failed to read TAR header");
}
}
return list.AsReadOnly();
}
public static TarArchive Create() => new();
protected override TarArchiveEntry CreateEntryInternal(

View File

@@ -18,7 +18,7 @@ namespace SharpCompress.Archives.Zip;
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
private readonly SeekableZipHeaderFactory? headerFactory;
protected internal SeekableZipHeaderFactory? headerFactory;
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
@@ -144,6 +144,14 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
return IsZipFile(stream, password, bufferSize);
}
// ... other using statements
namespace SharpCompress.Archives.Zip;
//... existing class
public static bool IsZipFile(
Stream stream,
string? password = null,
@@ -177,6 +185,41 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
}
public static async Task<bool> IsZipFileAsync(
Stream stream,
string? password = null,
CancellationToken cancellationToken = default,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = await headerFactory
.ReadStreamHeaderAsync(stream, cancellationToken)
.FirstOrDefaultAsync(x => x.ZipHeaderType != ZipHeaderType.Split, cancellationToken)
.ConfigureAwait(false);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static bool IsZipMulti(
Stream stream,
string? password = null,
@@ -219,6 +262,50 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
}
public static async Task<bool> IsZipMultiAsync(
Stream stream,
string? password = null,
CancellationToken cancellationToken = default,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = await headerFactory
.ReadStreamHeaderAsync(stream, cancellationToken)
.FirstOrDefaultAsync(x => x.ZipHeaderType != ZipHeaderType.Split, cancellationToken)
.ConfigureAwait(false);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
{
stream.LoadAllParts(); //request all streams
@@ -283,12 +370,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(
headerFactory.NotNull(),
deh,
s,
IsMultiVolume
)
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
);
}
break;
@@ -390,6 +472,4 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
((IStreamStack)stream).StackSeek(0);
return ZipReader.Open(stream, ReaderOptions, Entries);
}
public override bool SupportsMultiThreading => !IsMultiVolume;
}

View File

@@ -23,7 +23,5 @@ public class ZipArchiveEntry : ZipEntry, IArchiveEntry
public bool IsComplete => true;
public override bool SupportsMultiThreading => Parts.Single().SupportsMultiThreading;
#endregion
}

View File

@@ -87,5 +87,4 @@ public abstract class Entry : IEntry
/// Entry file attribute.
/// </summary>
public virtual int? Attrib => throw new NotImplementedException();
public virtual bool SupportsMultiThreading => false;
}

View File

@@ -128,7 +128,7 @@ internal static class ExtractionMethods
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Func<string, ExtractionOptions?, CancellationToken, Task> writeAsync,
Func<string, ExtractionOptions?, Task> writeAsync,
CancellationToken cancellationToken = default
)
{
@@ -189,7 +189,7 @@ internal static class ExtractionMethods
"Entry is trying to write a file outside of the destination directory."
);
}
await writeAsync(destinationFileName, options, cancellationToken).ConfigureAwait(false);
await writeAsync(destinationFileName, options).ConfigureAwait(false);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
@@ -201,7 +201,7 @@ internal static class ExtractionMethods
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Func<string, FileMode, CancellationToken, Task> openAndWriteAsync,
Func<string, FileMode, Task> openAndWriteAsync,
CancellationToken cancellationToken = default
)
{
@@ -225,8 +225,7 @@ internal static class ExtractionMethods
fm = FileMode.CreateNew;
}
await openAndWriteAsync(destinationFileName, fm, cancellationToken)
.ConfigureAwait(false);
await openAndWriteAsync(destinationFileName, fm).ConfigureAwait(false);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}

View File

@@ -14,6 +14,4 @@ public abstract class FilePart
internal abstract Stream? GetCompressedStream();
internal abstract Stream? GetRawStream();
internal bool Skipped { get; set; }
public virtual bool SupportsMultiThreading => false;
}

View File

@@ -21,5 +21,4 @@ public interface IEntry
DateTime? LastModifiedTime { get; }
long Size { get; }
int? Attrib { get; }
bool SupportsMultiThreading { get; }
}

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Rar.Headers;
@@ -25,6 +27,20 @@ internal class MarkHeader : IRarHeader
throw new EndOfStreamException();
}
private static async Task<byte> GetByteAsync(
Stream stream,
CancellationToken cancellationToken
)
{
byte[] buffer = new byte[1];
var read = await stream.ReadAsync(buffer, 0, 1, cancellationToken).ConfigureAwait(false);
if (read == 1)
{
return buffer[0];
}
throw new EndOfStreamException();
}
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
{
var maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
@@ -129,4 +145,115 @@ internal class MarkHeader : IRarHeader
throw new InvalidFormatException("Rar signature not found");
}
public static async Task<MarkHeader> ReadAsync(
Stream stream,
bool leaveStreamOpen,
bool lookForHeader,
CancellationToken cancellationToken
)
{
var maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
try
{
var start = -1;
var b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
while (start <= maxScanIndex)
{
cancellationToken.ThrowIfCancellationRequested();
// Rar old signature: 52 45 7E 5E
// Rar4 signature: 52 61 72 21 1A 07 00
// Rar5 signature: 52 61 72 21 1A 07 01 00
if (b == 0x52)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b == 0x61)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x72)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x21)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x1a)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x07)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b == 1)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0)
{
continue;
}
return new MarkHeader(true); // Rar5
}
else if (b == 0)
{
return new MarkHeader(false); // Rar4
}
}
else if (b == 0x45)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x7e)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x5e)
{
continue;
}
throw new InvalidFormatException(
"Rar format version pre-4 is unsupported."
);
}
}
else
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
}
}
}
catch (Exception e)
{
if (!leaveStreamOpen)
{
stream.Dispose();
}
throw new InvalidFormatException("Error trying to read rar signature.", e);
}
throw new InvalidFormatException("Rar signature not found");
}
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Tar.Headers;
@@ -187,6 +189,92 @@ internal sealed class TarHeader
return true;
}
internal async Task<bool> ReadAsync(Stream stream, CancellationToken cancellationToken)
{
string? longName = null;
string? longLinkName = null;
var hasLongValue = true;
byte[] buffer;
EntryType entryType;
do
{
buffer = await ReadBlockAsync(stream, cancellationToken).ConfigureAwait(false);
if (buffer.Length == 0)
{
return false;
}
entryType = ReadEntryType(buffer);
// LongName and LongLink headers can follow each other and need
// to apply to the header that follows them.
if (entryType == EntryType.LongName)
{
longName = await ReadLongNameAsync(stream, buffer, cancellationToken)
.ConfigureAwait(false);
continue;
}
else if (entryType == EntryType.LongLink)
{
longLinkName = await ReadLongNameAsync(stream, buffer, cancellationToken)
.ConfigureAwait(false);
continue;
}
hasLongValue = false;
} while (hasLongValue);
// Check header checksum
if (!checkChecksum(buffer))
{
return false;
}
Name = longName ?? ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
EntryType = entryType;
Size = ReadSize(buffer);
// for symlinks, additionally read the linkname
if (entryType == EntryType.SymLink || entryType == EntryType.HardLink)
{
LinkName = longLinkName ?? ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
if (entryType == EntryType.Directory)
{
Mode |= 0b1_000_000_000;
}
UserId = ReadAsciiInt64Base8oldGnu(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8oldGnu(buffer, 116, 7);
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic))
{
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls();
if (!string.IsNullOrEmpty(namePrefix))
{
Name = namePrefix + "/" + Name;
}
}
if (entryType != EntryType.LongName && Name.Length == 0)
{
return false;
}
return true;
}
private string ReadLongName(BinaryReader reader, byte[] buffer)
{
var size = ReadSize(buffer);
@@ -211,6 +299,39 @@ internal sealed class TarHeader
return ArchiveEncoding.Decode(nameBytes, 0, nameBytes.Length).TrimNulls();
}
private async Task<string> ReadLongNameAsync(
Stream stream,
byte[] buffer,
CancellationToken cancellationToken
)
{
var size = ReadSize(buffer);
// Validate size to prevent memory exhaustion from malformed headers
if (size < 0 || size > MAX_LONG_NAME_SIZE)
{
throw new InvalidFormatException(
$"Long name size {size} is invalid or exceeds maximum allowed size of {MAX_LONG_NAME_SIZE} bytes"
);
}
var nameLength = (int)size;
var nameBytes = new byte[nameLength];
await ReadFullyAsync(stream, nameBytes, 0, nameLength, cancellationToken)
.ConfigureAwait(false);
var remainingBytesToRead = BLOCK_SIZE - (nameLength % BLOCK_SIZE);
// Read the rest of the block and discard the data
if (remainingBytesToRead > 0 && remainingBytesToRead < BLOCK_SIZE)
{
var discardBuffer = new byte[remainingBytesToRead];
await ReadFullyAsync(stream, discardBuffer, 0, remainingBytesToRead, cancellationToken)
.ConfigureAwait(false);
}
return ArchiveEncoding.Decode(nameBytes, 0, nameBytes.Length).TrimNulls();
}
private static EntryType ReadEntryType(byte[] buffer) => (EntryType)buffer[156];
private long ReadSize(byte[] buffer)
@@ -234,6 +355,60 @@ internal sealed class TarHeader
return buffer;
}
private static async Task<byte[]> ReadBlockAsync(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = new byte[BLOCK_SIZE];
var bytesRead = 0;
while (bytesRead < BLOCK_SIZE)
{
var read = await stream.ReadAsync(
buffer,
bytesRead,
BLOCK_SIZE - bytesRead,
cancellationToken
);
if (read == 0)
{
// end of stream. If we read nothing, return empty array.
// if we read some, but not a full block, it's an error.
if (bytesRead == 0)
{
return Array.Empty<byte>();
}
throw new InvalidFormatException("Buffer is invalid size");
}
bytesRead += read;
}
return buffer;
}
private static async Task ReadFullyAsync(
Stream stream,
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
var totalRead = 0;
while (totalRead < count)
{
cancellationToken.ThrowIfCancellationRequested();
var read = await stream
.ReadAsync(buffer, offset + totalRead, count - totalRead, cancellationToken)
.ConfigureAwait(false);
if (read == 0)
{
throw new EndOfStreamException("End of stream reached, but more bytes were expected.");
}
totalRead += read;
}
}
private static void WriteStringBytes(ReadOnlySpan<byte> name, Span<byte> buffer, int length)
{
name.CopyTo(buffer);

View File

@@ -1,5 +1,8 @@
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
@@ -54,6 +57,54 @@ internal static class TarHeaderFactory
}
}
internal static async IAsyncEnumerable<TarHeader?> ReadHeaderAsync(
StreamingMode mode,
Stream stream,
ArchiveEncoding archiveEncoding,
[EnumeratorCancellation] CancellationToken cancellationToken
)
{
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
TarHeader? header = null;
try
{
header = new TarHeader(archiveEncoding);
if (!await header.ReadAsync(stream, cancellationToken).ConfigureAwait(false))
{
yield break;
}
switch (mode)
{
case StreamingMode.Seekable:
{
header.DataStartPosition = stream.Position;
//skip to nearest 512
stream.Position += PadTo512(header.Size);
}
break;
case StreamingMode.Streaming:
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
catch
{
header = null;
}
yield return header;
}
}
private static long PadTo512(long size)
{
var zeros = (int)(size % 512);

View File

@@ -1,4 +1,7 @@
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -19,6 +22,22 @@ internal class DirectoryEndHeader : ZipHeader
Comment = reader.ReadBytes(CommentLength);
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
VolumeNumber = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
FirstVolumeWithDirectory = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
TotalNumberOfEntriesInDisk = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
TotalNumberOfEntries = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
DirectorySize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
DirectoryStartOffsetRelativeToDisk = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
CommentLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
Comment = await ZipHeaderFactory.ReadBytesAsync(stream, CommentLength, cancellationToken).ConfigureAwait(false);
}
public ushort VolumeNumber { get; private set; }
public ushort FirstVolumeWithDirectory { get; private set; }

View File

@@ -1,5 +1,8 @@
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -90,7 +93,114 @@ internal class DirectoryEntryHeader : ZipFileEntry
if (unixTimeExtra is not null)
{
// Tuple order is last modified time, last access time, and creation time.
var unixTimeTuple = ((UnixTimeExtraField)unixTimeExtra).UnicodeTimes;
if (unixTimeTuple.Item1.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item1.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item2.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item2.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item3.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item3.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
}
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
Version = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
VersionNeededToExtract = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
Flags = (HeaderFlags)await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
CompressionMethod = (ZipCompressionMethod)
await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
OriginalLastModifiedTime =
LastModifiedTime = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
OriginalLastModifiedDate =
LastModifiedDate = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
Crc = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
CompressedSize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
UncompressedSize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
var nameLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
var extraLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
var commentLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
DiskNumberStart = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
InternalFileAttributes = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
ExternalFileAttributes = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
RelativeOffsetOfEntryHeader = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
var name = await ZipHeaderFactory.ReadBytesAsync(stream, nameLength, cancellationToken).ConfigureAwait(false);
var extra = await ZipHeaderFactory.ReadBytesAsync(stream, extraLength, cancellationToken).ConfigureAwait(false);
var comment = await ZipHeaderFactory.ReadBytesAsync(stream, commentLength, cancellationToken)
.ConfigureAwait(false);
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.DecodeUTF8(name);
Comment = ArchiveEncoding.DecodeUTF8(comment);
}
else
{
Name = ArchiveEncoding.Decode(name);
Comment = ArchiveEncoding.Decode(comment);
}
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u =>
u.Type == ExtraDataType.UnicodePathExtraField
);
if (unicodePathExtra != null && ArchiveEncoding.Forced == null)
{
Name = ((ExtraUnicodePathExtraField)unicodePathExtra).UnicodeName;
}
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
zip64ExtraData.Process(
UncompressedSize,
CompressedSize,
RelativeOffsetOfEntryHeader,
DiskNumberStart
);
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;
}
if (UncompressedSize == uint.MaxValue)
{
UncompressedSize = zip64ExtraData.UncompressedSize;
}
if (RelativeOffsetOfEntryHeader == uint.MaxValue)
{
RelativeOffsetOfEntryHeader = zip64ExtraData.RelativeOffsetOfEntryHeader;
}
}
var unixTimeExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnixTimeExtraField);
if (unixTimeExtra is not null)
{
var unixTimeTuple = ((UnixTimeExtraField)unixTimeExtra).UnicodeTimes;
if (unixTimeTuple.Item1.HasValue)

View File

@@ -1,5 +1,8 @@
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -96,5 +99,87 @@ internal class LocalEntryHeader : ZipFileEntry
}
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
Version = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
Flags = (HeaderFlags)await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
CompressionMethod = (ZipCompressionMethod)
await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
OriginalLastModifiedTime =
LastModifiedTime = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
OriginalLastModifiedDate =
LastModifiedDate = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
Crc = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
CompressedSize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
UncompressedSize = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
var nameLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
var extraLength = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
var name = await ZipHeaderFactory.ReadBytesAsync(stream, nameLength, cancellationToken).ConfigureAwait(false);
var extra = await ZipHeaderFactory.ReadBytesAsync(stream, extraLength, cancellationToken).ConfigureAwait(false);
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.DecodeUTF8(name);
}
else
{
Name = ArchiveEncoding.Decode(name);
}
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u =>
u.Type == ExtraDataType.UnicodePathExtraField
);
if (unicodePathExtra != null && ArchiveEncoding.Forced == null)
{
Name = ((ExtraUnicodePathExtraField)unicodePathExtra).UnicodeName;
}
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
zip64ExtraData.Process(UncompressedSize, CompressedSize, 0, 0);
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;
}
if (UncompressedSize == uint.MaxValue)
{
UncompressedSize = zip64ExtraData.UncompressedSize;
}
}
var unixTimeExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnixTimeExtraField);
if (unixTimeExtra is not null)
{
var unixTimeTuple = ((UnixTimeExtraField)unixTimeExtra).UnicodeTimes;
if (unixTimeTuple.Item1.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item1.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item2.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item2.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item3.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item3.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
}
}
internal ushort Version { get; private set; }
}

View File

@@ -1,4 +1,7 @@
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -26,6 +29,35 @@ internal class Zip64DirectoryEndHeader : ZipHeader
);
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
SizeOfDirectoryEndRecord = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
VersionMadeBy = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken).ConfigureAwait(false);
VersionNeededToExtract = await ZipHeaderFactory.ReadUInt16Async(stream, cancellationToken)
.ConfigureAwait(false);
VolumeNumber = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
FirstVolumeWithDirectory = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
TotalNumberOfEntriesInDisk = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
TotalNumberOfEntries = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
DirectorySize = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
DirectoryStartOffsetRelativeToDisk = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
DataSector = await ZipHeaderFactory.ReadBytesAsync(
stream,
(int)(
SizeOfDirectoryEndRecord
- SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS
),
cancellationToken
)
.ConfigureAwait(false);
}
private const int SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS = 44;
public long SizeOfDirectoryEndRecord { get; private set; }

View File

@@ -1,4 +1,7 @@
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -14,6 +17,16 @@ internal class Zip64DirectoryEndLocatorHeader : ZipHeader
TotalNumberOfVolumes = reader.ReadUInt32();
}
internal async Task ReadAsync(Stream stream, CancellationToken cancellationToken)
{
FirstVolumeWithDirectory = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
RelativeOffsetOfTheEndOfDirectoryRecord = (long)
await ZipHeaderFactory.ReadUInt64Async(stream, cancellationToken).ConfigureAwait(false);
TotalNumberOfVolumes = await ZipHeaderFactory.ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
}
public uint FirstVolumeWithDirectory { get; private set; }
public long RelativeOffsetOfTheEndOfDirectoryRecord { get; private set; }

View File

@@ -1,6 +1,5 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip;
@@ -8,19 +7,13 @@ internal class SeekableZipFilePart : ZipFilePart
{
private bool _isLocalHeaderLoaded;
private readonly SeekableZipHeaderFactory _headerFactory;
private readonly bool _isMultiVolume;
internal SeekableZipFilePart(
SeekableZipHeaderFactory headerFactory,
DirectoryEntryHeader header,
Stream stream,
bool isMultiVolume
Stream stream
)
: base(header, stream)
{
_headerFactory = headerFactory;
_isMultiVolume = isMultiVolume;
}
: base(header, stream) => _headerFactory = headerFactory;
internal override Stream GetCompressedStream()
{
@@ -37,20 +30,8 @@ internal class SeekableZipFilePart : ZipFilePart
protected override Stream CreateBaseStream()
{
if (!_isMultiVolume && BaseStream is SourceStream ss)
{
if (ss.IsFileMode && ss.Files.Count == 1)
{
var fileStream = ss.CurrentFile.OpenRead();
fileStream.Position = Header.DataStartPosition.NotNull();
return fileStream;
}
}
BaseStream.Position = Header.DataStartPosition.NotNull();
return BaseStream;
}
public override bool SupportsMultiThreading =>
!_isMultiVolume && BaseStream is SourceStream ss && ss.IsFileMode && ss.Files.Count == 1;
}

View File

@@ -1,6 +1,9 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -85,6 +88,90 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
}
}
internal async IAsyncEnumerable<ZipHeader> ReadSeekableHeaderAsync(
Stream stream,
[EnumeratorCancellation] CancellationToken cancellationToken
)
{
// Still need BinaryReader for synchronous SeekBackToHeader, until SeekBackToHeaderAsync is implemented
var reader = new BinaryReader(stream);
SeekBackToHeader(stream, reader); // Synchronous due to stream.Seek and BinaryReader
var eocd_location = stream.Position;
var entry = new DirectoryEndHeader();
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
if (entry.IsZip64)
{
_zip64 = true;
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin); // Synchronous seek
var zip64_locator = await ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR)
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
await zip64Locator.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
stream.Seek(
zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord,
SeekOrigin.Begin
); // Synchronous seek
var zip64Signature = await ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
await zip64Entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
stream.Seek(
zip64Entry.DirectoryStartOffsetRelativeToDisk,
SeekOrigin.Begin
); // Synchronous seek
}
else
{
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin); // Synchronous seek
}
var position = stream.Position;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
stream.Position = position; // Synchronous seek
var signature = await ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
var nextHeader = await ReadHeaderAsync(
signature,
stream,
_zip64,
cancellationToken
).ConfigureAwait(false);
position = stream.Position;
if (nextHeader is null)
{
yield break;
}
if (nextHeader is DirectoryEntryHeader entryHeader)
{
entryHeader.HasData = entryHeader.CompressedSize != 0;
yield return entryHeader;
}
else if (nextHeader is DirectoryEndHeader endHeader)
{
yield return endHeader;
}
}
}
private static bool IsMatch(byte[] haystack, int position, byte[] needle)
{
for (var i = 0; i < needle.Length; i++)

View File

@@ -1,7 +1,10 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -200,4 +203,230 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
yield return header;
}
}
internal async IAsyncEnumerable<ZipHeader> ReadStreamHeaderAsync(
Stream stream,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken
)
{
if (stream is not SharpCompressStream)
{
if (stream is SourceStream src)
{
stream = new SharpCompressStream(
stream,
src.ReaderOptions.LeaveStreamOpen,
bufferSize: src.ReaderOptions.BufferSize
);
}
else
{
throw new ArgumentException("Stream must be a SharpCompressStream", nameof(stream));
}
}
var rewindableStream = (SharpCompressStream)stream;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
uint headerBytes = 0;
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
)
{
if (_lastEntryHeader.Part is null)
{
continue;
}
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
var crc = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
if (crc == POST_DATA_DESCRIPTOR)
{
crc = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
_lastEntryHeader.Crc = crc;
ulong compSize = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
ulong uncompSize = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
bool isSentinel = compSize == 0xFFFFFFFF || uncompSize == 0xFFFFFFFF;
bool isHeader = headerBytes == 0x04034b50 || headerBytes == 0x02014b50;
if (!isHeader && !isSentinel)
{
compSize = (uncompSize << 32) | compSize;
uncompSize =
((ulong)headerBytes << 32)
| await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
else if (isSentinel)
{
compSize = await ReadUInt64Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
uncompSize = await ReadUInt64Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
_lastEntryHeader.CompressedSize = (long)compSize;
_lastEntryHeader.UncompressedSize = (long)uncompSize;
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
}
}
else if (_lastEntryHeader != null && _lastEntryHeader.IsZip64)
{
if (_lastEntryHeader.Part is null)
continue;
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
await ReadBytesAsync(rewindableStream, 12, cancellationToken).ConfigureAwait(false); //skip a bunch of fields we don't care about
var crc = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
if (crc == POST_DATA_DESCRIPTOR)
{
crc = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
_lastEntryHeader.Crc = crc;
var compressed_size = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
var uncompressed_size = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
var test_header = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50);
var test_64bit = ((long)uncompressed_size << 32) | compressed_size;
if (test_64bit == _lastEntryHeader.CompressedSize && test_header)
{
_lastEntryHeader.UncompressedSize =
((long)await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false) << 32) | headerBytes;
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
else
{
_lastEntryHeader.UncompressedSize = uncompressed_size;
}
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
rewindableStream.Position = pos.Value + 4;
}
}
else
{
headerBytes = await ReadUInt32Async(rewindableStream, cancellationToken)
.ConfigureAwait(false);
}
_lastEntryHeader = null;
var header = await ReadHeaderAsync(
headerBytes,
rewindableStream,
cancellationToken: cancellationToken
);
if (header is null)
{
yield break;
}
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
{
var local_header = ((LocalEntryHeader)header);
var dir_header = _entries?.FirstOrDefault(entry =>
entry.Key == local_header.Name
&& local_header.CompressedSize == 0
&& local_header.UncompressedSize == 0
&& local_header.Crc == 0
&& local_header.IsDirectory == false
);
if (dir_header != null)
{
local_header.UncompressedSize = dir_header.Size;
local_header.CompressedSize = dir_header.CompressedSize;
local_header.Crc = (uint)dir_header.Crc;
}
if (local_header.CompressedSize > 0)
{
header.HasData = true;
}
else if (local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor))
{
var nextHeaderBytes = await ReadUInt32Async(
rewindableStream,
cancellationToken
);
((IStreamStack)rewindableStream).Rewind(sizeof(uint));
header.HasData = !IsHeader(nextHeaderBytes);
}
else
{
header.HasData = false;
}
}
yield return header;
}
}
private static async Task<byte[]> ReadBytesAsync(
Stream stream,
int count,
CancellationToken cancellationToken
)
{
var buffer = new byte[count];
var read = await stream.ReadAsync(buffer, 0, count, cancellationToken);
if (read < count)
{
throw new EndOfStreamException();
}
return buffer;
}
private static async Task<uint> ReadUInt32Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 4, cancellationToken);
return BinaryPrimitives.ReadUInt32LittleEndian(buffer);
}
private static async Task<ushort> ReadUInt16Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 2, cancellationToken);
return BinaryPrimitives.ReadUInt16LittleEndian(buffer);
}
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Buffers.Binary;
using System.Security.Cryptography;
using System.Text;
namespace SharpCompress.Common.Zip;
@@ -19,8 +20,24 @@ internal class WinzipAesEncryptionData
{
_keySize = keySize;
#if NETFRAMEWORK || NETSTANDARD2_0
#if NETFRAMEWORK
var rfc2898 = new Rfc2898DeriveBytes(password, salt, RFC2898_ITERATIONS);
KeyBytes = rfc2898.GetBytes(KeySizeInBytes);
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
var generatedVerifyValue = rfc2898.GetBytes(2);
#elif NET10_0_OR_GREATER
var derivedKeySize = (KeySizeInBytes * 2) + 2;
var passwordBytes = Encoding.UTF8.GetBytes(password);
var derivedKey = Rfc2898DeriveBytes.Pbkdf2(
passwordBytes,
salt,
RFC2898_ITERATIONS,
HashAlgorithmName.SHA1,
derivedKeySize
);
KeyBytes = derivedKey.AsSpan(0, KeySizeInBytes).ToArray();
IvBytes = derivedKey.AsSpan(KeySizeInBytes, KeySizeInBytes).ToArray();
var generatedVerifyValue = derivedKey.AsSpan((KeySizeInBytes * 2), 2).ToArray();
#else
var rfc2898 = new Rfc2898DeriveBytes(
password,
@@ -28,11 +45,10 @@ internal class WinzipAesEncryptionData
RFC2898_ITERATIONS,
HashAlgorithmName.SHA1
);
#endif
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???
KeyBytes = rfc2898.GetBytes(KeySizeInBytes);
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
var generatedVerifyValue = rfc2898.GetBytes(2);
#endif
var verify = BinaryPrimitives.ReadInt16LittleEndian(passwordVerifyValue);
var generated = BinaryPrimitives.ReadInt16LittleEndian(generatedVerifyValue);

View File

@@ -1,6 +1,9 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -106,6 +109,129 @@ internal class ZipHeaderFactory
}
}
protected async Task<ZipHeader?> ReadHeaderAsync(
uint headerBytes,
Stream stream,
bool zip64 = false,
CancellationToken cancellationToken = default
)
{
switch (headerBytes)
{
case ENTRY_HEADER_BYTES:
{
var entryHeader = new LocalEntryHeader(_archiveEncoding);
await entryHeader.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
await LoadHeaderAsync(entryHeader, stream, cancellationToken).ConfigureAwait(false);
_lastEntryHeader = entryHeader;
return entryHeader;
}
case DIRECTORY_START_HEADER_BYTES:
{
var entry = new DirectoryEntryHeader(_archiveEncoding);
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
return entry;
}
case POST_DATA_DESCRIPTOR:
{
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(
_lastEntryHeader.NotNull().Flags,
HeaderFlags.UsePostDataDescriptor
)
)
{
_lastEntryHeader.Crc = await ReadUInt32Async(stream, cancellationToken)
.ConfigureAwait(false);
_lastEntryHeader.CompressedSize = zip64
? (long)await ReadUInt64Async(stream, cancellationToken)
.ConfigureAwait(false)
: await ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
_lastEntryHeader.UncompressedSize = zip64
? (long)await ReadUInt64Async(stream, cancellationToken)
.ConfigureAwait(false)
: await ReadUInt32Async(stream, cancellationToken).ConfigureAwait(false);
}
else
{
await ReadBytesAsync(stream, zip64 ? 20 : 12, cancellationToken)
.ConfigureAwait(false);
}
return null;
}
case DIGITAL_SIGNATURE:
return null;
case DIRECTORY_END_HEADER_BYTES:
{
var entry = new DirectoryEndHeader();
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
return entry;
}
case SPLIT_ARCHIVE_HEADER_BYTES:
{
return new SplitHeader();
}
case ZIP64_END_OF_CENTRAL_DIRECTORY:
{
var entry = new Zip64DirectoryEndHeader();
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
return entry;
}
case ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR:
{
var entry = new Zip64DirectoryEndLocatorHeader();
await entry.ReadAsync(stream, cancellationToken).ConfigureAwait(false);
return entry;
}
default:
return null;
}
}
internal static async Task<byte[]> ReadBytesAsync(
Stream stream,
int count,
CancellationToken cancellationToken
)
{
var buffer = new byte[count];
var read = await stream.ReadAsync(buffer, 0, count, cancellationToken);
if (read < count)
{
throw new EndOfStreamException();
}
return buffer;
}
internal static async Task<uint> ReadUInt32Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 4, cancellationToken);
return BinaryPrimitives.ReadUInt32LittleEndian(buffer);
}
internal static async Task<ushort> ReadUInt16Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 2, cancellationToken);
return BinaryPrimitives.ReadUInt16LittleEndian(buffer);
}
internal static async Task<ulong> ReadUInt64Async(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = await ReadBytesAsync(stream, 8, cancellationToken);
return BinaryPrimitives.ReadUInt64LittleEndian(buffer);
}
internal static bool IsHeader(uint headerBytes)
{
switch (headerBytes)
@@ -176,12 +302,6 @@ internal class ZipHeaderFactory
return;
}
//if (FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
//{
// entryHeader.PackedStream = new ReadOnlySubStream(stream);
//}
//else
//{
switch (_mode)
{
case StreamingMode.Seekable:
@@ -202,7 +322,87 @@ internal class ZipHeaderFactory
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
//}
private async Task LoadHeaderAsync(
ZipFileEntry entryHeader,
Stream stream,
CancellationToken cancellationToken
)
{
if (FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.Encrypted))
{
if (
!entryHeader.IsDirectory
&& entryHeader.CompressedSize == 0
&& FlagUtility.HasFlag(entryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
)
{
throw new NotSupportedException(
"SharpCompress cannot currently read non-seekable Zip Streams with encrypted data that has been written in a non-seekable manner."
);
}
if (_password is null)
{
throw new CryptographicException("No password supplied for encrypted zip.");
}
entryHeader.Password = _password;
if (entryHeader.CompressionMethod == ZipCompressionMethod.WinzipAes)
{
var data = entryHeader.Extra.SingleOrDefault(x =>
x.Type == ExtraDataType.WinZipAes
);
if (data != null)
{
var keySize = (WinzipAesKeySize)data.DataBytes[4];
var salt = new byte[WinzipAesEncryptionData.KeyLengthInBytes(keySize) / 2];
var passwordVerifyValue = new byte[2];
await stream
.ReadAsync(salt, 0, salt.Length, cancellationToken)
.ConfigureAwait(false);
await stream
.ReadAsync(passwordVerifyValue, 0, 2, cancellationToken)
.ConfigureAwait(false);
entryHeader.WinzipAesEncryptionData = new WinzipAesEncryptionData(
keySize,
salt,
passwordVerifyValue,
_password
);
entryHeader.CompressedSize -= (uint)(salt.Length + 2);
}
}
}
if (entryHeader.IsDirectory)
{
return;
}
switch (_mode)
{
case StreamingMode.Seekable:
{
entryHeader.DataStartPosition = stream.Position;
stream.Position += entryHeader.CompressedSize;
break;
}
case StreamingMode.Streaming:
{
entryHeader.PackedStream = stream;
break;
}
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
}

View File

@@ -544,6 +544,12 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
private void EndBlock()
{
// Skip block processing for empty input (no data written)
if (last < 0)
{
return;
}
blockCRC = mCrc.GetFinalCRC();
combinedCRC = (combinedCRC << 1) | (int)(((uint)combinedCRC) >> 31);
combinedCRC ^= blockCRC;

View File

@@ -62,6 +62,10 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
base.Dispose(disposing);
if (disposing)
{
#if DEBUG_STREAMS
this.DebugDispose(typeof(MultiVolumeReadOnlyStream));
#endif
if (filePartEnumerator != null)
{
filePartEnumerator.Dispose();

View File

@@ -82,6 +82,9 @@ internal class RarStream : Stream, IStreamStack
{
if (disposing)
{
#if DEBUG_STREAMS
this.DebugDispose(typeof(RarStream));
#endif
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
}

View File

@@ -1,6 +1,9 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
@@ -46,6 +49,12 @@ public class GZipFactory
int bufferSize = ReaderOptions.DefaultBufferSize
) => GZipArchive.IsGZipFile(stream);
public override Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
) => GZipArchive.IsGZipFileAsync(stream, cancellationToken);
#endregion
#region IArchiveFactory
@@ -54,10 +63,24 @@ public class GZipFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await GZipArchive.OpenAsync(stream, readerOptions, cancellationToken).ConfigureAwait(false);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await GZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken).ConfigureAwait(false);
#endregion
#region IMultiArchiveFactory
@@ -66,10 +89,24 @@ public class GZipFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await GZipArchive.OpenAsync(streams, readerOptions, cancellationToken).ConfigureAwait(false);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await GZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken).ConfigureAwait(false);
#endregion
#region IReaderFactory

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Factories;
@@ -42,6 +44,18 @@ public interface IFactory
int bufferSize = ReaderOptions.DefaultBufferSize
);
/// <summary>
/// Returns true if the stream represents an archive of the format defined by this type.
/// </summary>
/// <param name="stream">A stream, pointing to the beginning of the archive.</param>
/// <param name="cancellationToken">the cancellation token</param>
/// <param name="password">optional password</param>
Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
);
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>

View File

@@ -1,5 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
@@ -35,6 +38,12 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
int bufferSize = ReaderOptions.DefaultBufferSize
) => RarArchive.IsRarFile(stream);
public override Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
) => RarArchive.IsRarFileAsync(stream, cancellationToken);
/// <inheritdoc/>
public override FileInfo? GetFilePart(int index, FileInfo part1) =>
RarArchiveVolumeFactory.GetFilePart(index, part1);
@@ -47,10 +56,24 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
RarArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
RarArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IMultiArchiveFactory
@@ -59,10 +82,24 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
RarArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
RarArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IReaderFactory

View File

@@ -1,5 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Common;
@@ -34,6 +37,12 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
int bufferSize = ReaderOptions.DefaultBufferSize
) => SevenZipArchive.IsSevenZipFile(stream);
public override Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
) => SevenZipArchive.IsSevenZipFileAsync(stream, cancellationToken);
#endregion
#region IArchiveFactory
@@ -42,10 +51,24 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IMultiArchiveFactory
@@ -54,10 +77,24 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region reader

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
@@ -59,6 +61,12 @@ public class TarFactory
int bufferSize = ReaderOptions.DefaultBufferSize
) => TarArchive.IsTarFile(stream);
public override Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
) => TarArchive.IsTarFileAsync(stream, cancellationToken);
#endregion
#region IArchiveFactory
@@ -67,10 +75,24 @@ public class TarFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
TarArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await TarArchive.OpenAsync(stream, readerOptions, cancellationToken).ConfigureAwait(false);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
TarArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await TarArchive.OpenAsync(fileInfo, readerOptions, cancellationToken).ConfigureAwait(false);
#endregion
#region IMultiArchiveFactory
@@ -79,10 +101,24 @@ public class TarFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
TarArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await TarArchive.OpenAsync(streams, readerOptions, cancellationToken).ConfigureAwait(false);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
TarArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await TarArchive.OpenAsync(fileInfos, readerOptions, cancellationToken).ConfigureAwait(false);
#endregion
#region IReaderFactory
@@ -244,6 +280,7 @@ public class TarFactory
#endregion
#region IWriteableArchiveFactory
/// <inheritdoc/>

View File

@@ -1,5 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
@@ -79,6 +82,44 @@ public class ZipFactory
return false;
}
public override async Task<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken,
string? password = null
)
{
var startPosition = stream.CanSeek ? stream.Position : -1;
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(
stream,
bufferSize: ReaderOptions.DefaultBufferSize
);
}
if (await ZipArchive.IsZipFileAsync(stream, password, cancellationToken).ConfigureAwait(false))
{
return true;
}
if (!stream.CanSeek)
{
return false;
}
stream.Position = startPosition;
if (await ZipArchive.IsZipMultiAsync(stream, password, cancellationToken).ConfigureAwait(false))
{
return true;
}
stream.Position = startPosition;
return false;
}
/// <inheritdoc/>
public override FileInfo? GetFilePart(int index, FileInfo part1) =>
ZipArchiveVolumeFactory.GetFilePart(index, part1);
@@ -91,10 +132,22 @@ public class ZipFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(stream, readerOptions);
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(fileInfo, readerOptions);
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IMultiArchiveFactory
@@ -103,10 +156,22 @@ public class ZipFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(streams, readerOptions);
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(fileInfos, readerOptions);
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
#endregion
#region IReaderFactory

View File

@@ -15,7 +15,7 @@ public class SourceStream : Stream, IStreamStack
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _streams[_streamIndex];
Stream IStreamStack.BaseStream() => _streams[_stream];
int IStreamStack.BufferSize
{
@@ -35,7 +35,7 @@ public class SourceStream : Stream, IStreamStack
private readonly List<Stream> _streams;
private readonly Func<int, FileInfo?>? _getFilePart;
private readonly Func<int, Stream?>? _getStreamPart;
private int _streamIndex;
private int _stream;
public SourceStream(FileInfo file, Func<int, FileInfo?> getPart, ReaderOptions options)
: this(null, null, file, getPart, options) { }
@@ -59,7 +59,7 @@ public class SourceStream : Stream, IStreamStack
if (!IsFileMode)
{
_streams.Add(stream.NotNull("stream is null"));
_streams.Add(stream!);
_getStreamPart = getStreamPart;
_getFilePart = _ => null;
if (stream is FileStream fileStream)
@@ -69,12 +69,12 @@ public class SourceStream : Stream, IStreamStack
}
else
{
_files.Add(file.NotNull("file is null"));
_files.Add(file!);
_streams.Add(_files[0].OpenRead());
_getFilePart = getFilePart;
_getStreamPart = _ => null;
}
_streamIndex = 0;
_stream = 0;
_prevSize = 0;
#if DEBUG_STREAMS
@@ -93,12 +93,10 @@ public class SourceStream : Stream, IStreamStack
public ReaderOptions ReaderOptions { get; }
public bool IsFileMode { get; }
public IReadOnlyList<FileInfo> Files => _files;
public IReadOnlyList<Stream> Streams => _streams;
public IEnumerable<FileInfo> Files => _files;
public IEnumerable<Stream> Streams => _streams;
private Stream Current => _streams[_streamIndex];
public FileInfo CurrentFile => _files[_streamIndex];
private Stream Current => _streams[_stream];
public bool LoadStream(int index) //ensure all parts to id are loaded
{
@@ -109,7 +107,7 @@ public class SourceStream : Stream, IStreamStack
var f = _getFilePart.NotNull("GetFilePart is null")(_streams.Count);
if (f == null)
{
_streamIndex = _streams.Count - 1;
_stream = _streams.Count - 1;
return false;
}
//throw new Exception($"File part {idx} not available.");
@@ -121,7 +119,7 @@ public class SourceStream : Stream, IStreamStack
var s = _getStreamPart.NotNull("GetStreamPart is null")(_streams.Count);
if (s == null)
{
_streamIndex = _streams.Count - 1;
_stream = _streams.Count - 1;
return false;
}
//throw new Exception($"Stream part {idx} not available.");
@@ -139,10 +137,10 @@ public class SourceStream : Stream, IStreamStack
{
if (LoadStream(idx))
{
_streamIndex = idx;
_stream = idx;
}
return _streamIndex == idx;
return _stream == idx;
}
public override bool CanRead => true;
@@ -186,7 +184,7 @@ public class SourceStream : Stream, IStreamStack
var length = Current.Length;
// Load next file if present
if (!SetStream(_streamIndex + 1))
if (!SetStream(_stream + 1))
{
break;
}
@@ -225,7 +223,7 @@ public class SourceStream : Stream, IStreamStack
while (_prevSize + Current.Length < pos)
{
_prevSize += Current.Length;
SetStream(_streamIndex + 1);
SetStream(_stream + 1);
}
}
@@ -275,7 +273,7 @@ public class SourceStream : Stream, IStreamStack
var length = Current.Length;
// Load next file if present
if (!SetStream(_streamIndex + 1))
if (!SetStream(_stream + 1))
{
break;
}
@@ -324,7 +322,7 @@ public class SourceStream : Stream, IStreamStack
var length = Current.Length;
// Load next file if present
if (!SetStream(_streamIndex + 1))
if (!SetStream(_stream + 1))
{
break;
}

View File

@@ -0,0 +1,38 @@
#if !NETFRAMEWORK && !NETSTANDARD2_0
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Polyfills;
internal static class AsyncEnumerableExtensions
{
public static async Task<T?> FirstOrDefaultAsync<T>(
this IAsyncEnumerable<T> source,
CancellationToken cancellationToken = default
)
{
await foreach (var item in source.WithCancellation(cancellationToken))
{
return item;
}
return default;
}
public static async Task<T?> FirstOrDefaultAsync<T>(
this IAsyncEnumerable<T> source,
Func<T, bool> predicate,
CancellationToken cancellationToken = default
)
{
await foreach (var item in source.WithCancellation(cancellationToken))
{
if (predicate(item))
{
return item;
}
}
return default;
}
}
#endif

View File

@@ -82,7 +82,7 @@ public static class IReaderExtensions
reader.Entry,
destinationDirectory,
options,
reader.WriteEntryToFileAsync,
(fileName, opts) => reader.WriteEntryToFileAsync(fileName, opts, cancellationToken),
cancellationToken
)
.ConfigureAwait(false);
@@ -101,10 +101,10 @@ public static class IReaderExtensions
reader.Entry,
destinationFileName,
options,
async (x, fm, ct) =>
async (x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
await reader.WriteEntryToAsync(fs, ct).ConfigureAwait(false);
await reader.WriteEntryToAsync(fs, cancellationToken).ConfigureAwait(false);
},
cancellationToken
)

View File

@@ -75,6 +75,14 @@ public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
);
}
break;
case ZipHeaderType.DirectoryEntry:
// DirectoryEntry headers in the central directory are intentionally skipped.
// In streaming mode, we can only read forward, and DirectoryEntry headers
// reference LocalEntry headers that have already been processed. The file
// data comes from LocalEntry headers, not DirectoryEntry headers.
// For multi-volume ZIPs where file data spans multiple files, use ZipArchive
// instead, which requires seekable streams.
break;
case ZipHeaderType.DirectoryEnd:
{
yield break;

View File

@@ -2,11 +2,11 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.41.0</VersionPrefix>
<AssemblyVersion>0.41.0</AssemblyVersion>
<FileVersion>0.41.0</FileVersion>
<VersionPrefix>0.42.0</VersionPrefix>
<AssemblyVersion>0.42.0</AssemblyVersion>
<FileVersion>0.42.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks>net48;net481;netstandard2.0;net6.0;net8.0</TargetFrameworks>
<TargetFrameworks>net48;net8.0;net10.0</TargetFrameworks>
<AssemblyName>SharpCompress</AssemblyName>
<AssemblyOriginatorKeyFile>../../SharpCompress.snk</AssemblyOriginatorKeyFile>
<SignAssembly>true</SignAssembly>
@@ -17,7 +17,7 @@
<Copyright>Copyright (c) 2025 Adam Hathcock</Copyright>
<GenerateAssemblyTitleAttribute>false</GenerateAssemblyTitleAttribute>
<GenerateAssemblyProductAttribute>false</GenerateAssemblyProductAttribute>
<Description>SharpCompress is a compression library for NET Standard 2.0/NET 4.8/NET 4.8.1/NET 6.0/NET 8.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
<Description>SharpCompress is a compression library for NET 4.8/NET 8.0/NET 10.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
<PublishRepositoryUrl>true</PublishRepositoryUrl>
<IncludeSymbols>true</IncludeSymbols>
<DebugType>embedded</DebugType>
@@ -28,31 +28,29 @@
<EmbedUntrackedSources>true</EmbedUntrackedSources>
<AllowedOutputExtensionsInPackageBuildOutputFolder>$(AllowedOutputExtensionsInPackageBuildOutputFolder);.pdb</AllowedOutputExtensionsInPackageBuildOutputFolder>
</PropertyGroup>
<PropertyGroup Condition=" '$(TargetFramework)' == 'net8.0' ">
<PropertyGroup Condition=" '$(TargetFramework)' == 'net8.0' Or '$(TargetFramework)' == 'net10.0' ">
<IsTrimmable>true</IsTrimmable>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(TargetFramework)|$(Platform)'=='Debug|net8.0|AnyCPU'">
<DefineConstants>$(DefineConstants);DEBUG_STREAMS</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(TargetFramework)|$(Platform)'=='Debug|net10.0|AnyCPU'">
<DefineConstants>$(DefineConstants);DEBUG_STREAMS</DefineConstants>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="ZstdSharp.Port" />
<PackageReference Include="Microsoft.SourceLink.GitHub" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net8.0'">
<ItemGroup Condition=" '$(TargetFramework)' == 'net8.0' Or '$(TargetFramework)' == 'net10.0' ">
<PackageReference Include="Microsoft.NET.ILLink.Tasks" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net48' Or '$(TargetFramework)' == 'net481' ">
<ItemGroup Condition=" '$(TargetFramework)' == 'net48' ">
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
<PackageReference Include="System.Text.Encoding.CodePages" />
<PackageReference Include="System.Buffers" />
<PackageReference Include="System.Memory" />
<PackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
<PackageReference Include="System.Text.Encoding.CodePages" />
<PackageReference Include="System.Memory" />
</ItemGroup>
<ItemGroup>
<None Include="..\..\README.md" Pack="true" PackagePath="\" />
</ItemGroup>

View File

@@ -4,11 +4,11 @@
".NETFramework,Version=v4.8": {
"Microsoft.Bcl.AsyncInterfaces": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "vFuwSLj9QJBbNR0NeNO4YVASUbokxs+i/xbuu8B+Fs4FAZg5QaFa6eGrMaRqTzzNI5tAb97T7BhSxtLckFyiRA==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
"System.Threading.Tasks.Extensions": "4.6.3"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
@@ -49,12 +49,13 @@
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "QLP54mIATaBpjGlsZIxga38VPk1G9js0Kw651B+bvrXi2kSgGZYrxJSpM3whhTZCBK4HEBHX3fzfDQMw7CXHGQ==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
"System.Memory": "4.6.3",
"System.Runtime.CompilerServices.Unsafe": "6.1.2",
"System.ValueTuple": "4.6.1"
}
},
"ZstdSharp.Port": {
@@ -95,216 +96,25 @@
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
"resolved": "4.5.4",
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
}
},
".NETFramework,Version=v4.8.1": {
"Microsoft.Bcl.AsyncInterfaces": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net481": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"System.Buffers": {
"type": "Direct",
"requested": "[4.6.1, )",
"resolved": "4.6.1",
"contentHash": "N8GXpmiLMtljq7gwvyS+1QvKT/W2J8sNAvx+HVg4NGmsG/H+2k/y9QI23auLJRterrzCiDH+IWAw4V/GPwsMlw=="
},
"System.Memory": {
"type": "Direct",
"requested": "[4.6.3, )",
"resolved": "4.6.3",
"contentHash": "qdcDOgnFZY40+Q9876JUHnlHu7bosOHX8XISRoH94fwk6hgaeQGSgfZd8srWRZNt5bV9ZW2TljcegDNxsf+96A==",
"contentHash": "7sCiwilJLYbTZELaKnc7RecBBXWXA+xMLQWZKWawBxYjp6DBlSE3v9/UcvKBvr1vv2tTOhipiogM8rRmxlhrVA==",
"dependencies": {
"System.Buffers": "4.6.1",
"System.Numerics.Vectors": "4.6.1",
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.6, )",
"resolved": "0.8.6",
"contentHash": "iP4jVLQoQmUjMU88g1WObiNr6YKZGvh4aOXn3yOJsHqZsflwRsxZPcIBvNXgjXO3vQKSLctXGLTpcBPLnWPS8A==",
"dependencies": {
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net481": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "Vv/20vgHS7VglVOVh8J3Iz/MA+VYKVRp9f7r2qiKBMuzviTOmocG70yq0Q8T5OTmCONkEAIJwETD1zhEfLkAXQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
},
"System.Numerics.Vectors": {
"System.ValueTuple": {
"type": "Transitive",
"resolved": "4.6.1",
"contentHash": "sQxefTnhagrhoq2ReR0D/6K0zJcr9Hrd6kikeXsA1I8kOCboTavcUC4r7TSfpKFeE163uMuxZcyfO1mGO3EN8Q=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.1.2",
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
"resolved": "4.5.4",
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
"contentHash": "+RJT4qaekpZ7DDLhf+LTjq+E48jieKiY9ulJ+BoxKmZblIJfIJT8Ufcaa/clQqnYvWs8jugfGSMu8ylS0caG0w=="
}
},
".NETStandard,Version=v2.0": {
"Microsoft.Bcl.AsyncInterfaces": {
"net10.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "kICGrGYEzCNI3wPzfEXcwNHgTvlvVn9yJDhSdRK+oZQy4jvYH529u7O0xf5ocQKzOMjfS07+3z9PKRIjrFMJDA=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"NETStandard.Library": {
"type": "Direct",
"requested": "[2.0.3, )",
"resolved": "2.0.3",
"contentHash": "st47PosZSHrjECdjeIzZQbzivYBJFv6P2nv4cj2ypdI204DO+vZ7l5raGMiX4eXMJ53RfOIg+/s4DHVZ54Nu2A==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0"
}
},
"System.Memory": {
"type": "Direct",
"requested": "[4.6.3, )",
"resolved": "4.6.3",
"contentHash": "qdcDOgnFZY40+Q9876JUHnlHu7bosOHX8XISRoH94fwk6hgaeQGSgfZd8srWRZNt5bV9ZW2TljcegDNxsf+96A==",
"dependencies": {
"System.Buffers": "4.6.1",
"System.Numerics.Vectors": "4.6.1",
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.6, )",
"resolved": "0.8.6",
"contentHash": "iP4jVLQoQmUjMU88g1WObiNr6YKZGvh4aOXn3yOJsHqZsflwRsxZPcIBvNXgjXO3vQKSLctXGLTpcBPLnWPS8A==",
"dependencies": {
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.NETCore.Platforms": {
"type": "Transitive",
"resolved": "1.1.0",
"contentHash": "kz0PEW2lhqygehI/d6XsPCQzD7ff7gUJaVGPVETX611eadGsA3A877GdSlU0LRVMCTH/+P3o2iDTak+S08V2+A=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.6.1",
"contentHash": "sQxefTnhagrhoq2ReR0D/6K0zJcr9Hrd6kikeXsA1I8kOCboTavcUC4r7TSfpKFeE163uMuxZcyfO1mGO3EN8Q=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.1.2",
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
"resolved": "4.5.4",
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
"System.Buffers": {
"type": "CentralTransitive",
"requested": "[4.6.1, )",
"resolved": "4.6.1",
"contentHash": "N8GXpmiLMtljq7gwvyS+1QvKT/W2J8sNAvx+HVg4NGmsG/H+2k/y9QI23auLJRterrzCiDH+IWAw4V/GPwsMlw=="
}
},
"net6.0": {
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
@@ -335,9 +145,9 @@
"net8.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[8.0.21, )",
"resolved": "8.0.21",
"contentHash": "s8H5PZQs50OcNkaB6Si54+v3GWM7vzs6vxFRMlD3aXsbM+aPCtod62gmK0BYWou9diGzmo56j8cIf/PziijDqQ=="
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "kICGrGYEzCNI3wPzfEXcwNHgTvlvVn9yJDhSdRK+oZQy4jvYH529u7O0xf5ocQKzOMjfS07+3z9PKRIjrFMJDA=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<TargetFramework>net10.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="JetBrains.Profiler.SelfApi" />

View File

@@ -1,7 +1,7 @@
{
"version": 2,
"dependencies": {
"net8.0": {
"net10.0": {
"JetBrains.Profiler.SelfApi": {
"type": "Direct",
"requested": "[2.5.14, )",

View File

@@ -134,7 +134,6 @@ public class ArchiveTests : ReaderTests
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
Assert.False(entry.SupportsMultiThreading);
entry.WriteToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
@@ -267,31 +266,6 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
protected async Task ArchiveFileRead_Multithreaded(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
var tasks = new List<Task>();
using (var archive = archiveFactory.Open(new FileInfo(testArchive), readerOptions))
{
Assert.True(archive.SupportsMultiThreading);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
Assert.True(entry.SupportsMultiThreading);
var t = entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
tasks.Add(t);
}
}
await Task.WhenAll(tasks);
VerifyFiles();
}
protected void ArchiveFileRead(
IArchiveFactory archiveFactory,
string testArchive,
@@ -315,11 +289,6 @@ public class ArchiveTests : ReaderTests
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveFileRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected Task ArchiveFileRead_Multithreaded(
string testArchive,
ReaderOptions? readerOptions = null
) => ArchiveFileRead_Multithreaded(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveFileSkip(
string testArchive,
string fileOrder,

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
@@ -293,15 +292,9 @@ public class RarArchiveTests : ArchiveTests
[Fact]
public void Rar_ArchiveFileRead() => ArchiveFileRead("Rar.rar");
[Fact]
public Task Rar_ArchiveFileRead_Multithreaded() => ArchiveFileRead_Multithreaded("Rar.rar");
[Fact]
public void Rar5_ArchiveFileRead() => ArchiveFileRead("Rar5.rar");
[Fact]
public Task Rar5_ArchiveFileRead_Multithreaded() => ArchiveFileRead_Multithreaded("Rar5.rar");
[Fact]
public void Rar_ArchiveFileRead_HasDirectories() =>
DoRar_ArchiveFileRead_HasDirectories("Rar.rar");
@@ -366,9 +359,6 @@ public class RarArchiveTests : ArchiveTests
[Fact]
public void Rar2_ArchiveFileRead() => ArchiveFileRead("Rar2.rar");
[Fact]
public Task Rar2_ArchiveFileRead_Multithreaded() => ArchiveFileRead_Multithreaded("Rar2.rar");
[Fact]
public void Rar15_ArchiveFileRead()
{

View File

@@ -1,12 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>net8.0;net48</TargetFrameworks>
<TargetFrameworks>net10.0;net48</TargetFrameworks>
<AssemblyName>SharpCompress.Test</AssemblyName>
<PackageId>SharpCompress.Test</PackageId>
<AssemblyOriginatorKeyFile>SharpCompress.Test.snk</AssemblyOriginatorKeyFile>
<SignAssembly>true</SignAssembly>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(TargetFramework)|$(Platform)'=='Debug|net8.0|AnyCPU'">
<PropertyGroup Condition="'$(Configuration)|$(TargetFramework)|$(Platform)'=='Debug|net10.0|AnyCPU'">
<DefineConstants>$(DefineConstants);DEBUG_STREAMS</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::Windows)))">

View File

@@ -2,7 +2,6 @@ using System;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
@@ -89,10 +88,6 @@ public class ZipArchiveTests : ArchiveTests
[Fact]
public void Zip_Deflate_ArchiveFileRead() => ArchiveFileRead("Zip.deflate.zip");
[Fact]
public Task Zip_Deflate_ArchiveFileRead_Multithreaded() =>
ArchiveFileRead_Multithreaded("Zip.deflate.zip");
[Fact]
public void Zip_Deflate_ArchiveExtractToDirectory() =>
ArchiveExtractToDirectory("Zip.deflate.zip");

View File

@@ -1,5 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -397,4 +399,41 @@ public class ZipReaderTests : ReaderTests
Assert.Equal("second.txt", reader.Entry.Key);
Assert.Equal(197, reader.Entry.Size);
}
[Fact]
public void ZipReader_Returns_Same_Entries_As_ZipArchive()
{
// Verifies that ZipReader and ZipArchive return the same entries
// for standard single-volume ZIP files. ZipReader processes LocalEntry
// headers sequentially, while ZipArchive uses DirectoryEntry headers
// from the central directory and seeks to LocalEntry headers for data.
var testFiles = new[] { "Zip.none.zip", "Zip.deflate.zip", "Zip.none.issue86.zip" };
foreach (var testFile in testFiles)
{
var path = Path.Combine(TEST_ARCHIVES_PATH, testFile);
var readerKeys = new List<string>();
using (var stream = File.OpenRead(path))
using (var reader = ZipReader.Open(stream))
{
while (reader.MoveToNextEntry())
{
readerKeys.Add(reader.Entry.Key!);
}
}
var archiveKeys = new List<string>();
using (var archive = Archives.Zip.ZipArchive.Open(path))
{
foreach (var entry in archive.Entries)
{
archiveKeys.Add(entry.Key!);
}
}
Assert.Equal(archiveKeys.Count, readerKeys.Count);
Assert.Equal(archiveKeys.OrderBy(k => k), readerKeys.OrderBy(k => k));
}
}
}

View File

@@ -1,5 +1,7 @@
using System.IO;
using System.Text;
using SharpCompress.Common;
using SharpCompress.Writers;
using Xunit;
namespace SharpCompress.Test.Zip;
@@ -9,6 +11,42 @@ public class ZipWriterTests : WriterTests
public ZipWriterTests()
: base(ArchiveType.Zip) { }
[Fact]
public void Zip_BZip2_Write_EmptyFile()
{
// Test that writing an empty file with BZip2 compression doesn't throw DivideByZeroException
using var memoryStream = new MemoryStream();
var options = new WriterOptions(CompressionType.BZip2)
{
ArchiveEncoding = new ArchiveEncoding { Default = new UTF8Encoding(false) },
};
using (var writer = WriterFactory.Open(memoryStream, ArchiveType.Zip, options))
{
writer.Write("test-folder/zero-byte-file.txt", Stream.Null);
}
Assert.True(memoryStream.Length > 0);
}
[Fact]
public void Zip_BZip2_Write_EmptyFolder()
{
// Test that writing an empty folder entry with BZip2 compression doesn't throw DivideByZeroException
using var memoryStream = new MemoryStream();
var options = new WriterOptions(CompressionType.BZip2)
{
ArchiveEncoding = new ArchiveEncoding { Default = new UTF8Encoding(false) },
};
using (var writer = WriterFactory.Open(memoryStream, ArchiveType.Zip, options))
{
writer.Write("test-empty-folder/", Stream.Null);
}
Assert.True(memoryStream.Length > 0);
}
[Fact]
public void Zip_Deflate_Write() =>
Write(

View File

@@ -13,11 +13,11 @@
},
"Microsoft.NET.Test.Sdk": {
"type": "Direct",
"requested": "[18.0.0, )",
"resolved": "18.0.0",
"contentHash": "bvxj2Asb7nT+tqOFFerrhQeEjUYLwx0Poi0Rznu63WbqN+A4uDn1t5NWXfAOOQsF6lpmK6N2v+Vvgso7KWZS7g==",
"requested": "[18.0.1, )",
"resolved": "18.0.1",
"contentHash": "WNpu6vI2rA0pXY4r7NKxCN16XRWl5uHu6qjuyVLoDo6oYEggIQefrMjkRuibQHm/NslIUNCcKftvoWAN80MSAg==",
"dependencies": {
"Microsoft.CodeCoverage": "18.0.0"
"Microsoft.CodeCoverage": "18.0.1"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
@@ -29,6 +29,12 @@
"Microsoft.NETFramework.ReferenceAssemblies.net48": "1.0.3"
}
},
"Mono.Posix.NETStandard": {
"type": "Direct",
"requested": "[1.0.0, )",
"resolved": "1.0.0",
"contentHash": "vSN/L1uaVwKsiLa95bYu2SGkF0iY3xMblTfxc8alSziPuVfJpj3geVqHGAA75J7cZkMuKpFVikz82Lo6y6LLdA=="
},
"xunit": {
"type": "Direct",
"requested": "[2.9.3, )",
@@ -51,8 +57,8 @@
},
"Microsoft.CodeCoverage": {
"type": "Transitive",
"resolved": "18.0.0",
"contentHash": "DFPhMrsIofgJ1DDU3ModqqRArDm15/bNl4ecmcuBspZkZ4ONYnCC0R8U27WzK7cYv6r8l6Q/fRmvg7cb+I/dJA=="
"resolved": "18.0.1",
"contentHash": "O+utSr97NAJowIQT/OVp3Lh9QgW/wALVTP4RG1m2AfFP4IyJmJz0ZBmFJUsRQiAPgq6IRC0t8AAzsiPIsaUDEA=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net48": {
"type": "Transitive",
@@ -92,12 +98,17 @@
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
"resolved": "4.5.4",
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
"resolved": "4.6.3",
"contentHash": "7sCiwilJLYbTZELaKnc7RecBBXWXA+xMLQWZKWawBxYjp6DBlSE3v9/UcvKBvr1vv2tTOhipiogM8rRmxlhrVA==",
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
},
"System.ValueTuple": {
"type": "Transitive",
"resolved": "4.6.1",
"contentHash": "+RJT4qaekpZ7DDLhf+LTjq+E48jieKiY9ulJ+BoxKmZblIJfIJT8Ufcaa/clQqnYvWs8jugfGSMu8ylS0caG0w=="
},
"xunit.abstractions": {
"type": "Transitive",
"resolved": "2.0.3",
@@ -141,20 +152,20 @@
"sharpcompress": {
"type": "Project",
"dependencies": {
"Microsoft.Bcl.AsyncInterfaces": "[8.0.0, )",
"Microsoft.Bcl.AsyncInterfaces": "[10.0.0, )",
"System.Buffers": "[4.6.1, )",
"System.Memory": "[4.6.3, )",
"System.Text.Encoding.CodePages": "[8.0.0, )",
"System.Text.Encoding.CodePages": "[10.0.0, )",
"ZstdSharp.Port": "[0.8.6, )"
}
},
"Microsoft.Bcl.AsyncInterfaces": {
"type": "CentralTransitive",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "vFuwSLj9QJBbNR0NeNO4YVASUbokxs+i/xbuu8B+Fs4FAZg5QaFa6eGrMaRqTzzNI5tAb97T7BhSxtLckFyiRA==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
"System.Threading.Tasks.Extensions": "4.6.3"
}
},
"System.Buffers": {
@@ -176,12 +187,13 @@
},
"System.Text.Encoding.CodePages": {
"type": "CentralTransitive",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "QLP54mIATaBpjGlsZIxga38VPk1G9js0Kw651B+bvrXi2kSgGZYrxJSpM3whhTZCBK4HEBHX3fzfDQMw7CXHGQ==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
"System.Memory": "4.6.3",
"System.Runtime.CompilerServices.Unsafe": "6.1.2",
"System.ValueTuple": "4.6.1"
}
},
"ZstdSharp.Port": {
@@ -196,7 +208,7 @@
}
}
},
"net8.0": {
"net10.0": {
"AwesomeAssertions": {
"type": "Direct",
"requested": "[9.3.0, )",
@@ -205,12 +217,12 @@
},
"Microsoft.NET.Test.Sdk": {
"type": "Direct",
"requested": "[18.0.0, )",
"resolved": "18.0.0",
"contentHash": "bvxj2Asb7nT+tqOFFerrhQeEjUYLwx0Poi0Rznu63WbqN+A4uDn1t5NWXfAOOQsF6lpmK6N2v+Vvgso7KWZS7g==",
"requested": "[18.0.1, )",
"resolved": "18.0.1",
"contentHash": "WNpu6vI2rA0pXY4r7NKxCN16XRWl5uHu6qjuyVLoDo6oYEggIQefrMjkRuibQHm/NslIUNCcKftvoWAN80MSAg==",
"dependencies": {
"Microsoft.CodeCoverage": "18.0.0",
"Microsoft.TestPlatform.TestHost": "18.0.0"
"Microsoft.CodeCoverage": "18.0.1",
"Microsoft.TestPlatform.TestHost": "18.0.1"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
@@ -247,8 +259,8 @@
},
"Microsoft.CodeCoverage": {
"type": "Transitive",
"resolved": "18.0.0",
"contentHash": "DFPhMrsIofgJ1DDU3ModqqRArDm15/bNl4ecmcuBspZkZ4ONYnCC0R8U27WzK7cYv6r8l6Q/fRmvg7cb+I/dJA=="
"resolved": "18.0.1",
"contentHash": "O+utSr97NAJowIQT/OVp3Lh9QgW/wALVTP4RG1m2AfFP4IyJmJz0ZBmFJUsRQiAPgq6IRC0t8AAzsiPIsaUDEA=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
@@ -257,18 +269,18 @@
},
"Microsoft.TestPlatform.ObjectModel": {
"type": "Transitive",
"resolved": "18.0.0",
"contentHash": "Al/a99ymb8UdEEh6DKNiaoFn5i8fvX5PdM9LfU9Z/Q8NJrlyHHzF+LRHLbR+t89gRsJ2fFMpwYxgEn3eH1BQwA==",
"resolved": "18.0.1",
"contentHash": "qT/mwMcLF9BieRkzOBPL2qCopl8hQu6A1P7JWAoj/FMu5i9vds/7cjbJ/LLtaiwWevWLAeD5v5wjQJ/l6jvhWQ==",
"dependencies": {
"System.Reflection.Metadata": "8.0.0"
}
},
"Microsoft.TestPlatform.TestHost": {
"type": "Transitive",
"resolved": "18.0.0",
"contentHash": "aAxE8Thr9ZHGrljOYaDeLJqitQi75iE4xeEFn6CEGFirlHSn1KwpKPniuEn6zCLZ90Z3XqNlrC3ZJTuvBov45w==",
"resolved": "18.0.1",
"contentHash": "uDJKAEjFTaa2wHdWlfo6ektyoh+WD4/Eesrwb4FpBFKsLGehhACVnwwTI4qD3FrIlIEPlxdXg3SyrYRIcO+RRQ==",
"dependencies": {
"Microsoft.TestPlatform.ObjectModel": "18.0.0",
"Microsoft.TestPlatform.ObjectModel": "18.0.1",
"Newtonsoft.Json": "13.0.3"
}
},