Compare commits

..

9 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
4a6e5232ae Add opt-in multi-threading support with SupportsMultiThreadedExtraction flag
- Added IArchive.SupportsMultiThreadedExtraction property to indicate if multi-threading is supported
- Added ReaderOptions.EnableMultiThreadedExtraction option to opt-in to multi-threading
- Updated SeekableZipFilePart, TarFilePart, and SeekableFilePart to check the flag
- Added test to verify multi-threading flag behavior
- Multi-threading is now disabled by default for backward compatibility

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-18 16:27:59 +00:00
copilot-swe-agent[bot]
3e23a6e5a6 Add multi-threading support for file-based archives - sync test passing
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-18 16:09:54 +00:00
copilot-swe-agent[bot]
e0a43e9727 Initial plan 2026-01-18 15:56:08 +00:00
Adam Hathcock
f5d83c0e33 Merge pull request #1135 from adamhathcock/copilot/consolidate-compile-flags 2026-01-15 18:47:37 +00:00
copilot-swe-agent[bot]
d2cb792d91 Change NET6_0_OR_GREATER to NET8_0_OR_GREATER
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 18:31:06 +00:00
copilot-swe-agent[bot]
52fef492a5 Additional simplifications: Remove NETCF, fix NET60 typo, consolidate NETCOREAPP2_1 pattern
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 18:09:06 +00:00
copilot-swe-agent[bot]
a5300f3383 Replace NETFRAMEWORK and NETSTANDARD2_0 with LEGACY_DOTNET compile flag
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 18:05:14 +00:00
copilot-swe-agent[bot]
cab3e7d498 Initial analysis: Planning compile flags consolidation
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 17:55:37 +00:00
copilot-swe-agent[bot]
405dbb30cd Initial plan 2026-01-15 17:50:54 +00:00
67 changed files with 903 additions and 746 deletions

View File

@@ -1,7 +1,7 @@
// Copyright (c) Six Labors.
// Licensed under the Apache License, Version 2.0.
#if !NETSTANDARD2_0 && !NETSTANDARD2_1 && !NETFRAMEWORK
#if !LEGACY_DOTNET
#define SUPPORTS_RUNTIME_INTRINSICS
#define SUPPORTS_HOTPATH
#endif

View File

@@ -145,6 +145,19 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
/// </summary>
public virtual bool IsEncrypted => false;
/// <summary>
/// Returns whether multi-threaded extraction is supported for this archive.
/// Multi-threading is supported when:
/// 1. The archive is opened from a FileInfo or file path (not a stream)
/// 2. Multi-threading is explicitly enabled in ReaderOptions
/// 3. The archive is not SOLID (SOLID archives should use sequential extraction)
/// </summary>
public virtual bool SupportsMultiThreadedExtraction =>
_sourceStream is not null
&& _sourceStream.IsFileMode
&& ReaderOptions.EnableMultiThreadedExtraction
&& !IsSolid;
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
/// </summary>

View File

@@ -78,7 +78,7 @@ public static class ArchiveFactory
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken);
return factory.OpenAsyncArchive(fileInfo, options);
return factory.OpenAsyncArchive(fileInfo, options, cancellationToken);
}
public static IArchive OpenArchive(

View File

@@ -40,6 +40,13 @@ internal class AutoArchiveFactory : IArchiveFactory
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ArchiveFactory.OpenArchive(fileInfo, readerOptions);
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
public IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
}

View File

@@ -44,4 +44,12 @@ public interface IArchive : IDisposable
/// Returns whether the archive is encrypted.
/// </summary>
bool IsEncrypted { get; }
/// <summary>
/// Returns whether multi-threaded extraction is supported for this archive.
/// Multi-threading is supported when the archive is opened from a FileInfo or file path
/// (not a stream) and the format supports random access (e.g., Zip, Tar, Rar).
/// SOLID archives (some Rar, all 7Zip) should use sequential extraction for best performance.
/// </summary>
bool SupportsMultiThreadedExtraction { get; }
}

View File

@@ -47,5 +47,9 @@ public interface IArchiveFactory : IFactory
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null);
IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -164,22 +163,4 @@ public partial class RarArchive
return false;
}
}
public static async ValueTask<bool> IsRarFileAsync(
Stream stream,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
MarkHeader.Read(stream, true, false);
return true;
}
catch
{
return false;
}
}
}

View File

@@ -1,6 +1,7 @@
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Archives.Rar;
@@ -24,6 +25,76 @@ internal class SeekableFilePart : RarFilePart
internal override Stream GetCompressedStream()
{
Stream streamToUse;
// If the stream is a SourceStream in file mode with multi-threading enabled,
// create an independent stream to support concurrent extraction
if (
_stream is SourceStream sourceStream
&& sourceStream.IsFileMode
&& sourceStream.ReaderOptions.EnableMultiThreadedExtraction
)
{
var independentStream = sourceStream.CreateIndependentStream(0);
if (independentStream is not null)
{
streamToUse = independentStream;
streamToUse.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)
{
var cryptKey = new CryptKey3(_password!);
return new RarCryptoWrapper(streamToUse, FileHeader.R4Salt, cryptKey);
}
if (FileHeader.Rar5CryptoInfo != null)
{
var cryptKey = new CryptKey5(_password!, FileHeader.Rar5CryptoInfo);
return new RarCryptoWrapper(
streamToUse,
FileHeader.Rar5CryptoInfo.Salt,
cryptKey
);
}
return streamToUse;
}
}
// Check if the stream wraps a FileStream
Stream? underlyingStream = _stream;
if (_stream is IStreamStack streamStack)
{
underlyingStream = streamStack.BaseStream();
}
if (underlyingStream is FileStream fileStream)
{
// Create a new independent stream from the file
streamToUse = new FileStream(
fileStream.Name,
FileMode.Open,
FileAccess.Read,
FileShare.Read
);
streamToUse.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)
{
var cryptKey = new CryptKey3(_password!);
return new RarCryptoWrapper(streamToUse, FileHeader.R4Salt, cryptKey);
}
if (FileHeader.Rar5CryptoInfo != null)
{
var cryptKey = new CryptKey5(_password!, FileHeader.Rar5CryptoInfo);
return new RarCryptoWrapper(streamToUse, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
}
return streamToUse;
}
// Fall back to existing behavior for stream-based sources
_stream.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)

View File

@@ -1,10 +1,12 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -155,56 +157,13 @@ public partial class SevenZipArchive
}
}
public static async ValueTask<bool> IsSevenZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
return await SignatureMatchAsync(stream, cancellationToken);
}
catch
{
return false;
}
}
private static ReadOnlySpan<byte> Signature => [(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C];
private static ReadOnlySpan<byte> Signature =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static bool SignatureMatch(Stream stream)
{
var buffer = ArrayPool<byte>.Shared.Rent(6);
try
{
stream.ReadExact(buffer, 0, 6);
return buffer.AsSpan().Slice(0, 6).SequenceEqual(Signature);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
private static async ValueTask<bool> SignatureMatchAsync(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = ArrayPool<byte>.Shared.Rent(6);
try
{
if (!await stream.ReadFullyAsync(buffer, cancellationToken).ConfigureAwait(false))
{
return false;
}
return buffer.AsSpan().Slice(0, 6).SequenceEqual(Signature);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
var reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(Signature);
}
}

View File

@@ -32,56 +32,11 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
IEnumerable<SevenZipVolume> volumes
)
{
foreach (var volume in volumes)
{
LoadFactory(volume.Stream);
if (_database is null)
{
yield break;
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
{
var file = _database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(
volume.Stream,
_database,
i,
file,
ReaderOptions.ArchiveEncoding
)
);
}
foreach (
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
)
{
var isSolid = false;
foreach (var entry in group)
{
entry.IsSolid = isSolid;
isSolid = true;
}
}
foreach (var entry in entries)
{
yield return entry;
}
}
}
protected override async IAsyncEnumerable<SevenZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<SevenZipVolume> volumes
)
{
var stream = (await volumes.SingleAsync()).Stream;
await LoadFactoryAsync(stream);
var stream = volumes.Single().Stream;
LoadFactory(stream);
if (_database is null)
{
yield break;
return Enumerable.Empty<SevenZipArchiveEntry>();
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
@@ -102,10 +57,7 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
}
}
foreach (var entry in entries)
{
yield return entry;
}
return entries;
}
private void LoadFactory(Stream stream)
@@ -119,27 +71,6 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
}
}
private async Task LoadFactoryAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
if (_database is null)
{
stream.Position = 0;
var reader = new ArchiveReader();
await reader.OpenAsync(
stream,
lookForHeader: ReaderOptions.LookForHeader,
cancellationToken
);
_database = await reader.ReadDatabaseAsync(
new PasswordProvider(ReaderOptions.Password),
cancellationToken
);
}
}
protected override IReader CreateReaderForSolidExtraction() =>
new SevenZipReader(ReaderOptions, this);
@@ -253,7 +184,7 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
return Task.CompletedTask;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default

View File

@@ -165,27 +165,6 @@ public partial class TarArchive
return false;
}
public static async ValueTask<bool> IsTarFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var reader = new BinaryReader(stream);
var readSucceeded = tarHeader.Read(reader);
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch { }
return false;
}
public static IWritableArchive CreateArchive() => new TarArchive();
public static IWritableAsyncArchive CreateAsyncArchive() => new TarArchive();

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Arj.Headers;
using SharpCompress.Crypto;
@@ -107,26 +105,6 @@ namespace SharpCompress.Common.Ace.Headers
return CheckMagicBytes(bytes, 7);
}
/// <summary>
/// Asynchronously checks if the stream is an ACE archive
/// </summary>
/// <param name="stream">The stream to read from</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>True if the stream is an ACE archive, false otherwise</returns>
public static async ValueTask<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var bytes = new byte[14];
if (await stream.ReadAsync(bytes, 0, 14, cancellationToken) != 14)
{
return false;
}
return CheckMagicBytes(bytes, 7);
}
protected static bool CheckMagicBytes(byte[] headerBytes, int offset)
{
// Check for "**ACE**" at specified offset

View File

@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Crypto;
@@ -150,26 +149,6 @@ namespace SharpCompress.Common.Arj.Headers
return CheckMagicBytes(bytes);
}
/// <summary>
/// Asynchronously checks if the stream is an ARJ archive
/// </summary>
/// <param name="stream">The stream to read from</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>True if the stream is an ARJ archive, false otherwise</returns>
public static async ValueTask<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var bytes = new byte[2];
if (await stream.ReadAsync(bytes, 0, 2, cancellationToken) != 2)
{
return false;
}
return CheckMagicBytes(bytes);
}
protected static bool CheckMagicBytes(byte[] headerBytes)
{
var magicValue = (ushort)(headerBytes[0] | headerBytes[1] << 8);

View File

@@ -82,7 +82,7 @@ namespace SharpCompress.Common
}
}
#if NET6_0_OR_GREATER
#if NET8_0_OR_GREATER
public async ValueTask DisposeAsync()
{
if (_disposed)

View File

@@ -93,7 +93,7 @@ public class EntryStream : Stream, IStreamStack
_stream.Dispose();
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask DisposeAsync()
{
if (_isDisposed)
@@ -171,7 +171,7 @@ public class EntryStream : Stream, IStreamStack
return read;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default

View File

@@ -5,8 +5,6 @@ using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
@@ -1272,46 +1270,6 @@ internal class ArchiveReader
_stream = stream;
}
public async Task OpenAsync(
Stream stream,
bool lookForHeader,
CancellationToken cancellationToken = default
)
{
Close();
_streamOrigin = stream.Position;
_streamEnding = stream.Length;
var canScan = lookForHeader ? 0x80000 - 20 : 0;
while (true)
{
// TODO: Check Signature!
_header = new byte[0x20];
await stream.ReadExactAsync(_header, 0, 0x20, cancellationToken);
if (
!lookForHeader
|| _header
.AsSpan(0, length: 6)
.SequenceEqual<byte>([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])
)
{
break;
}
if (canScan == 0)
{
throw new InvalidFormatException("Unable to find 7z signature");
}
canScan--;
stream.Position = ++_streamOrigin;
}
_stream = stream;
}
public void Close()
{
_stream?.Dispose();
@@ -1425,110 +1383,6 @@ internal class ArchiveReader
return db;
}
public async Task<ArchiveDatabase> ReadDatabaseAsync(
IPasswordProvider pass,
CancellationToken cancellationToken = default
)
{
var db = new ArchiveDatabase(pass);
db.Clear();
db._majorVersion = _header[6];
db._minorVersion = _header[7];
if (db._majorVersion != 0)
{
throw new InvalidOperationException();
}
var crcFromArchive = DataReader.Get32(_header, 8);
var nextHeaderOffset = (long)DataReader.Get64(_header, 0xC);
var nextHeaderSize = (long)DataReader.Get64(_header, 0x14);
var nextHeaderCrc = DataReader.Get32(_header, 0x1C);
var crc = Crc.INIT_CRC;
crc = Crc.Update(crc, nextHeaderOffset);
crc = Crc.Update(crc, nextHeaderSize);
crc = Crc.Update(crc, nextHeaderCrc);
crc = Crc.Finish(crc);
if (crc != crcFromArchive)
{
throw new InvalidOperationException();
}
db._startPositionAfterHeader = _streamOrigin + 0x20;
// empty header is ok
if (nextHeaderSize == 0)
{
db.Fill();
return db;
}
if (nextHeaderOffset < 0 || nextHeaderSize < 0 || nextHeaderSize > int.MaxValue)
{
throw new InvalidOperationException();
}
if (nextHeaderOffset > _streamEnding - db._startPositionAfterHeader)
{
throw new InvalidOperationException("nextHeaderOffset is invalid");
}
_stream.Seek(nextHeaderOffset, SeekOrigin.Current);
var header = new byte[nextHeaderSize];
await _stream.ReadExactAsync(header, 0, header.Length, cancellationToken);
if (Crc.Finish(Crc.Update(Crc.INIT_CRC, header, 0, header.Length)) != nextHeaderCrc)
{
throw new InvalidOperationException();
}
using (var streamSwitch = new CStreamSwitch())
{
streamSwitch.Set(this, header);
var type = ReadId();
if (type != BlockType.Header)
{
if (type != BlockType.EncodedHeader)
{
throw new InvalidOperationException();
}
var dataVector = ReadAndDecodePackedStreams(
db._startPositionAfterHeader,
db.PasswordProvider
);
// compressed header without content is odd but ok
if (dataVector.Count == 0)
{
db.Fill();
return db;
}
if (dataVector.Count != 1)
{
throw new InvalidOperationException();
}
streamSwitch.Set(this, dataVector[0]);
if (ReadId() != BlockType.Header)
{
throw new InvalidOperationException();
}
}
ReadHeader(db, db.PasswordProvider);
}
db.Fill();
return db;
}
internal class CExtractFolderInfo
{
internal int _fileIndex;

View File

@@ -1,5 +1,6 @@
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Tar;
@@ -20,8 +21,45 @@ internal sealed class TarFilePart : FilePart
internal override Stream GetCompressedStream()
{
if (_seekableStream != null)
if (_seekableStream is not null)
{
// If the seekable stream is a SourceStream in file mode with multi-threading enabled,
// create an independent stream to support concurrent extraction
if (
_seekableStream is SourceStream sourceStream
&& sourceStream.IsFileMode
&& sourceStream.ReaderOptions.EnableMultiThreadedExtraction
)
{
var independentStream = sourceStream.CreateIndependentStream(0);
if (independentStream is not null)
{
independentStream.Position = Header.DataStartPosition ?? 0;
return new TarReadOnlySubStream(independentStream, Header.Size);
}
}
// Check if the seekable stream wraps a FileStream
Stream? underlyingStream = _seekableStream;
if (_seekableStream is IStreamStack streamStack)
{
underlyingStream = streamStack.BaseStream();
}
if (underlyingStream is FileStream fileStream)
{
// Create a new independent stream from the file
var independentStream = new FileStream(
fileStream.Name,
FileMode.Open,
FileAccess.Read,
FileShare.Read
);
independentStream.Position = Header.DataStartPosition ?? 0;
return new TarReadOnlySubStream(independentStream, Header.Size);
}
// Fall back to existing behavior for stream-based sources
_seekableStream.Position = Header.DataStartPosition ?? 0;
return new TarReadOnlySubStream(_seekableStream, Header.Size);
}

View File

@@ -66,7 +66,7 @@ internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
base.Dispose(disposing);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async System.Threading.Tasks.ValueTask DisposeAsync()
{
if (_isDisposed)
@@ -170,7 +170,7 @@ internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
return read;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async System.Threading.Tasks.ValueTask<int> ReadAsync(
System.Memory<byte> buffer,
System.Threading.CancellationToken cancellationToken = default

View File

@@ -2,13 +2,16 @@ using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip;
internal class SeekableZipFilePart : ZipFilePart
{
private bool _isLocalHeaderLoaded;
private volatile bool _isLocalHeaderLoaded;
private readonly SeekableZipHeaderFactory _headerFactory;
private readonly object _headerLock = new();
private readonly SemaphoreSlim _asyncHeaderSemaphore = new(1, 1);
internal SeekableZipFilePart(
SeekableZipHeaderFactory headerFactory,
@@ -21,8 +24,14 @@ internal class SeekableZipFilePart : ZipFilePart
{
if (!_isLocalHeaderLoaded)
{
LoadLocalHeader();
_isLocalHeaderLoaded = true;
lock (_headerLock)
{
if (!_isLocalHeaderLoaded)
{
LoadLocalHeader();
_isLocalHeaderLoaded = true;
}
}
}
return base.GetCompressedStream();
}
@@ -33,22 +42,173 @@ internal class SeekableZipFilePart : ZipFilePart
{
if (!_isLocalHeaderLoaded)
{
await LoadLocalHeaderAsync(cancellationToken);
_isLocalHeaderLoaded = true;
await _asyncHeaderSemaphore.WaitAsync(cancellationToken);
try
{
if (!_isLocalHeaderLoaded)
{
await LoadLocalHeaderAsync(cancellationToken);
_isLocalHeaderLoaded = true;
}
}
finally
{
_asyncHeaderSemaphore.Release();
}
}
return await base.GetCompressedStreamAsync(cancellationToken);
}
private void LoadLocalHeader() =>
Header = _headerFactory.GetLocalHeader(BaseStream, (DirectoryEntryHeader)Header);
private void LoadLocalHeader()
{
// Use an independent stream for loading the header if multi-threading is enabled
Stream streamToUse = BaseStream;
bool disposeStream = false;
private async ValueTask LoadLocalHeaderAsync(CancellationToken cancellationToken = default) =>
Header = await _headerFactory.GetLocalHeaderAsync(BaseStream, (DirectoryEntryHeader)Header);
if (
BaseStream is SourceStream sourceStream
&& sourceStream.IsFileMode
&& sourceStream.ReaderOptions.EnableMultiThreadedExtraction
)
{
var independentStream = sourceStream.CreateIndependentStream(0);
if (independentStream is not null)
{
streamToUse = independentStream;
disposeStream = true;
}
}
else
{
// Check if BaseStream wraps a FileStream
Stream? underlyingStream = BaseStream;
if (BaseStream is IStreamStack streamStack)
{
underlyingStream = streamStack.BaseStream();
}
if (underlyingStream is FileStream fileStream)
{
streamToUse = new FileStream(
fileStream.Name,
FileMode.Open,
FileAccess.Read,
FileShare.Read
);
disposeStream = true;
}
}
try
{
Header = _headerFactory.GetLocalHeader(streamToUse, (DirectoryEntryHeader)Header);
}
finally
{
if (disposeStream)
{
streamToUse.Dispose();
}
}
}
private async ValueTask LoadLocalHeaderAsync(CancellationToken cancellationToken = default)
{
// Use an independent stream for loading the header if multi-threading is enabled
Stream streamToUse = BaseStream;
bool disposeStream = false;
if (
BaseStream is SourceStream sourceStream
&& sourceStream.IsFileMode
&& sourceStream.ReaderOptions.EnableMultiThreadedExtraction
)
{
var independentStream = sourceStream.CreateIndependentStream(0);
if (independentStream is not null)
{
streamToUse = independentStream;
disposeStream = true;
}
}
else
{
// Check if BaseStream wraps a FileStream
Stream? underlyingStream = BaseStream;
if (BaseStream is IStreamStack streamStack)
{
underlyingStream = streamStack.BaseStream();
}
if (underlyingStream is FileStream fileStream)
{
streamToUse = new FileStream(
fileStream.Name,
FileMode.Open,
FileAccess.Read,
FileShare.Read
);
disposeStream = true;
}
}
try
{
Header = await _headerFactory.GetLocalHeaderAsync(
streamToUse,
(DirectoryEntryHeader)Header
);
}
finally
{
if (disposeStream)
{
streamToUse.Dispose();
}
}
}
protected override Stream CreateBaseStream()
{
BaseStream.Position = Header.DataStartPosition.NotNull();
// If BaseStream is a SourceStream in file mode with multi-threading enabled,
// create an independent stream to support concurrent extraction
if (
BaseStream is SourceStream sourceStream
&& sourceStream.IsFileMode
&& sourceStream.ReaderOptions.EnableMultiThreadedExtraction
)
{
// Create a new independent stream for this entry
var independentStream = sourceStream.CreateIndependentStream(0);
if (independentStream is not null)
{
independentStream.Position = Header.DataStartPosition.NotNull();
return independentStream;
}
}
// Check if BaseStream wraps a FileStream (for multi-volume archives)
Stream? underlyingStream = BaseStream;
if (BaseStream is IStreamStack streamStack)
{
underlyingStream = streamStack.BaseStream();
}
if (underlyingStream is FileStream fileStream)
{
// Create a new independent stream from the file
var independentStream = new FileStream(
fileStream.Name,
FileMode.Open,
FileAccess.Read,
FileShare.Read
);
independentStream.Position = Header.DataStartPosition.NotNull();
return independentStream;
}
// Fall back to existing behavior for stream-based sources
BaseStream.Position = Header.DataStartPosition.NotNull();
return BaseStream;
}
}

View File

@@ -20,7 +20,7 @@ internal class WinzipAesEncryptionData
{
_keySize = keySize;
#if NETFRAMEWORK || NETSTANDARD2_0
#if LEGACY_DOTNET
var rfc2898 = new Rfc2898DeriveBytes(password, salt, RFC2898_ITERATIONS);
KeyBytes = rfc2898.GetBytes(KeySizeInBytes);
IvBytes = rfc2898.GetBytes(KeySizeInBytes);

View File

@@ -98,7 +98,7 @@ public sealed class BZip2Stream : Stream, IStreamStack
public override void SetLength(long value) => stream.SetLength(value);
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override int Read(Span<byte> buffer) => stream.Read(buffer);
@@ -149,25 +149,4 @@ public sealed class BZip2Stream : Stream, IStreamStack
}
return true;
}
/// <summary>
/// Asynchronously consumes two bytes to test if there is a BZip2 header
/// </summary>
/// <param name="stream"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static async ValueTask<bool> IsBZip2Async(
Stream stream,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var buffer = new byte[2];
var bytesRead = await stream.ReadAsync(buffer, 0, 2, cancellationToken);
if (bytesRead < 2 || buffer[0] != 'B' || buffer[1] != 'Z')
{
return false;
}
return true;
}
}

View File

@@ -299,7 +299,7 @@ public class DeflateStream : Stream, IStreamStack
await _baseStream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask DisposeAsync()
{
if (_disposed)
@@ -370,7 +370,7 @@ public class DeflateStream : Stream, IStreamStack
.ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
@@ -461,7 +461,7 @@ public class DeflateStream : Stream, IStreamStack
.ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default

View File

@@ -345,7 +345,7 @@ public class GZipStream : Stream, IStreamStack
return n;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
@@ -454,7 +454,7 @@ public class GZipStream : Stream, IStreamStack
await BaseStream.WriteAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default

View File

@@ -552,7 +552,7 @@ internal class ZlibBaseStream : Stream, IStreamStack
}
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask DisposeAsync()
{
if (isDisposed)
@@ -1171,7 +1171,7 @@ internal class ZlibBaseStream : Stream, IStreamStack
return rc;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default

View File

@@ -108,13 +108,9 @@ internal static class ZlibConstants
public const int Z_BUF_ERROR = -5;
/// <summary>
/// The size of the working buffer used in the ZlibCodec class. Defaults to 8192 bytes.
/// The size of the working buffer used in the ZlibCodec class. Defaults to 16384 bytes.
/// </summary>
#if NETCF
public const int WorkingBufferSizeDefault = 8192;
#else
public const int WorkingBufferSizeDefault = 16384;
#endif
/// <summary>
/// The minimum size of the working buffer used in the ZlibCodec class. Currently it is 128 bytes.

View File

@@ -277,7 +277,7 @@ public class ZlibStream : Stream, IStreamStack
await _baseStream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask DisposeAsync()
{
if (_disposed)
@@ -347,7 +347,7 @@ public class ZlibStream : Stream, IStreamStack
.ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
@@ -431,7 +431,7 @@ public class ZlibStream : Stream, IStreamStack
.ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default

View File

@@ -200,7 +200,7 @@ public sealed class Deflate64Stream : Stream, IStreamStack
return count - remainingCount;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default

View File

@@ -163,7 +163,7 @@ public sealed class LZipStream : Stream, IStreamStack
public override void SetLength(long value) => throw new NotImplementedException();
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override ValueTask<int> ReadAsync(
Memory<byte> buffer,
@@ -222,19 +222,6 @@ public sealed class LZipStream : Stream, IStreamStack
/// <returns><c>true</c> if the given stream is an LZip file, <c>false</c> otherwise.</returns>
public static bool IsLZipFile(Stream stream) => ValidateAndReadSize(stream) != 0;
/// <summary>
/// Asynchronously determines if the given stream is positioned at the start of a v1 LZip
/// file, as indicated by the ASCII characters "LZIP" and a version byte
/// of 1, followed by at least one byte.
/// </summary>
/// <param name="stream">The stream to read from. Must not be null.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns><c>true</c> if the given stream is an LZip file, <c>false</c> otherwise.</returns>
public static async ValueTask<bool> IsLZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
) => await ValidateAndReadSizeAsync(stream, cancellationToken) != 0;
/// <summary>
/// Reads the 6-byte header of the stream, and returns 0 if either the header
/// couldn't be read or it isn't a validate LZIP header, or the dictionary
@@ -268,44 +255,6 @@ public sealed class LZipStream : Stream, IStreamStack
return (1 << basePower) - (subtractionNumerator * (1 << (basePower - 4)));
}
/// <summary>
/// Asynchronously reads the 6-byte header of the stream, and returns 0 if either the header
/// couldn't be read or it isn't a validate LZIP header, or the dictionary
/// size if it *is* a valid LZIP file.
/// </summary>
public static async ValueTask<int> ValidateAndReadSizeAsync(
Stream stream,
CancellationToken cancellationToken
)
{
// Read the header
byte[] header = new byte[6];
var n = await stream
.ReadAsync(header, 0, header.Length, cancellationToken)
.ConfigureAwait(false);
// TODO: Handle reading only part of the header?
if (n != 6)
{
return 0;
}
if (
header[0] != 'L'
|| header[1] != 'Z'
|| header[2] != 'I'
|| header[3] != 'P'
|| header[4] != 1 /* version 1 */
)
{
return 0;
}
var basePower = header[5] & 0x1F;
var subtractionNumerator = (header[5] & 0xE0) >> 5;
return (1 << basePower) - (subtractionNumerator * (1 << (basePower - 4)));
}
private static readonly byte[] headerBytes =
[
(byte)'L',

View File

@@ -201,7 +201,7 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
}
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
[MemberNotNull(nameof(_outWindow))]
#endif
private void CreateDictionary()

View File

@@ -632,7 +632,7 @@ public class LzmaStream : Stream, IStreamStack
return total;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -97,45 +95,6 @@ namespace SharpCompress.Compressors.Lzw
return true;
}
/// <summary>
/// Asynchronously checks if the stream is an LZW stream
/// </summary>
/// <param name="stream">The stream to read from</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>True if the stream is an LZW stream, false otherwise</returns>
public static async ValueTask<bool> IsLzwStreamAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
try
{
byte[] hdr = new byte[LzwConstants.HDR_SIZE];
int result = await stream.ReadAsync(hdr, 0, hdr.Length, cancellationToken);
// Check the magic marker
if (result < 0)
throw new IncompleteArchiveException("Failed to read LZW header");
if (hdr[0] != (LzwConstants.MAGIC >> 8) || hdr[1] != (LzwConstants.MAGIC & 0xff))
{
throw new IncompleteArchiveException(
String.Format(
"Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}",
hdr[0],
hdr[1]
)
);
}
}
catch (Exception)
{
return false;
}
return true;
}
/// <summary>
/// Gets or sets a flag indicating ownership of underlying stream.
/// When the flag is true <see cref="Stream.Dispose()" /> will close the underlying stream also.

View File

@@ -185,7 +185,7 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
return totalRead;
}
#if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
#if !LEGACY_DOTNET
public override async System.Threading.Tasks.ValueTask<int> ReadAsync(
Memory<byte> buffer,
System.Threading.CancellationToken cancellationToken = default

View File

@@ -385,7 +385,7 @@ internal class RarBLAKE2spStream : RarStream, IStreamStack
return result;
}
#if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
#if !LEGACY_DOTNET
public override async System.Threading.Tasks.ValueTask<int> ReadAsync(
Memory<byte> buffer,
System.Threading.CancellationToken cancellationToken = default

View File

@@ -129,7 +129,7 @@ internal class RarCrcStream : RarStream, IStreamStack
return result;
}
#if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
#if !LEGACY_DOTNET
public override async System.Threading.Tasks.ValueTask<int> ReadAsync(
Memory<byte> buffer,
System.Threading.CancellationToken cancellationToken = default

View File

@@ -189,7 +189,7 @@ internal class RarStream : Stream, IStreamStack
return outTotal;
}
#if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
#if !LEGACY_DOTNET
public override async System.Threading.Tasks.ValueTask<int> ReadAsync(
Memory<byte> buffer,
System.Threading.CancellationToken cancellationToken = default

View File

@@ -60,22 +60,6 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
}
}
public static async ValueTask<bool> IsXZStreamAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
return null != await XZHeader.FromStreamAsync(stream, cancellationToken);
}
catch (Exception)
{
return false;
}
}
private void AssertBlockCheckTypeIsSupported()
{
switch (Header.BlockCheckType)

View File

@@ -74,7 +74,7 @@ public class CompressionStream : Stream
~CompressionStream() => Dispose(false);
#if !NETSTANDARD2_0 && !NETFRAMEWORK
#if !LEGACY_DOTNET
public override async ValueTask DisposeAsync()
#else
public async ValueTask DisposeAsync()
@@ -145,7 +145,7 @@ public class CompressionStream : Stream
public override void Write(byte[] buffer, int offset, int count) =>
Write(new ReadOnlySpan<byte>(buffer, offset, count));
#if !NETSTANDARD2_0 && !NETFRAMEWORK
#if !LEGACY_DOTNET
public override void Write(ReadOnlySpan<byte> buffer) =>
WriteInternal(buffer, ZSTD_EndDirective.ZSTD_e_continue);
#else
@@ -176,7 +176,7 @@ public class CompressionStream : Stream
);
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
#if !LEGACY_DOTNET
private async ValueTask WriteInternalAsync(
ReadOnlyMemory<byte>? buffer,
ZSTD_EndDirective directive,
@@ -218,7 +218,7 @@ public class CompressionStream : Stream
);
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
#if !LEGACY_DOTNET
public override Task WriteAsync(
byte[] buffer,

View File

@@ -105,7 +105,7 @@ public class DecompressionStream : Stream
public override int Read(byte[] buffer, int offset, int count) =>
Read(new Span<byte>(buffer, offset, count));
#if !NETSTANDARD2_0 && !NETFRAMEWORK
#if !LEGACY_DOTNET
public override int Read(Span<byte> buffer)
#else
public int Read(Span<byte> buffer)
@@ -158,7 +158,7 @@ public class DecompressionStream : Stream
}
}
#if !NETSTANDARD2_0 && !NETFRAMEWORK
#if !LEGACY_DOTNET
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
@@ -276,7 +276,7 @@ public class DecompressionStream : Stream
throw new ObjectDisposedException(nameof(DecompressionStream));
}
#if NETSTANDARD2_0 || NETFRAMEWORK
#if LEGACY_DOTNET
public virtual Task DisposeAsync()
{
try

View File

@@ -16,7 +16,7 @@ public static unsafe class UnsafeHelper
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void* malloc(ulong size)
{
#if NET6_0_OR_GREATER
#if NET8_0_OR_GREATER
var ptr = NativeMemory.Alloc((nuint)size);
#else
var ptr = (void*)Marshal.AllocHGlobal((nint)size);
@@ -31,7 +31,7 @@ public static unsafe class UnsafeHelper
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void* calloc(ulong num, ulong size)
{
#if NET6_0_OR_GREATER
#if NET8_0_OR_GREATER
return NativeMemory.AllocZeroed((nuint)num, (nuint)size);
#else
var total = num * size;
@@ -53,7 +53,7 @@ public static unsafe class UnsafeHelper
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void free(void* ptr)
{
#if NET6_0_OR_GREATER
#if NET8_0_OR_GREATER
NativeMemory.Free(ptr);
#else
Marshal.FreeHGlobal((IntPtr)ptr);

View File

@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
@@ -44,27 +43,6 @@ internal class ZStandardStream : DecompressionStream, IStreamStack
return true;
}
internal static async ValueTask<bool> IsZStandardAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var buffer = new byte[4];
var bytesRead = await stream.ReadAsync(buffer, 0, 4, cancellationToken);
if (bytesRead < 4)
{
return false;
}
var magic = BitConverter.ToUInt32(buffer, 0);
if (ZstandardConstants.MAGIC != magic)
{
return false;
}
return true;
}
public ZStandardStream(Stream baseInputStream)
: base(baseInputStream)
{

View File

@@ -70,7 +70,7 @@ public sealed class Crc32Stream : Stream, IStreamStack
public override void SetLength(long value) => throw new NotSupportedException();
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override void Write(ReadOnlySpan<byte> buffer)
{

View File

@@ -29,13 +29,6 @@ namespace SharpCompress.Factories
int bufferSize = ReaderOptions.DefaultBufferSize
) => AceHeader.IsArchive(stream);
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => AceHeader.IsArchiveAsync(stream, cancellationToken);
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
AceReader.OpenReader(stream, options);
@@ -48,5 +41,11 @@ namespace SharpCompress.Factories
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)AceReader.OpenReader(stream, options);
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
}
}

View File

@@ -1,5 +1,4 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -37,16 +36,9 @@ namespace SharpCompress.Factories
//Hyper - archive, check the next two bytes for "HP" or "ST"(or look below for
//"HYP").Also the ZOO archiver also does put a 01Ah at the start of the file,
//see the ZOO entry below.
var buffer = ArrayPool<byte>.Shared.Rent(2);
try
{
stream.ReadExact(buffer, 0, 2);
return buffer[0] == 0x1A && buffer[1] < 10; //rather thin, but this is all we have
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
var bytes = new byte[2];
stream.Read(bytes, 0, 2);
return bytes[0] == 0x1A && bytes[1] < 10; //rather thin, but this is all we have
}
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
@@ -62,29 +54,10 @@ namespace SharpCompress.Factories
return (IAsyncReader)ArcReader.OpenReader(stream, options);
}
public override async ValueTask<bool> IsArchiveAsync(
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
//You may have to use some(paranoid) checks to ensure that you actually are
//processing an ARC file, since other archivers also adopted the idea of putting
//a 01Ah byte at offset 0, namely the Hyper archiver. To check if you have a
//Hyper - archive, check the next two bytes for "HP" or "ST"(or look below for
//"HYP").Also the ZOO archiver also does put a 01Ah at the start of the file,
//see the ZOO entry below.
var buffer = ArrayPool<byte>.Shared.Rent(2);
try
{
await stream.ReadExactAsync(buffer, 0, 2, cancellationToken);
return buffer[0] == 0x1A && buffer[1] < 10; //rather thin, but this is all we have
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
}
}

View File

@@ -27,14 +27,10 @@ namespace SharpCompress.Factories
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => ArjHeader.IsArchive(stream);
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => ArjHeader.IsArchiveAsync(stream, cancellationToken);
)
{
return ArjHeader.IsArchive(stream);
}
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
ArjReader.OpenReader(stream, options);
@@ -48,5 +44,11 @@ namespace SharpCompress.Factories
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncReader)ArjReader.OpenReader(stream, options);
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
}
}

View File

@@ -60,11 +60,22 @@ public abstract class Factory : IFactory
);
public abstract ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
);
/// <inheritdoc/>
public virtual ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
);
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(IsArchive(stream, password, bufferSize));
}
/// <inheritdoc/>
public virtual FileInfo? GetFilePart(int index, FileInfo part1) => null;
@@ -101,4 +112,31 @@ public abstract class Factory : IFactory
return false;
}
internal virtual async ValueTask<(bool, IAsyncReader?)> TryOpenReaderAsync(
SharpCompressStream stream,
ReaderOptions options,
CancellationToken cancellationToken
)
{
if (this is IReaderFactory readerFactory)
{
long pos = ((IStreamStack)stream).GetPosition();
if (
await IsArchiveAsync(
stream,
options.Password,
options.BufferSize,
cancellationToken
)
)
{
((IStreamStack)stream).StackSeek(pos);
return (true, readerFactory.OpenAsyncReader(stream, options, cancellationToken));
}
}
return (false, null);
}
}

View File

@@ -68,9 +68,22 @@ public class GZipFactory
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(stream, readerOptions);
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
public IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
#endregion

View File

@@ -37,14 +37,6 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
int bufferSize = ReaderOptions.DefaultBufferSize
) => RarArchive.IsRarFile(stream);
/// <inheritdoc/>
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => RarArchive.IsRarFileAsync(stream, cancellationToken: cancellationToken);
/// <inheritdoc/>
public override FileInfo? GetFilePart(int index, FileInfo part1) =>
RarArchiveVolumeFactory.GetFilePart(index, part1);
@@ -66,8 +58,21 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
RarArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
public IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
#endregion

View File

@@ -36,14 +36,6 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
int bufferSize = ReaderOptions.DefaultBufferSize
) => SevenZipArchive.IsSevenZipFile(stream);
/// <inheritdoc/>
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => SevenZipArchive.IsSevenZipFileAsync(stream, cancellationToken);
#endregion
#region IArchiveFactory
@@ -54,15 +46,28 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
SevenZipArchive.OpenAsyncArchive(stream, readerOptions);
(IAsyncArchive)OpenArchive(stream, readerOptions);
/// <inheritdoc/>
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.OpenAsyncArchive(fileInfo, readerOptions);
public IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
#endregion

View File

@@ -61,13 +61,11 @@ public class TarFactory
int bufferSize = ReaderOptions.DefaultBufferSize
) => TarArchive.IsTarFile(stream);
/// <inheritdoc/>
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => TarArchive.IsTarFileAsync(stream, cancellationToken);
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
#endregion
@@ -86,8 +84,15 @@ public class TarFactory
TarArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
public IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
#endregion

View File

@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Compressors.ZStandard;
@@ -30,7 +29,6 @@ internal class ZStandardFactory : Factory
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => ZStandardStream.IsZStandardAsync(stream, cancellationToken);
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
}

View File

@@ -81,6 +81,12 @@ public class ZipFactory
return false;
}
public override ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => new(IsArchive(stream, password, bufferSize));
/// <inheritdoc/>
public override async ValueTask<bool> IsArchiveAsync(
Stream stream,
@@ -145,8 +151,15 @@ public class ZipFactory
ZipArchive.OpenArchive(fileInfo, readerOptions);
/// <inheritdoc/>
public IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(fileInfo, readerOptions);
public IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
#endregion

View File

@@ -154,7 +154,7 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
return count;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default

View File

@@ -64,7 +64,7 @@ internal sealed class ProgressReportingStream : Stream
return bytesRead;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override int Read(Span<byte> buffer)
{
var bytesRead = _baseStream.Read(buffer);
@@ -95,7 +95,7 @@ internal sealed class ProgressReportingStream : Stream
return bytesRead;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
@@ -147,7 +147,7 @@ internal sealed class ProgressReportingStream : Stream
base.Dispose(disposing);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask DisposeAsync()
{
if (!_leaveOpen)

View File

@@ -81,7 +81,7 @@ internal class ReadOnlySubStream : SharpCompressStream, IStreamStack
return value;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override int Read(Span<byte> buffer)
{
var sliceLen = BytesLeftToRead < buffer.Length ? BytesLeftToRead : buffer.Length;
@@ -117,7 +117,7 @@ internal class ReadOnlySubStream : SharpCompressStream, IStreamStack
return read;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default

View File

@@ -384,7 +384,7 @@ public class SharpCompressStream : Stream, IStreamStack
await Stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,

View File

@@ -98,6 +98,30 @@ public class SourceStream : Stream, IStreamStack
private Stream Current => _streams[_stream];
/// <summary>
/// Creates an independent stream for the specified volume index.
/// This allows multiple threads to read from different positions concurrently.
/// Only works when IsFileMode is true.
/// </summary>
/// <param name="volumeIndex">The volume index to create a stream for</param>
/// <returns>A new independent FileStream, or null if not in file mode or volume doesn't exist</returns>
public Stream? CreateIndependentStream(int volumeIndex)
{
if (!IsFileMode)
{
return null;
}
// Ensure the volume is loaded
if (!LoadStream(volumeIndex))
{
return null;
}
// Create a new independent stream from the FileInfo
return _files[volumeIndex].OpenRead();
}
public bool LoadStream(int index) //ensure all parts to id are loaded
{
while (_streams.Count <= index)
@@ -289,7 +313,7 @@ public class SourceStream : Stream, IStreamStack
return total - count;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,

View File

@@ -21,7 +21,7 @@ internal static class NotNullExtensions
return source.AsEnumerable();
}
#if NETFRAMEWORK || NETSTANDARD
#if LEGACY_DOTNET
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static T NotNull<T>(this T? obj, string? message = null)
where T : class

View File

@@ -1,4 +1,4 @@
#if NETFRAMEWORK || NETSTANDARD2_0
#if LEGACY_DOTNET
namespace SharpCompress;

View File

@@ -277,7 +277,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IAsyncReader
}
}
//don't know the size so we have to try to decompress to skip
#if NETFRAMEWORK || NETSTANDARD2_0
#if LEGACY_DOTNET
using var s = await OpenEntryStreamAsync(cancellationToken).ConfigureAwait(false);
await s.SkipEntryAsync(cancellationToken).ConfigureAwait(false);
#else
@@ -344,7 +344,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IAsyncReader
internal async ValueTask WriteAsync(Stream writeStream, CancellationToken cancellationToken)
{
#if NETFRAMEWORK || NETSTANDARD2_0
#if LEGACY_DOTNET
using Stream s = await OpenEntryStreamAsync(cancellationToken).ConfigureAwait(false);
var sourceStream = WrapWithProgress(s, Entry);
await sourceStream.CopyToAsync(writeStream, 81920, cancellationToken).ConfigureAwait(false);

View File

@@ -28,4 +28,12 @@ public class ReaderOptions : OptionsBase
/// When set, progress updates will be reported as entries are extracted.
/// </summary>
public IProgress<ProgressReport>? Progress { get; set; }
/// <summary>
/// Enable multi-threaded extraction support when the archive is opened from a FileInfo or file path.
/// When enabled, multiple threads can extract different entries concurrently by creating
/// independent file streams. This is only effective for archives opened from files, not streams.
/// Default is false for backward compatibility.
/// </summary>
public bool EnableMultiThreadedExtraction { get; set; }
}

View File

@@ -28,6 +28,9 @@
<EmbedUntrackedSources>true</EmbedUntrackedSources>
<AllowedOutputExtensionsInPackageBuildOutputFolder>$(AllowedOutputExtensionsInPackageBuildOutputFolder);.pdb</AllowedOutputExtensionsInPackageBuildOutputFolder>
</PropertyGroup>
<PropertyGroup Condition=" '$(TargetFramework)' == 'net48' Or '$(TargetFramework)' == 'netstandard20' ">
<DefineConstants>$(DefineConstants);LEGACY_DOTNET</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition=" '$(TargetFramework)' == 'net8.0' Or '$(TargetFramework)' == 'net10.0' ">
<IsTrimmable>true</IsTrimmable>
</PropertyGroup>

View File

@@ -189,7 +189,7 @@ internal static class Utility
}
}
#if NET60_OR_GREATER
#if NET8_0_OR_GREATER
public bool ReadFully(byte[] buffer)
{
try

View File

@@ -651,4 +651,40 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
}
[Fact]
public async Task ArchiveFactory_Open_WithPreWrappedStream()
{
// Test that ArchiveFactory.Open works correctly with a stream that's already wrapped
// This addresses the issue where ZIP files fail to open on Linux
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.noEmptyDirs.zip");
// Open with a pre-wrapped stream
using (var fileStream = File.OpenRead(testArchive))
using (var wrappedStream = SharpCompressStream.Create(fileStream, bufferSize: 32768))
await using (
var archive = await ArchiveFactory.OpenAsyncArchive(new AsyncOnlyStream(wrappedStream))
)
{
Assert.Equal(ArchiveType.Zip, archive.Type);
Assert.Equal(3, await archive.EntriesAsync.CountAsync());
}
}
[Fact]
public async Task ArchiveFactory_Open_WithRawFileStream()
{
// Test that ArchiveFactory.Open works correctly with a raw FileStream
// This is the common use case reported in the issue
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.noEmptyDirs.zip");
using (var stream = File.OpenRead(testArchive))
await using (
var archive = await ArchiveFactory.OpenAsyncArchive(new AsyncOnlyStream(stream))
)
{
Assert.Equal(ArchiveType.Zip, archive.Type);
Assert.Equal(3, await archive.EntriesAsync.CountAsync());
}
}
}

View File

@@ -2,16 +2,14 @@ using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Test.Mocks;
public class AsyncOnlyStream : SharpCompressStream
public class AsyncOnlyStream : Stream
{
private readonly Stream _stream;
public AsyncOnlyStream(Stream stream)
: base(stream)
{
_stream = stream;
// Console.WriteLine("AsyncOnlyStream created");

View File

@@ -3,22 +3,20 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.SevenZip;
#if !NETFRAMEWORK
public class SevenZipArchiveAsyncTests : ArchiveTests
{
[Fact]
public async Task SevenZipArchive_LZMA_AsyncStreamExtraction()
public async ValueTask SevenZipArchive_LZMA_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.LZMA.7z");
#if NETFRAMEWORK
using var stream = File.OpenRead(testArchive);
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
@@ -33,35 +31,19 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
Directory.CreateDirectory(targetDir);
}
#if NETFRAMEWORK
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#else
await using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#endif
#if NETFRAMEWORK
using var targetStream = File.Create(targetPath);
#else
await using var targetStream = File.Create(targetPath);
#endif
#if NETFRAMEWORK
await sourceStream.CopyToAsync(targetStream, 81920, CancellationToken.None);
#else
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
#endif
}
VerifyFiles();
}
//[Fact]
public async Task SevenZipArchive_LZMA2_AsyncStreamExtraction()
[Fact]
public async ValueTask SevenZipArchive_LZMA2_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.LZMA2.7z");
#if NETFRAMEWORK
using var stream = File.OpenRead(testArchive);
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
@@ -76,35 +58,19 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
Directory.CreateDirectory(targetDir);
}
#if NETFRAMEWORK
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#else
await using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#endif
#if NETFRAMEWORK
using var targetStream = File.Create(targetPath);
#else
await using var targetStream = File.Create(targetPath);
#endif
#if NETFRAMEWORK
await sourceStream.CopyToAsync(targetStream, 81920, CancellationToken.None);
#else
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
#endif
}
VerifyFiles();
}
[Fact]
public async Task SevenZipArchive_Solid_AsyncStreamExtraction()
public async ValueTask SevenZipArchive_Solid_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.solid.7z");
#if NETFRAMEWORK
using var stream = File.OpenRead(testArchive);
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
@@ -119,35 +85,19 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
Directory.CreateDirectory(targetDir);
}
#if NETFRAMEWORK
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#else
await using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#endif
#if NETFRAMEWORK
using var targetStream = File.Create(targetPath);
#else
await using var targetStream = File.Create(targetPath);
#endif
#if NETFRAMEWORK
await sourceStream.CopyToAsync(targetStream, 81920, CancellationToken.None);
#else
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
#endif
}
VerifyFiles();
}
[Fact]
public async Task SevenZipArchive_BZip2_AsyncStreamExtraction()
public async ValueTask SevenZipArchive_BZip2_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.BZip2.7z");
#if NETFRAMEWORK
using var stream = File.OpenRead(testArchive);
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
@@ -162,35 +112,19 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
Directory.CreateDirectory(targetDir);
}
#if NETFRAMEWORK
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#else
await using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#endif
#if NETFRAMEWORK
using var targetStream = File.Create(targetPath);
#else
await using var targetStream = File.Create(targetPath);
#endif
#if NETFRAMEWORK
await sourceStream.CopyToAsync(targetStream, 81920, CancellationToken.None);
#else
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
#endif
}
VerifyFiles();
}
[Fact]
public async Task SevenZipArchive_PPMd_AsyncStreamExtraction()
public async ValueTask SevenZipArchive_PPMd_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.PPMd.7z");
#if NETFRAMEWORK
using var stream = File.OpenRead(testArchive);
#else
await using var stream = File.OpenRead(testArchive);
#endif
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
@@ -205,23 +139,12 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
Directory.CreateDirectory(targetDir);
}
#if NETFRAMEWORK
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#else
await using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
#endif
#if NETFRAMEWORK
using var targetStream = File.Create(targetPath);
#else
await using var targetStream = File.Create(targetPath);
#endif
#if NETFRAMEWORK
await sourceStream.CopyToAsync(targetStream, 81920, CancellationToken.None);
#else
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
#endif
}
VerifyFiles();
}
}
#endif

View File

@@ -0,0 +1,115 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using Xunit;
namespace SharpCompress.Test.Tar;
public class TarMultiThreadTests : TestBase
{
[Fact]
public void Tar_Archive_Concurrent_Extraction_From_FileInfo()
{
// Test concurrent extraction of multiple entries from a Tar archive opened from FileInfo
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar");
var fileInfo = new FileInfo(testArchive);
var options = new SharpCompress.Readers.ReaderOptions
{
EnableMultiThreadedExtraction = true,
};
using var archive = TarArchive.OpenArchive(fileInfo, options);
// Verify multi-threading is supported
Assert.True(archive.SupportsMultiThreadedExtraction);
var entries = archive.Entries.Where(e => !e.IsDirectory).Take(5).ToList();
// Extract multiple entries concurrently
var tasks = new List<Task>();
var outputFiles = new List<string>();
foreach (var entry in entries)
{
var outputFile = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
outputFiles.Add(outputFile);
tasks.Add(
Task.Run(() =>
{
var dir = Path.GetDirectoryName(outputFile);
if (dir != null)
{
Directory.CreateDirectory(dir);
}
using var entryStream = entry.OpenEntryStream();
using var fileStream = File.Create(outputFile);
entryStream.CopyTo(fileStream);
})
);
}
Task.WaitAll(tasks.ToArray());
// Verify all files were extracted
Assert.Equal(entries.Count, outputFiles.Count);
foreach (var outputFile in outputFiles)
{
Assert.True(File.Exists(outputFile), $"File {outputFile} should exist");
}
}
[Fact]
public async Task Tar_Archive_Concurrent_Extraction_From_FileInfo_Async()
{
// Test concurrent async extraction of multiple entries from a Tar archive opened from FileInfo
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar");
var fileInfo = new FileInfo(testArchive);
var options = new SharpCompress.Readers.ReaderOptions
{
EnableMultiThreadedExtraction = true,
};
using var archive = TarArchive.OpenArchive(fileInfo, options);
var entries = archive.Entries.Where(e => !e.IsDirectory).Take(5).ToList();
// Extract multiple entries concurrently
var tasks = new List<Task>();
var outputFiles = new List<string>();
foreach (var entry in entries)
{
var outputFile = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
outputFiles.Add(outputFile);
tasks.Add(
Task.Run(async () =>
{
var dir = Path.GetDirectoryName(outputFile);
if (dir != null)
{
Directory.CreateDirectory(dir);
}
using var entryStream = await entry.OpenEntryStreamAsync();
using var fileStream = File.Create(outputFile);
await entryStream.CopyToAsync(fileStream);
})
);
}
await Task.WhenAll(tasks);
// Verify all files were extracted
Assert.Equal(entries.Count, outputFiles.Count);
foreach (var outputFile in outputFiles)
{
Assert.True(File.Exists(outputFile), $"File {outputFile} should exist");
}
}
}

View File

@@ -0,0 +1,192 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using Xunit;
namespace SharpCompress.Test.Zip;
public class ZipMultiThreadTests : TestBase
{
[Fact]
public void Zip_Archive_Without_MultiThreading_Enabled()
{
// Test that extraction still works when multi-threading is NOT enabled
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Zip.none.zip");
var fileInfo = new FileInfo(testArchive);
// Default options - multi-threading disabled
using var archive = ZipArchive.OpenArchive(fileInfo);
// Verify multi-threading is NOT supported
Assert.False(archive.SupportsMultiThreadedExtraction);
var entry = archive.Entries.First(e => !e.IsDirectory);
var outputFile = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
var dir = Path.GetDirectoryName(outputFile);
if (dir != null)
{
Directory.CreateDirectory(dir);
}
using var entryStream = entry.OpenEntryStream();
using var fileStream = File.Create(outputFile);
entryStream.CopyTo(fileStream);
Assert.True(File.Exists(outputFile));
}
[Fact]
public void Zip_Archive_Concurrent_Extraction_From_FileInfo()
{
// Test concurrent extraction of multiple entries from a Zip archive opened from FileInfo
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Zip.none.zip");
var fileInfo = new FileInfo(testArchive);
var options = new SharpCompress.Readers.ReaderOptions
{
EnableMultiThreadedExtraction = true,
};
using var archive = ZipArchive.OpenArchive(fileInfo, options);
// Verify multi-threading is supported
Assert.True(archive.SupportsMultiThreadedExtraction);
var entries = archive.Entries.Where(e => !e.IsDirectory).Take(5).ToList();
// Extract multiple entries concurrently
var tasks = new List<Task>();
var outputFiles = new List<string>();
foreach (var entry in entries)
{
var outputFile = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
outputFiles.Add(outputFile);
tasks.Add(
Task.Run(() =>
{
var dir = Path.GetDirectoryName(outputFile);
if (dir != null)
{
Directory.CreateDirectory(dir);
}
using var entryStream = entry.OpenEntryStream();
using var fileStream = File.Create(outputFile);
entryStream.CopyTo(fileStream);
})
);
}
Task.WaitAll(tasks.ToArray());
// Verify all files were extracted
Assert.Equal(entries.Count, outputFiles.Count);
foreach (var outputFile in outputFiles)
{
Assert.True(File.Exists(outputFile), $"File {outputFile} should exist");
}
}
[Fact]
public async Task Zip_Archive_Concurrent_Extraction_From_FileInfo_Async()
{
// Test concurrent async extraction of multiple entries from a Zip archive opened from FileInfo
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Zip.none.zip");
var fileInfo = new FileInfo(testArchive);
var options = new SharpCompress.Readers.ReaderOptions
{
EnableMultiThreadedExtraction = true,
};
using var archive = ZipArchive.OpenArchive(fileInfo, options);
var entries = archive.Entries.Where(e => !e.IsDirectory).Take(5).ToList();
// Extract multiple entries concurrently
var tasks = new List<Task>();
var outputFiles = new List<string>();
foreach (var entry in entries)
{
var outputFile = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
outputFiles.Add(outputFile);
tasks.Add(
Task.Run(async () =>
{
var dir = Path.GetDirectoryName(outputFile);
if (dir != null)
{
Directory.CreateDirectory(dir);
}
using var entryStream = await entry.OpenEntryStreamAsync();
using var fileStream = File.Create(outputFile);
await entryStream.CopyToAsync(fileStream);
})
);
}
await Task.WhenAll(tasks);
// Verify all files were extracted
Assert.Equal(entries.Count, outputFiles.Count);
foreach (var outputFile in outputFiles)
{
Assert.True(File.Exists(outputFile), $"File {outputFile} should exist");
}
}
[Fact]
public void Zip_Archive_Concurrent_Extraction_From_Path()
{
// Test concurrent extraction when opening from path (should use FileInfo internally)
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Zip.none.zip");
var options = new SharpCompress.Readers.ReaderOptions
{
EnableMultiThreadedExtraction = true,
};
using var archive = ZipArchive.OpenArchive(testArchive, options);
var entries = archive.Entries.Where(e => !e.IsDirectory).Take(5).ToList();
// Extract multiple entries concurrently
var tasks = new List<Task>();
var outputFiles = new List<string>();
foreach (var entry in entries)
{
var outputFile = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
outputFiles.Add(outputFile);
tasks.Add(
Task.Run(() =>
{
var dir = Path.GetDirectoryName(outputFile);
if (dir != null)
{
Directory.CreateDirectory(dir);
}
using var entryStream = entry.OpenEntryStream();
using var fileStream = File.Create(outputFile);
entryStream.CopyTo(fileStream);
})
);
}
Task.WaitAll(tasks.ToArray());
// Verify all files were extracted
Assert.Equal(entries.Count, outputFiles.Count);
foreach (var outputFile in outputFiles)
{
Assert.True(File.Exists(outputFile), $"File {outputFile} should exist");
}
}
}