Compare commits

...

7 Commits

Author SHA1 Message Date
Adam Hathcock
39d986a62c async loading for rar is better 2026-01-03 14:49:30 +00:00
Adam Hathcock
6ef1dd590f Merge pull request #1097 from adamhathcock/copilot/sub-pr-1096
Implement full async I/O support for RAR header reading pipeline
2026-01-03 14:26:42 +00:00
copilot-swe-agent[bot]
f967dd0d3d Implement full async RAR header reading pipeline and fix SharpCompressStream
- Added async read methods to MarkingBinaryReader (ReadByteAsync, ReadBytesAsync, ReadRarVIntAsync, etc.)
- Added async methods to RarCrcBinaryReader and RarCryptoBinaryReader
- Created RarHeader.TryReadBaseAsync with InitializeAsync helper
- Added TryReadNextHeaderAsync to RarHeaderFactory
- Updated ReadHeadersAsync to use fully async pipeline
- Fixed SharpCompressStream.Read to fallback to async when sync not supported
- Made RarHeader properties mutable to support async initialization

Still debugging stream position issues causing "Unknown Rar Header" errors.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-03 14:16:12 +00:00
copilot-swe-agent[bot]
98e509df46 Add async support for RAR header reading (work in progress)
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-03 13:57:45 +00:00
copilot-swe-agent[bot]
676fb0ba52 Initial plan 2026-01-03 13:41:00 +00:00
Adam Hathcock
0ed66ae6d0 add rar async only with failures 2026-01-03 13:22:30 +00:00
Adam Hathcock
955e1d8742 add async testing 2026-01-03 13:22:20 +00:00
27 changed files with 1231 additions and 57 deletions

View File

@@ -272,5 +272,24 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
}
}
public static async Task<bool> IsRarFileAsync(
Stream stream,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
try
{
await MarkHeader
.ReadAsync(stream, true, false, cancellationToken)
.ConfigureAwait(false);
return true;
}
catch
{
return false;
}
}
#endregion
}

View File

@@ -305,6 +305,31 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
protected override async IAsyncEnumerable<SevenZipEntry> GetEntriesAsync(
Stream stream,
[System.Runtime.CompilerServices.EnumeratorCancellation]
CancellationToken cancellationToken = default
)
{
var entries = _archive.Entries.ToList();
stream.Position = 0;
foreach (var dir in entries.Where(x => x.IsDirectory))
{
cancellationToken.ThrowIfCancellationRequested();
_currentEntry = dir;
yield return dir;
}
// For non-directory entries, yield them without creating shared streams
// Each call to GetEntryStream() will create a fresh decompression stream
// to avoid state corruption issues with async operations
foreach (var entry in entries.Where(x => !x.IsDirectory))
{
cancellationToken.ThrowIfCancellationRequested();
_currentEntry = entry;
yield return entry;
}
}
protected override EntryStream GetEntryStream()
{
// Create a fresh decompression stream for each file (no state sharing).

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Rar.Headers;
@@ -129,4 +131,130 @@ internal class MarkHeader : IRarHeader
throw new InvalidFormatException("Rar signature not found");
}
private static async Task<byte> GetByteAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var buffer = new byte[1];
var bytesRead = await stream
.ReadAsync(buffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
if (bytesRead == 1)
{
return buffer[0];
}
throw new EndOfStreamException();
}
public static async Task<MarkHeader> ReadAsync(
Stream stream,
bool leaveStreamOpen,
bool lookForHeader,
CancellationToken cancellationToken = default
)
{
var maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
try
{
var start = -1;
var b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
while (start <= maxScanIndex)
{
// Rar old signature: 52 45 7E 5E
// Rar4 signature: 52 61 72 21 1A 07 00
// Rar5 signature: 52 61 72 21 1A 07 01 00
if (b == 0x52)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b == 0x61)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x72)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x21)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x1a)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x07)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b == 1)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0)
{
continue;
}
return new MarkHeader(true); // Rar5
}
else if (b == 0)
{
return new MarkHeader(false); // Rar4
}
}
else if (b == 0x45)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x7e)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x5e)
{
continue;
}
throw new InvalidFormatException(
"Rar format version pre-4 is unsupported."
);
}
}
else
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
}
}
}
catch (Exception e)
{
if (!leaveStreamOpen)
{
stream.Dispose();
}
throw new InvalidFormatException("Error trying to read rar signature.", e);
}
throw new InvalidFormatException("Rar signature not found");
}
}

View File

@@ -1,4 +1,6 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
@@ -8,7 +10,7 @@ namespace SharpCompress.Common.Rar.Headers;
internal class RarHeader : IRarHeader
{
private readonly HeaderType _headerType;
private readonly bool _isRar5;
private bool _isRar5;
internal static RarHeader? TryReadBase(
RarCrcBinaryReader reader,
@@ -26,6 +28,97 @@ internal class RarHeader : IRarHeader
}
}
internal static async Task<RarHeader?> TryReadBaseAsync(
RarCrcBinaryReader reader,
bool isRar5,
ArchiveEncoding archiveEncoding,
CancellationToken cancellationToken = default
)
{
try
{
return await CreateAsync(reader, isRar5, archiveEncoding, cancellationToken)
.ConfigureAwait(false);
}
catch (InvalidFormatException)
{
return null;
}
}
private static async Task<RarHeader> CreateAsync(
RarCrcBinaryReader reader,
bool isRar5,
ArchiveEncoding archiveEncoding,
CancellationToken cancellationToken
)
{
var header = new RarHeader();
await header
.InitializeAsync(reader, isRar5, archiveEncoding, cancellationToken)
.ConfigureAwait(false);
return header;
}
private RarHeader()
{
_headerType = HeaderType.Null;
ArchiveEncoding = new ArchiveEncoding();
}
private async Task InitializeAsync(
RarCrcBinaryReader reader,
bool isRar5,
ArchiveEncoding archiveEncoding,
CancellationToken cancellationToken
)
{
_isRar5 = isRar5;
ArchiveEncoding = archiveEncoding;
if (IsRar5)
{
HeaderCrc = await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false);
reader.ResetCrc();
HeaderSize = (int)
await reader.ReadRarVIntUInt32Async(3, cancellationToken).ConfigureAwait(false);
reader.Mark();
HeaderCode = await reader
.ReadRarVIntByteAsync(2, cancellationToken)
.ConfigureAwait(false);
HeaderFlags = await reader
.ReadRarVIntUInt16Async(2, cancellationToken)
.ConfigureAwait(false);
if (HasHeaderFlag(HeaderFlagsV5.HAS_EXTRA))
{
ExtraSize = await reader
.ReadRarVIntUInt32Async(5, cancellationToken)
.ConfigureAwait(false);
}
if (HasHeaderFlag(HeaderFlagsV5.HAS_DATA))
{
AdditionalDataSize = (long)
await reader.ReadRarVIntAsync(10, cancellationToken).ConfigureAwait(false);
}
}
else
{
reader.Mark();
HeaderCrc = await reader.ReadUInt16Async(cancellationToken).ConfigureAwait(false);
reader.ResetCrc();
HeaderCode = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
HeaderFlags = await reader.ReadUInt16Async(cancellationToken).ConfigureAwait(false);
HeaderSize = await reader.ReadInt16Async(cancellationToken).ConfigureAwait(false);
if (HasHeaderFlag(HeaderFlagsV4.HAS_DATA))
{
AdditionalDataSize = await reader
.ReadUInt32Async(cancellationToken)
.ConfigureAwait(false);
}
}
}
private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
{
_headerType = HeaderType.Null;
@@ -105,25 +198,25 @@ internal class RarHeader : IRarHeader
protected bool IsRar5 => _isRar5;
protected uint HeaderCrc { get; }
protected uint HeaderCrc { get; private set; }
internal byte HeaderCode { get; }
internal byte HeaderCode { get; private set; }
protected ushort HeaderFlags { get; }
protected ushort HeaderFlags { get; private set; }
protected bool HasHeaderFlag(ushort flag) => (HeaderFlags & flag) == flag;
protected int HeaderSize { get; }
protected int HeaderSize { get; private set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal ArchiveEncoding ArchiveEncoding { get; private set; }
/// <summary>
/// Extra header size.
/// </summary>
protected uint ExtraSize { get; }
protected uint ExtraSize { get; private set; }
/// <summary>
/// Size of additional data (eg file contents)
/// </summary>
protected long AdditionalDataSize { get; }
protected long AdditionalDataSize { get; private set; }
}

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -40,6 +42,34 @@ public class RarHeaderFactory
}
}
public async IAsyncEnumerable<IRarHeader> ReadHeadersAsync(
Stream stream,
[System.Runtime.CompilerServices.EnumeratorCancellation]
CancellationToken cancellationToken = default
)
{
var markHeader = await MarkHeader
.ReadAsync(stream, Options.LeaveStreamOpen, Options.LookForHeader, cancellationToken)
.ConfigureAwait(false);
_isRar5 = markHeader.IsRar5;
yield return markHeader;
RarHeader? header;
while (
(header = await TryReadNextHeaderAsync(stream, cancellationToken).ConfigureAwait(false))
!= null
)
{
yield return header;
if (header.HeaderType == HeaderType.EndArchive)
{
// End of archive marker. RAR does not read anything after this header letting to use third
// party tools to add extra information such as a digital signature to archive.
yield break;
}
}
}
private RarHeader? TryReadNextHeader(Stream stream)
{
RarCrcBinaryReader reader;
@@ -198,6 +228,169 @@ public class RarHeaderFactory
}
}
private async Task<RarHeader?> TryReadNextHeaderAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
RarCrcBinaryReader reader;
if (!IsEncrypted)
{
reader = new RarCrcBinaryReader(stream);
}
else
{
if (Options.Password is null)
{
throw new CryptographicException(
"Encrypted Rar archive has no password specified."
);
}
if (_isRar5 && _cryptInfo != null)
{
_cryptInfo.ReadInitV(new MarkingBinaryReader(stream));
var _headerKey = new CryptKey5(Options.Password!, _cryptInfo);
reader = new RarCryptoBinaryReader(stream, _headerKey, _cryptInfo.Salt);
}
else
{
var key = new CryptKey3(Options.Password);
reader = new RarCryptoBinaryReader(stream, key);
}
}
var header = await RarHeader
.TryReadBaseAsync(reader, _isRar5, Options.ArchiveEncoding, cancellationToken)
.ConfigureAwait(false);
if (header is null)
{
return null;
}
switch (header.HeaderCode)
{
case HeaderCodeV.RAR5_ARCHIVE_HEADER:
case HeaderCodeV.RAR4_ARCHIVE_HEADER:
{
var ah = new ArchiveHeader(header, reader);
if (ah.IsEncrypted == true)
{
//!!! rar5 we don't know yet
IsEncrypted = true;
}
return ah;
}
case HeaderCodeV.RAR4_PROTECT_HEADER:
{
var ph = new ProtectHeader(header, reader);
// skip the recovery record data, we do not use it.
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
reader.BaseStream.Skip(ph.DataSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return ph;
}
case HeaderCodeV.RAR5_SERVICE_HEADER:
{
var fh = new FileHeader(header, reader, HeaderType.Service);
if (fh.FileName == "CMT")
{
fh.PackedStream = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
}
else
{
SkipData(fh, reader);
}
return fh;
}
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
{
var fh = new FileHeader(header, reader, HeaderType.NewSub);
SkipData(fh, reader);
return fh;
}
case HeaderCodeV.RAR5_FILE_HEADER:
case HeaderCodeV.RAR4_FILE_HEADER:
{
var fh = new FileHeader(header, reader, HeaderType.File);
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.R4Salt is null && fh.Rar5CryptoInfo is null)
{
fh.PackedStream = ms;
}
else
{
fh.PackedStream = new RarCryptoWrapper(
ms,
fh.R4Salt is null
? fh.Rar5CryptoInfo.NotNull().Salt
: fh.R4Salt,
fh.R4Salt is null
? new CryptKey5(
Options.Password,
fh.Rar5CryptoInfo.NotNull()
)
: new CryptKey3(Options.Password)
);
}
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return fh;
}
case HeaderCodeV.RAR5_END_ARCHIVE_HEADER:
case HeaderCodeV.RAR4_END_ARCHIVE_HEADER:
{
return new EndArchiveHeader(header, reader);
}
case HeaderCodeV.RAR5_ARCHIVE_ENCRYPTION_HEADER:
{
var cryptoHeader = new ArchiveCryptHeader(header, reader);
IsEncrypted = true;
_cryptInfo = cryptoHeader.CryptInfo;
return cryptoHeader;
}
default:
{
throw new InvalidFormatException("Unknown Rar Header: " + header.HeaderCode);
}
}
}
private void SkipData(FileHeader fh, RarCrcBinaryReader reader)
{
switch (StreamingMode)

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Rar;
using SharpCompress.IO;
@@ -32,4 +34,27 @@ internal class RarCrcBinaryReader : MarkingBinaryReader
_currentCrc = RarCRC.CheckCrc(_currentCrc, result, 0, result.Length);
return result;
}
// Async versions
public override async Task<byte> ReadByteAsync(CancellationToken cancellationToken = default)
{
var b = await base.ReadByteAsync(cancellationToken).ConfigureAwait(false);
_currentCrc = RarCRC.CheckCrc(_currentCrc, b);
return b;
}
public override async Task<byte[]> ReadBytesAsync(
int count,
CancellationToken cancellationToken = default
)
{
var result = await base.ReadBytesAsync(count, cancellationToken).ConfigureAwait(false);
_currentCrc = RarCRC.CheckCrc(_currentCrc, result, 0, result.Length);
return result;
}
public async Task<byte[]> ReadBytesNoCrcAsync(
int count,
CancellationToken cancellationToken = default
) => await base.ReadBytesAsync(count, cancellationToken).ConfigureAwait(false);
}

View File

@@ -2,6 +2,8 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Crypto;
@@ -81,4 +83,49 @@ internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
BaseStream.Position = position + _data.Count;
ClearQueue();
}
// Async versions
public override async Task<byte> ReadByteAsync(CancellationToken cancellationToken = default) =>
(await ReadAndDecryptBytesAsync(1, cancellationToken).ConfigureAwait(false))[0];
public override async Task<byte[]> ReadBytesAsync(
int count,
CancellationToken cancellationToken = default
) => await ReadAndDecryptBytesAsync(count, cancellationToken).ConfigureAwait(false);
private async Task<byte[]> ReadAndDecryptBytesAsync(
int count,
CancellationToken cancellationToken
)
{
var queueSize = _data.Count;
var sizeToRead = count - queueSize;
if (sizeToRead > 0)
{
var alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
for (var i = 0; i < alignedSize / 16; i++)
{
var cipherText = await ReadBytesNoCrcAsync(16, cancellationToken)
.ConfigureAwait(false);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
}
var decryptedBytes = new byte[count];
for (var i = 0; i < count; i++)
{
var b = _data.Dequeue();
decryptedBytes[i] = b;
UpdateCrc(b);
}
_readCount += count;
return decryptedBytes;
}
}

View File

@@ -3,6 +3,8 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -26,6 +28,14 @@ public abstract class RarVolume : Volume
internal abstract IEnumerable<RarFilePart> ReadFileParts();
internal virtual IAsyncEnumerable<RarFilePart> ReadFilePartsAsync(
CancellationToken cancellationToken = default
)
{
// Default implementation - derived classes can override for better async support
throw new NotImplementedException("Async read is not supported for this volume type");
}
internal abstract RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader);
internal IEnumerable<RarFilePart> GetVolumeFileParts()
@@ -71,6 +81,56 @@ public abstract class RarVolume : Volume
}
}
internal async IAsyncEnumerable<RarFilePart> GetVolumeFilePartsAsync(
[System.Runtime.CompilerServices.EnumeratorCancellation]
CancellationToken cancellationToken = default
)
{
MarkHeader? lastMarkHeader = null;
await foreach (
var header in _headerFactory
.ReadHeadersAsync(Stream, cancellationToken)
.ConfigureAwait(false)
)
{
switch (header.HeaderType)
{
case HeaderType.Mark:
{
lastMarkHeader = (MarkHeader)header;
}
break;
case HeaderType.Archive:
{
ArchiveHeader = (ArchiveHeader)header;
}
break;
case HeaderType.File:
{
var fh = (FileHeader)header;
if (_maxCompressionAlgorithm < fh.CompressionAlgorithm)
{
_maxCompressionAlgorithm = fh.CompressionAlgorithm;
}
yield return CreateFilePart(lastMarkHeader!, fh);
}
break;
case HeaderType.Service:
{
var fh = (FileHeader)header;
if (fh.FileName == "CMT")
{
var buffer = new byte[fh.CompressedSize];
fh.PackedStream.NotNull().ReadFully(buffer);
Comment = Encoding.UTF8.GetString(buffer, 0, buffer.Length - 1);
}
}
break;
}
}
}
private void EnsureArchiveHeaderLoaded()
{
if (ArchiveHeader is null)

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -200,4 +201,196 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
yield return header;
}
}
internal async IAsyncEnumerable<ZipHeader> ReadStreamHeaderAsync(
Stream stream,
[System.Runtime.CompilerServices.EnumeratorCancellation]
CancellationToken cancellationToken = default
)
{
if (stream is not SharpCompressStream) //ensure the stream is already a SharpCompressStream. So the buffer/size will already be set
{
//the original code wrapped this with RewindableStream. Wrap with SharpCompressStream as we can get the buffer size
if (stream is SourceStream src)
{
stream = new SharpCompressStream(
stream,
src.ReaderOptions.LeaveStreamOpen,
bufferSize: src.ReaderOptions.BufferSize
);
}
else
{
throw new ArgumentException("Stream must be a SharpCompressStream", nameof(stream));
}
}
var rewindableStream = (SharpCompressStream)stream;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var reader = new AsyncBinaryReader(rewindableStream, leaveOpen: true);
uint headerBytes = 0;
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
)
{
if (_lastEntryHeader.Part is null)
{
continue;
}
// removed requirement for FixStreamedFileLocation()
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
var crc = await reader.ReadUInt32Async().ConfigureAwait(false);
if (crc == POST_DATA_DESCRIPTOR)
{
crc = await reader.ReadUInt32Async().ConfigureAwait(false);
}
_lastEntryHeader.Crc = crc;
//attempt 32bit read
ulong compSize = await reader.ReadUInt32Async().ConfigureAwait(false);
ulong uncompSize = await reader.ReadUInt32Async().ConfigureAwait(false);
headerBytes = await reader.ReadUInt32Async().ConfigureAwait(false);
//check for zip64 sentinel or unexpected header
bool isSentinel = compSize == 0xFFFFFFFF || uncompSize == 0xFFFFFFFF;
bool isHeader = headerBytes == 0x04034b50 || headerBytes == 0x02014b50;
if (!isHeader && !isSentinel)
{
//reshuffle into 64-bit values
compSize = (uncompSize << 32) | compSize;
uncompSize =
((ulong)headerBytes << 32)
| await reader.ReadUInt32Async().ConfigureAwait(false);
headerBytes = await reader.ReadUInt32Async().ConfigureAwait(false);
}
else if (isSentinel)
{
//standards-compliant zip64 descriptor
compSize = await reader.ReadUInt64Async().ConfigureAwait(false);
uncompSize = await reader.ReadUInt64Async().ConfigureAwait(false);
}
_lastEntryHeader.CompressedSize = (long)compSize;
_lastEntryHeader.UncompressedSize = (long)uncompSize;
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
}
}
else if (_lastEntryHeader != null && _lastEntryHeader.IsZip64)
{
if (_lastEntryHeader.Part is null)
continue;
//reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
// ref rewindableStream
//);
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
headerBytes = await reader.ReadUInt32Async().ConfigureAwait(false);
var version = await reader.ReadUInt16Async().ConfigureAwait(false);
var flags = (HeaderFlags)await reader.ReadUInt16Async().ConfigureAwait(false);
var compressionMethod = (ZipCompressionMethod)
await reader.ReadUInt16Async().ConfigureAwait(false);
var lastModifiedDate = await reader.ReadUInt16Async().ConfigureAwait(false);
var lastModifiedTime = await reader.ReadUInt16Async().ConfigureAwait(false);
var crc = await reader.ReadUInt32Async().ConfigureAwait(false);
if (crc == POST_DATA_DESCRIPTOR)
{
crc = await reader.ReadUInt32Async().ConfigureAwait(false);
}
_lastEntryHeader.Crc = crc;
// The DataDescriptor can be either 64bit or 32bit
var compressed_size = await reader.ReadUInt32Async().ConfigureAwait(false);
var uncompressed_size = await reader.ReadUInt32Async().ConfigureAwait(false);
// Check if we have header or 64bit DataDescriptor
var test_header = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50);
var test_64bit = ((long)uncompressed_size << 32) | compressed_size;
if (test_64bit == _lastEntryHeader.CompressedSize && test_header)
{
_lastEntryHeader.UncompressedSize =
((long)await reader.ReadUInt32Async().ConfigureAwait(false) << 32)
| headerBytes;
headerBytes = await reader.ReadUInt32Async().ConfigureAwait(false);
}
else
{
_lastEntryHeader.UncompressedSize = uncompressed_size;
}
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
// 4 = First 4 bytes of the entry header (i.e. 50 4B 03 04)
rewindableStream.Position = pos.Value + 4;
}
}
else
{
headerBytes = await reader.ReadUInt32Async().ConfigureAwait(false);
}
_lastEntryHeader = null;
var header = await ReadHeader(headerBytes, reader).ConfigureAwait(false);
if (header is null)
{
yield break;
}
//entry could be zero bytes so we need to know that.
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
{
var local_header = ((LocalEntryHeader)header);
var dir_header = _entries?.FirstOrDefault(entry =>
entry.Key == local_header.Name
&& local_header.CompressedSize == 0
&& local_header.UncompressedSize == 0
&& local_header.Crc == 0
&& local_header.IsDirectory == false
);
if (dir_header != null)
{
local_header.UncompressedSize = dir_header.Size;
local_header.CompressedSize = dir_header.CompressedSize;
local_header.Crc = (uint)dir_header.Crc;
}
// If we have CompressedSize, there is data to be read
if (local_header.CompressedSize > 0)
{
header.HasData = true;
} // Check if zip is streaming ( Length is 0 and is declared in PostDataDescriptor )
else if (local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor))
{
var nextHeaderBytes = await reader.ReadUInt32Async().ConfigureAwait(false);
((IStreamStack)rewindableStream).Rewind(sizeof(uint));
// Check if next data is PostDataDescriptor, streamed file with 0 length
header.HasData = !IsHeader(nextHeaderBytes);
}
else // We are not streaming and compressed size is 0, we have no data
{
header.HasData = false;
}
}
yield return header;
}
}
}

View File

@@ -37,6 +37,14 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
int bufferSize = ReaderOptions.DefaultBufferSize
) => RarArchive.IsRarFile(stream);
/// <inheritdoc/>
public override async Task<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => await RarArchive.IsRarFileAsync(stream, null, cancellationToken).ConfigureAwait(false);
/// <inheritdoc/>
public override FileInfo? GetFilePart(int index, FileInfo part1) =>
RarArchiveVolumeFactory.GetFilePart(index, part1);

View File

@@ -1,6 +1,8 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.IO;
@@ -155,4 +157,168 @@ internal class MarkingBinaryReader : BinaryReader
throw new FormatException("malformed vint");
}
// Async versions of read methods
public virtual async Task<byte> ReadByteAsync(CancellationToken cancellationToken = default)
{
CurrentReadByteCount++;
var buffer = new byte[1];
var bytesRead = await BaseStream
.ReadAsync(buffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
if (bytesRead != 1)
{
throw new EndOfStreamException();
}
return buffer[0];
}
public virtual async Task<byte[]> ReadBytesAsync(
int count,
CancellationToken cancellationToken = default
)
{
CurrentReadByteCount += count;
var bytes = new byte[count];
var totalRead = 0;
while (totalRead < count)
{
var bytesRead = await BaseStream
.ReadAsync(bytes, totalRead, count - totalRead, cancellationToken)
.ConfigureAwait(false);
if (bytesRead == 0)
{
throw new InvalidFormatException(
string.Format(
"Could not read the requested amount of bytes. End of stream reached. Requested: {0} Read: {1}",
count,
totalRead
)
);
}
totalRead += bytesRead;
}
return bytes;
}
public async Task<bool> ReadBooleanAsync(CancellationToken cancellationToken = default) =>
await ReadByteAsync(cancellationToken).ConfigureAwait(false) != 0;
public async Task<short> ReadInt16Async(CancellationToken cancellationToken = default) =>
BinaryPrimitives.ReadInt16LittleEndian(
await ReadBytesAsync(2, cancellationToken).ConfigureAwait(false)
);
public async Task<int> ReadInt32Async(CancellationToken cancellationToken = default) =>
BinaryPrimitives.ReadInt32LittleEndian(
await ReadBytesAsync(4, cancellationToken).ConfigureAwait(false)
);
public async Task<long> ReadInt64Async(CancellationToken cancellationToken = default) =>
BinaryPrimitives.ReadInt64LittleEndian(
await ReadBytesAsync(8, cancellationToken).ConfigureAwait(false)
);
public async Task<sbyte> ReadSByteAsync(CancellationToken cancellationToken = default) =>
(sbyte)await ReadByteAsync(cancellationToken).ConfigureAwait(false);
public async Task<ushort> ReadUInt16Async(CancellationToken cancellationToken = default) =>
BinaryPrimitives.ReadUInt16LittleEndian(
await ReadBytesAsync(2, cancellationToken).ConfigureAwait(false)
);
public async Task<uint> ReadUInt32Async(CancellationToken cancellationToken = default) =>
BinaryPrimitives.ReadUInt32LittleEndian(
await ReadBytesAsync(4, cancellationToken).ConfigureAwait(false)
);
public async Task<ulong> ReadUInt64Async(CancellationToken cancellationToken = default) =>
BinaryPrimitives.ReadUInt64LittleEndian(
await ReadBytesAsync(8, cancellationToken).ConfigureAwait(false)
);
public Task<ulong> ReadRarVIntAsync(
int maxBytes = 10,
CancellationToken cancellationToken = default
) => DoReadRarVIntAsync((maxBytes - 1) * 7, cancellationToken);
private async Task<ulong> DoReadRarVIntAsync(int maxShift, CancellationToken cancellationToken)
{
var shift = 0;
ulong result = 0;
do
{
var b0 = await ReadByteAsync(cancellationToken).ConfigureAwait(false);
var b1 = ((uint)b0) & 0x7f;
ulong n = b1;
var shifted = n << shift;
if (n != shifted >> shift)
{
// overflow
break;
}
result |= shifted;
if (b0 == b1)
{
return result;
}
shift += 7;
} while (shift <= maxShift);
throw new FormatException("malformed vint");
}
public Task<uint> ReadRarVIntUInt32Async(
int maxBytes = 5,
CancellationToken cancellationToken = default
) => DoReadRarVIntUInt32Async((maxBytes - 1) * 7, cancellationToken);
public async Task<ushort> ReadRarVIntUInt16Async(
int maxBytes = 3,
CancellationToken cancellationToken = default
) =>
checked(
(ushort)
await DoReadRarVIntUInt32Async((maxBytes - 1) * 7, cancellationToken)
.ConfigureAwait(false)
);
public async Task<byte> ReadRarVIntByteAsync(
int maxBytes = 2,
CancellationToken cancellationToken = default
) =>
checked(
(byte)
await DoReadRarVIntUInt32Async((maxBytes - 1) * 7, cancellationToken)
.ConfigureAwait(false)
);
private async Task<uint> DoReadRarVIntUInt32Async(
int maxShift,
CancellationToken cancellationToken
)
{
var shift = 0;
uint result = 0;
do
{
var b0 = await ReadByteAsync(cancellationToken).ConfigureAwait(false);
var b1 = ((uint)b0) & 0x7f;
var n = b1;
var shifted = n << shift;
if (n != shifted >> shift)
{
// overflow
break;
}
result |= shifted;
if (b0 == b1)
{
return result;
}
shift += 7;
} while (shift <= maxShift);
throw new FormatException("malformed vint");
}
}

View File

@@ -245,8 +245,7 @@ public class SharpCompressStream : Stream, IStreamStack
{
return 0;
}
int read;
read = Stream.Read(buffer, offset, count);
int read = Stream.Read(buffer, offset, count);
_internalPosition += read;
return read;
}

View File

@@ -18,6 +18,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
{
private bool _completed;
private IEnumerator<TEntry>? _entriesForCurrentReadStream;
private IAsyncEnumerator<TEntry>? _entriesForCurrentReadStreamAsync;
private bool _wroteCurrentEntry;
internal AbstractReader(ReaderOptions options, ArchiveType archiveType)
@@ -104,7 +105,9 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
}
if (_entriesForCurrentReadStream is null)
{
return LoadStreamForReading(RequestInitialStream());
var loaded = await LoadStreamForReadingAsync(RequestInitialStream(), cancellationToken)
.ConfigureAwait(false);
return loaded;
}
if (!_wroteCurrentEntry)
{
@@ -121,7 +124,10 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
protected bool LoadStreamForReading(Stream stream)
{
_entriesForCurrentReadStream?.Dispose();
if (_entriesForCurrentReadStream is not null)
{
_entriesForCurrentReadStream.Dispose();
}
if (stream is null || !stream.CanRead)
{
throw new MultipartStreamRequiredException(
@@ -134,6 +140,30 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
return _entriesForCurrentReadStream.MoveNext();
}
protected virtual async Task<bool> LoadStreamForReadingAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
if (_entriesForCurrentReadStreamAsync is null)
{
throw new InvalidOperationException("Entries async enumerator is not initialized.");
}
_entriesForCurrentReadStreamAsync?.DisposeAsync();
if (stream is null || !stream.CanRead)
{
throw new MultipartStreamRequiredException(
"File is split into multiple archives: '"
+ Entry.Key
+ "'. A new readable stream is required. Use Cancel if it was intended."
);
}
// Default implementation uses sync version
_entriesForCurrentReadStreamAsync = GetEntriesAsync(stream, cancellationToken)
.GetAsyncEnumerator(cancellationToken);
return await _entriesForCurrentReadStreamAsync.MoveNextAsync();
}
protected virtual Stream RequestInitialStream() =>
Volume.NotNull("Volume isn't loaded.").Stream;
@@ -142,6 +172,11 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
protected abstract IEnumerable<TEntry> GetEntries(Stream stream);
protected abstract IAsyncEnumerable<TEntry> GetEntriesAsync(
Stream stream,
CancellationToken cancellationToken = default
);
#region Entry Skip/Write
private void SkipEntry()

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Arc;
@@ -37,5 +38,20 @@ namespace SharpCompress.Readers.Arc
yield return new ArcEntry(new ArcFilePart(header, stream));
}
}
protected override async IAsyncEnumerable<ArcEntry> GetEntriesAsync(
Stream stream,
[System.Runtime.CompilerServices.EnumeratorCancellation]
CancellationToken cancellationToken = default
)
{
ArcEntryHeader headerReader = new ArcEntryHeader(Options.ArchiveEncoding);
ArcEntryHeader? header;
while ((header = headerReader.ReadHeader(stream)) != null)
{
cancellationToken.ThrowIfCancellationRequested();
yield return new ArcEntry(new ArcFilePart(header, stream));
}
}
}
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using SharpCompress.Common;
using SharpCompress.Common.Arj;
using SharpCompress.Common.Arj.Headers;
@@ -85,5 +86,46 @@ namespace SharpCompress.Readers.Arj
protected virtual IEnumerable<FilePart> CreateFilePartEnumerableForCurrentEntry() =>
Entry.Parts;
protected override async IAsyncEnumerable<ArjEntry> GetEntriesAsync(
Stream stream,
[System.Runtime.CompilerServices.EnumeratorCancellation]
CancellationToken cancellationToken = default
)
{
var encoding = new ArchiveEncoding();
var mainHeaderReader = new ArjMainHeader(encoding);
var localHeaderReader = new ArjLocalHeader(encoding);
var mainHeader = mainHeaderReader.Read(stream);
if (mainHeader?.IsVolume == true)
{
throw new MultiVolumeExtractionException(
"Multi volumes are currently not supported"
);
}
if (mainHeader?.IsGabled == true)
{
throw new CryptographicException(
"Password protected archives are currently not supported"
);
}
if (_volume == null)
{
_volume = new ArjVolume(stream, Options, 0);
ValidateArchive(_volume);
}
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var localHeader = localHeaderReader.Read(stream);
if (localHeader == null)
break;
yield return new ArjEntry(new ArjFilePart((ArjLocalHeader)localHeader, stream));
}
}
}
}

View File

@@ -6,6 +6,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Arj;

View File

@@ -1,31 +1,31 @@
using System;
using System.IO;
using System.Threading;
using SharpCompress.Common;
using SharpCompress.Common.Arj;
namespace SharpCompress.Readers.Arj
namespace SharpCompress.Readers.Arj;
internal class SingleVolumeArjReader : ArjReader
{
internal class SingleVolumeArjReader : ArjReader
private readonly Stream _stream;
internal SingleVolumeArjReader(Stream stream, ReaderOptions options)
: base(options)
{
private readonly Stream _stream;
stream.NotNull(nameof(stream));
_stream = stream;
}
internal SingleVolumeArjReader(Stream stream, ReaderOptions options)
: base(options)
protected override Stream RequestInitialStream() => _stream;
protected override void ValidateArchive(ArjVolume archive)
{
if (archive.IsMultiVolume)
{
stream.NotNull(nameof(stream));
_stream = stream;
}
protected override Stream RequestInitialStream() => _stream;
protected override void ValidateArchive(ArjVolume archive)
{
if (archive.IsMultiVolume)
{
throw new MultiVolumeExtractionException(
"Streamed archive is a Multi-volume archive. Use a different ArjReader method to extract."
);
}
throw new MultiVolumeExtractionException(
"Streamed archive is a Multi-volume archive. Use a different ArjReader method to extract."
);
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
@@ -30,4 +31,17 @@ public class GZipReader : AbstractReader<GZipEntry, GZipVolume>
protected override IEnumerable<GZipEntry> GetEntries(Stream stream) =>
GZipEntry.GetEntries(stream, Options);
protected override async IAsyncEnumerable<GZipEntry> GetEntriesAsync(
Stream stream,
[System.Runtime.CompilerServices.EnumeratorCancellation]
CancellationToken cancellationToken = default
)
{
foreach (var entry in GZipEntry.GetEntries(stream, Options))
{
cancellationToken.ThrowIfCancellationRequested();
yield return entry;
}
}
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Compressors.Rar;
@@ -97,6 +99,20 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
}
}
protected override async IAsyncEnumerable<RarReaderEntry> GetEntriesAsync(
Stream stream,
[System.Runtime.CompilerServices.EnumeratorCancellation]
CancellationToken cancellationToken = default
)
{
volume = new RarReaderVolume(stream, Options, 0);
await foreach (var fp in volume.ReadFilePartsAsync(cancellationToken).ConfigureAwait(false))
{
ValidateArchive(volume);
yield return new RarReaderEntry(volume.IsSolidArchive, fp);
}
}
protected virtual IEnumerable<FilePart> CreateFilePartEnumerableForCurrentEntry() =>
Entry.Parts;

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -15,4 +16,8 @@ public class RarReaderVolume : RarVolume
new NonSeekableStreamFilePart(markHeader, fileHeader, Index);
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
internal override IAsyncEnumerable<RarFilePart> ReadFilePartsAsync(
CancellationToken cancellationToken = default
) => GetVolumeFilePartsAsync(cancellationToken);
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
@@ -124,4 +125,24 @@ public class TarReader : AbstractReader<TarEntry, TarVolume>
compressionType,
Options.ArchiveEncoding
);
protected override async IAsyncEnumerable<TarEntry> GetEntriesAsync(
Stream stream,
[System.Runtime.CompilerServices.EnumeratorCancellation]
CancellationToken cancellationToken = default
)
{
foreach (
var entry in TarEntry.GetEntries(
StreamingMode.Streaming,
stream,
compressionType,
Options.ArchiveEncoding
)
)
{
cancellationToken.ThrowIfCancellationRequested();
yield return entry;
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
@@ -91,4 +92,40 @@ public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
}
}
}
protected override async IAsyncEnumerable<ZipEntry> GetEntriesAsync(
Stream stream,
[System.Runtime.CompilerServices.EnumeratorCancellation]
CancellationToken cancellationToken = default
)
{
await foreach (var h in _headerFactory.ReadStreamHeaderAsync(stream, cancellationToken))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.LocalEntry:
{
yield return new ZipEntry(
new StreamingZipFilePart((LocalEntryHeader)h, stream)
);
}
break;
case ZipHeaderType.DirectoryEntry:
// DirectoryEntry headers in the central directory are intentionally skipped.
// In streaming mode, we can only read forward, and DirectoryEntry headers
// reference LocalEntry headers that have already been processed. The file
// data comes from LocalEntry headers, not DirectoryEntry headers.
// For multi-volume ZIPs where file data spans multiple files, use ZipArchive
// instead, which requires seekable streams.
break;
case ZipHeaderType.DirectoryEnd:
{
yield break;
}
}
}
}
}
}

View File

@@ -7,6 +7,7 @@ using SharpCompress.Archives.Rar;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.Rar;
@@ -45,7 +46,7 @@ public class RarReaderAsyncTests : ReaderTests
)
)
{
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
@@ -73,12 +74,13 @@ public class RarReaderAsyncTests : ReaderTests
var reader = RarReader.Open(
archives
.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.Select(p => File.OpenRead(p)),
.Select(p => File.OpenRead(p))
.Select(x => new AsyncOnlyStream(x)),
new ReaderOptions { Password = "test" }
)
)
{
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
@@ -123,10 +125,11 @@ public class RarReaderAsyncTests : ReaderTests
var streams = archives
.Select(s => Path.Combine(SCRATCH2_FILES_PATH, s))
.Select(File.OpenRead)
.Select(x => new AsyncOnlyStream(x))
.ToList();
using (var reader = RarReader.Open(streams))
{
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
@@ -204,7 +207,7 @@ public class RarReaderAsyncTests : ReaderTests
private async Task DoRar_Entry_Stream_Async(string filename)
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
using (var reader = ReaderFactory.Open(stream))
using (var reader = await ReaderFactory.OpenAsync(stream))
{
while (await reader.MoveToNextEntryAsync())
{
@@ -248,9 +251,14 @@ public class RarReaderAsyncTests : ReaderTests
using (
var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.Audio_program.rar"))
)
using (var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true }))
using (
var reader = await ReaderFactory.OpenAsync(
stream,
new ReaderOptions { LookForHeader = true }
)
)
{
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
@@ -271,7 +279,7 @@ public class RarReaderAsyncTests : ReaderTests
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.jpeg.jpg")))
using (var reader = RarReader.Open(stream, new ReaderOptions { LookForHeader = true }))
{
while (reader.MoveToNextEntry())
while (await reader.MoveToNextEntryAsync())
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
@@ -310,8 +318,11 @@ public class RarReaderAsyncTests : ReaderTests
private async Task DoRar_Solid_Skip_Reader_Async(string filename)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename));
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
while (reader.MoveToNextEntry())
using var reader = await ReaderFactory.OpenAsync(
stream,
new ReaderOptions { LookForHeader = true }
);
while (await reader.MoveToNextEntryAsync())
{
if (reader.Entry.Key.NotNull().Contains("jpg"))
{
@@ -332,9 +343,14 @@ public class RarReaderAsyncTests : ReaderTests
private async Task DoRar_Reader_Skip_Async(string filename)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename));
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
while (reader.MoveToNextEntry())
using var stream = new AsyncOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename))
);
using var reader = await ReaderFactory.OpenAsync(
stream,
new ReaderOptions { LookForHeader = true }
);
while (await reader.MoveToNextEntryAsync())
{
if (reader.Entry.Key.NotNull().Contains("jpg"))
{
@@ -354,8 +370,11 @@ public class RarReaderAsyncTests : ReaderTests
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using Stream stream = File.OpenRead(testArchive);
using var reader = ReaderFactory.Open(stream, readerOptions ?? new ReaderOptions());
using Stream stream = new AsyncOnlyStream(File.OpenRead(testArchive));
using var reader = await ReaderFactory.OpenAsync(
stream,
readerOptions ?? new ReaderOptions()
);
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)

View File

@@ -3,6 +3,7 @@ using System.Buffers;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.Streams;
@@ -14,7 +15,7 @@ public class LzmaStreamAsyncTests
{
var properties = new byte[] { 0x01 };
var compressedData = new byte[] { 0x01, 0x00, 0x00, 0x58, 0x00 };
var lzma2Stream = new MemoryStream(compressedData);
var lzma2Stream = new AsyncOnlyStream(new MemoryStream(compressedData));
var decompressor = new LzmaStream(properties, lzma2Stream, 5, 1);
var buffer = new byte[1];

View File

@@ -1,6 +1,7 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.IO;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.Streams;
@@ -10,7 +11,7 @@ public class RewindableStreamAsyncTest
[Fact]
public async Task TestRewindAsync()
{
var ms = new MemoryStream();
var ms = new AsyncOnlyStream(new MemoryStream());
var bw = new BinaryWriter(ms);
bw.Write(1);
bw.Write(2);
@@ -48,7 +49,7 @@ public class RewindableStreamAsyncTest
[Fact]
public async Task TestIncompleteRewindAsync()
{
var ms = new MemoryStream();
var ms = new AsyncOnlyStream(new MemoryStream());
var bw = new BinaryWriter(ms);
bw.Write(1);
bw.Write(2);

View File

@@ -6,6 +6,7 @@ using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA;
using SharpCompress.IO;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.Streams;
@@ -22,6 +23,7 @@ public class SharpCompressStreamAsyncTests
bw.Write(i);
}
}
ms.Flush();
ms.Position = 0;
}
@@ -30,11 +32,12 @@ public class SharpCompressStreamAsyncTests
{
byte[] data = new byte[0x100000];
byte[] test = new byte[0x1000];
using (MemoryStream ms = new MemoryStream(data))
using (var ms = new MemoryStream(data))
{
CreateData(ms);
using (SharpCompressStream scs = new SharpCompressStream(ms, true, false, 0x10000))
using var aos = new AsyncOnlyStream(ms);
using (SharpCompressStream scs = new SharpCompressStream(aos, true, false, 0x10000))
{
scs.Seek(0x1000, SeekOrigin.Begin);
Assert.Equal(0x1000, scs.Position); // position in the SharpCompressionStream
@@ -63,7 +66,8 @@ public class SharpCompressStreamAsyncTests
{
CreateData(ms);
using (SharpCompressStream scs = new SharpCompressStream(ms, true, false, 0x10000))
using var aos = new AsyncOnlyStream(ms);
using (SharpCompressStream scs = new SharpCompressStream(aos, true, false, 0x10000))
{
IStreamStack stack = (IStreamStack)scs;
@@ -99,7 +103,8 @@ public class SharpCompressStreamAsyncTests
{
CreateData(ms);
using (SharpCompressStream scs = new SharpCompressStream(ms, true, false, 0x10000))
using var aos = new AsyncOnlyStream(ms);
using (SharpCompressStream scs = new SharpCompressStream(aos, true, false, 0x10000))
{
// Read first chunk
await scs.ReadAsync(test1, 0, test1.Length);
@@ -123,7 +128,8 @@ public class SharpCompressStreamAsyncTests
{
CreateData(ms);
using (SharpCompressStream scs = new SharpCompressStream(ms, true, false, 0x10000))
using var aos = new AsyncOnlyStream(ms);
using (SharpCompressStream scs = new SharpCompressStream(aos, true, false, 0x10000))
{
for (int i = 0; i < 10; i++)
{

View File

@@ -5,6 +5,7 @@ using AwesomeAssertions;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.Streams;
@@ -40,7 +41,8 @@ public class ZLibBaseStreamAsyncTests
var buf = new byte[plainData.Length * 2];
var plainStream1 = new MemoryStream(plainData);
var compressor1 = new DeflateStream(plainStream1, CompressionMode.Compress);
using var aos = new AsyncOnlyStream(plainStream1);
var compressor1 = new DeflateStream(aos, CompressionMode.Compress);
// This is enough to read the entire data
var realCompressedSize = await compressor1
.ReadAsync(buf, 0, plainData.Length * 2)
@@ -67,14 +69,16 @@ public class ZLibBaseStreamAsyncTests
var bytes = Encoding.ASCII.GetBytes(message);
using var inputStream = new MemoryStream(bytes);
using var aos = new AsyncOnlyStream(inputStream);
using var compressedStream = new MemoryStream();
using var byteBufferStream = new BufferedStream(inputStream); // System.IO
using var byteBufferStream = new BufferedStream(aos); // System.IO
await CompressAsync(byteBufferStream, compressedStream, compressionLevel: 1)
.ConfigureAwait(false);
compressedStream.Position = 0;
using var decompressedStream = new MemoryStream();
await DecompressAsync(compressedStream, decompressedStream).ConfigureAwait(false);
using var aos2 = new AsyncOnlyStream(decompressedStream);
await DecompressAsync(compressedStream, aos2).ConfigureAwait(false);
byteBufferStream.Position = 0;
var result = Encoding.ASCII.GetString(