Merge pull request #1131 from adamhathcock/adam/async-again

More test fixes and some perf changes
This commit is contained in:
Adam Hathcock
2026-01-15 16:20:25 +00:00
committed by GitHub
24 changed files with 491 additions and 166 deletions

View File

@@ -48,6 +48,20 @@ public partial class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>, I
}
}
public override async ValueTask DisposeAsync()
{
if (!_disposed)
{
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
{
unpackV1.Dispose();
}
_disposed = true;
await base.DisposeAsync();
}
}
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);

View File

@@ -51,11 +51,19 @@ namespace SharpCompress.Common
return BinaryPrimitives.ReadUInt64LittleEndian(_buffer);
}
public async ValueTask<byte[]> ReadBytesAsync(int count, CancellationToken ct = default)
public async ValueTask ReadBytesAsync(
byte[] bytes,
int offset,
int count,
CancellationToken ct = default
)
{
var result = new byte[count];
await _stream.ReadExactAsync(result, 0, count, ct).ConfigureAwait(false);
return result;
await _stream.ReadExactAsync(bytes, offset, count, ct).ConfigureAwait(false);
}
public async ValueTask SkipAsync(int count, CancellationToken ct = default)
{
await _stream.SkipAsync(count, ct).ConfigureAwait(false);
}
public void Dispose()

View File

@@ -29,7 +29,8 @@ internal class DirectoryEndHeader : ZipHeader
DirectorySize = await reader.ReadUInt32Async();
DirectoryStartOffsetRelativeToDisk = await reader.ReadUInt32Async();
CommentLength = await reader.ReadUInt16Async();
Comment = await reader.ReadBytesAsync(CommentLength);
Comment = new byte[CommentLength];
await reader.ReadBytesAsync(Comment, 0, CommentLength);
}
public ushort VolumeNumber { get; private set; }

View File

@@ -53,10 +53,12 @@ internal class DirectoryEntryHeader : ZipFileEntry
InternalFileAttributes = await reader.ReadUInt16Async();
ExternalFileAttributes = await reader.ReadUInt32Async();
RelativeOffsetOfEntryHeader = await reader.ReadUInt32Async();
var name = await reader.ReadBytesAsync(nameLength);
var extra = await reader.ReadBytesAsync(extraLength);
var comment = await reader.ReadBytesAsync(commentLength);
var name = new byte[nameLength];
var extra = new byte[extraLength];
var comment = new byte[commentLength];
await reader.ReadBytesAsync(name, 0, nameLength);
await reader.ReadBytesAsync(extra, 0, extraLength);
await reader.ReadBytesAsync(comment, 0, commentLength);
ProcessReadData(name, extra, comment);
}

View File

@@ -37,8 +37,10 @@ internal class LocalEntryHeader(IArchiveEncoding archiveEncoding)
UncompressedSize = await reader.ReadUInt32Async();
var nameLength = await reader.ReadUInt16Async();
var extraLength = await reader.ReadUInt16Async();
var name = await reader.ReadBytesAsync(nameLength);
var extra = await reader.ReadBytesAsync(extraLength);
var name = new byte[nameLength];
var extra = new byte[extraLength];
await reader.ReadBytesAsync(name, 0, nameLength);
await reader.ReadBytesAsync(extra, 0, extraLength);
ProcessReadData(name, extra);
}

View File

@@ -38,12 +38,11 @@ internal class Zip64DirectoryEndHeader : ZipHeader
TotalNumberOfEntries = (long)await reader.ReadUInt64Async();
DirectorySize = (long)await reader.ReadUInt64Async();
DirectoryStartOffsetRelativeToDisk = (long)await reader.ReadUInt64Async();
DataSector = await reader.ReadBytesAsync(
(int)(
SizeOfDirectoryEndRecord
- SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS
)
var size = (int)(
SizeOfDirectoryEndRecord - SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS
);
DataSector = new byte[size];
await reader.ReadBytesAsync(DataSector, 0, size);
}
private const int SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS = 44;

View File

@@ -1,4 +1,5 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
@@ -16,6 +17,8 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
private const int MAX_SEARCH_LENGTH_FOR_EOCD = 65557;
private bool _zip64;
private static readonly byte[] needle = { 0x06, 0x05, 0x4b, 0x50 };
internal SeekableZipHeaderFactory(string? password, IArchiveEncoding archiveEncoding)
: base(StreamingMode.Seekable, password, archiveEncoding) { }
@@ -153,74 +156,7 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
}
}
internal async IAsyncEnumerable<ZipHeader> ReadSeekableHeaderAsync(Stream stream, bool useSync)
{
var reader = new AsyncBinaryReader(stream);
await SeekBackToHeaderAsync(stream, reader);
var eocd_location = stream.Position;
var entry = new DirectoryEndHeader();
await entry.Read(reader);
if (entry.IsZip64)
{
_zip64 = true;
// ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin);
var zip64_locator = await reader.ReadUInt32Async();
if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR)
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
await zip64Locator.Read(reader);
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
var zip64Signature = await reader.ReadUInt32Async();
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
await zip64Entry.Read(reader);
stream.Seek(zip64Entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
else
{
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
var position = stream.Position;
while (true)
{
stream.Position = position;
var signature = await reader.ReadUInt32Async();
var nextHeader = await ReadHeader(signature, reader, _zip64);
position = stream.Position;
if (nextHeader is null)
{
yield break;
}
if (nextHeader is DirectoryEntryHeader entryHeader)
{
//entry could be zero bytes so we need to know that.
entryHeader.HasData = entryHeader.CompressedSize != 0;
yield return entryHeader;
}
else if (nextHeader is DirectoryEndHeader endHeader)
{
yield return endHeader;
}
}
}
private static bool IsMatch(byte[] haystack, int position, byte[] needle)
private static bool IsMatch(Span<byte> haystack, int position, byte[] needle)
{
for (var i = 0; i < needle.Length; i++)
{
@@ -247,29 +183,35 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
stream.Length < MAX_SEARCH_LENGTH_FOR_EOCD
? (int)stream.Length
: MAX_SEARCH_LENGTH_FOR_EOCD;
// We search for marker in reverse to find the first occurance
byte[] needle = { 0x06, 0x05, 0x4b, 0x50 };
stream.Seek(-len, SeekOrigin.End);
var seek = ArrayPool<byte>.Shared.Rent(len);
var seek = await reader.ReadBytesAsync(len);
// Search in reverse
Array.Reverse(seek);
// don't exclude the minimum eocd region, otherwise you fail to locate the header in empty zip files
var max_search_area = len; // - MINIMUM_EOCD_LENGTH;
for (var pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
try
{
if (IsMatch(seek, pos_from_end, needle))
{
stream.Seek(-pos_from_end, SeekOrigin.End);
return;
}
}
await reader.ReadBytesAsync(seek, 0, len, default);
var memory = new Memory<byte>(seek, 0, len);
var span = memory.Span;
span.Reverse();
throw new ArchiveException("Failed to locate the Zip Header");
// don't exclude the minimum eocd region, otherwise you fail to locate the header in empty zip files
var max_search_area = len; // - MINIMUM_EOCD_LENGTH;
for (var pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
{
if (IsMatch(span, pos_from_end, needle))
{
stream.Seek(-pos_from_end, SeekOrigin.End);
return;
}
}
throw new ArchiveException("Failed to locate the Zip Header");
}
finally
{
ArrayPool<byte>.Shared.Return(seek);
}
}
private static void SeekBackToHeader(Stream stream, BinaryReader reader)
@@ -286,9 +228,6 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
stream.Length < MAX_SEARCH_LENGTH_FOR_EOCD
? (int)stream.Length
: MAX_SEARCH_LENGTH_FOR_EOCD;
// We search for marker in reverse to find the first occurance
byte[] needle = { 0x06, 0x05, 0x4b, 0x50 };
stream.Seek(-len, SeekOrigin.End);
var seek = reader.ReadBytes(len);

View File

@@ -79,7 +79,7 @@ internal class ZipHeaderFactory
}
else
{
await reader.ReadBytesAsync(zip64 ? 20 : 12);
await reader.SkipAsync(zip64 ? 20 : 12);
}
return null;
}

View File

@@ -64,6 +64,7 @@
// -----------------------------------------------------------------------
using System;
using System.Buffers;
using SharpCompress.Algorithms;
namespace SharpCompress.Compressors.Deflate;
@@ -116,14 +117,14 @@ internal sealed class InflateBlocks
internal int readAt; // window read pointer
internal int table; // table lengths (14 bits)
internal int[] tb = new int[1]; // bit length decoding tree
internal byte[] window; // sliding window
internal IMemoryOwner<byte> window; // sliding window
internal int writeAt; // window write pointer
internal InflateBlocks(ZlibCodec codec, object checkfn, int w)
{
_codec = codec;
hufts = new int[MANY * 3];
window = new byte[w];
window = MemoryPool<byte>.Shared.Rent(w);
end = w;
this.checkfn = checkfn;
mode = InflateBlockMode.TYPE;
@@ -340,7 +341,7 @@ internal sealed class InflateBlocks
{
t = m;
}
Array.Copy(_codec.InputBuffer, p, window, q, t);
_codec.InputBuffer.AsSpan(p, t).CopyTo(window.Memory.Span.Slice(q));
p += t;
n -= t;
q += t;
@@ -715,13 +716,14 @@ internal sealed class InflateBlocks
internal void Free()
{
Reset();
window?.Dispose();
window = null;
hufts = null;
}
internal void SetDictionary(byte[] d, int start, int n)
{
Array.Copy(d, start, window, 0, n);
d.AsSpan(start, n).CopyTo(window.Memory.Span.Slice(0, n));
readAt = writeAt = n;
}
@@ -774,11 +776,16 @@ internal sealed class InflateBlocks
// update check information
if (checkfn != null)
{
_codec._adler32 = check = Adler32.Calculate(check, window.AsSpan(readAt, nBytes));
_codec._adler32 = check = Adler32.Calculate(
check,
window.Memory.Span.Slice(readAt, nBytes)
);
}
// copy as far as end of window
Array.Copy(window, readAt, _codec.OutputBuffer, _codec.NextOut, nBytes);
window
.Memory.Span.Slice(readAt, nBytes)
.CopyTo(_codec.OutputBuffer.AsSpan(_codec.NextOut));
_codec.NextOut += nBytes;
readAt += nBytes;
@@ -1213,7 +1220,7 @@ internal sealed class InflateCodes
}
}
blocks.window[q++] = blocks.window[f++];
blocks.window.Memory.Span[q++] = blocks.window.Memory.Span[f++];
m--;
if (f == blocks.end)
@@ -1259,7 +1266,7 @@ internal sealed class InflateCodes
}
r = ZlibConstants.Z_OK;
blocks.window[q++] = (byte)lit;
blocks.window.Memory.Span[q++] = (byte)lit;
m--;
mode = START;
@@ -1396,7 +1403,7 @@ internal sealed class InflateCodes
b >>= (tp[tp_index_t_3 + 1]);
k -= (tp[tp_index_t_3 + 1]);
s.window[q++] = (byte)tp[tp_index_t_3 + 2];
s.window.Memory.Span[q++] = (byte)tp[tp_index_t_3 + 2];
m--;
continue;
}
@@ -1461,13 +1468,14 @@ internal sealed class InflateCodes
r = q - d;
if (q - r > 0 && 2 > (q - r))
{
s.window[q++] = s.window[r++]; // minimum count is three,
s.window[q++] = s.window[r++]; // so unroll loop a little
s.window.Memory.Span[q++] = s.window.Memory.Span[r++]; // minimum count is three,
s.window.Memory.Span[q++] = s.window.Memory.Span[r++]; // so unroll loop a little
c -= 2;
}
else
{
Array.Copy(s.window, r, s.window, q, 2);
s.window.Memory.Span.Slice(r, 2)
.CopyTo(s.window.Memory.Span.Slice(q));
q += 2;
r += 2;
c -= 2;
@@ -1490,12 +1498,13 @@ internal sealed class InflateCodes
{
do
{
s.window[q++] = s.window[r++];
s.window.Memory.Span[q++] = s.window.Memory.Span[r++];
} while (--e != 0);
}
else
{
Array.Copy(s.window, r, s.window, q, e);
s.window.Memory.Span.Slice(r, e)
.CopyTo(s.window.Memory.Span.Slice(q));
q += e;
r += e;
e = 0;
@@ -1509,12 +1518,13 @@ internal sealed class InflateCodes
{
do
{
s.window[q++] = s.window[r++];
s.window.Memory.Span[q++] = s.window.Memory.Span[r++];
} while (--c != 0);
}
else
{
Array.Copy(s.window, r, s.window, q, c);
s.window.Memory.Span.Slice(r, c)
.CopyTo(s.window.Memory.Span.Slice(q));
q += c;
r += c;
c = 0;
@@ -1560,7 +1570,7 @@ internal sealed class InflateCodes
{
b >>= (tp[tp_index_t_3 + 1]);
k -= (tp[tp_index_t_3 + 1]);
s.window[q++] = (byte)tp[tp_index_t_3 + 2];
s.window.Memory.Span[q++] = (byte)tp[tp_index_t_3 + 2];
m--;
break;
}

View File

@@ -87,10 +87,10 @@ internal class RarStream : Stream, IStreamStack
#endif
ArrayPool<byte>.Shared.Return(this.tmpBuffer);
this.tmpBuffer = null;
readStream.Dispose();
}
isDisposed = true;
base.Dispose(disposing);
readStream.Dispose();
}
}

View File

@@ -126,17 +126,12 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
private FileHeader fileHeader;
private void Init(byte[] window)
private void Init()
{
if (this.window is null && window is null)
if (this.window is null)
{
this.window = ArrayPool<byte>.Shared.Rent(PackDef.MAXWINSIZE);
}
else if (window is not null)
{
this.window = window;
externalWindow = true;
}
inAddr = 0;
UnpInitData(false);
}
@@ -149,7 +144,7 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
this.writeStream = writeStream;
if (!fileHeader.IsSolid)
{
Init(null);
Init();
}
suspended = false;
DoUnpack();
@@ -168,7 +163,7 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
this.writeStream = writeStream;
if (!fileHeader.IsSolid)
{
Init(null);
Init();
}
suspended = false;
await DoUnpackAsync(cancellationToken).ConfigureAwait(false);

View File

@@ -1,4 +1,5 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
@@ -28,13 +29,16 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
#if DEBUG_STREAMS
this.DebugDispose(typeof(BufferedSubStream));
#endif
if (disposing) { }
if (disposing)
{
ArrayPool<byte>.Shared.Return(_cache);
}
base.Dispose(disposing);
}
private int _cacheOffset;
private int _cacheLength;
private readonly byte[] _cache = new byte[32 << 10];
private readonly byte[] _cache = ArrayPool<byte>.Shared.Rent(32 << 10);
private long origin;
private long BytesLeftToRead { get; set; }

View File

@@ -1,8 +1,6 @@
using System;
using System.Buffers;
using System.Diagnostics;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;

View File

@@ -0,0 +1,145 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Readers.Ace;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.Ace
{
public class AceReaderAsyncTests : ReaderTests
{
public AceReaderAsyncTests()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
}
[Fact]
public async ValueTask Ace_Uncompressed_Read_Async() =>
await ReadAsync("Ace.store.ace", CompressionType.None);
[Fact]
public async ValueTask Ace_Encrypted_Read_Async()
{
var exception = await Assert.ThrowsAsync<CryptographicException>(() =>
ReadAsync("Ace.encrypted.ace")
);
}
[Theory]
[InlineData("Ace.method1.ace", CompressionType.AceLZ77)]
[InlineData("Ace.method1-solid.ace", CompressionType.AceLZ77)]
[InlineData("Ace.method2.ace", CompressionType.AceLZ77)]
[InlineData("Ace.method2-solid.ace", CompressionType.AceLZ77)]
public async ValueTask Ace_Unsupported_ShouldThrow_Async(
string fileName,
CompressionType compressionType
)
{
var exception = await Assert.ThrowsAsync<NotSupportedException>(() =>
ReadAsync(fileName, compressionType)
);
}
[Theory]
[InlineData("Ace.store.largefile.ace", CompressionType.None)]
public async ValueTask Ace_LargeFileTest_Read_Async(
string fileName,
CompressionType compressionType
)
{
await ReadForBufferBoundaryCheckAsync(fileName, compressionType);
}
[Fact]
public async ValueTask Ace_Multi_Reader_Async()
{
var exception = await Assert.ThrowsAsync<MultiVolumeExtractionException>(() =>
DoMultiReaderAsync(new[] { "Ace.store.split.ace", "Ace.store.split.c01" })
);
}
private async Task ReadAsync(string testArchive, CompressionType expectedCompression)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using Stream stream = File.OpenRead(testArchive);
await using var reader = ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
new ReaderOptions()
);
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
}
private async Task ReadForBufferBoundaryCheckAsync(
string testArchive,
CompressionType expectedCompression
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using Stream stream = File.OpenRead(testArchive);
await using var reader = ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
new ReaderOptions() { LookForHeader = false }
);
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
}
private async Task DoMultiReaderAsync(string[] archiveNames)
{
var testArchives = archiveNames
.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.ToList();
var streams = testArchives.Select(File.OpenRead).ToList();
try
{
await using var reader = ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(streams.First())
);
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
finally
{
foreach (var stream in streams)
{
stream.Dispose();
}
}
}
}
}

View File

@@ -0,0 +1,176 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Readers.Arj;
using SharpCompress.Test.Mocks;
using Xunit;
using Xunit.Sdk;
namespace SharpCompress.Test.Arj
{
public class ArjReaderAsyncTests : ReaderTests
{
public ArjReaderAsyncTests()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
}
[Fact]
public async ValueTask Arj_Uncompressed_Read_Async() =>
await ReadAsync("Arj.store.arj", CompressionType.None);
[Fact]
public async ValueTask Arj_Method1_Read_Async() => await ReadAsync("Arj.method1.arj");
[Fact]
public async ValueTask Arj_Method2_Read_Async() => await ReadAsync("Arj.method2.arj");
[Fact]
public async ValueTask Arj_Method3_Read_Async() => await ReadAsync("Arj.method3.arj");
[Fact]
public async ValueTask Arj_Method4_Read_Async() => await ReadAsync("Arj.method4.arj");
[Fact]
public async ValueTask Arj_Encrypted_Read_Async()
{
var exception = await Assert.ThrowsAsync<CryptographicException>(() =>
ReadAsync("Arj.encrypted.arj")
);
}
[Fact]
public async ValueTask Arj_Multi_Reader_Async()
{
var exception = await Assert.ThrowsAsync<MultiVolumeExtractionException>(() =>
DoMultiReaderAsync(
[
"Arj.store.split.arj",
"Arj.store.split.a01",
"Arj.store.split.a02",
"Arj.store.split.a03",
"Arj.store.split.a04",
"Arj.store.split.a05",
],
streams => ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(streams.First()))
)
);
}
[Theory]
[InlineData("Arj.method1.largefile.arj", CompressionType.ArjLZ77)]
[InlineData("Arj.method2.largefile.arj", CompressionType.ArjLZ77)]
[InlineData("Arj.method3.largefile.arj", CompressionType.ArjLZ77)]
public async ValueTask Arj_LargeFile_ShouldThrow_Async(
string fileName,
CompressionType compressionType
)
{
var exception = await Assert.ThrowsAsync<NotSupportedException>(() =>
ReadForBufferBoundaryCheckAsync(fileName, compressionType)
);
}
[Theory]
[InlineData("Arj.store.largefile.arj", CompressionType.None)]
[InlineData("Arj.method4.largefile.arj", CompressionType.ArjLZ77)]
public async ValueTask Arj_LargeFileTest_Read_Async(
string fileName,
CompressionType compressionType
)
{
await ReadForBufferBoundaryCheckAsync(fileName, compressionType);
}
private async Task ReadAsync(
string testArchive,
CompressionType? expectedCompression = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using Stream stream = File.OpenRead(testArchive);
await using var reader = ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
new ReaderOptions()
);
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
if (expectedCompression.HasValue)
{
Assert.Equal(expectedCompression.Value, reader.Entry.CompressionType);
}
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
}
private async Task ReadForBufferBoundaryCheckAsync(
string testArchive,
CompressionType expectedCompression
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using Stream stream = File.OpenRead(testArchive);
await using var reader = ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(stream),
new ReaderOptions() { LookForHeader = false }
);
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
}
private async Task DoMultiReaderAsync(
string[] archiveNames,
Func<IEnumerable<Stream>, IAsyncReader> openReader
)
{
var testArchives = archiveNames
.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.ToList();
var streams = testArchives.Select(File.OpenRead).ToList();
try
{
await using var reader = openReader(streams);
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
finally
{
foreach (var stream in streams)
{
stream.Dispose();
}
}
}
}
}

View File

@@ -4,6 +4,7 @@ using System.IO;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.BZip2;
@@ -32,7 +33,7 @@ public class BZip2StreamAsyncTests
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
new AsyncOnlyStream(memoryStream),
SharpCompress.Compressors.CompressionMode.Compress,
false
)
@@ -54,7 +55,7 @@ public class BZip2StreamAsyncTests
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
new AsyncOnlyStream(memoryStream),
SharpCompress.Compressors.CompressionMode.Decompress,
false
)
@@ -93,7 +94,7 @@ public class BZip2StreamAsyncTests
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
new AsyncOnlyStream(memoryStream),
SharpCompress.Compressors.CompressionMode.Compress,
false
)
@@ -110,7 +111,7 @@ public class BZip2StreamAsyncTests
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
new AsyncOnlyStream(memoryStream),
SharpCompress.Compressors.CompressionMode.Decompress,
false
)
@@ -133,7 +134,7 @@ public class BZip2StreamAsyncTests
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
new AsyncOnlyStream(memoryStream),
SharpCompress.Compressors.CompressionMode.Compress,
false
)
@@ -158,7 +159,7 @@ public class BZip2StreamAsyncTests
{
using (
var bzip2Stream = new BZip2Stream(
readStream,
new AsyncOnlyStream(memoryStream),
SharpCompress.Compressors.CompressionMode.Decompress,
false
)
@@ -189,7 +190,7 @@ public class BZip2StreamAsyncTests
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
new AsyncOnlyStream(memoryStream),
SharpCompress.Compressors.CompressionMode.Compress,
false
)
@@ -207,7 +208,7 @@ public class BZip2StreamAsyncTests
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
new AsyncOnlyStream(memoryStream),
SharpCompress.Compressors.CompressionMode.Decompress,
false
)

View File

@@ -70,7 +70,13 @@ public class GZipReaderAsyncTests : ReaderTests
bufferSize: options.BufferSize
);
using var testStream = new TestStream(protectedStream);
await using (var reader = ReaderFactory.OpenAsyncReader(testStream, options, default))
await using (
var reader = ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(testStream),
options,
default
)
)
{
await UseReaderAsync(reader, expectedCompression);
protectedStream.ThrowOnDispose = false;

View File

@@ -6,6 +6,7 @@ using SharpCompress.Archives.Rar;
using SharpCompress.Common;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.Readers;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.Rar;
@@ -653,7 +654,9 @@ public class RarArchiveAsyncTests : ArchiveTests
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using var stream = File.OpenRead(testArchive);
await using var archive = await ArchiveFactory.OpenAsyncArchive(stream);
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
Assert.True(await archive.IsSolidAsync());
await using (var reader = await archive.ExtractAllEntriesAsync())
{

View File

@@ -4,6 +4,7 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.SevenZip;
@@ -16,9 +17,11 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.LZMA.7z");
using var stream = File.OpenRead(testArchive);
using var archive = ArchiveFactory.OpenArchive(stream);
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
await foreach (var entry in archive.EntriesAsync.Where(entry => !entry.IsDirectory))
{
var targetPath = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
var targetDir = Path.GetDirectoryName(targetPath);
@@ -41,9 +44,11 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.LZMA2.7z");
using var stream = File.OpenRead(testArchive);
using var archive = ArchiveFactory.OpenArchive(stream);
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
await foreach (var entry in archive.EntriesAsync.Where(entry => !entry.IsDirectory))
{
var targetPath = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
var targetDir = Path.GetDirectoryName(targetPath);
@@ -66,9 +71,11 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.solid.7z");
using var stream = File.OpenRead(testArchive);
using var archive = ArchiveFactory.OpenArchive(stream);
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
await foreach (var entry in archive.EntriesAsync.Where(entry => !entry.IsDirectory))
{
var targetPath = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
var targetDir = Path.GetDirectoryName(targetPath);
@@ -91,9 +98,11 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.BZip2.7z");
using var stream = File.OpenRead(testArchive);
using var archive = ArchiveFactory.OpenArchive(stream);
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
await foreach (var entry in archive.EntriesAsync.Where(entry => !entry.IsDirectory))
{
var targetPath = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
var targetDir = Path.GetDirectoryName(targetPath);
@@ -116,9 +125,11 @@ public class SevenZipArchiveAsyncTests : ArchiveTests
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.PPMd.7z");
using var stream = File.OpenRead(testArchive);
using var archive = ArchiveFactory.OpenArchive(stream);
await using var archive = await ArchiveFactory.OpenAsyncArchive(
new AsyncOnlyStream(stream)
);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
await foreach (var entry in archive.EntriesAsync.Where(entry => !entry.IsDirectory))
{
var targetPath = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
var targetDir = Path.GetDirectoryName(targetPath);

View File

@@ -35,7 +35,7 @@ public class TarArchiveAsyncTests : ArchiveTests
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (
var writer = WriterFactory.OpenAsyncWriter(
stream,
new AsyncOnlyStream(stream),
ArchiveType.Tar,
CompressionType.None
)
@@ -92,7 +92,7 @@ public class TarArchiveAsyncTests : ArchiveTests
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH2_FILES_PATH, archive)))
using (
var writer = WriterFactory.OpenAsyncWriter(
stream,
new AsyncOnlyStream(stream),
ArchiveType.Tar,
CompressionType.None
)
@@ -199,7 +199,9 @@ public class TarArchiveAsyncTests : ArchiveTests
using (var inputMemory = new MemoryStream(mstm.ToArray()))
{
var tropt = new ReaderOptions { ArchiveEncoding = enc };
await using (var tr = ReaderFactory.OpenAsyncReader(inputMemory, tropt))
await using (
var tr = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(inputMemory), tropt)
)
{
while (await tr.MoveToNextEntryAsync())
{

View File

@@ -73,7 +73,7 @@ public class TarReaderAsyncTests : ReaderTests
public async ValueTask Tar_BZip2_Entry_Stream_Async()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2")))
await using (var reader = ReaderFactory.OpenAsyncReader(stream))
await using (var reader = ReaderFactory.OpenAsyncReader(new AsyncOnlyStream(stream)))
{
while (await reader.MoveToNextEntryAsync())
{

View File

@@ -1,6 +1,7 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Compressors.Xz;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.Xz;
@@ -11,7 +12,7 @@ public class XzStreamAsyncTests : XzTestsBase
public async ValueTask CanReadEmptyStreamAsync()
{
var xz = new XZStream(CompressedEmptyStream);
using var sr = new StreamReader(xz);
using var sr = new StreamReader(new AsyncOnlyStream(xz));
var uncompressed = await sr.ReadToEndAsync().ConfigureAwait(false);
Assert.Equal(OriginalEmpty, uncompressed);
}
@@ -20,7 +21,7 @@ public class XzStreamAsyncTests : XzTestsBase
public async ValueTask CanReadStreamAsync()
{
var xz = new XZStream(CompressedStream);
using var sr = new StreamReader(xz);
using var sr = new StreamReader(new AsyncOnlyStream(xz));
var uncompressed = await sr.ReadToEndAsync().ConfigureAwait(false);
Assert.Equal(Original, uncompressed);
}
@@ -29,7 +30,7 @@ public class XzStreamAsyncTests : XzTestsBase
public async ValueTask CanReadIndexedStreamAsync()
{
var xz = new XZStream(CompressedIndexedStream);
using var sr = new StreamReader(xz);
using var sr = new StreamReader(new AsyncOnlyStream(xz));
var uncompressed = await sr.ReadToEndAsync().ConfigureAwait(false);
Assert.Equal(OriginalIndexed, uncompressed);
}

View File

@@ -199,7 +199,10 @@ public class Zip64AsyncTests : WriterTests
ZipEntry? prev = null;
using (var fs = File.OpenRead(filename))
{
var rd = ReaderFactory.OpenAsyncReader(fs, new ReaderOptions { LookForHeader = false });
var rd = ReaderFactory.OpenAsyncReader(
new AsyncOnlyStream(fs),
new ReaderOptions { LookForHeader = false }
);
await using (rd)
{
while (await rd.MoveToNextEntryAsync())

View File

@@ -9,6 +9,7 @@ using SharpCompress.Common;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Xz;
using SharpCompress.Crypto;
using SharpCompress.Test.Mocks;
using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
using Xunit;
@@ -132,7 +133,11 @@ public class ZipTypesLevelsWithCrcRatioAsyncTests : ArchiveTests
};
using (
var writer = WriterFactory.OpenAsyncWriter(zipStream, ArchiveType.Zip, writerOptions)
var writer = WriterFactory.OpenAsyncWriter(
new AsyncOnlyStream(zipStream),
ArchiveType.Zip,
writerOptions
)
)
{
await writer.WriteAsync(