diff --git a/src/SharpCompress/Compressors/Deflate64/Deflate64Stream.cs b/src/SharpCompress/Compressors/Deflate64/Deflate64Stream.cs
index 8ef83ec4..e18e85b5 100644
--- a/src/SharpCompress/Compressors/Deflate64/Deflate64Stream.cs
+++ b/src/SharpCompress/Compressors/Deflate64/Deflate64Stream.cs
@@ -2,7 +2,6 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
-#nullable disable
using System;
using System.Diagnostics;
@@ -39,7 +38,6 @@ public sealed class Deflate64Stream : Stream, IStreamStack
private const int DEFAULT_BUFFER_SIZE = 8192;
private Stream _stream;
- private CompressionMode _mode;
private InflaterManaged _inflater;
private byte[] _buffer;
@@ -62,61 +60,24 @@ public sealed class Deflate64Stream : Stream, IStreamStack
throw new ArgumentException("Deflate64: input stream is not readable", nameof(stream));
}
- InitializeInflater(stream, ZipCompressionMethod.Deflate64);
-#if DEBUG_STREAMS
- this.DebugConstruct(typeof(Deflate64Stream));
-#endif
- }
-
- ///
- /// Sets up this DeflateManagedStream to be used for Inflation/Decompression
- ///
- private void InitializeInflater(
- Stream stream,
- ZipCompressionMethod method = ZipCompressionMethod.Deflate
- )
- {
- Debug.Assert(stream != null);
- Debug.Assert(
- method == ZipCompressionMethod.Deflate || method == ZipCompressionMethod.Deflate64
- );
if (!stream.CanRead)
{
throw new ArgumentException("Deflate64: input stream is not readable", nameof(stream));
}
- _inflater = new InflaterManaged(method == ZipCompressionMethod.Deflate64);
+ _inflater = new InflaterManaged( true);
_stream = stream;
- _mode = CompressionMode.Decompress;
_buffer = new byte[DEFAULT_BUFFER_SIZE];
+#if DEBUG_STREAMS
+ this.DebugConstruct(typeof(Deflate64Stream));
+#endif
}
- public override bool CanRead
- {
- get
- {
- if (_stream is null)
- {
- return false;
- }
- return (_mode == CompressionMode.Decompress && _stream.CanRead);
- }
- }
+ public override bool CanRead => _stream.CanRead;
- public override bool CanWrite
- {
- get
- {
- if (_stream is null)
- {
- return false;
- }
-
- return (_mode == CompressionMode.Compress && _stream.CanWrite);
- }
- }
+ public override bool CanWrite => false;
public override bool CanSeek => false;
@@ -138,7 +99,6 @@ public sealed class Deflate64Stream : Stream, IStreamStack
public override int Read(byte[] array, int offset, int count)
{
- EnsureDecompressionMode();
ValidateParameters(array, offset, count);
EnsureNotDisposed();
@@ -220,25 +180,8 @@ public sealed class Deflate64Stream : Stream, IStreamStack
private static void ThrowStreamClosedException() =>
throw new ObjectDisposedException(null, "Deflate64: stream has been disposed");
- private void EnsureDecompressionMode()
- {
- if (_mode != CompressionMode.Decompress)
- {
- ThrowCannotReadFromDeflateManagedStreamException();
- }
- }
- [MethodImpl(MethodImplOptions.NoInlining)]
- private static void ThrowCannotReadFromDeflateManagedStreamException() =>
- throw new InvalidOperationException("Deflate64: cannot read from this stream");
- private void EnsureCompressionMode()
- {
- if (_mode != CompressionMode.Compress)
- {
- ThrowCannotWriteToDeflateManagedStreamException();
- }
- }
[MethodImpl(MethodImplOptions.NoInlining)]
private static void ThrowCannotWriteToDeflateManagedStreamException() =>
@@ -281,20 +224,18 @@ public sealed class Deflate64Stream : Stream, IStreamStack
#endif
if (disposing)
{
- _stream?.Dispose();
+ _stream.Dispose();
}
}
finally
{
- _stream = null;
try
{
- _inflater?.Dispose();
+ _inflater.Dispose();
}
finally
{
- _inflater = null;
base.Dispose(disposing);
}
}
diff --git a/tests/SharpCompress.Test/Zip/Zip64AsyncTests.cs b/tests/SharpCompress.Test/Zip/Zip64AsyncTests.cs
new file mode 100644
index 00000000..b939f068
--- /dev/null
+++ b/tests/SharpCompress.Test/Zip/Zip64AsyncTests.cs
@@ -0,0 +1,237 @@
+using System;
+using System.IO;
+using System.Linq;
+using System.Threading.Tasks;
+using SharpCompress.Archives;
+using SharpCompress.Common;
+using SharpCompress.Common.Zip;
+using SharpCompress.Compressors.Deflate;
+using SharpCompress.Readers;
+using SharpCompress.Readers.Zip;
+using SharpCompress.Test.Mocks;
+using SharpCompress.Writers;
+using SharpCompress.Writers.Zip;
+using Xunit;
+
+namespace SharpCompress.Test.Zip;
+
+public class Zip64AsyncTests : WriterTests
+{
+ public Zip64AsyncTests()
+ : base(ArchiveType.Zip) { }
+
+ // 4GiB + 1
+ private const long FOUR_GB_LIMIT = ((long)uint.MaxValue) + 1;
+
+ [Fact]
+ [Trait("format", "zip64")]
+ public async Task Zip64_Single_Large_File_Async() =>
+ // One single file, requires zip64
+ await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
+
+ [Fact]
+ [Trait("format", "zip64")]
+ public async Task Zip64_Two_Large_Files_Async() =>
+ // One single file, requires zip64
+ await RunSingleTestAsync(2, FOUR_GB_LIMIT, setZip64: true, forwardOnly: false);
+
+ [Fact]
+ [Trait("format", "zip64")]
+ public async Task Zip64_Two_Small_files_Async() =>
+ // Multiple files, does not require zip64
+ await RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: false);
+
+ [Fact]
+ [Trait("format", "zip64")]
+ public async Task Zip64_Two_Small_files_stream_Async() =>
+ // Multiple files, does not require zip64, and works with streams
+ await RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: false, forwardOnly: true);
+
+ [Fact]
+ [Trait("format", "zip64")]
+ public async Task Zip64_Two_Small_Files_Zip64_Async() =>
+ // Multiple files, use zip64 even though it is not required
+ await RunSingleTestAsync(2, FOUR_GB_LIMIT / 2, setZip64: true, forwardOnly: false);
+
+ [Fact]
+ [Trait("format", "zip64")]
+ public async Task Zip64_Single_Large_File_Fail_Async()
+ {
+ try
+ {
+ // One single file, should fail
+ await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: false);
+ throw new InvalidOperationException("Test did not fail?");
+ }
+ catch (NotSupportedException) { }
+ }
+
+ [Fact]
+ [Trait("zip64", "true")]
+ public async Task Zip64_Single_Large_File_Zip64_Streaming_Fail_Async()
+ {
+ try
+ {
+ // One single file, should fail (fast) with zip64
+ await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: true, forwardOnly: true);
+ throw new InvalidOperationException("Test did not fail?");
+ }
+ catch (NotSupportedException) { }
+ }
+
+ [Fact]
+ [Trait("zip64", "true")]
+ public async Task Zip64_Single_Large_File_Streaming_Fail_Async()
+ {
+ try
+ {
+ // One single file, should fail once the write discovers the problem
+ await RunSingleTestAsync(1, FOUR_GB_LIMIT, setZip64: false, forwardOnly: true);
+ throw new InvalidOperationException("Test did not fail?");
+ }
+ catch (NotSupportedException) { }
+ }
+
+ public async Task RunSingleTestAsync(
+ long files,
+ long filesize,
+ bool setZip64,
+ bool forwardOnly,
+ long writeChunkSize = 1024 * 1024,
+ string filename = "zip64-test-async.zip"
+ )
+ {
+ filename = Path.Combine(SCRATCH2_FILES_PATH, filename);
+
+ if (File.Exists(filename))
+ {
+ File.Delete(filename);
+ }
+
+ if (!File.Exists(filename))
+ {
+ await CreateZipArchiveAsync(
+ filename,
+ files,
+ filesize,
+ writeChunkSize,
+ setZip64,
+ forwardOnly
+ );
+ }
+
+ var resForward = await ReadForwardOnlyAsync(filename);
+ if (resForward.Item1 != files)
+ {
+ throw new InvalidOperationException(
+ $"Incorrect number of items reported: {resForward.Item1}, should have been {files}"
+ );
+ }
+
+ if (resForward.Item2 != files * filesize)
+ {
+ throw new InvalidOperationException(
+ $"Incorrect combined size reported: {resForward.Item2}, should have been {files * filesize}"
+ );
+ }
+
+ var resArchive = ReadArchive(filename);
+ if (resArchive.Item1 != files)
+ {
+ throw new InvalidOperationException(
+ $"Incorrect number of items reported: {resArchive.Item1}, should have been {files}"
+ );
+ }
+
+ if (resArchive.Item2 != files * filesize)
+ {
+ throw new InvalidOperationException(
+ $"Incorrect number of items reported: {resArchive.Item2}, should have been {files * filesize}"
+ );
+ }
+ }
+
+ public async Task CreateZipArchiveAsync(
+ string filename,
+ long files,
+ long filesize,
+ long chunksize,
+ bool setZip64,
+ bool forwardOnly
+ )
+ {
+ var data = new byte[chunksize];
+
+ // Use deflate for speed
+ var opts = new ZipWriterOptions(CompressionType.Deflate) { UseZip64 = setZip64 };
+
+ // Use no compression to ensure we hit the limits (actually inflates a bit, but seems better than using method==Store)
+ var eo = new ZipWriterEntryOptions { DeflateCompressionLevel = CompressionLevel.None };
+
+ using var zip = File.OpenWrite(filename);
+ using var st = forwardOnly ? (Stream)new ForwardOnlyStream(zip) : zip;
+ using var zipWriter = (ZipWriter)WriterFactory.Open(st, ArchiveType.Zip, opts);
+ for (var i = 0; i < files; i++)
+ {
+ using var str = zipWriter.WriteToStream(i.ToString(), eo);
+ var left = filesize;
+ while (left > 0)
+ {
+ var b = (int)Math.Min(left, data.Length);
+ // Use synchronous Write to match the sync version and avoid ForwardOnlyStream issues
+ str.Write(data, 0, b);
+ left -= b;
+ }
+ }
+ // Adding await to make it properly async, even though the writes are sync
+ await Task.CompletedTask;
+ }
+
+ public async Task> ReadForwardOnlyAsync(string filename)
+ {
+ long count = 0;
+ long size = 0;
+ ZipEntry? prev = null;
+ using (var fs = File.OpenRead(filename))
+ using (var rd = ZipReader.Open(fs, new ReaderOptions { LookForHeader = false }))
+ {
+ while (await rd.MoveToNextEntryAsync())
+ {
+#if NETFRAMEWORK || NETSTANDARD2_0
+ using (var entryStream = await rd.OpenEntryStreamAsync())
+ {
+ await entryStream.SkipEntryAsync();
+ }
+#else
+ await using (var entryStream = await rd.OpenEntryStreamAsync())
+ {
+ await entryStream.SkipEntryAsync();
+ }
+#endif
+ count++;
+ if (prev != null)
+ {
+ size += prev.Size;
+ }
+
+ prev = rd.Entry;
+ }
+ }
+
+ if (prev != null)
+ {
+ size += prev.Size;
+ }
+
+ return new Tuple(count, size);
+ }
+
+ public Tuple ReadArchive(string filename)
+ {
+ using var archive = ArchiveFactory.Open(filename);
+ return new Tuple(
+ archive.Entries.Count(),
+ archive.Entries.Select(x => x.Size).Sum()
+ );
+ }
+}
diff --git a/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcAsyncTests.cs b/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcAsyncTests.cs
new file mode 100644
index 00000000..eae1b22e
--- /dev/null
+++ b/tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcAsyncTests.cs
@@ -0,0 +1,281 @@
+using System;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.IO;
+using System.Linq;
+using System.Threading.Tasks;
+using SharpCompress.Archives.Zip;
+using SharpCompress.Common;
+using SharpCompress.Compressors.Deflate;
+using SharpCompress.Compressors.Xz;
+using SharpCompress.Crypto;
+using SharpCompress.Writers;
+using SharpCompress.Writers.Zip;
+using Xunit;
+
+namespace SharpCompress.Test.Zip;
+
+public class ZipTypesLevelsWithCrcRatioAsyncTests : ArchiveTests
+{
+ public ZipTypesLevelsWithCrcRatioAsyncTests() => UseExtensionInsteadOfNameToVerify = true;
+
+ [Theory]
+ [InlineData(CompressionType.Deflate, 1, 1, 0.11f)] // was 0.8f, actual 0.104
+ [InlineData(CompressionType.Deflate, 3, 1, 0.08f)] // was 0.8f, actual 0.078
+ [InlineData(CompressionType.Deflate, 6, 1, 0.05f)] // was 0.8f, actual ~0.042
+ [InlineData(CompressionType.Deflate, 9, 1, 0.04f)] // was 0.7f, actual 0.038
+ [InlineData(CompressionType.ZStandard, 1, 1, 0.025f)] // was 0.8f, actual 0.023
+ [InlineData(CompressionType.ZStandard, 3, 1, 0.015f)] // was 0.7f, actual 0.013
+ [InlineData(CompressionType.ZStandard, 9, 1, 0.006f)] // was 0.7f, actual 0.005
+ [InlineData(CompressionType.ZStandard, 22, 1, 0.005f)] // was 0.7f, actual 0.004
+ [InlineData(CompressionType.BZip2, 0, 1, 0.035f)] // was 0.8f, actual 0.033
+ [InlineData(CompressionType.LZMA, 0, 1, 0.005f)] // was 0.8f, actual 0.004
+ [InlineData(CompressionType.None, 0, 1, 1.001f)] // was 1.1f, actual 1.000
+ [InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8f, actual 0.042
+ [InlineData(CompressionType.ZStandard, 3, 2, 0.012f)] // was 0.7f, actual 0.010
+ [InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8f, actual 0.032
+ [InlineData(CompressionType.Deflate, 9, 3, 0.04f)] // was 0.7f, actual 0.038
+ [InlineData(CompressionType.ZStandard, 9, 3, 0.003f)] // was 0.7f, actual 0.002
+ public async Task Zip_Create_Archive_With_3_Files_Crc32_Test_Async(
+ CompressionType compressionType,
+ int compressionLevel,
+ int sizeMb,
+ float expectedRatio
+ )
+ {
+ const int OneMiB = 1024 * 1024;
+ var baseSize = sizeMb * OneMiB;
+
+ // Generate test content for files with sizes based on the sizeMb parameter
+ var file1Data = TestPseudoTextStream.Create(baseSize);
+ var file2Data = TestPseudoTextStream.Create(baseSize * 2);
+ var file3Data = TestPseudoTextStream.Create(baseSize * 3);
+
+ var expectedFiles = new Dictionary
+ {
+ [$"file1_{sizeMb}MiB.txt"] = (file1Data, CalculateCrc32(file1Data)),
+ [$"data/file2_{sizeMb * 2}MiB.txt"] = (file2Data, CalculateCrc32(file2Data)),
+ [$"deep/nested/file3_{sizeMb * 3}MiB.txt"] = (file3Data, CalculateCrc32(file3Data)),
+ };
+
+ // Create zip archive in memory
+ using var zipStream = new MemoryStream();
+ using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
+ {
+ await writer.WriteAsync($"file1_{sizeMb}MiB.txt", new MemoryStream(file1Data));
+ await writer.WriteAsync($"data/file2_{sizeMb * 2}MiB.txt", new MemoryStream(file2Data));
+ await writer.WriteAsync(
+ $"deep/nested/file3_{sizeMb * 3}MiB.txt",
+ new MemoryStream(file3Data)
+ );
+ }
+
+ // Calculate and output actual compression ratio
+ var originalSize = file1Data.Length + file2Data.Length + file3Data.Length;
+ var actualRatio = (double)zipStream.Length / originalSize;
+ //Debug.WriteLine($"Zip_Create_Archive_With_3_Files_Crc32_Test_Async: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
+
+ // Verify compression occurred (except for None compression type)
+ if (compressionType != CompressionType.None)
+ {
+ Assert.True(
+ zipStream.Length < originalSize,
+ $"Compression failed: compressed={zipStream.Length}, original={originalSize}"
+ );
+ }
+
+ // Verify compression ratio
+ VerifyCompressionRatio(
+ originalSize,
+ zipStream.Length,
+ expectedRatio,
+ $"{compressionType} level {compressionLevel}"
+ );
+
+ // Verify archive content and CRC32
+ await VerifyArchiveContentAsync(zipStream, expectedFiles);
+
+ // Verify compression type is correctly set
+ VerifyCompressionType(zipStream, compressionType);
+ }
+
+ [Theory]
+ [InlineData(CompressionType.Deflate, 1, 4, 0.11f)] // was 0.8, actual 0.105
+ [InlineData(CompressionType.Deflate, 3, 4, 0.08f)] // was 0.8, actual 0.077
+ [InlineData(CompressionType.Deflate, 6, 4, 0.045f)] // was 0.8, actual 0.042
+ [InlineData(CompressionType.Deflate, 9, 4, 0.04f)] // was 0.8, actual 0.037
+ [InlineData(CompressionType.ZStandard, 1, 4, 0.025f)] // was 0.8, actual 0.022
+ [InlineData(CompressionType.ZStandard, 3, 4, 0.012f)] // was 0.8, actual 0.010
+ [InlineData(CompressionType.ZStandard, 9, 4, 0.003f)] // was 0.8, actual 0.002
+ [InlineData(CompressionType.ZStandard, 22, 4, 0.003f)] // was 0.8, actual 0.002
+ [InlineData(CompressionType.BZip2, 0, 4, 0.035f)] // was 0.8, actual 0.032
+ [InlineData(CompressionType.LZMA, 0, 4, 0.003f)] // was 0.8, actual 0.002
+ public async Task Zip_WriterFactory_Crc32_Test_Async(
+ CompressionType compressionType,
+ int compressionLevel,
+ int sizeMb,
+ float expectedRatio
+ )
+ {
+ var fileSize = sizeMb * 1024 * 1024;
+
+ var testData = TestPseudoTextStream.Create(fileSize);
+ var expectedCrc = CalculateCrc32(testData);
+
+ // Create archive with specified compression level
+ using var zipStream = new MemoryStream();
+ var writerOptions = new ZipWriterOptions(compressionType)
+ {
+ CompressionLevel = compressionLevel,
+ };
+
+ using (var writer = WriterFactory.Open(zipStream, ArchiveType.Zip, writerOptions))
+ {
+ await writer.WriteAsync(
+ $"{compressionType}_level_{compressionLevel}_{sizeMb}MiB.txt",
+ new MemoryStream(testData)
+ );
+ }
+
+ // Calculate and output actual compression ratio
+ var actualRatio = (double)zipStream.Length / testData.Length;
+ //Debug.WriteLine($"Zip_WriterFactory_Crc32_Test_Async: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
+
+ VerifyCompressionRatio(
+ testData.Length,
+ zipStream.Length,
+ expectedRatio,
+ $"{compressionType} level {compressionLevel}"
+ );
+
+ // Verify the archive
+ zipStream.Position = 0;
+ using var archive = ZipArchive.Open(zipStream);
+
+ var entry = archive.Entries.Single(e => !e.IsDirectory);
+ using var entryStream = entry.OpenEntryStream();
+ using var extractedStream = new MemoryStream();
+ await entryStream.CopyToAsync(extractedStream);
+
+ var extractedData = extractedStream.ToArray();
+ var actualCrc = CalculateCrc32(extractedData);
+
+ Assert.Equal(compressionType, entry.CompressionType);
+ Assert.Equal(expectedCrc, actualCrc);
+ Assert.Equal(testData.Length, extractedData.Length);
+ Assert.Equal(testData, extractedData);
+ }
+
+ [Theory]
+ [InlineData(CompressionType.Deflate, 1, 2, 0.11f)] // was 0.8, actual 0.104
+ [InlineData(CompressionType.Deflate, 3, 2, 0.08f)] // was 0.8, actual 0.077
+ [InlineData(CompressionType.Deflate, 6, 2, 0.045f)] // was 0.8, actual 0.042
+ [InlineData(CompressionType.Deflate, 9, 2, 0.04f)] // was 0.7, actual 0.038
+ [InlineData(CompressionType.ZStandard, 1, 2, 0.025f)] // was 0.8, actual 0.023
+ [InlineData(CompressionType.ZStandard, 3, 2, 0.015f)] // was 0.7, actual 0.012
+ [InlineData(CompressionType.ZStandard, 9, 2, 0.006f)] // was 0.7, actual 0.005
+ [InlineData(CompressionType.ZStandard, 22, 2, 0.005f)] // was 0.7, actual 0.004
+ [InlineData(CompressionType.BZip2, 0, 2, 0.035f)] // was 0.8, actual 0.032
+ [InlineData(CompressionType.LZMA, 0, 2, 0.005f)] // was 0.8, actual 0.004
+ public async Task Zip_ZipArchiveOpen_Crc32_Test_Async(
+ CompressionType compressionType,
+ int compressionLevel,
+ int sizeMb,
+ float expectedRatio
+ )
+ {
+ var fileSize = sizeMb * 1024 * 1024;
+
+ var testData = TestPseudoTextStream.Create(fileSize);
+ var expectedCrc = CalculateCrc32(testData);
+
+ // Create archive with specified compression and level
+ using var zipStream = new MemoryStream();
+ using (var writer = CreateWriterWithLevel(zipStream, compressionType, compressionLevel))
+ {
+ await writer.WriteAsync(
+ $"{compressionType}_{compressionLevel}_{sizeMb}MiB.txt",
+ new MemoryStream(testData)
+ );
+ }
+
+ // Calculate and output actual compression ratio
+ var actualRatio = (double)zipStream.Length / testData.Length;
+ //Debug.WriteLine($"Zip_ZipArchiveOpen_Crc32_Test_Async: {compressionType} Level={compressionLevel} Size={sizeMb}MB Expected={expectedRatio:F3} Actual={actualRatio:F3}");
+
+ // Verify the archive
+ zipStream.Position = 0;
+ using var archive = ZipArchive.Open(zipStream);
+
+ var entry = archive.Entries.Single(e => !e.IsDirectory);
+ using var entryStream = entry.OpenEntryStream();
+ using var extractedStream = new MemoryStream();
+ await entryStream.CopyToAsync(extractedStream);
+
+ var extractedData = extractedStream.ToArray();
+ var actualCrc = CalculateCrc32(extractedData);
+
+ Assert.Equal(compressionType, entry.CompressionType);
+ Assert.Equal(expectedCrc, actualCrc);
+ Assert.Equal(testData.Length, extractedData.Length);
+
+ // For smaller files, verify full content; for larger, spot check
+ if (testData.Length <= sizeMb * 2)
+ {
+ Assert.Equal(testData, extractedData);
+ }
+ else
+ {
+ VerifyDataSpotCheck(testData, extractedData);
+ }
+
+ VerifyCompressionRatio(
+ testData.Length,
+ zipStream.Length,
+ expectedRatio,
+ $"{compressionType} Level {compressionLevel}"
+ );
+ }
+
+ // Helper method for async archive content verification
+ private async Task VerifyArchiveContentAsync(
+ MemoryStream zipStream,
+ Dictionary expectedFiles
+ )
+ {
+ zipStream.Position = 0;
+ using var archive = ZipArchive.Open(zipStream);
+
+ foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
+ {
+ Assert.True(
+ expectedFiles.ContainsKey(entry.Key!),
+ $"Unexpected file in archive: {entry.Key}"
+ );
+
+ var expected = expectedFiles[entry.Key!];
+ using var entryStream = entry.OpenEntryStream();
+ using var extractedStream = new MemoryStream();
+ await entryStream.CopyToAsync(extractedStream);
+
+ var extractedData = extractedStream.ToArray();
+ var actualCrc = CalculateCrc32(extractedData);
+
+ Assert.Equal(expected.crc, actualCrc);
+ Assert.Equal(expected.data.Length, extractedData.Length);
+
+ // For larger files, just spot check, for smaller verify full content
+ var expectedData = expected.data;
+ if (expectedData.Length <= 2 * 1024 * 1024)
+ {
+ Assert.Equal(expectedData, extractedData);
+ }
+ else
+ {
+ VerifyDataSpotCheck(expectedData, extractedData);
+ }
+ }
+
+ Assert.Equal(expectedFiles.Count, archive.Entries.Count(e => !e.IsDirectory));
+ }
+}