Compare commits

...

25 Commits

Author SHA1 Message Date
Adam Hathcock
30e036f9ec Mark for 0.42.0 2025-11-28 13:24:03 +00:00
Adam Hathcock
095c871174 Merge pull request #1043 from adamhathcock/copilot/fix-divide-by-zero-exception
Fix DivideByZeroException when compressing empty files with BZip2
2025-11-27 16:47:39 +00:00
copilot-swe-agent[bot]
6d73c5b295 Fix DivideByZeroException when using BZip2 with empty files
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-27 15:59:39 +00:00
copilot-swe-agent[bot]
cc4d28193c Initial plan 2025-11-27 15:53:13 +00:00
Adam Hathcock
9433e06b93 Merge pull request #1023 from adamhathcock/copilot/fix-zip64-validation-issue
Fix version mismatch between Local File Header and Central Directory File Header in Zip archives
2025-11-25 15:30:48 +00:00
copilot-swe-agent[bot]
a92aaa51d5 Remove ZipCompressionMethod.None from version 63 check
None (stored) compression only requires version 10/20, not version 63.
Version 63 is specifically for advanced compression methods like LZMA,
PPMd, BZip2, and ZStandard.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-25 14:54:13 +00:00
Adam Hathcock
d41908adeb fixes for clarity 2025-11-25 14:25:58 +00:00
Adam Hathcock
81ca15b567 Update src/SharpCompress/Writers/Zip/ZipCentralDirectoryEntry.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-25 14:13:10 +00:00
Adam Hathcock
b81d0fd730 Merge pull request #1009 from adamhathcock/dependabot/nuget/AwesomeAssertions-9.3.0
Bump AwesomeAssertions from 9.2.1 to 9.3.0
2025-11-25 11:55:41 +00:00
Adam Hathcock
3a1bb187e8 Merge pull request #1031 from adamhathcock/dependabot/github_actions/actions/checkout-6
Bump actions/checkout from 5 to 6
2025-11-25 11:55:21 +00:00
Adam Hathcock
3fee14a070 Merge pull request #1035 from adamhathcock/adam/update-csharpier
Update csharpier and reformat
2025-11-25 11:54:56 +00:00
Adam Hathcock
5bf789ac65 Update csharpier and reformat 2025-11-25 11:50:21 +00:00
dependabot[bot]
be06049db3 Bump actions/checkout from 5 to 6
Bumps [actions/checkout](https://github.com/actions/checkout) from 5 to 6.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-24 09:20:23 +00:00
Adam Hathcock
a0435f6a60 Merge pull request #1030 from TwanVanDongen/master
Added buffer boundary tests.
2025-11-23 15:53:36 +00:00
Twan van Dongen
2321e2c90b Added buffer boundaty tests. Changed largefile to Alice29.txt as it's sufficient for the tests. 2025-11-22 12:32:25 +01:00
Adam Hathcock
97e98d8629 Merge pull request #1028 from TwanVanDongen/master
Buffer boundary tests
2025-11-21 08:35:54 +00:00
Twan van Dongen
d96e7362d2 Buffer boundary test for ARC's Squeezed method 2025-11-20 21:56:07 +01:00
Twan van Dongen
7dd46fe5ed More buffer boundary tests 2025-11-20 19:43:07 +01:00
Adam Hathcock
04c044cb2b Update tests/SharpCompress.Test/Zip/Zip64VersionConsistencyTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-19 15:49:55 +00:00
Adam Hathcock
cc10a12fbc Update tests/SharpCompress.Test/Zip/Zip64VersionConsistencyTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-19 15:49:27 +00:00
Adam Hathcock
8b0a1c699f Update tests/SharpCompress.Test/Zip/Zip64VersionConsistencyTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-19 15:49:17 +00:00
copilot-swe-agent[bot]
0fe48c647e Enhance fix to handle LZMA/PPMd/BZip2/ZStandard compression methods
Also fixes pre-existing version mismatch for advanced compression methods that require version 63. Added tests for LZMA and PPMd to verify version consistency.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-19 11:29:43 +00:00
copilot-swe-agent[bot]
434ce05416 Fix Zip64 version mismatch between LFH and CDFH
When UseZip64=true but files are small, ensure Central Directory File Header uses version 45 to match Local File Header. This fixes validation failures in System.IO.Packaging and other strict readers.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-19 11:14:06 +00:00
copilot-swe-agent[bot]
51237a34eb Initial plan 2025-11-19 11:02:28 +00:00
dependabot[bot]
4c61628078 Bump AwesomeAssertions from 9.2.1 to 9.3.0
---
updated-dependencies:
- dependency-name: AwesomeAssertions
  dependency-version: 9.3.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-10 11:57:30 +00:00
31 changed files with 4301 additions and 153 deletions

View File

@@ -3,7 +3,7 @@
"isRoot": true,
"tools": {
"csharpier": {
"version": "1.1.2",
"version": "1.2.1",
"commands": [
"csharpier"
],

View File

@@ -14,7 +14,7 @@ jobs:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- uses: actions/setup-dotnet@v5
with:
dotnet-version: 8.0.x

View File

@@ -1,7 +1,7 @@
<Project>
<ItemGroup>
<PackageVersion Include="Bullseye" Version="6.0.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.2.1" />
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />

View File

@@ -128,6 +128,7 @@ public static class IArchiveEntryExtensions
{
using var fs = File.Open(destinationFileName, fm);
await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false);
}
},
cancellationToken
);
}

View File

@@ -57,7 +57,7 @@ namespace SharpCompress.Common.Arc
return value switch
{
1 or 2 => CompressionType.None,
3 => CompressionType.RLE90,
3 => CompressionType.Packed,
4 => CompressionType.Squeezed,
5 or 6 or 7 or 8 => CompressionType.Crunched,
9 => CompressionType.Squashed,

View File

@@ -44,7 +44,7 @@ namespace SharpCompress.Common.Arc
Header.CompressedSize
);
break;
case CompressionType.RLE90:
case CompressionType.Packed:
compressedStream = new RunLength90Stream(
_stream,
(int)Header.CompressedSize
@@ -54,6 +54,14 @@ namespace SharpCompress.Common.Arc
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Crunched:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: "
+ Header.CompressionMethod
+ " with size > 128KB"
);
}
compressedStream = new ArcLzwStream(
_stream,
(int)Header.CompressedSize,

View File

@@ -41,7 +41,7 @@ namespace SharpCompress.Common.Arj
case CompressionMethod.CompressedMost:
case CompressionMethod.Compressed:
case CompressionMethod.CompressedFaster:
if (Header.CompressedSize > 128 * 1024)
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: "

View File

@@ -23,7 +23,7 @@ public enum CompressionType
Reduce4,
Explode,
Squeezed,
RLE90,
Packed,
Crunched,
Squashed,
Crushed,

View File

@@ -544,6 +544,12 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
private void EndBlock()
{
// Skip block processing for empty input (no data written)
if (last < 0)
{
return;
}
blockCRC = mCrc.GetFinalCRC();
combinedCRC = (combinedCRC << 1) | (int)(((uint)combinedCRC) >> 31);
combinedCRC ^= blockCRC;

View File

@@ -2,9 +2,9 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.41.0</VersionPrefix>
<AssemblyVersion>0.41.0</AssemblyVersion>
<FileVersion>0.41.0</FileVersion>
<VersionPrefix>0.42.0</VersionPrefix>
<AssemblyVersion>0.42.0</AssemblyVersion>
<FileVersion>0.42.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks>net48;net481;netstandard2.0;net6.0;net8.0</TargetFrameworks>
<AssemblyName>SharpCompress</AssemblyName>

View File

@@ -48,7 +48,29 @@ internal class ZipCentralDirectoryEntry
var decompressedvalue = zip64 ? uint.MaxValue : (uint)Decompressed;
var headeroffsetvalue = zip64 ? uint.MaxValue : (uint)HeaderOffset;
var extralength = zip64 ? (2 + 2 + 8 + 8 + 8 + 4) : 0;
var version = (byte)(zip64 ? 45 : 20); // Version 20 required for deflate/encryption
// Determine version needed to extract:
// - Version 63 for LZMA, PPMd, BZip2, ZStandard (advanced compression methods)
// - Version 45 for Zip64 extensions (when Zip64HeaderOffset != 0 or actual sizes require it)
// - Version 20 for standard Deflate/None compression
byte version;
if (
compression == ZipCompressionMethod.LZMA
|| compression == ZipCompressionMethod.PPMd
|| compression == ZipCompressionMethod.BZip2
|| compression == ZipCompressionMethod.ZStandard
)
{
version = 63;
}
else if (zip64 || Zip64HeaderOffset != 0)
{
version = 45;
}
else
{
version = 20;
}
var flags = Equals(archiveEncoding.GetEncoding(), Encoding.UTF8)
? HeaderFlags.Efs

View File

@@ -27,5 +27,22 @@ namespace SharpCompress.Test.Arc
[Fact]
public void Arc_Crunched_Read() => Read("Arc.crunched.arc");
[Theory]
[InlineData("Arc.crunched.largefile.arc", CompressionType.Crunched)]
public void Arc_LargeFile_ShouldThrow(string fileName, CompressionType compressionType)
{
var exception = Assert.Throws<NotSupportedException>(() =>
ReadForBufferBoundaryCheck(fileName, compressionType)
);
}
[Theory]
[InlineData("Arc.uncompressed.largefile.arc", CompressionType.None)]
[InlineData("Arc.squeezed.largefile.arc", CompressionType.Squeezed)]
public void Arc_LargeFileTest_Read(string fileName, CompressionType compressionType)
{
ReadForBufferBoundaryCheck(fileName, compressionType);
}
}
}

View File

@@ -45,16 +45,14 @@ namespace SharpCompress.Test.Arj
public void Arj_Multi_Reader()
{
var exception = Assert.Throws<MultiVolumeExtractionException>(() =>
DoArj_Multi_Reader(
[
"Arj.store.split.arj",
"Arj.store.split.a01",
"Arj.store.split.a02",
"Arj.store.split.a03",
"Arj.store.split.a04",
"Arj.store.split.a05",
]
)
DoArj_Multi_Reader([
"Arj.store.split.arj",
"Arj.store.split.a01",
"Arj.store.split.a02",
"Arj.store.split.a03",
"Arj.store.split.a04",
"Arj.store.split.a05",
])
);
}
@@ -65,7 +63,7 @@ namespace SharpCompress.Test.Arj
public void Arj_LargeFile_ShouldThrow(string fileName, CompressionType compressionType)
{
var exception = Assert.Throws<NotSupportedException>(() =>
Arj_LargeFileTest_Read(fileName, compressionType)
ReadForBufferBoundaryCheck(fileName, compressionType)
);
}
@@ -74,24 +72,7 @@ namespace SharpCompress.Test.Arj
[InlineData("Arj.method4.largefile.arj", CompressionType.ArjLZ77)]
public void Arj_LargeFileTest_Read(string fileName, CompressionType compressionType)
{
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, fileName)))
using (
var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true })
)
{
while (reader.MoveToNextEntry())
{
Assert.Equal(compressionType, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
CompareFilesByPath(
Path.Combine(SCRATCH_FILES_PATH, "news.txt"),
Path.Combine(MISC_TEST_FILES_PATH, "news.txt")
);
ReadForBufferBoundaryCheck(fileName, compressionType);
}
private void DoArj_Multi_Reader(string[] archives)

View File

@@ -15,29 +15,25 @@ public class RarReaderAsyncTests : ReaderTests
{
[Fact]
public async Task Rar_Multi_Reader_Async() =>
await DoRar_Multi_Reader_Async(
[
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]
);
await DoRar_Multi_Reader_Async([
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]);
[Fact]
public async Task Rar5_Multi_Reader_Async() =>
await DoRar_Multi_Reader_Async(
[
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]
);
await DoRar_Multi_Reader_Async([
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]);
private async Task DoRar_Multi_Reader_Async(string[] archives)
{
@@ -95,29 +91,25 @@ public class RarReaderAsyncTests : ReaderTests
[Fact]
public async Task Rar_Multi_Reader_Delete_Files_Async() =>
await DoRar_Multi_Reader_Delete_Files_Async(
[
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]
);
await DoRar_Multi_Reader_Delete_Files_Async([
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]);
[Fact]
public async Task Rar5_Multi_Reader_Delete_Files_Async() =>
await DoRar_Multi_Reader_Delete_Files_Async(
[
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]
);
await DoRar_Multi_Reader_Delete_Files_Async([
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]);
private async Task DoRar_Multi_Reader_Delete_Files_Async(string[] archives)
{

View File

@@ -14,29 +14,25 @@ public class RarReaderTests : ReaderTests
{
[Fact]
public void Rar_Multi_Reader() =>
DoRar_Multi_Reader(
[
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]
);
DoRar_Multi_Reader([
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]);
[Fact]
public void Rar5_Multi_Reader() =>
DoRar_Multi_Reader(
[
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]
);
DoRar_Multi_Reader([
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]);
private void DoRar_Multi_Reader(string[] archives)
{
@@ -61,16 +57,14 @@ public class RarReaderTests : ReaderTests
[Fact]
public void Rar_Multi_Reader_Encrypted() =>
DoRar_Multi_Reader_Encrypted(
[
"Rar.EncryptedParts.part01.rar",
"Rar.EncryptedParts.part02.rar",
"Rar.EncryptedParts.part03.rar",
"Rar.EncryptedParts.part04.rar",
"Rar.EncryptedParts.part05.rar",
"Rar.EncryptedParts.part06.rar",
]
);
DoRar_Multi_Reader_Encrypted([
"Rar.EncryptedParts.part01.rar",
"Rar.EncryptedParts.part02.rar",
"Rar.EncryptedParts.part03.rar",
"Rar.EncryptedParts.part04.rar",
"Rar.EncryptedParts.part05.rar",
"Rar.EncryptedParts.part06.rar",
]);
private void DoRar_Multi_Reader_Encrypted(string[] archives) =>
Assert.Throws<InvalidFormatException>(() =>
@@ -97,29 +91,25 @@ public class RarReaderTests : ReaderTests
[Fact]
public void Rar_Multi_Reader_Delete_Files() =>
DoRar_Multi_Reader_Delete_Files(
[
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]
);
DoRar_Multi_Reader_Delete_Files([
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]);
[Fact]
public void Rar5_Multi_Reader_Delete_Files() =>
DoRar_Multi_Reader_Delete_Files(
[
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]
);
DoRar_Multi_Reader_Delete_Files([
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]);
private void DoRar_Multi_Reader_Delete_Files(string[] archives)
{
@@ -407,16 +397,14 @@ public class RarReaderTests : ReaderTests
Path.Combine("exe", "test.exe"),
}
);
using var reader = RarReader.Open(
[
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part02.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part04.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part05.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part06.rar"),
]
);
using var reader = RarReader.Open([
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part02.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part04.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part05.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part06.rar"),
]);
while (reader.MoveToNextEntry())
{
Assert.Equal(expectedOrder.Pop(), reader.Entry.Key);

View File

@@ -176,6 +176,27 @@ public abstract class ReaderTests : TestBase
}
}
protected void ReadForBufferBoundaryCheck(string fileName, CompressionType compressionType)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, fileName));
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
while (reader.MoveToNextEntry())
{
Assert.Equal(compressionType, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
CompareFilesByPath(
Path.Combine(SCRATCH_FILES_PATH, "alice29.txt"),
Path.Combine(MISC_TEST_FILES_PATH, "alice29.txt")
);
}
protected void Iterate(
string testArchive,
string fileOrder,

View File

@@ -74,9 +74,21 @@ public class XzIndexAsyncTests : XzTestsBase
public async Task SkipsPaddingAsync()
{
// Index with 3-byte padding.
using Stream badStream = new MemoryStream(
[0x00, 0x01, 0x10, 0x80, 0x01, 0x00, 0x00, 0x00, 0xB1, 0x01, 0xD9, 0xC9, 0xFF]
);
using Stream badStream = new MemoryStream([
0x00,
0x01,
0x10,
0x80,
0x01,
0x00,
0x00,
0x00,
0xB1,
0x01,
0xD9,
0xC9,
0xFF,
]);
var br = new BinaryReader(badStream);
var index = new XZIndex(br, false);
await index.ProcessAsync().ConfigureAwait(false);

View File

@@ -71,9 +71,21 @@ public class XzIndexTests : XzTestsBase
public void SkipsPadding()
{
// Index with 3-byte padding.
using Stream badStream = new MemoryStream(
[0x00, 0x01, 0x10, 0x80, 0x01, 0x00, 0x00, 0x00, 0xB1, 0x01, 0xD9, 0xC9, 0xFF]
);
using Stream badStream = new MemoryStream([
0x00,
0x01,
0x10,
0x80,
0x01,
0x00,
0x00,
0x00,
0xB1,
0x01,
0xD9,
0xC9,
0xFF,
]);
var br = new BinaryReader(badStream);
var index = new XZIndex(br, false);
index.Process();

View File

@@ -0,0 +1,441 @@
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
using Xunit;
namespace SharpCompress.Test.Zip;
/// <summary>
/// Tests for verifying version consistency between Local File Header (LFH)
/// and Central Directory File Header (CDFH) when using Zip64.
/// </summary>
public class Zip64VersionConsistencyTests : WriterTests
{
public Zip64VersionConsistencyTests()
: base(ArchiveType.Zip) { }
[Fact]
public void Zip64_Small_File_With_UseZip64_Should_Have_Matching_Versions()
{
// Create a zip with UseZip64=true but with a small file
var filename = Path.Combine(SCRATCH2_FILES_PATH, "zip64_version_test.zip");
if (File.Exists(filename))
{
File.Delete(filename);
}
// Create archive with UseZip64=true
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
{
LeaveStreamOpen = false,
UseZip64 = true,
};
ZipArchive zipArchive = ZipArchive.Create();
zipArchive.AddEntry("empty", new MemoryStream());
zipArchive.SaveTo(filename, writerOptions);
// Now read the raw bytes to verify version consistency
using var fs = File.OpenRead(filename);
using var br = new BinaryReader(fs);
// Read Local File Header
var lfhSignature = br.ReadUInt32();
Assert.Equal(0x04034b50u, lfhSignature); // Local file header signature
var lfhVersion = br.ReadUInt16();
// Skip to Central Directory
// Find Central Directory by searching from the end
fs.Seek(-22, SeekOrigin.End); // Min EOCD size
var eocdSignature = br.ReadUInt32();
if (eocdSignature != 0x06054b50u)
{
// Might have Zip64 EOCD, search backwards
fs.Seek(-100, SeekOrigin.End);
var buffer = new byte[100];
fs.Read(buffer, 0, 100);
// Find EOCD signature
for (int i = buffer.Length - 4; i >= 0; i--)
{
if (BinaryPrimitives.ReadUInt32LittleEndian(buffer.AsSpan(i)) == 0x06054b50u)
{
fs.Seek(-100 + i, SeekOrigin.End);
break;
}
}
}
// Read EOCD
fs.Seek(-22, SeekOrigin.End);
br.ReadUInt32(); // EOCD signature
br.ReadUInt16(); // disk number
br.ReadUInt16(); // disk with central dir
br.ReadUInt16(); // entries on this disk
br.ReadUInt16(); // total entries
br.ReadUInt32(); // central directory size (unused)
var cdOffset = br.ReadUInt32();
// If Zip64, need to read from Zip64 EOCD
if (cdOffset == 0xFFFFFFFF)
{
// Find Zip64 EOCD Locator
fs.Seek(-22 - 20, SeekOrigin.End);
var z64eocdlSig = br.ReadUInt32();
if (z64eocdlSig == 0x07064b50u)
{
br.ReadUInt32(); // disk number
var z64eocdOffset = br.ReadUInt64();
br.ReadUInt32(); // total disks
// Read Zip64 EOCD
fs.Seek((long)z64eocdOffset, SeekOrigin.Begin);
br.ReadUInt32(); // signature
br.ReadUInt64(); // size of EOCD64
br.ReadUInt16(); // version made by
br.ReadUInt16(); // version needed
br.ReadUInt32(); // disk number
br.ReadUInt32(); // disk with CD
br.ReadUInt64(); // entries on disk
br.ReadUInt64(); // total entries
br.ReadUInt64(); // CD size
cdOffset = (uint)br.ReadUInt64(); // CD offset
}
}
// Read Central Directory Header
fs.Seek(cdOffset, SeekOrigin.Begin);
var cdhSignature = br.ReadUInt32();
Assert.Equal(0x02014b50u, cdhSignature); // Central directory header signature
br.ReadUInt16(); // version made by
var cdhVersionNeeded = br.ReadUInt16();
// The versions should match when UseZip64 is true
Assert.Equal(lfhVersion, cdhVersionNeeded);
}
[Fact]
public void Zip64_Small_File_Without_UseZip64_Should_Have_Version_20()
{
// Create a zip without UseZip64
var filename = Path.Combine(SCRATCH2_FILES_PATH, "no_zip64_version_test.zip");
if (File.Exists(filename))
{
File.Delete(filename);
}
// Create archive without UseZip64
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
{
LeaveStreamOpen = false,
UseZip64 = false,
};
ZipArchive zipArchive = ZipArchive.Create();
zipArchive.AddEntry("empty", new MemoryStream());
zipArchive.SaveTo(filename, writerOptions);
// Read the raw bytes
using var fs = File.OpenRead(filename);
using var br = new BinaryReader(fs);
// Read Local File Header version
var lfhSignature = br.ReadUInt32();
Assert.Equal(0x04034b50u, lfhSignature);
var lfhVersion = br.ReadUInt16();
// Read Central Directory Header version
fs.Seek(-22, SeekOrigin.End);
br.ReadUInt32(); // EOCD signature
br.ReadUInt16(); // disk number
br.ReadUInt16(); // disk with central dir
br.ReadUInt16(); // entries on this disk
br.ReadUInt16(); // total entries
br.ReadUInt32(); // CD size
var cdOffset = br.ReadUInt32();
fs.Seek(cdOffset, SeekOrigin.Begin);
var cdhSignature = br.ReadUInt32();
Assert.Equal(0x02014b50u, cdhSignature);
br.ReadUInt16(); // version made by
var cdhVersionNeeded = br.ReadUInt16();
// Both should be version 20 (or less)
Assert.True(lfhVersion <= 20);
Assert.Equal(lfhVersion, cdhVersionNeeded);
}
[Fact]
public void LZMA_Compression_Should_Use_Version_63()
{
// Create a zip with LZMA compression
var filename = Path.Combine(SCRATCH2_FILES_PATH, "lzma_version_test.zip");
if (File.Exists(filename))
{
File.Delete(filename);
}
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.LZMA)
{
LeaveStreamOpen = false,
UseZip64 = false,
};
ZipArchive zipArchive = ZipArchive.Create();
var data = new byte[100];
new Random(42).NextBytes(data);
zipArchive.AddEntry("test.bin", new MemoryStream(data));
zipArchive.SaveTo(filename, writerOptions);
// Read the raw bytes
using var fs = File.OpenRead(filename);
using var br = new BinaryReader(fs);
// Read Local File Header version
var lfhSignature = br.ReadUInt32();
Assert.Equal(0x04034b50u, lfhSignature);
var lfhVersion = br.ReadUInt16();
// Read Central Directory Header version
fs.Seek(-22, SeekOrigin.End);
br.ReadUInt32(); // EOCD signature
br.ReadUInt16(); // disk number
br.ReadUInt16(); // disk with central dir
br.ReadUInt16(); // entries on this disk
br.ReadUInt16(); // total entries
br.ReadUInt32(); // CD size
var cdOffset = br.ReadUInt32();
fs.Seek(cdOffset, SeekOrigin.Begin);
var cdhSignature = br.ReadUInt32();
Assert.Equal(0x02014b50u, cdhSignature);
br.ReadUInt16(); // version made by
var cdhVersionNeeded = br.ReadUInt16();
// Both should be version 63 for LZMA
Assert.Equal(63, lfhVersion);
Assert.Equal(lfhVersion, cdhVersionNeeded);
}
[Fact]
public void PPMd_Compression_Should_Use_Version_63()
{
// Create a zip with PPMd compression
var filename = Path.Combine(SCRATCH2_FILES_PATH, "ppmd_version_test.zip");
if (File.Exists(filename))
{
File.Delete(filename);
}
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.PPMd)
{
LeaveStreamOpen = false,
UseZip64 = false,
};
ZipArchive zipArchive = ZipArchive.Create();
var data = new byte[100];
new Random(42).NextBytes(data);
zipArchive.AddEntry("test.bin", new MemoryStream(data));
zipArchive.SaveTo(filename, writerOptions);
// Read the raw bytes
using var fs = File.OpenRead(filename);
using var br = new BinaryReader(fs);
// Read Local File Header version
var lfhSignature = br.ReadUInt32();
Assert.Equal(0x04034b50u, lfhSignature);
var lfhVersion = br.ReadUInt16();
// Read Central Directory Header version
fs.Seek(-22, SeekOrigin.End);
br.ReadUInt32(); // EOCD signature
br.ReadUInt16(); // disk number
br.ReadUInt16(); // disk with central dir
br.ReadUInt16(); // entries on this disk
br.ReadUInt16(); // total entries
br.ReadUInt32(); // CD size
var cdOffset = br.ReadUInt32();
fs.Seek(cdOffset, SeekOrigin.Begin);
var cdhSignature = br.ReadUInt32();
Assert.Equal(0x02014b50u, cdhSignature);
br.ReadUInt16(); // version made by
var cdhVersionNeeded = br.ReadUInt16();
// Both should be version 63 for PPMd
Assert.Equal(63, lfhVersion);
Assert.Equal(lfhVersion, cdhVersionNeeded);
}
[Fact]
public void Zip64_Multiple_Small_Files_With_UseZip64_Should_Have_Matching_Versions()
{
// Create a zip with UseZip64=true but with multiple small files
var filename = Path.Combine(SCRATCH2_FILES_PATH, "zip64_version_multiple_test.zip");
if (File.Exists(filename))
{
File.Delete(filename);
}
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
{
LeaveStreamOpen = false,
UseZip64 = true,
};
ZipArchive zipArchive = ZipArchive.Create();
for (int i = 0; i < 5; i++)
{
var data = new byte[100];
new Random(i).NextBytes(data);
zipArchive.AddEntry($"file{i}.bin", new MemoryStream(data));
}
zipArchive.SaveTo(filename, writerOptions);
// Verify that all entries have matching versions
using var fs = File.OpenRead(filename);
using var br = new BinaryReader(fs);
// Read all LFH versions
var lfhVersions = new System.Collections.Generic.List<ushort>();
while (true)
{
var sig = br.ReadUInt32();
if (sig == 0x04034b50u) // LFH signature
{
var version = br.ReadUInt16();
lfhVersions.Add(version);
// Skip rest of LFH
br.ReadUInt16(); // flags
br.ReadUInt16(); // compression
br.ReadUInt32(); // mod time
br.ReadUInt32(); // crc
br.ReadUInt32(); // compressed size
br.ReadUInt32(); // uncompressed size
var fnLen = br.ReadUInt16();
var extraLen = br.ReadUInt16();
fs.Seek(fnLen + extraLen, SeekOrigin.Current);
// Skip compressed data by reading compressed size from extra field if zip64
// For simplicity in this test, we'll just find the next signature
var found = false;
while (fs.Position < fs.Length - 4)
{
var b = br.ReadByte();
if (b == 0x50)
{
var nextBytes = br.ReadBytes(3);
if (
(nextBytes[0] == 0x4b && nextBytes[1] == 0x03 && nextBytes[2] == 0x04)
|| // LFH
(nextBytes[0] == 0x4b && nextBytes[1] == 0x01 && nextBytes[2] == 0x02)
) // CDH
{
fs.Seek(-4, SeekOrigin.Current);
found = true;
break;
}
}
}
if (!found)
{
break;
}
}
else if (sig == 0x02014b50u) // CDH signature
{
break; // Reached central directory
}
else
{
break; // Unknown signature
}
}
// Find Central Directory
fs.Seek(-22, SeekOrigin.End);
br.ReadUInt32(); // EOCD signature
br.ReadUInt16(); // disk number
br.ReadUInt16(); // disk with central dir
br.ReadUInt16(); // entries on this disk
var totalEntries = br.ReadUInt16();
br.ReadUInt32(); // CD size
var cdOffset = br.ReadUInt32();
// Check if we need Zip64 EOCD
if (cdOffset == 0xFFFFFFFF)
{
fs.Seek(-22 - 20, SeekOrigin.End);
var z64eocdlSig = br.ReadUInt32();
if (z64eocdlSig == 0x07064b50u)
{
br.ReadUInt32(); // disk number
var z64eocdOffset = br.ReadUInt64();
fs.Seek((long)z64eocdOffset, SeekOrigin.Begin);
br.ReadUInt32(); // signature
br.ReadUInt64(); // size
br.ReadUInt16(); // version made by
br.ReadUInt16(); // version needed
br.ReadUInt32(); // disk number
br.ReadUInt32(); // disk with CD
br.ReadUInt64(); // entries on disk
totalEntries = (ushort)br.ReadUInt64(); // total entries
br.ReadUInt64(); // CD size
cdOffset = (uint)br.ReadUInt64(); // CD offset
}
}
// Read CDH versions
fs.Seek(cdOffset, SeekOrigin.Begin);
var cdhVersions = new System.Collections.Generic.List<ushort>();
for (int i = 0; i < totalEntries; i++)
{
var sig = br.ReadUInt32();
Assert.Equal(0x02014b50u, sig);
br.ReadUInt16(); // version made by
var version = br.ReadUInt16();
cdhVersions.Add(version);
// Skip rest of CDH
br.ReadUInt16(); // flags
br.ReadUInt16(); // compression
br.ReadUInt32(); // mod time
br.ReadUInt32(); // crc
br.ReadUInt32(); // compressed size
br.ReadUInt32(); // uncompressed size
var fnLen = br.ReadUInt16();
var extraLen = br.ReadUInt16();
var commentLen = br.ReadUInt16();
br.ReadUInt16(); // disk number start
br.ReadUInt16(); // internal attributes
br.ReadUInt32(); // external attributes
br.ReadUInt32(); // LFH offset
fs.Seek(fnLen + extraLen + commentLen, SeekOrigin.Current);
}
// Verify all versions match
Assert.Equal(lfhVersions.Count, cdhVersions.Count);
for (int i = 0; i < lfhVersions.Count; i++)
{
Assert.Equal(lfhVersions[i], cdhVersions[i]);
}
}
}

View File

@@ -1,5 +1,7 @@
using System.IO;
using System.Text;
using SharpCompress.Common;
using SharpCompress.Writers;
using Xunit;
namespace SharpCompress.Test.Zip;
@@ -9,6 +11,42 @@ public class ZipWriterTests : WriterTests
public ZipWriterTests()
: base(ArchiveType.Zip) { }
[Fact]
public void Zip_BZip2_Write_EmptyFile()
{
// Test that writing an empty file with BZip2 compression doesn't throw DivideByZeroException
using var memoryStream = new MemoryStream();
var options = new WriterOptions(CompressionType.BZip2)
{
ArchiveEncoding = new ArchiveEncoding { Default = new UTF8Encoding(false) },
};
using (var writer = WriterFactory.Open(memoryStream, ArchiveType.Zip, options))
{
writer.Write("test-folder/zero-byte-file.txt", Stream.Null);
}
Assert.True(memoryStream.Length > 0);
}
[Fact]
public void Zip_BZip2_Write_EmptyFolder()
{
// Test that writing an empty folder entry with BZip2 compression doesn't throw DivideByZeroException
using var memoryStream = new MemoryStream();
var options = new WriterOptions(CompressionType.BZip2)
{
ArchiveEncoding = new ArchiveEncoding { Default = new UTF8Encoding(false) },
};
using (var writer = WriterFactory.Open(memoryStream, ArchiveType.Zip, options))
{
writer.Write("test-empty-folder/", Stream.Null);
}
Assert.True(memoryStream.Length > 0);
}
[Fact]
public void Zip_Deflate_Write() =>
Write(

View File

@@ -4,9 +4,9 @@
".NETFramework,Version=v4.8": {
"AwesomeAssertions": {
"type": "Direct",
"requested": "[9.2.1, )",
"resolved": "9.2.1",
"contentHash": "lbwhyQNXxxEGx4oCbFqNfFy2DsywsvNhN6qoOjY4wwhMgI2L9+YrxjyF/M0io99yrvWV1Cjj12LP2QGcC43Uhw==",
"requested": "[9.3.0, )",
"resolved": "9.3.0",
"contentHash": "8lGLYap2ec2gNLgjf2xKZaKLpQ7j36oJvrYzBVVpNAumqnxRdevqqhEF66qxE92f8y2+zsbQ061DeHG61ZhzaQ==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
@@ -199,9 +199,9 @@
"net8.0": {
"AwesomeAssertions": {
"type": "Direct",
"requested": "[9.2.1, )",
"resolved": "9.2.1",
"contentHash": "lbwhyQNXxxEGx4oCbFqNfFy2DsywsvNhN6qoOjY4wwhMgI2L9+YrxjyF/M0io99yrvWV1Cjj12LP2QGcC43Uhw=="
"requested": "[9.3.0, )",
"resolved": "9.3.0",
"contentHash": "8lGLYap2ec2gNLgjf2xKZaKLpQ7j36oJvrYzBVVpNAumqnxRdevqqhEF66qxE92f8y2+zsbQ061DeHG61ZhzaQ=="
},
"Microsoft.NET.Test.Sdk": {
"type": "Direct",

Binary file not shown.

Binary file not shown.

Binary file not shown.

File diff suppressed because it is too large Load Diff