Compare commits

..

73 Commits

Author SHA1 Message Date
Adam Hathcock
22d15f73f0 Merge pull request #1181 from adamhathcock/adam/add-aot
Add AOT to props and clean up in release
2026-02-03 16:55:59 +00:00
Adam Hathcock
4e0d78d6c8 update desc 2026-02-03 16:41:19 +00:00
Adam Hathcock
63a1927838 Merge pull request #1182 from adamhathcock/copilot/sub-pr-1181
[WIP] WIP address feedback on AOT props and cleanup
2026-02-03 16:32:02 +00:00
copilot-swe-agent[bot]
3d745bfa05 Fix invalid TFM: change netstandard20 to netstandard2.0
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-03 16:30:47 +00:00
copilot-swe-agent[bot]
ce26d50792 Initial plan 2026-02-03 16:28:28 +00:00
Adam Hathcock
01e162fcc4 properly add netstandard 2 support 2026-02-03 16:17:58 +00:00
Adam Hathcock
443f7b8b0c Add AOT to props and clean up in release 2026-02-03 16:13:15 +00:00
Adam Hathcock
df63e152c1 Merge pull request #1178 from adamhathcock/copilot/fix-infinite-loop-rar-archive-again
Fix infinite loop in SourceStream.Seek for malformed archives
2026-02-02 10:57:10 +00:00
copilot-swe-agent[bot]
ad7e64ba43 Fix test to use correct RarArchive API - all RAR tests passing
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-02 09:33:37 +00:00
copilot-swe-agent[bot]
8737b7a38e Apply infinite loop fix to SourceStream.cs and add test case
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-02 09:31:38 +00:00
copilot-swe-agent[bot]
13199fcfd1 Initial plan 2026-02-02 09:28:50 +00:00
Adam Hathcock
9a7bdd39e8 Merge pull request #1172 from adamhathcock/copilot/fix-sevenzip-contiguous-streams
Fix SevenZipReader to maintain contiguous stream state for solid archives
2026-01-28 08:35:28 +00:00
Adam Hathcock
484bc740d7 Update src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-28 08:26:28 +00:00
Adam Hathcock
8a67d501a8 Don't use reflection in tests 2026-01-28 08:10:06 +00:00
copilot-swe-agent[bot]
3c87242bd0 Add test to verify folder stream reuse in solid archives
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 17:29:44 +00:00
copilot-swe-agent[bot]
999124e68e Remove unused _currentFolderIndex field
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 17:03:20 +00:00
copilot-swe-agent[bot]
db2f5c9cb9 Fix SevenZipReader to iterate entries as contiguous streams
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 17:01:18 +00:00
Adam Hathcock
af08a7cd54 Merge pull request #1169 from adamhathcock/copilot/fix-zip-parsing-regression
Fix ZIP parsing failure on non-seekable streams with short reads
2026-01-27 16:54:12 +00:00
copilot-swe-agent[bot]
72eaf66f05 Initial plan 2026-01-27 16:53:53 +00:00
Adam Hathcock
8a3be35d67 Update tests/SharpCompress.Test/Zip/ZipShortReadTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-27 16:43:13 +00:00
copilot-swe-agent[bot]
d59e4c2a0d Refactor FillBuffer to use ReadFully pattern
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 16:25:24 +00:00
copilot-swe-agent[bot]
71655e04c4 Apply code formatting with CSharpier
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 16:02:26 +00:00
copilot-swe-agent[bot]
a706a9d725 Fix ZIP parsing regression with short reads on non-seekable streams
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 16:00:44 +00:00
copilot-swe-agent[bot]
970934a40b Initial plan 2026-01-27 15:51:50 +00:00
Adam Hathcock
a9c28a7b62 Merge pull request #1165 from adamhathcock/adam/buffer-size-consolidation
(Release) Buffer size consolidation
2026-01-27 14:41:14 +00:00
Adam Hathcock
4d31436740 constant should be a static property 2026-01-27 12:39:01 +00:00
Adam Hathcock
c82744c51c fmt 2026-01-27 12:15:31 +00:00
Adam Hathcock
f0eaddc6a6 Merge remote-tracking branch 'origin/adam/buffer-size-consolidation' into adam/buffer-size-consolidation 2026-01-27 12:14:17 +00:00
Adam Hathcock
d6156f0f1e release branch builds increment patch versions and master builds increment minor versions 2026-01-27 12:14:03 +00:00
Adam Hathcock
3c88c7fdd5 Merge pull request #1167 from adamhathcock/copilot/sub-pr-1165-again
Fix grammatical errors in ArcFactory comment documentation
2026-01-27 11:58:25 +00:00
Adam Hathcock
d11f6aefb0 Merge pull request #1166 from adamhathcock/copilot/sub-pr-1165
Add [Obsolete] attribute to ReaderOptions.DefaultBufferSize for backward compatibility
2026-01-27 11:57:54 +00:00
copilot-swe-agent[bot]
010a38bb73 Add clarifying comment about buffer size value difference
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 11:54:50 +00:00
copilot-swe-agent[bot]
53f12d75db Add [Obsolete] attribute to ReaderOptions.DefaultBufferSize
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 11:53:37 +00:00
copilot-swe-agent[bot]
6c866324b2 Fix grammatical errors in ArcFactory comments
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 11:49:29 +00:00
copilot-swe-agent[bot]
a114155189 Initial plan 2026-01-27 11:48:05 +00:00
copilot-swe-agent[bot]
014bbc3ea4 Initial plan 2026-01-27 11:47:52 +00:00
Adam Hathcock
d52facd4ab Remove change 2026-01-27 10:48:32 +00:00
Adam Hathcock
0a50386ada Using Constants class differently 2026-01-27 10:46:54 +00:00
Adam Hathcock
b9fc680548 Merge pull request #1160 from adamhathcock/adam/check-if-seek
add check to see if we need to seek before hand
2026-01-26 12:24:39 +00:00
Adam Hathcock
7dcc13c1f0 Merge pull request #1161 from adamhathcock/copilot/sub-pr-1160
Fix ArrayPool corruption from double-disposal in BufferedSubStream
2026-01-26 12:15:55 +00:00
copilot-swe-agent[bot]
56d3091688 Fix condition order to check CanSeek before Position
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-26 12:12:08 +00:00
copilot-swe-agent[bot]
a0af0604d1 Add disposal checks to RefillCache methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-26 12:11:16 +00:00
copilot-swe-agent[bot]
875c2d7694 Fix BufferedSubStream double-dispose issue with ArrayPool
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-26 12:10:19 +00:00
Adam Hathcock
8c95f863cb do CanSeek first 2026-01-26 12:06:57 +00:00
copilot-swe-agent[bot]
ddf37e82c2 Initial plan 2026-01-26 12:06:38 +00:00
Adam Hathcock
a82fda98d7 more testing and add pooling to cache 2026-01-26 11:45:25 +00:00
Adam Hathcock
44e4b1804e add check to see if we need to seek before hand 2026-01-26 09:41:13 +00:00
Adam Hathcock
4ca1a7713e Merge pull request #1157 from adamhathcock/adam/1154-release
Merge pull request #1156 from adamhathcock/copilot/fix-sharpcompress-…
2026-01-25 11:36:59 +00:00
Adam Hathcock
9caf7be928 Revert testing 2026-01-24 10:23:02 +00:00
Adam Hathcock
bf4217fde6 Merge pull request #1156 from adamhathcock/copilot/fix-sharpcompress-archive-iteration
Fix silent iteration failure when input stream throws on Flush()
# Conflicts:
#	src/SharpCompress/packages.lock.json
2026-01-24 10:18:02 +00:00
Adam Hathcock
d5a8c37113 Merge pull request #1154 from adamhathcock/adam/1151-release
Adam/1151 release cherry pick
2026-01-23 09:31:03 +00:00
Adam Hathcock
21ce9a38e6 fix up tests 2026-01-23 09:04:55 +00:00
Adam Hathcock
7732fbb698 Merge pull request #1151 from adamhathcock/copilot/fix-entrystream-flush-issue
Fix EntryStream.Dispose() throwing NotSupportedException on non-seekable streams
2026-01-23 08:59:56 +00:00
Adam Hathcock
97879f18b6 Merge pull request #1146 from adamhathcock/adam/pr-1145-release
Merge pull request #1145 from adamhathcock/copilot/add-leaveopen-para…
2026-01-19 10:35:33 +00:00
Adam Hathcock
d74454f7e9 Merge pull request #1145 from adamhathcock/copilot/add-leaveopen-parameter-lzipstream
Add leaveOpen parameter to LZipStream and BZip2Stream
2026-01-19 09:58:10 +00:00
Adam Hathcock
5c947bccc7 Merge branch 'adam/update-docs' 2026-01-07 16:18:51 +00:00
Adam Hathcock
fbdefc17c1 updates from review 2026-01-07 16:18:27 +00:00
Adam Hathcock
1425c6ff0d Merge pull request #1120 from adamhathcock/adam/update-docs
Update docs
2026-01-07 16:12:51 +00:00
Adam Hathcock
e038aea694 move old changelog 2026-01-07 16:10:55 +00:00
Adam Hathcock
87ccbf329d moved examples to USAGE 2026-01-07 15:56:38 +00:00
Adam Hathcock
9dcf384263 update for progress reporting 2026-01-07 15:30:26 +00:00
Adam Hathcock
be045c4f15 Merge pull request #1114 from adamhathcock/copilot/fix-7z-file-decompression-error
Fix async decompression of .7z files by implementing Memory<byte> ReadAsync overload
2026-01-07 08:16:51 +00:00
Adam Hathcock
fd968b3f78 Update src/SharpCompress/IO/ReadOnlySubStream.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-06 16:33:03 +00:00
Adam Hathcock
833dd7b3a2 fix tests and fmt 2026-01-06 15:33:43 +00:00
Adam Hathcock
b9258ad496 use more ValueTask methods but types are still created because of state machine suspension 2026-01-06 15:26:49 +00:00
copilot-swe-agent[bot]
0678318dde Fix async decompression by implementing Memory<byte> ReadAsync overload
The issue was that .NET 10's ReadExactlyAsync calls the Memory<byte> overload of ReadAsync, which wasn't implemented in BufferedSubStream. This caused it to fall back to the base Stream implementation that uses synchronous reads, leading to cache state corruption.

Solution: Added ValueTask<int> ReadAsync(Memory<byte>, CancellationToken) overload for modern .NET versions.

All tests now passing including LZMA2 and Solid archives.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-06 14:18:15 +00:00
copilot-swe-agent[bot]
7116c0d098 Add async support to BufferedSubStream for 7zip decompression
- Implemented ReadAsync and RefillCacheAsync methods in BufferedSubStream
- Added async test cases for SevenZipArchive (LZMA, LZMA2, Solid, BZip2, PPMd)
- Tests show LZMA, BZip2, and PPMd working correctly
- LZMA2 and Solid archives still failing with Data Error - investigating cache state management

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-06 14:13:48 +00:00
copilot-swe-agent[bot]
2fde8436fb Initial plan 2026-01-06 14:00:03 +00:00
Adam Hathcock
61ecd6475f Merge pull request #1113 from adamhathcock/adam/fix-readonly-dispose
Fix a usage of ReadOnly that use dispose in 7Zip
2026-01-06 13:22:50 +00:00
Adam Hathcock
64b209a772 add disposal tests and fix lzipstream 2026-01-06 13:13:34 +00:00
Adam Hathcock
48dbdbfed5 fmt 2026-01-06 12:49:26 +00:00
Adam Hathcock
cf50311b9c Skip should use framework stuff 2026-01-06 12:45:13 +00:00
Adam Hathcock
e4d8582a2a 7zip streams always want to be disposed 2026-01-06 12:42:48 +00:00
72 changed files with 2218 additions and 1907 deletions

View File

@@ -13,8 +13,7 @@
<PackageVersion Include="System.Memory" Version="4.6.3" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<PackageVersion Include="Microsoft.NET.ILLink.Tasks" Version="10.0.0" />
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="10.0.102" />
<GlobalPackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
</ItemGroup>
</Project>

View File

@@ -25,7 +25,7 @@
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading. SOZip (Seek-Optimized ZIP) detection is supported for reading. See [Zip Format Notes](#zip-format-notes) for details on multi-volume archives and streaming behavior.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading. See [Zip Format Notes](#zip-format-notes) for details on multi-volume archives and streaming behavior.
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API. See [7Zip Format Notes](#7zip-format-notes) for details on async extraction behavior.
5. LZip has no support for extra data like the file name or timestamp. There is a default filename used when looking at the entry Key on the archive.

142
OLD_CHANGELOG.md Normal file
View File

@@ -0,0 +1,142 @@
# Version Log
* [Releases](https://github.com/adamhathcock/sharpcompress/releases)
## Version 0.18
* [Now on Github releases](https://github.com/adamhathcock/sharpcompress/releases/tag/0.18)
## Version 0.17.1
* Fix - [Bug Fix for .NET Core on Windows](https://github.com/adamhathcock/sharpcompress/pull/257)
## Version 0.17.0
* New - Full LZip support! Can read and write LZip files and Tars inside LZip files. [Make LZip a first class citizen. #241](https://github.com/adamhathcock/sharpcompress/issues/241)
* New - XZ read support! Can read XZ files and Tars inside XZ files. [XZ in SharpCompress #91](https://github.com/adamhathcock/sharpcompress/issues/94)
* Fix - [Regression - zip file writing on seekable streams always assumed stream start was 0. Introduced with Zip64 writing.](https://github.com/adamhathcock/sharpcompress/issues/244)
* Fix - [Zip files with post-data descriptors can be properly skipped via decompression](https://github.com/adamhathcock/sharpcompress/issues/162)
## Version 0.16.2
* Fix [.NET 3.5 should support files and cryptography (was a regression from 0.16.0)](https://github.com/adamhathcock/sharpcompress/pull/251)
* Fix [Zip per entry compression customization wrote the wrong method into the zip archive](https://github.com/adamhathcock/sharpcompress/pull/249)
## Version 0.16.1
* Fix [Preserve compression method when getting a compressed stream](https://github.com/adamhathcock/sharpcompress/pull/235)
* Fix [RAR entry key normalization fix](https://github.com/adamhathcock/sharpcompress/issues/201)
## Version 0.16.0
* Breaking - [Progress Event Tracking rethink](https://github.com/adamhathcock/sharpcompress/pull/226)
* Update to VS2017 - [VS2017](https://github.com/adamhathcock/sharpcompress/pull/231) - Framework targets have been changed.
* New - [Add Zip64 writing](https://github.com/adamhathcock/sharpcompress/pull/211)
* [Fix invalid/mismatching Zip version flags.](https://github.com/adamhathcock/sharpcompress/issues/164) - This allows nuget/System.IO.Packaging to read zip files generated by SharpCompress
* [Fix 7Zip directory hiding](https://github.com/adamhathcock/sharpcompress/pull/215/files)
* [Verify RAR CRC headers](https://github.com/adamhathcock/sharpcompress/pull/220)
## Version 0.15.2
* [Fix invalid headers](https://github.com/adamhathcock/sharpcompress/pull/210) - fixes an issue creating large-ish zip archives that was introduced with zip64 reading.
## Version 0.15.1
* [Zip64 extending information and ZipReader](https://github.com/adamhathcock/sharpcompress/pull/206)
## Version 0.15.0
* [Add zip64 support for ZipArchive extraction](https://github.com/adamhathcock/sharpcompress/pull/205)
## Version 0.14.1
* [.NET Assemblies aren't strong named](https://github.com/adamhathcock/sharpcompress/issues/158)
* [Pkware encryption for Zip files didn't allow for multiple reads of an entry](https://github.com/adamhathcock/sharpcompress/issues/197)
* [GZip Entry couldn't be read multiple times](https://github.com/adamhathcock/sharpcompress/issues/198)
## Version 0.14.0
* [Support for LZip reading in for Tars](https://github.com/adamhathcock/sharpcompress/pull/191)
## Version 0.13.1
* [Fix null password on ReaderFactory. Fix null options on SevenZipArchive](https://github.com/adamhathcock/sharpcompress/pull/188)
* [Make PpmdProperties lazy to avoid unnecessary allocations.](https://github.com/adamhathcock/sharpcompress/pull/185)
## Version 0.13.0
* Breaking change: Big refactor of Options on API.
* 7Zip supports Deflate
## Version 0.12.4
* Forward only zip issue fix https://github.com/adamhathcock/sharpcompress/issues/160
* Try to fix frameworks again by copying targets from JSON.NET
## Version 0.12.3
* 7Zip fixes https://github.com/adamhathcock/sharpcompress/issues/73
* Maybe all profiles will work with project.json now
## Version 0.12.2
* Support Profile 259 again
## Version 0.12.1
* Support Silverlight 5
## Version 0.12.0
* .NET Core RTM!
* Bug fix for Tar long paths
## Version 0.11.6
* Bug fix for global header in Tar
* Writers now have a leaveOpen `bool` overload. They won't close streams if not-requested to.
## Version 0.11.5
* Bug fix in Skip method
## Version 0.11.4
* SharpCompress is now endian neutral (matters for Mono platforms)
* Fix for Inflate (need to change implementation)
* Fixes for RAR detection
## Version 0.11.1
* Added Cancel on IReader
* Removed .NET 2.0 support and LinqBridge dependency
## Version 0.11
* Been over a year, contains mainly fixes from contributors!
* Possible breaking change: ArchiveEncoding is UTF8 by default now.
* TAR supports writing long names using longlink
* RAR Protect Header added
## Version 0.10.3
* Finally fixed Disposal issue when creating a new archive with the Archive API
## Version 0.10.2
* Fixed Rar Header reading for invalid extended time headers.
* Windows Store assembly is now strong named
* Known issues with Long Tar names being worked on
* Updated to VS2013
* Portable targets SL5 and Windows Phone 8 (up from SL4 and WP7)
## Version 0.10.1
* Fixed 7Zip extraction performance problem
## Version 0.10:
* Added support for RAR Decryption (thanks to https://github.com/hrasyid)
* Embedded some BouncyCastle crypto classes to allow RAR Decryption and Winzip AES Decryption in Portable and Windows Store DLLs
* Built in Release (I think)

228
README.md
View File

@@ -4,7 +4,7 @@ SharpCompress is a compression library in pure C# for .NET Framework 4.8, .NET 8
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
**NEW:** All I/O operations now support async/await for improved performance and scalability. See the [Async Usage](#async-usage) section below.
**NEW:** All I/O operations now support async/await for improved performance and scalability. See the [USAGE.md](USAGE.md#async-examples) for examples.
GitHub Actions Build -
[![SharpCompress](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml/badge.svg)](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
@@ -34,235 +34,11 @@ Hi everyone. I hope you're using SharpCompress and finding it useful. Please giv
Please do not email me directly to ask for help. If you think there is a real issue, please report it here.
## Async Usage
SharpCompress now provides full async/await support for all I/O operations, allowing for better performance and scalability in modern applications.
### Async Reading Examples
Extract entries asynchronously:
```csharp
using (Stream stream = File.OpenRead("archive.zip"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
// Async extraction
await reader.WriteEntryToDirectoryAsync(
@"C:\temp",
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
}
}
```
Extract all entries to directory asynchronously:
```csharp
using (Stream stream = File.OpenRead("archive.tar.gz"))
using (var reader = ReaderFactory.Open(stream))
{
await reader.WriteAllToDirectoryAsync(
@"C:\temp",
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
```
Open entry stream asynchronously:
```csharp
using (var archive = ZipArchive.Open("archive.zip"))
{
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
{
// Process stream asynchronously
await entryStream.CopyToAsync(outputStream, cancellationToken);
}
}
}
```
### Async Writing Examples
Write files asynchronously:
```csharp
using (Stream stream = File.OpenWrite("output.zip"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
await writer.WriteAsync("file1.txt", fileStream, DateTime.Now, cancellationToken);
}
```
Write all files from directory asynchronously:
```csharp
using (Stream stream = File.OpenWrite("output.tar.gz"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
{
await writer.WriteAllAsync(@"D:\files", "*", SearchOption.AllDirectories, cancellationToken);
}
```
All async methods support `CancellationToken` for graceful cancellation of long-running operations.
## Want to contribute?
I'm always looking for help or ideas. Please submit code or email with ideas. Unfortunately, just letting me know you'd like to help is not enough because I really have no overall plan of what needs to be done. I'll definitely accept code submissions and add you as a member of the project!
## TODOs (always lots)
* RAR 5 decryption crc check support
* 7Zip writing
* Zip64 (Need writing and extend Reading)
* Multi-volume Zip support.
* ZStandard writing
## Version Log
* [Releases](https://github.com/adamhathcock/sharpcompress/releases)
### Version 0.18
* [Now on Github releases](https://github.com/adamhathcock/sharpcompress/releases/tag/0.18)
### Version 0.17.1
* Fix - [Bug Fix for .NET Core on Windows](https://github.com/adamhathcock/sharpcompress/pull/257)
### Version 0.17.0
* New - Full LZip support! Can read and write LZip files and Tars inside LZip files. [Make LZip a first class citizen. #241](https://github.com/adamhathcock/sharpcompress/issues/241)
* New - XZ read support! Can read XZ files and Tars inside XZ files. [XZ in SharpCompress #91](https://github.com/adamhathcock/sharpcompress/issues/94)
* Fix - [Regression - zip file writing on seekable streams always assumed stream start was 0. Introduced with Zip64 writing.](https://github.com/adamhathcock/sharpcompress/issues/244)
* Fix - [Zip files with post-data descriptors can be properly skipped via decompression](https://github.com/adamhathcock/sharpcompress/issues/162)
### Version 0.16.2
* Fix [.NET 3.5 should support files and cryptography (was a regression from 0.16.0)](https://github.com/adamhathcock/sharpcompress/pull/251)
* Fix [Zip per entry compression customization wrote the wrong method into the zip archive](https://github.com/adamhathcock/sharpcompress/pull/249)
### Version 0.16.1
* Fix [Preserve compression method when getting a compressed stream](https://github.com/adamhathcock/sharpcompress/pull/235)
* Fix [RAR entry key normalization fix](https://github.com/adamhathcock/sharpcompress/issues/201)
### Version 0.16.0
* Breaking - [Progress Event Tracking rethink](https://github.com/adamhathcock/sharpcompress/pull/226)
* Update to VS2017 - [VS2017](https://github.com/adamhathcock/sharpcompress/pull/231) - Framework targets have been changed.
* New - [Add Zip64 writing](https://github.com/adamhathcock/sharpcompress/pull/211)
* [Fix invalid/mismatching Zip version flags.](https://github.com/adamhathcock/sharpcompress/issues/164) - This allows nuget/System.IO.Packaging to read zip files generated by SharpCompress
* [Fix 7Zip directory hiding](https://github.com/adamhathcock/sharpcompress/pull/215/files)
* [Verify RAR CRC headers](https://github.com/adamhathcock/sharpcompress/pull/220)
### Version 0.15.2
* [Fix invalid headers](https://github.com/adamhathcock/sharpcompress/pull/210) - fixes an issue creating large-ish zip archives that was introduced with zip64 reading.
### Version 0.15.1
* [Zip64 extending information and ZipReader](https://github.com/adamhathcock/sharpcompress/pull/206)
### Version 0.15.0
* [Add zip64 support for ZipArchive extraction](https://github.com/adamhathcock/sharpcompress/pull/205)
### Version 0.14.1
* [.NET Assemblies aren't strong named](https://github.com/adamhathcock/sharpcompress/issues/158)
* [Pkware encryption for Zip files didn't allow for multiple reads of an entry](https://github.com/adamhathcock/sharpcompress/issues/197)
* [GZip Entry couldn't be read multiple times](https://github.com/adamhathcock/sharpcompress/issues/198)
### Version 0.14.0
* [Support for LZip reading in for Tars](https://github.com/adamhathcock/sharpcompress/pull/191)
### Version 0.13.1
* [Fix null password on ReaderFactory. Fix null options on SevenZipArchive](https://github.com/adamhathcock/sharpcompress/pull/188)
* [Make PpmdProperties lazy to avoid unnecessary allocations.](https://github.com/adamhathcock/sharpcompress/pull/185)
### Version 0.13.0
* Breaking change: Big refactor of Options on API.
* 7Zip supports Deflate
### Version 0.12.4
* Forward only zip issue fix https://github.com/adamhathcock/sharpcompress/issues/160
* Try to fix frameworks again by copying targets from JSON.NET
### Version 0.12.3
* 7Zip fixes https://github.com/adamhathcock/sharpcompress/issues/73
* Maybe all profiles will work with project.json now
### Version 0.12.2
* Support Profile 259 again
### Version 0.12.1
* Support Silverlight 5
### Version 0.12.0
* .NET Core RTM!
* Bug fix for Tar long paths
### Version 0.11.6
* Bug fix for global header in Tar
* Writers now have a leaveOpen `bool` overload. They won't close streams if not-requested to.
### Version 0.11.5
* Bug fix in Skip method
### Version 0.11.4
* SharpCompress is now endian neutral (matters for Mono platforms)
* Fix for Inflate (need to change implementation)
* Fixes for RAR detection
### Version 0.11.1
* Added Cancel on IReader
* Removed .NET 2.0 support and LinqBridge dependency
### Version 0.11
* Been over a year, contains mainly fixes from contributors!
* Possible breaking change: ArchiveEncoding is UTF8 by default now.
* TAR supports writing long names using longlink
* RAR Protect Header added
### Version 0.10.3
* Finally fixed Disposal issue when creating a new archive with the Archive API
### Version 0.10.2
* Fixed Rar Header reading for invalid extended time headers.
* Windows Store assembly is now strong named
* Known issues with Long Tar names being worked on
* Updated to VS2013
* Portable targets SL5 and Windows Phone 8 (up from SL4 and WP7)
### Version 0.10.1
* Fixed 7Zip extraction performance problem
### Version 0.10:
* Added support for RAR Decryption (thanks to https://github.com/hrasyid)
* Embedded some BouncyCastle crypto classes to allow RAR Decryption and Winzip AES Decryption in Portable and Windows Store DLLs
* Built in Release (I think)
## Notes
XZ implementation based on: https://github.com/sambott/XZ.NET by @sambott

View File

@@ -113,38 +113,26 @@ using (var archive = RarArchive.Open("Test.rar"))
}
```
### Extract solid Rar or 7Zip archives with manual progress reporting
### Extract solid Rar or 7Zip archives with progress reporting
`ExtractAllEntries` only works for solid archives (Rar) or 7Zip archives. For optimal performance with these archive types, use this method:
```C#
using (var archive = RarArchive.Open("archive.rar")) // Must be solid Rar or 7Zip
using SharpCompress.Common;
using SharpCompress.Readers;
var progress = new Progress<ProgressReport>(report =>
{
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
Console.WriteLine($"Extracting {report.EntryPath}: {report.PercentComplete}%");
});
using (var archive = RarArchive.Open("archive.rar", new ReaderOptions { Progress = progress })) // Must be solid Rar or 7Zip
{
archive.WriteToDirectory(@"D:\output", new ExtractionOptions()
{
// Calculate total size for progress reporting
double totalSize = archive.Entries.Where(e => !e.IsDirectory).Sum(e => e.Size);
long completed = 0;
using (var reader = archive.ExtractAllEntries())
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(@"D:\output", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
completed += reader.Entry.Size;
double progress = completed / totalSize;
Console.WriteLine($"Progress: {progress:P}");
}
}
}
}
ExtractFullPath = true,
Overwrite = true
});
}
```

View File

@@ -230,7 +230,7 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
}
else
{
// Not tagged - create prerelease version based on next minor version
// Not tagged - create prerelease version
var allTags = (await GetGitOutput("tag", "--list"))
.Split('\n', StringSplitOptions.RemoveEmptyEntries)
.Where(tag => Regex.IsMatch(tag.Trim(), @"^\d+\.\d+\.\d+$"))
@@ -240,8 +240,22 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
var lastTag = allTags.OrderBy(tag => Version.Parse(tag)).LastOrDefault() ?? "0.0.0";
var lastVersion = Version.Parse(lastTag);
// Increment minor version for next release
var nextVersion = new Version(lastVersion.Major, lastVersion.Minor + 1, 0);
// Determine version increment based on branch
var currentBranch = await GetCurrentBranch();
Version nextVersion;
if (currentBranch == "release")
{
// Release branch: increment patch version
nextVersion = new Version(lastVersion.Major, lastVersion.Minor, lastVersion.Build + 1);
Console.WriteLine($"Building prerelease for release branch (patch increment)");
}
else
{
// Master or other branches: increment minor version
nextVersion = new Version(lastVersion.Major, lastVersion.Minor + 1, 0);
Console.WriteLine($"Building prerelease for {currentBranch} branch (minor increment)");
}
// Use commit count since the last version tag if available; otherwise, fall back to total count
var revListArgs = allTags.Any() ? $"--count {lastTag}..HEAD" : "--count HEAD";
@@ -253,6 +267,28 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
}
}
static async Task<string> GetCurrentBranch()
{
// In GitHub Actions, GITHUB_REF_NAME contains the branch name
var githubRefName = Environment.GetEnvironmentVariable("GITHUB_REF_NAME");
if (!string.IsNullOrEmpty(githubRefName))
{
return githubRefName;
}
// Fallback to git command for local builds
try
{
var (output, _) = await ReadAsync("git", "branch --show-current");
return output.Trim();
}
catch (Exception ex)
{
Console.WriteLine($"Warning: Could not determine current branch: {ex.Message}");
return "unknown";
}
}
static async Task<string> GetGitOutput(string command, string args)
{
try

View File

@@ -14,11 +14,45 @@
"resolved": "1.1.9",
"contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[10.0.102, )",
"resolved": "10.0.102",
"contentHash": "Oxq3RCIJSdtpIU4hLqO7XaDe/Ra3HS9Wi8rJl838SAg6Zu1iQjerA0+xXWBgUFYbgknUGCLOU0T+lzMLkvY9Qg==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "10.0.102",
"Microsoft.SourceLink.Common": "10.0.102"
}
},
"SimpleExec": {
"type": "Direct",
"requested": "[13.0.0, )",
"resolved": "13.0.0",
"contentHash": "zcCR1pupa1wI1VqBULRiQKeHKKZOuJhi/K+4V5oO+rHJZlaOD53ViFo1c3PavDoMAfSn/FAXGAWpPoF57rwhYg=="
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "10.0.102",
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "10.0.102",
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
}
}
}

View File

@@ -166,22 +166,14 @@ public static class ArchiveFactory
);
}
public static bool IsArchive(
string filePath,
out ArchiveType? type,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public static bool IsArchive(string filePath, out ArchiveType? type)
{
filePath.NotNullOrEmpty(nameof(filePath));
using Stream s = File.OpenRead(filePath);
return IsArchive(s, out type, bufferSize);
return IsArchive(s, out type);
}
public static bool IsArchive(
Stream stream,
out ArchiveType? type,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public static bool IsArchive(Stream stream, out ArchiveType? type)
{
type = null;
stream.NotNull(nameof(stream));

View File

@@ -14,11 +14,8 @@ class AutoArchiveFactory : IArchiveFactory
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
public bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => throw new NotSupportedException();
public bool IsArchive(Stream stream, string? password = null) =>
throw new NotSupportedException();
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();

View File

@@ -9,8 +9,6 @@ namespace SharpCompress.Archives;
public static class IArchiveEntryExtensions
{
private const int BufferSize = 81920;
/// <param name="archiveEntry">The archive entry to extract.</param>
extension(IArchiveEntry archiveEntry)
{
@@ -28,7 +26,7 @@ public static class IArchiveEntryExtensions
using var entryStream = archiveEntry.OpenEntryStream();
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
sourceStream.CopyTo(streamToWriteTo, BufferSize);
sourceStream.CopyTo(streamToWriteTo, Constants.BufferSize);
}
/// <summary>
@@ -51,7 +49,7 @@ public static class IArchiveEntryExtensions
using var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
await sourceStream
.CopyToAsync(streamToWriteTo, BufferSize, cancellationToken)
.CopyToAsync(streamToWriteTo, Constants.BufferSize, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -212,10 +212,31 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
public override long TotalSize =>
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
internal sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive _archive;
private SevenZipEntry? _currentEntry;
private Stream? _currentFolderStream;
private CFolder? _currentFolder;
/// <summary>
/// Enables internal diagnostics for tests.
/// When disabled (default), diagnostics properties return null to avoid exposing internal state.
/// </summary>
internal bool DiagnosticsEnabled { get; set; }
/// <summary>
/// Current folder instance used to decide whether the solid folder stream should be reused.
/// Only available when <see cref="DiagnosticsEnabled"/> is true.
/// </summary>
internal object? DiagnosticsCurrentFolder => DiagnosticsEnabled ? _currentFolder : null;
/// <summary>
/// Current shared folder stream instance.
/// Only available when <see cref="DiagnosticsEnabled"/> is true.
/// </summary>
internal Stream? DiagnosticsCurrentFolderStream =>
DiagnosticsEnabled ? _currentFolderStream : null;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
@@ -231,9 +252,10 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
_currentEntry = dir;
yield return dir;
}
// For non-directory entries, yield them without creating shared streams
// Each call to GetEntryStream() will create a fresh decompression stream
// to avoid state corruption issues with async operations
// For solid archives (entries in the same folder share a compressed stream),
// we must iterate entries sequentially and maintain the folder stream state
// across entries in the same folder to avoid recreating the decompression
// stream for each file, which breaks contiguous streaming.
foreach (var entry in entries.Where(x => !x.IsDirectory))
{
_currentEntry = entry;
@@ -243,19 +265,53 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
protected override EntryStream GetEntryStream()
{
// Create a fresh decompression stream for each file (no state sharing).
// However, the LZMA decoder has bugs in its async implementation that cause
// state corruption even on fresh streams. The SyncOnlyStream wrapper
// works around these bugs by forcing async operations to use sync equivalents.
//
// TODO: Fix the LZMA decoder async bugs (in LzmaStream, Decoder, OutWindow)
// so this wrapper is no longer necessary.
var entry = _currentEntry.NotNull("currentEntry is not null");
if (entry.IsDirectory)
{
return CreateEntryStream(Stream.Null);
}
return CreateEntryStream(new SyncOnlyStream(entry.FilePart.GetCompressedStream()));
var filePart = (SevenZipFilePart)entry.FilePart;
if (!filePart.Header.HasStream)
{
// Entries with no underlying stream (e.g., empty files or anti-items)
// should return an empty stream, matching previous behavior.
return CreateEntryStream(Stream.Null);
}
var folder = filePart.Folder;
// Check if we're starting a new folder - dispose old folder stream if needed
if (folder != _currentFolder)
{
_currentFolderStream?.Dispose();
_currentFolderStream = null;
_currentFolder = folder;
}
// Create the folder stream once per folder
if (_currentFolderStream is null)
{
_currentFolderStream = _archive._database!.GetFolderStream(
_archive.Volumes.Single().Stream,
folder!,
_archive._database.PasswordProvider
);
}
// Wrap with SyncOnlyStream to work around LZMA async bugs
// Return a ReadOnlySubStream that reads from the shared folder stream
return CreateEntryStream(
new SyncOnlyStream(
new ReadOnlySubStream(_currentFolderStream, entry.Size, leaveOpen: true)
)
);
}
public override void Dispose()
{
_currentFolderStream?.Dispose();
_currentFolderStream = null;
base.Dispose();
}
}

View File

@@ -180,7 +180,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
using (var entryStream = entry.OpenEntryStream())
{
using var memoryStream = new MemoryStream();
entryStream.CopyTo(memoryStream);
entryStream.CopyTo(memoryStream, Constants.BufferSize);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();

View File

@@ -124,38 +124,27 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
);
}
public static bool IsZipFile(
string filePath,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => IsZipFile(new FileInfo(filePath), password, bufferSize);
public static bool IsZipFile(string filePath, string? password = null) =>
IsZipFile(new FileInfo(filePath), password);
public static bool IsZipFile(
FileInfo fileInfo,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsZipFile(stream, password, bufferSize);
return IsZipFile(stream, password);
}
public static bool IsZipFile(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public static bool IsZipFile(Stream stream, string? password = null)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
}
var header = headerFactory
@@ -177,18 +166,14 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
}
public static bool IsZipMulti(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public static bool IsZipMulti(Stream stream, string? password = null)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
}
var header = headerFactory
@@ -229,7 +214,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
if (streams.Count() > 1) //test part 2 - true = multipart not split
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
var isZip = IsZipFile(streams[1], ReaderOptions.Password, ReaderOptions.BufferSize);
var isZip = IsZipFile(streams[1], ReaderOptions.Password);
streams[1].Position -= 4;
if (isZip)
{

View File

@@ -0,0 +1,10 @@
namespace SharpCompress.Common;
public static class Constants
{
/// <summary>
/// The default buffer size for stream operations, matching .NET's Stream.CopyTo default of 81920 bytes.
/// This can be modified globally at runtime.
/// </summary>
public static int BufferSize { get; set; } = 81920;
}

View File

@@ -55,7 +55,7 @@ internal class SevenZipFilePart : FilePart
{
folderStream.Skip(skipSize);
}
return new ReadOnlySubStream(folderStream, Header.Size);
return new ReadOnlySubStream(folderStream, Header.Size, leaveOpen: false);
}
public CompressionType CompressionType

View File

@@ -15,10 +15,6 @@ internal enum ExtraDataType : ushort
UnicodePathExtraField = 0x7075,
Zip64ExtendedInformationExtraField = 0x0001,
UnixTimeExtraField = 0x5455,
// SOZip (Seek-Optimized ZIP) extra field
// Used to link a main file to its SOZip index file
SOZip = 0x564B,
}
internal class ExtraData
@@ -237,44 +233,6 @@ internal sealed class UnixTimeExtraField : ExtraData
}
}
/// <summary>
/// SOZip (Seek-Optimized ZIP) extra field that links a main file to its index file.
/// The extra field contains the offset within the ZIP file where the index entry's
/// local header is located.
/// </summary>
internal sealed class SOZipExtraField : ExtraData
{
public SOZipExtraField(ExtraDataType type, ushort length, byte[] dataBytes)
: base(type, length, dataBytes) { }
/// <summary>
/// Gets the offset to the SOZip index file's local entry header within the ZIP archive.
/// </summary>
internal ulong IndexOffset
{
get
{
if (DataBytes is null || DataBytes.Length < 8)
{
return 0;
}
return BinaryPrimitives.ReadUInt64LittleEndian(DataBytes);
}
}
/// <summary>
/// Creates a SOZip extra field with the specified index offset
/// </summary>
/// <param name="indexOffset">The offset to the index file's local entry header</param>
/// <returns>A new SOZipExtraField instance</returns>
public static SOZipExtraField Create(ulong indexOffset)
{
var data = new byte[8];
BinaryPrimitives.WriteUInt64LittleEndian(data, indexOffset);
return new SOZipExtraField(ExtraDataType.SOZip, 8, data);
}
}
internal static class LocalEntryHeaderExtraFactory
{
internal static ExtraData Create(ExtraDataType type, ushort length, byte[] extraData) =>
@@ -288,7 +246,6 @@ internal static class LocalEntryHeaderExtraFactory
ExtraDataType.Zip64ExtendedInformationExtraField =>
new Zip64ExtendedInformationExtraField(type, length, extraData),
ExtraDataType.UnixTimeExtraField => new UnixTimeExtraField(type, length, extraData),
ExtraDataType.SOZip => new SOZipExtraField(type, length, extraData),
_ => new ExtraData(type, length, extraData),
};
}

View File

@@ -1,150 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Common.Zip.SOZip;
/// <summary>
/// A Deflate stream that inserts sync flush points at regular intervals
/// to enable random access (SOZip optimization).
/// </summary>
internal sealed class SOZipDeflateStream : Stream
{
private readonly DeflateStream _deflateStream;
private readonly Stream _baseStream;
private readonly uint _chunkSize;
private readonly List<ulong> _compressedOffsets = new();
private readonly long _baseOffset;
private long _uncompressedBytesWritten;
private long _nextSyncPoint;
private bool _disposed;
/// <summary>
/// Creates a new SOZip Deflate stream
/// </summary>
/// <param name="baseStream">The underlying stream to write to</param>
/// <param name="compressionLevel">The compression level</param>
/// <param name="chunkSize">The chunk size for sync flush points</param>
public SOZipDeflateStream(Stream baseStream, CompressionLevel compressionLevel, int chunkSize)
{
_baseStream = baseStream;
_chunkSize = (uint)chunkSize;
_baseOffset = baseStream.Position;
_nextSyncPoint = chunkSize;
// Record the first offset (start of compressed data)
_compressedOffsets.Add(0);
_deflateStream = new DeflateStream(baseStream, CompressionMode.Compress, compressionLevel);
}
/// <summary>
/// Gets the array of compressed offsets recorded during writing
/// </summary>
public ulong[] CompressedOffsets => _compressedOffsets.ToArray();
/// <summary>
/// Gets the total number of uncompressed bytes written
/// </summary>
public ulong UncompressedBytesWritten => (ulong)_uncompressedBytesWritten;
/// <summary>
/// Gets the total number of compressed bytes written
/// </summary>
public ulong CompressedBytesWritten => (ulong)(_baseStream.Position - _baseOffset);
/// <summary>
/// Gets the chunk size being used
/// </summary>
public uint ChunkSize => _chunkSize;
public override bool CanRead => false;
public override bool CanSeek => false;
public override bool CanWrite => !_disposed && _deflateStream.CanWrite;
public override long Length => throw new NotSupportedException();
public override long Position
{
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
public override void Flush() => _deflateStream.Flush();
public override int Read(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
{
if (_disposed)
{
throw new ObjectDisposedException(nameof(SOZipDeflateStream));
}
var remaining = count;
var currentOffset = offset;
while (remaining > 0)
{
// Calculate how many bytes until the next sync point
var bytesUntilSync = (int)(_nextSyncPoint - _uncompressedBytesWritten);
if (bytesUntilSync <= 0)
{
// We've reached a sync point - perform sync flush
PerformSyncFlush();
continue;
}
// Write up to the next sync point
var bytesToWrite = Math.Min(remaining, bytesUntilSync);
_deflateStream.Write(buffer, currentOffset, bytesToWrite);
_uncompressedBytesWritten += bytesToWrite;
currentOffset += bytesToWrite;
remaining -= bytesToWrite;
}
}
private void PerformSyncFlush()
{
// Flush with Z_SYNC_FLUSH to create an independent block
var originalFlushMode = _deflateStream.FlushMode;
_deflateStream.FlushMode = FlushType.Sync;
_deflateStream.Flush();
_deflateStream.FlushMode = originalFlushMode;
// Record the compressed offset for this sync point
var compressedOffset = (ulong)(_baseStream.Position - _baseOffset);
_compressedOffsets.Add(compressedOffset);
// Set the next sync point
_nextSyncPoint += _chunkSize;
}
protected override void Dispose(bool disposing)
{
if (_disposed)
{
return;
}
_disposed = true;
if (disposing)
{
_deflateStream.Dispose();
}
base.Dispose(disposing);
}
}

View File

@@ -1,367 +0,0 @@
using System;
using System.Buffers.Binary;
using System.IO;
namespace SharpCompress.Common.Zip.SOZip;
/// <summary>
/// Represents a SOZip (Seek-Optimized ZIP) index that enables random access
/// within DEFLATE-compressed files by storing offsets to sync flush points.
/// </summary>
/// <remarks>
/// SOZip index files (.sozip.idx) contain a header followed by offset entries
/// that point to the beginning of independently decompressable DEFLATE blocks.
/// </remarks>
[CLSCompliant(false)]
public sealed class SOZipIndex
{
/// <summary>
/// SOZip index file magic number: "SOZo" (0x534F5A6F)
/// </summary>
public const uint SOZIP_MAGIC = 0x6F5A4F53; // "SOZo" little-endian
/// <summary>
/// Current SOZip specification version
/// </summary>
public const byte SOZIP_VERSION = 1;
/// <summary>
/// Index file extension suffix
/// </summary>
public const string INDEX_EXTENSION = ".sozip.idx";
/// <summary>
/// Default chunk size in bytes (32KB)
/// </summary>
public const uint DEFAULT_CHUNK_SIZE = 32768;
/// <summary>
/// The version of the SOZip index format
/// </summary>
public byte Version { get; private set; }
/// <summary>
/// Size of each uncompressed chunk in bytes
/// </summary>
public uint ChunkSize { get; private set; }
/// <summary>
/// Total uncompressed size of the file
/// </summary>
public ulong UncompressedSize { get; private set; }
/// <summary>
/// Total compressed size of the file
/// </summary>
public ulong CompressedSize { get; private set; }
/// <summary>
/// Number of offset entries in the index
/// </summary>
public uint OffsetCount { get; private set; }
/// <summary>
/// Array of compressed offsets for each chunk
/// </summary>
public ulong[] CompressedOffsets { get; private set; } = Array.Empty<ulong>();
/// <summary>
/// Creates a new empty SOZip index
/// </summary>
public SOZipIndex() { }
/// <summary>
/// Creates a new SOZip index with specified parameters
/// </summary>
/// <param name="chunkSize">Size of each uncompressed chunk</param>
/// <param name="uncompressedSize">Total uncompressed size</param>
/// <param name="compressedSize">Total compressed size</param>
/// <param name="compressedOffsets">Array of compressed offsets</param>
public SOZipIndex(
uint chunkSize,
ulong uncompressedSize,
ulong compressedSize,
ulong[] compressedOffsets
)
{
Version = SOZIP_VERSION;
ChunkSize = chunkSize;
UncompressedSize = uncompressedSize;
CompressedSize = compressedSize;
OffsetCount = (uint)compressedOffsets.Length;
CompressedOffsets = compressedOffsets;
}
/// <summary>
/// Reads a SOZip index from a stream
/// </summary>
/// <param name="stream">The stream containing the index data</param>
/// <returns>A parsed SOZipIndex instance</returns>
/// <exception cref="InvalidDataException">If the stream doesn't contain valid SOZip index data</exception>
public static SOZipIndex Read(Stream stream)
{
var index = new SOZipIndex();
Span<byte> header = stackalloc byte[4];
// Read magic number
if (stream.Read(header) != 4)
{
throw new InvalidDataException("Invalid SOZip index: unable to read magic number");
}
var magic = BinaryPrimitives.ReadUInt32LittleEndian(header);
if (magic != SOZIP_MAGIC)
{
throw new InvalidDataException(
$"Invalid SOZip index: magic number mismatch (expected 0x{SOZIP_MAGIC:X8}, got 0x{magic:X8})"
);
}
// Read version
var versionByte = stream.ReadByte();
if (versionByte < 0)
{
throw new InvalidDataException("Invalid SOZip index: unable to read version");
}
index.Version = (byte)versionByte;
if (index.Version != SOZIP_VERSION)
{
throw new InvalidDataException(
$"Unsupported SOZip index version: {index.Version} (expected {SOZIP_VERSION})"
);
}
// Read reserved byte (padding)
stream.ReadByte();
// Read chunk size (2 bytes)
Span<byte> buf2 = stackalloc byte[2];
if (stream.Read(buf2) != 2)
{
throw new InvalidDataException("Invalid SOZip index: unable to read chunk size");
}
// Chunk size is stored as (actual_size / 1024) - 1
var chunkSizeEncoded = BinaryPrimitives.ReadUInt16LittleEndian(buf2);
index.ChunkSize = ((uint)chunkSizeEncoded + 1) * 1024;
// Read uncompressed size (8 bytes)
Span<byte> buf8 = stackalloc byte[8];
if (stream.Read(buf8) != 8)
{
throw new InvalidDataException("Invalid SOZip index: unable to read uncompressed size");
}
index.UncompressedSize = BinaryPrimitives.ReadUInt64LittleEndian(buf8);
// Read compressed size (8 bytes)
if (stream.Read(buf8) != 8)
{
throw new InvalidDataException("Invalid SOZip index: unable to read compressed size");
}
index.CompressedSize = BinaryPrimitives.ReadUInt64LittleEndian(buf8);
// Read offset count (4 bytes)
if (stream.Read(header) != 4)
{
throw new InvalidDataException("Invalid SOZip index: unable to read offset count");
}
index.OffsetCount = BinaryPrimitives.ReadUInt32LittleEndian(header);
// Read offsets
index.CompressedOffsets = new ulong[index.OffsetCount];
for (uint i = 0; i < index.OffsetCount; i++)
{
if (stream.Read(buf8) != 8)
{
throw new InvalidDataException($"Invalid SOZip index: unable to read offset {i}");
}
index.CompressedOffsets[i] = BinaryPrimitives.ReadUInt64LittleEndian(buf8);
}
return index;
}
/// <summary>
/// Reads a SOZip index from a byte array
/// </summary>
/// <param name="data">The byte array containing the index data</param>
/// <returns>A parsed SOZipIndex instance</returns>
public static SOZipIndex Read(byte[] data)
{
using var stream = new MemoryStream(data);
return Read(stream);
}
/// <summary>
/// Writes this SOZip index to a stream
/// </summary>
/// <param name="stream">The stream to write to</param>
public void Write(Stream stream)
{
Span<byte> buf8 = stackalloc byte[8];
// Write magic number
BinaryPrimitives.WriteUInt32LittleEndian(buf8, SOZIP_MAGIC);
stream.Write(buf8.Slice(0, 4));
// Write version
stream.WriteByte(SOZIP_VERSION);
// Write reserved byte (padding)
stream.WriteByte(0);
// Write chunk size (encoded as (size/1024)-1)
var chunkSizeEncoded = (ushort)((ChunkSize / 1024) - 1);
BinaryPrimitives.WriteUInt16LittleEndian(buf8, chunkSizeEncoded);
stream.Write(buf8.Slice(0, 2));
// Write uncompressed size
BinaryPrimitives.WriteUInt64LittleEndian(buf8, UncompressedSize);
stream.Write(buf8);
// Write compressed size
BinaryPrimitives.WriteUInt64LittleEndian(buf8, CompressedSize);
stream.Write(buf8);
// Write offset count
BinaryPrimitives.WriteUInt32LittleEndian(buf8, OffsetCount);
stream.Write(buf8.Slice(0, 4));
// Write offsets
foreach (var offset in CompressedOffsets)
{
BinaryPrimitives.WriteUInt64LittleEndian(buf8, offset);
stream.Write(buf8);
}
}
/// <summary>
/// Converts this SOZip index to a byte array
/// </summary>
/// <returns>Byte array containing the serialized index</returns>
public byte[] ToByteArray()
{
using var stream = new MemoryStream();
Write(stream);
return stream.ToArray();
}
/// <summary>
/// Gets the index of the chunk that contains the specified uncompressed offset
/// </summary>
/// <param name="uncompressedOffset">The uncompressed byte offset</param>
/// <returns>The chunk index</returns>
public int GetChunkIndex(long uncompressedOffset)
{
if (uncompressedOffset < 0 || (ulong)uncompressedOffset >= UncompressedSize)
{
throw new ArgumentOutOfRangeException(
nameof(uncompressedOffset),
"Offset is out of range"
);
}
return (int)((ulong)uncompressedOffset / ChunkSize);
}
/// <summary>
/// Gets the compressed offset for the specified chunk index
/// </summary>
/// <param name="chunkIndex">The chunk index</param>
/// <returns>The compressed byte offset for the start of the chunk</returns>
public ulong GetCompressedOffset(int chunkIndex)
{
if (chunkIndex < 0 || chunkIndex >= CompressedOffsets.Length)
{
throw new ArgumentOutOfRangeException(
nameof(chunkIndex),
"Chunk index is out of range"
);
}
return CompressedOffsets[chunkIndex];
}
/// <summary>
/// Gets the uncompressed offset for the start of the specified chunk
/// </summary>
/// <param name="chunkIndex">The chunk index</param>
/// <returns>The uncompressed byte offset for the start of the chunk</returns>
public ulong GetUncompressedOffset(int chunkIndex)
{
if (chunkIndex < 0 || chunkIndex >= CompressedOffsets.Length)
{
throw new ArgumentOutOfRangeException(
nameof(chunkIndex),
"Chunk index is out of range"
);
}
return (ulong)chunkIndex * ChunkSize;
}
/// <summary>
/// Gets the name of the SOZip index file for a given entry name
/// </summary>
/// <param name="entryName">The main entry name</param>
/// <returns>The index file name (hidden with .sozip.idx extension)</returns>
public static string GetIndexFileName(string entryName)
{
var directory = Path.GetDirectoryName(entryName);
var fileName = Path.GetFileName(entryName);
// The index file is hidden (prefixed with .)
var indexFileName = $".{fileName}{INDEX_EXTENSION}";
if (string.IsNullOrEmpty(directory))
{
return indexFileName;
}
return Path.Combine(directory, indexFileName).Replace('\\', '/');
}
/// <summary>
/// Checks if a file name is a SOZip index file
/// </summary>
/// <param name="fileName">The file name to check</param>
/// <returns>True if the file is a SOZip index file</returns>
public static bool IsIndexFile(string fileName)
{
if (string.IsNullOrEmpty(fileName))
{
return false;
}
var name = Path.GetFileName(fileName);
return name.StartsWith(".", StringComparison.Ordinal)
&& name.EndsWith(INDEX_EXTENSION, StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Gets the main file name from a SOZip index file name
/// </summary>
/// <param name="indexFileName">The index file name</param>
/// <returns>The main file name, or null if not a valid index file</returns>
public static string? GetMainFileName(string indexFileName)
{
if (!IsIndexFile(indexFileName))
{
return null;
}
var directory = Path.GetDirectoryName(indexFileName);
var name = Path.GetFileName(indexFileName);
// Remove leading '.' and trailing '.sozip.idx'
var mainName = name.Substring(1, name.Length - 1 - INDEX_EXTENSION.Length);
if (string.IsNullOrEmpty(directory))
{
return mainName;
}
return Path.Combine(directory, mainName).Replace('\\', '/');
}
}

View File

@@ -20,7 +20,7 @@ internal class WinzipAesEncryptionData
{
_keySize = keySize;
#if NETFRAMEWORK
#if NETFRAMEWORK || NETSTANDARD2_0
var rfc2898 = new Rfc2898DeriveBytes(password, salt, RFC2898_ITERATIONS);
KeyBytes = rfc2898.GetBytes(KeySizeInBytes);
IvBytes = rfc2898.GetBytes(KeySizeInBytes);

View File

@@ -2,7 +2,6 @@ using System;
using System.Collections.Generic;
using System.Linq;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Common.Zip.SOZip;
namespace SharpCompress.Common.Zip;
@@ -12,7 +11,7 @@ public class ZipEntry : Entry
internal ZipEntry(ZipFilePart? filePart)
{
if (filePart is null)
if (filePart == null)
{
return;
}
@@ -89,24 +88,4 @@ public class ZipEntry : Entry
public override int? Attrib => (int?)_filePart?.Header.ExternalFileAttributes;
public string? Comment => _filePart?.Header.Comment;
/// <summary>
/// Gets a value indicating whether this entry has SOZip (Seek-Optimized ZIP) support.
/// A SOZip entry has an associated index file that enables random access within
/// the compressed data.
/// </summary>
public bool IsSozip => _filePart?.Header.Extra.Any(e => e.Type == ExtraDataType.SOZip) ?? false;
/// <summary>
/// Gets a value indicating whether this entry is a SOZip index file.
/// Index files are hidden files with a .sozip.idx extension that contain
/// offsets into the main compressed file.
/// </summary>
public bool IsSozipIndexFile => Key is not null && SOZipIndex.IsIndexFile(Key);
/// <summary>
/// Gets the SOZip extra field data, if present.
/// </summary>
internal SOZipExtraField? SOZipExtra =>
_filePart?.Header.Extra.OfType<SOZipExtraField>().FirstOrDefault();
}

View File

@@ -30,6 +30,7 @@ public sealed class BZip2Stream : Stream, IStreamStack
private readonly Stream stream;
private bool isDisposed;
private readonly bool leaveOpen;
/// <summary>
/// Create a BZip2Stream
@@ -37,19 +38,30 @@ public sealed class BZip2Stream : Stream, IStreamStack
/// <param name="stream">The stream to read from</param>
/// <param name="compressionMode">Compression Mode</param>
/// <param name="decompressConcatenated">Decompress Concatenated</param>
public BZip2Stream(Stream stream, CompressionMode compressionMode, bool decompressConcatenated)
/// <param name="leaveOpen">Leave the stream open after disposing</param>
public BZip2Stream(
Stream stream,
CompressionMode compressionMode,
bool decompressConcatenated,
bool leaveOpen = false
)
{
#if DEBUG_STREAMS
this.DebugConstruct(typeof(BZip2Stream));
#endif
this.leaveOpen = leaveOpen;
Mode = compressionMode;
if (Mode == CompressionMode.Compress)
{
this.stream = new CBZip2OutputStream(stream);
this.stream = new CBZip2OutputStream(stream, 9, leaveOpen);
}
else
{
this.stream = new CBZip2InputStream(stream, decompressConcatenated);
this.stream = new CBZip2InputStream(
stream,
decompressConcatenated,
leaveOpen: leaveOpen
);
}
}

View File

@@ -168,6 +168,7 @@ internal class CBZip2InputStream : Stream, IStreamStack
private int computedBlockCRC,
computedCombinedCRC;
private readonly bool decompressConcatenated;
private readonly bool leaveOpen;
private int i2,
count,
@@ -181,9 +182,10 @@ internal class CBZip2InputStream : Stream, IStreamStack
private char z;
private bool isDisposed;
public CBZip2InputStream(Stream zStream, bool decompressConcatenated)
public CBZip2InputStream(Stream zStream, bool decompressConcatenated, bool leaveOpen = false)
{
this.decompressConcatenated = decompressConcatenated;
this.leaveOpen = leaveOpen;
ll8 = null;
tt = null;
BsSetStream(zStream);
@@ -207,7 +209,10 @@ internal class CBZip2InputStream : Stream, IStreamStack
this.DebugDispose(typeof(CBZip2InputStream));
#endif
base.Dispose(disposing);
bsStream?.Dispose();
if (!leaveOpen)
{
bsStream?.Dispose();
}
}
internal static int[][] InitIntArray(int n1, int n2)
@@ -398,7 +403,10 @@ internal class CBZip2InputStream : Stream, IStreamStack
private void BsFinishedWithStream()
{
bsStream?.Dispose();
if (!leaveOpen)
{
bsStream?.Dispose();
}
bsStream = null;
}

View File

@@ -341,12 +341,14 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
private int currentChar = -1;
private int runLength;
private readonly bool leaveOpen;
public CBZip2OutputStream(Stream inStream)
: this(inStream, 9) { }
public CBZip2OutputStream(Stream inStream, bool leaveOpen = false)
: this(inStream, 9, leaveOpen) { }
public CBZip2OutputStream(Stream inStream, int inBlockSize)
public CBZip2OutputStream(Stream inStream, int inBlockSize, bool leaveOpen = false)
{
this.leaveOpen = leaveOpen;
block = null;
quadrant = null;
zptr = null;
@@ -481,7 +483,10 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
this.DebugDispose(typeof(CBZip2OutputStream));
#endif
Dispose();
bsStream?.Dispose();
if (!leaveOpen)
{
bsStream?.Dispose();
}
bsStream = null;
}
}

View File

@@ -586,7 +586,13 @@ internal class ZlibBaseStream : Stream, IStreamStack
public override void Flush()
{
_stream.Flush();
// Only flush the underlying stream when in write mode
// Flushing input streams during read operations is not meaningful
// and can cause issues with forward-only/non-seekable streams
if (_streamMode == StreamMode.Writer)
{
_stream.Flush();
}
//rewind the buffer
((IStreamStack)this).Rewind(z.AvailableBytesIn); //unused
z.AvailableBytesIn = 0;
@@ -594,7 +600,13 @@ internal class ZlibBaseStream : Stream, IStreamStack
public override async Task FlushAsync(CancellationToken cancellationToken)
{
await _stream.FlushAsync(cancellationToken).ConfigureAwait(false);
// Only flush the underlying stream when in write mode
// Flushing input streams during read operations is not meaningful
// and can cause issues with forward-only/non-seekable streams
if (_streamMode == StreamMode.Writer)
{
await _stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
//rewind the buffer
((IStreamStack)this).Rewind(z.AvailableBytesIn); //unused
z.AvailableBytesIn = 0;

View File

@@ -153,7 +153,7 @@ internal class OutWindow : IDisposable
_pendingLen = rem;
}
public async Task CopyPendingAsync(CancellationToken cancellationToken = default)
public async ValueTask CopyPendingAsync(CancellationToken cancellationToken = default)
{
if (_pendingLen < 1)
{
@@ -206,7 +206,7 @@ internal class OutWindow : IDisposable
_pendingDist = distance;
}
public async Task CopyBlockAsync(
public async ValueTask CopyBlockAsync(
int distance,
int len,
CancellationToken cancellationToken = default
@@ -253,7 +253,7 @@ internal class OutWindow : IDisposable
}
}
public async Task PutByteAsync(byte b, CancellationToken cancellationToken = default)
public async ValueTask PutByteAsync(byte b, CancellationToken cancellationToken = default)
{
_buffer[_pos++] = b;
_total++;
@@ -369,6 +369,28 @@ internal class OutWindow : IDisposable
return size;
}
public int Read(Memory<byte> buffer, int offset, int count)
{
if (_streamPos >= _pos)
{
return 0;
}
var size = _pos - _streamPos;
if (size > count)
{
size = count;
}
_buffer.AsMemory(_streamPos, size).CopyTo(buffer.Slice(offset, size));
_streamPos += size;
if (_streamPos >= _windowSize)
{
_pos = 0;
_streamPos = 0;
}
return size;
}
public int ReadByte()
{
if (_streamPos >= _pos)

View File

@@ -45,10 +45,14 @@ public sealed class LZipStream : Stream, IStreamStack
private bool _finished;
private long _writeCount;
private readonly Stream? _originalStream;
private readonly bool _leaveOpen;
public LZipStream(Stream stream, CompressionMode mode)
public LZipStream(Stream stream, CompressionMode mode, bool leaveOpen = false)
{
Mode = mode;
_originalStream = stream;
_leaveOpen = leaveOpen;
if (mode == CompressionMode.Decompress)
{
@@ -58,7 +62,7 @@ public sealed class LZipStream : Stream, IStreamStack
throw new InvalidFormatException("Not an LZip stream");
}
var properties = GetProperties(dSize);
_stream = new LzmaStream(properties, stream);
_stream = new LzmaStream(properties, stream, leaveOpen: leaveOpen);
}
else
{
@@ -125,6 +129,10 @@ public sealed class LZipStream : Stream, IStreamStack
{
Finish();
_stream.Dispose();
if (Mode == CompressionMode.Compress && !_leaveOpen)
{
_originalStream?.Dispose();
}
}
}

View File

@@ -3,6 +3,7 @@
using System;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.Compressors.LZMA.RangeCoder;
@@ -475,7 +476,7 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
return false;
}
internal async System.Threading.Tasks.Task<bool> CodeAsync(
internal async ValueTask<bool> CodeAsync(
int dictionarySize,
OutWindow outWindow,
RangeCoder.Decoder rangeDecoder,

View File

@@ -35,6 +35,7 @@ public class LzmaStream : Stream, IStreamStack
private readonly Stream _inputStream;
private readonly long _inputSize;
private readonly long _outputSize;
private readonly bool _leaveOpen;
private readonly int _dictionarySize;
private readonly OutWindow _outWindow = new();
@@ -56,14 +57,28 @@ public class LzmaStream : Stream, IStreamStack
private readonly Encoder _encoder;
private bool _isDisposed;
public LzmaStream(byte[] properties, Stream inputStream)
: this(properties, inputStream, -1, -1, null, properties.Length < 5) { }
public LzmaStream(byte[] properties, Stream inputStream, bool leaveOpen = false)
: this(properties, inputStream, -1, -1, null, properties.Length < 5, leaveOpen) { }
public LzmaStream(byte[] properties, Stream inputStream, long inputSize)
: this(properties, inputStream, inputSize, -1, null, properties.Length < 5) { }
public LzmaStream(byte[] properties, Stream inputStream, long inputSize, bool leaveOpen = false)
: this(properties, inputStream, inputSize, -1, null, properties.Length < 5, leaveOpen) { }
public LzmaStream(byte[] properties, Stream inputStream, long inputSize, long outputSize)
: this(properties, inputStream, inputSize, outputSize, null, properties.Length < 5) { }
public LzmaStream(
byte[] properties,
Stream inputStream,
long inputSize,
long outputSize,
bool leaveOpen = false
)
: this(
properties,
inputStream,
inputSize,
outputSize,
null,
properties.Length < 5,
leaveOpen
) { }
public LzmaStream(
byte[] properties,
@@ -71,13 +86,15 @@ public class LzmaStream : Stream, IStreamStack
long inputSize,
long outputSize,
Stream presetDictionary,
bool isLzma2
bool isLzma2,
bool leaveOpen = false
)
{
_inputStream = inputStream;
_inputSize = inputSize;
_outputSize = outputSize;
_isLzma2 = isLzma2;
_leaveOpen = leaveOpen;
#if DEBUG_STREAMS
this.DebugConstruct(typeof(LzmaStream));
@@ -179,7 +196,10 @@ public class LzmaStream : Stream, IStreamStack
{
_position = _encoder.Code(null, true);
}
_inputStream?.Dispose();
if (!_leaveOpen)
{
_inputStream?.Dispose();
}
_outWindow.Dispose();
}
base.Dispose(disposing);
@@ -425,7 +445,7 @@ public class LzmaStream : Stream, IStreamStack
}
}
private async Task DecodeChunkHeaderAsync(CancellationToken cancellationToken = default)
private async ValueTask DecodeChunkHeaderAsync(CancellationToken cancellationToken = default)
{
var controlBuffer = new byte[1];
await _inputStream
@@ -632,6 +652,119 @@ public class LzmaStream : Stream, IStreamStack
return total;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (_endReached)
{
return 0;
}
var total = 0;
var offset = 0;
var count = buffer.Length;
while (total < count)
{
cancellationToken.ThrowIfCancellationRequested();
if (_availableBytes == 0)
{
if (_isLzma2)
{
await DecodeChunkHeaderAsync(cancellationToken).ConfigureAwait(false);
}
else
{
_endReached = true;
}
if (_endReached)
{
break;
}
}
var toProcess = count - total;
if (toProcess > _availableBytes)
{
toProcess = (int)_availableBytes;
}
_outWindow.SetLimit(toProcess);
if (_uncompressedChunk)
{
_inputPosition += await _outWindow
.CopyStreamAsync(_inputStream, toProcess, cancellationToken)
.ConfigureAwait(false);
}
else if (
await _decoder
.CodeAsync(_dictionarySize, _outWindow, _rangeDecoder, cancellationToken)
.ConfigureAwait(false)
&& _outputSize < 0
)
{
_availableBytes = _outWindow.AvailableBytes;
}
var read = _outWindow.Read(buffer, offset, toProcess);
total += read;
offset += read;
_position += read;
_availableBytes -= read;
if (_availableBytes == 0 && !_uncompressedChunk)
{
if (
!_rangeDecoder.IsFinished
|| (_rangeDecoderLimit >= 0 && _rangeDecoder._total != _rangeDecoderLimit)
)
{
_outWindow.SetLimit(toProcess + 1);
if (
!await _decoder
.CodeAsync(
_dictionarySize,
_outWindow,
_rangeDecoder,
cancellationToken
)
.ConfigureAwait(false)
)
{
_rangeDecoder.ReleaseStream();
throw new DataErrorException();
}
}
_rangeDecoder.ReleaseStream();
_inputPosition += _rangeDecoder._total;
if (_outWindow.HasPending)
{
throw new DataErrorException();
}
}
}
if (_endReached)
{
if (_inputSize >= 0 && _inputPosition != _inputSize)
{
throw new DataErrorException();
}
if (_outputSize >= 0 && _position != _outputSize)
{
throw new DataErrorException();
}
}
return total;
}
#endif
public override Task WriteAsync(
byte[] buffer,
int offset,

View File

@@ -22,11 +22,7 @@ namespace SharpCompress.Factories
yield return "ace";
}
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public override bool IsArchive(Stream stream, string? password = null)
{
return AceHeader.IsArchive(stream);
}

View File

@@ -23,11 +23,7 @@ namespace SharpCompress.Factories
yield return "arc";
}
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public override bool IsArchive(Stream stream, string? password = null)
{
//You may have to use some(paranoid) checks to ensure that you actually are
//processing an ARC file, since other archivers also adopted the idea of putting

View File

@@ -22,11 +22,7 @@ namespace SharpCompress.Factories
yield return "arj";
}
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public override bool IsArchive(Stream stream, string? password = null)
{
return ArjHeader.IsArchive(stream);
}

View File

@@ -51,11 +51,7 @@ public abstract class Factory : IFactory
public abstract IEnumerable<string> GetSupportedExtensions();
/// <inheritdoc/>
public abstract bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
);
public abstract bool IsArchive(Stream stream, string? password = null);
/// <inheritdoc/>
public virtual FileInfo? GetFilePart(int index, FileInfo part1) => null;
@@ -82,7 +78,7 @@ public abstract class Factory : IFactory
{
long pos = ((IStreamStack)stream).GetPosition();
if (IsArchive(stream, options.Password, options.BufferSize))
if (IsArchive(stream, options.Password))
{
((IStreamStack)stream).StackSeek(pos);
reader = readerFactory.OpenReader(stream, options);

View File

@@ -40,11 +40,8 @@ public class GZipFactory
}
/// <inheritdoc/>
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => GZipArchive.IsGZipFile(stream);
public override bool IsArchive(Stream stream, string? password = null) =>
GZipArchive.IsGZipFile(stream);
#endregion

View File

@@ -36,11 +36,7 @@ public interface IFactory
/// </summary>
/// <param name="stream">A stream, pointing to the beginning of the archive.</param>
/// <param name="password">optional password</param>
bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
);
bool IsArchive(Stream stream, string? password = null);
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.

View File

@@ -29,11 +29,8 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
}
/// <inheritdoc/>
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => RarArchive.IsRarFile(stream);
public override bool IsArchive(Stream stream, string? password = null) =>
RarArchive.IsRarFile(stream);
/// <inheritdoc/>
public override FileInfo? GetFilePart(int index, FileInfo part1) =>

View File

@@ -28,11 +28,8 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
}
/// <inheritdoc/>
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => SevenZipArchive.IsSevenZipFile(stream);
public override bool IsArchive(Stream stream, string? password = null) =>
SevenZipArchive.IsSevenZipFile(stream);
#endregion

View File

@@ -53,11 +53,8 @@ public class TarFactory
}
/// <inheritdoc/>
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => TarArchive.IsTarFile(stream);
public override bool IsArchive(Stream stream, string? password = null) =>
TarArchive.IsTarFile(stream);
#endregion

View File

@@ -20,9 +20,6 @@ internal class ZStandardFactory : Factory
yield return "zstd";
}
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = 65536
) => ZStandardStream.IsZStandard(stream);
public override bool IsArchive(Stream stream, string? password = null) =>
ZStandardStream.IsZStandard(stream);
}

View File

@@ -39,11 +39,7 @@ public class ZipFactory
}
/// <inheritdoc/>
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public override bool IsArchive(Stream stream, string? password = null)
{
var startPosition = stream.CanSeek ? stream.Position : -1;
@@ -51,10 +47,10 @@ public class ZipFactory
if (stream is not SharpCompressStream) // wrap to provide buffer bef
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
}
if (ZipArchive.IsZipFile(stream, password, bufferSize))
if (ZipArchive.IsZipFile(stream, password))
{
return true;
}
@@ -69,7 +65,7 @@ public class ZipFactory
stream.Position = startPosition;
//test the zip (last) file of a multipart zip
if (ZipArchive.IsZipMulti(stream, password, bufferSize))
if (ZipArchive.IsZipMulti(stream, password))
{
return true;
}

View File

@@ -1,5 +1,8 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.IO;
@@ -26,14 +29,25 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
#if DEBUG_STREAMS
this.DebugDispose(typeof(BufferedSubStream));
#endif
if (disposing) { }
if (_isDisposed)
{
return;
}
_isDisposed = true;
if (disposing && _cache is not null)
{
ArrayPool<byte>.Shared.Return(_cache);
_cache = null;
}
base.Dispose(disposing);
}
private int _cacheOffset;
private int _cacheLength;
private readonly byte[] _cache = new byte[32 << 10];
private byte[]? _cache = ArrayPool<byte>.Shared.Rent(81920);
private long origin;
private bool _isDisposed;
private long BytesLeftToRead { get; set; }
@@ -55,19 +69,58 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
private void RefillCache()
{
var count = (int)Math.Min(BytesLeftToRead, _cache.Length);
if (_isDisposed)
{
throw new ObjectDisposedException(nameof(BufferedSubStream));
}
var count = (int)Math.Min(BytesLeftToRead, _cache!.Length);
_cacheOffset = 0;
if (count == 0)
{
_cacheLength = 0;
return;
}
Stream.Position = origin;
// Only seek if we're not already at the correct position
// This avoids expensive seek operations when reading sequentially
if (Stream.CanSeek && Stream.Position != origin)
{
Stream.Position = origin;
}
_cacheLength = Stream.Read(_cache, 0, count);
origin += _cacheLength;
BytesLeftToRead -= _cacheLength;
}
private async ValueTask RefillCacheAsync(CancellationToken cancellationToken)
{
if (_isDisposed)
{
throw new ObjectDisposedException(nameof(BufferedSubStream));
}
var count = (int)Math.Min(BytesLeftToRead, _cache!.Length);
_cacheOffset = 0;
if (count == 0)
{
_cacheLength = 0;
return;
}
// Only seek if we're not already at the correct position
// This avoids expensive seek operations when reading sequentially
if (Stream.CanSeek && Stream.Position != origin)
{
Stream.Position = origin;
}
_cacheLength = await Stream
.ReadAsync(_cache, 0, count, cancellationToken)
.ConfigureAwait(false);
origin += _cacheLength;
BytesLeftToRead -= _cacheLength;
}
public override int Read(byte[] buffer, int offset, int count)
{
if (count > Length)
@@ -83,7 +136,7 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
}
count = Math.Min(count, _cacheLength - _cacheOffset);
Buffer.BlockCopy(_cache, _cacheOffset, buffer, offset, count);
Buffer.BlockCopy(_cache!, _cacheOffset, buffer, offset, count);
_cacheOffset += count;
}
@@ -101,9 +154,64 @@ internal class BufferedSubStream : SharpCompressStream, IStreamStack
}
}
return _cache[_cacheOffset++];
return _cache![_cacheOffset++];
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (count > Length)
{
count = (int)Length;
}
if (count > 0)
{
if (_cacheOffset == _cacheLength)
{
await RefillCacheAsync(cancellationToken).ConfigureAwait(false);
}
count = Math.Min(count, _cacheLength - _cacheOffset);
Buffer.BlockCopy(_cache!, _cacheOffset, buffer, offset, count);
_cacheOffset += count;
}
return count;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
var count = buffer.Length;
if (count > Length)
{
count = (int)Length;
}
if (count > 0)
{
if (_cacheOffset == _cacheLength)
{
await RefillCacheAsync(cancellationToken).ConfigureAwait(false);
}
count = Math.Min(count, _cacheLength - _cacheOffset);
_cache!.AsSpan(_cacheOffset, count).CopyTo(buffer.Span);
_cacheOffset += count;
}
return count;
}
#endif
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();

View File

@@ -16,11 +16,11 @@ internal class ReadOnlySubStream : SharpCompressStream, IStreamStack
private long _position;
public ReadOnlySubStream(Stream stream, long bytesToRead)
: this(stream, null, bytesToRead) { }
public ReadOnlySubStream(Stream stream, long bytesToRead, bool leaveOpen = true)
: this(stream, null, bytesToRead, leaveOpen) { }
public ReadOnlySubStream(Stream stream, long? origin, long bytesToRead)
: base(stream, leaveOpen: true, throwOnDispose: false)
public ReadOnlySubStream(Stream stream, long? origin, long bytesToRead, bool leaveOpen = true)
: base(stream, leaveOpen, throwOnDispose: false)
{
if (origin != null && stream.Position != origin.Value)
{

View File

@@ -138,8 +138,6 @@ public class SharpCompressStream : Stream, IStreamStack
#endif
}
internal bool IsRecording { get; private set; }
protected override void Dispose(bool disposing)
{
#if DEBUG_STREAMS
@@ -208,11 +206,11 @@ public class SharpCompressStream : Stream, IStreamStack
{
ValidateBufferState();
// Fill buffer if needed
// Fill buffer if needed, handling short reads from underlying stream
if (_bufferedLength == 0)
{
_bufferedLength = Stream.Read(_buffer!, 0, _bufferSize);
_bufferPosition = 0;
_bufferedLength = FillBuffer(_buffer!, 0, _bufferSize);
}
int available = _bufferedLength - _bufferPosition;
int toRead = Math.Min(count, available);
@@ -224,11 +222,8 @@ public class SharpCompressStream : Stream, IStreamStack
return toRead;
}
// If buffer exhausted, refill
int r = Stream.Read(_buffer!, 0, _bufferSize);
if (r == 0)
return 0;
_bufferedLength = r;
_bufferPosition = 0;
_bufferedLength = FillBuffer(_buffer!, 0, _bufferSize);
if (_bufferedLength == 0)
{
return 0;
@@ -252,6 +247,31 @@ public class SharpCompressStream : Stream, IStreamStack
}
}
/// <summary>
/// Fills the buffer by reading from the underlying stream, handling short reads.
/// Implements the ReadFully pattern: reads in a loop until buffer is full or EOF is reached.
/// </summary>
/// <param name="buffer">Buffer to fill</param>
/// <param name="offset">Offset in buffer (always 0 in current usage)</param>
/// <param name="count">Number of bytes to read</param>
/// <returns>Total number of bytes read (may be less than count if EOF is reached)</returns>
private int FillBuffer(byte[] buffer, int offset, int count)
{
// Implement ReadFully pattern but return the actual count read
// This is the same logic as Utility.ReadFully but returns count instead of bool
var total = 0;
int read;
while ((read = Stream.Read(buffer, offset + total, count - total)) > 0)
{
total += read;
if (total >= count)
{
return total;
}
}
return total;
}
public override long Seek(long offset, SeekOrigin origin)
{
if (_bufferingEnabled)
@@ -259,7 +279,6 @@ public class SharpCompressStream : Stream, IStreamStack
ValidateBufferState();
}
long orig = _internalPosition;
long targetPos;
// Calculate the absolute target position based on origin
switch (origin)
@@ -327,13 +346,12 @@ public class SharpCompressStream : Stream, IStreamStack
{
ValidateBufferState();
// Fill buffer if needed
// Fill buffer if needed, handling short reads from underlying stream
if (_bufferedLength == 0)
{
_bufferedLength = await Stream
.ReadAsync(_buffer!, 0, _bufferSize, cancellationToken)
.ConfigureAwait(false);
_bufferPosition = 0;
_bufferedLength = await FillBufferAsync(_buffer!, 0, _bufferSize, cancellationToken)
.ConfigureAwait(false);
}
int available = _bufferedLength - _bufferPosition;
int toRead = Math.Min(count, available);
@@ -345,13 +363,9 @@ public class SharpCompressStream : Stream, IStreamStack
return toRead;
}
// If buffer exhausted, refill
int r = await Stream
.ReadAsync(_buffer!, 0, _bufferSize, cancellationToken)
.ConfigureAwait(false);
if (r == 0)
return 0;
_bufferedLength = r;
_bufferPosition = 0;
_bufferedLength = await FillBufferAsync(_buffer!, 0, _bufferSize, cancellationToken)
.ConfigureAwait(false);
if (_bufferedLength == 0)
{
return 0;
@@ -372,6 +386,38 @@ public class SharpCompressStream : Stream, IStreamStack
}
}
/// <summary>
/// Async version of FillBuffer. Implements the ReadFullyAsync pattern.
/// Reads in a loop until buffer is full or EOF is reached.
/// </summary>
private async Task<int> FillBufferAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
// Implement ReadFullyAsync pattern but return the actual count read
// This is the same logic as Utility.ReadFullyAsync but returns count instead of bool
var total = 0;
int read;
while (
(
read = await Stream
.ReadAsync(buffer, offset + total, count - total, cancellationToken)
.ConfigureAwait(false)
) > 0
)
{
total += read;
if (total >= count)
{
return total;
}
}
return total;
}
public override async Task WriteAsync(
byte[] buffer,
int offset,
@@ -402,13 +448,15 @@ public class SharpCompressStream : Stream, IStreamStack
{
ValidateBufferState();
// Fill buffer if needed
// Fill buffer if needed, handling short reads from underlying stream
if (_bufferedLength == 0)
{
_bufferedLength = await Stream
.ReadAsync(_buffer.AsMemory(0, _bufferSize), cancellationToken)
.ConfigureAwait(false);
_bufferPosition = 0;
_bufferedLength = await FillBufferMemoryAsync(
_buffer.AsMemory(0, _bufferSize),
cancellationToken
)
.ConfigureAwait(false);
}
int available = _bufferedLength - _bufferPosition;
int toRead = Math.Min(buffer.Length, available);
@@ -420,13 +468,12 @@ public class SharpCompressStream : Stream, IStreamStack
return toRead;
}
// If buffer exhausted, refill
int r = await Stream
.ReadAsync(_buffer.AsMemory(0, _bufferSize), cancellationToken)
.ConfigureAwait(false);
if (r == 0)
return 0;
_bufferedLength = r;
_bufferPosition = 0;
_bufferedLength = await FillBufferMemoryAsync(
_buffer.AsMemory(0, _bufferSize),
cancellationToken
)
.ConfigureAwait(false);
if (_bufferedLength == 0)
{
return 0;
@@ -445,6 +492,35 @@ public class SharpCompressStream : Stream, IStreamStack
}
}
/// <summary>
/// Async version of FillBuffer for Memory{byte}. Implements the ReadFullyAsync pattern.
/// Reads in a loop until buffer is full or EOF is reached.
/// </summary>
private async ValueTask<int> FillBufferMemoryAsync(
Memory<byte> buffer,
CancellationToken cancellationToken
)
{
// Implement ReadFullyAsync pattern but return the actual count read
var total = 0;
int read;
while (
(
read = await Stream
.ReadAsync(buffer.Slice(total), cancellationToken)
.ConfigureAwait(false)
) > 0
)
{
total += read;
if (total >= buffer.Length)
{
return total;
}
}
return total;
}
public override async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default

View File

@@ -222,8 +222,26 @@ public class SourceStream : Stream, IStreamStack
SetStream(0);
while (_prevSize + Current.Length < pos)
{
_prevSize += Current.Length;
SetStream(_stream + 1);
var currentLength = Current.Length;
_prevSize += currentLength;
if (!SetStream(_stream + 1))
{
// No more streams available, cannot seek to requested position
throw new InvalidOperationException(
$"Cannot seek to position {pos}. End of stream reached at position {_prevSize}."
);
}
// Safety check: if we have a zero-length stream and we're still not
// making progress toward the target position, we're in an invalid state
if (currentLength == 0 && Current.Length == 0)
{
// Both old and new stream have zero length - cannot make progress
throw new InvalidOperationException(
$"Cannot seek to position {pos}. Encountered zero-length streams at position {_prevSize}."
);
}
}
}

View File

@@ -262,7 +262,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
{
using Stream s = OpenEntryStream();
var sourceStream = WrapWithProgress(s, Entry);
sourceStream.CopyTo(writeStream, 81920);
sourceStream.CopyTo(writeStream, Constants.BufferSize);
}
internal async Task WriteAsync(Stream writeStream, CancellationToken cancellationToken)
@@ -270,11 +270,15 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader
#if NETFRAMEWORK || NETSTANDARD2_0
using Stream s = OpenEntryStream();
var sourceStream = WrapWithProgress(s, Entry);
await sourceStream.CopyToAsync(writeStream, 81920, cancellationToken).ConfigureAwait(false);
await sourceStream
.CopyToAsync(writeStream, Constants.BufferSize, cancellationToken)
.ConfigureAwait(false);
#else
await using Stream s = OpenEntryStream();
var sourceStream = WrapWithProgress(s, Entry);
await sourceStream.CopyToAsync(writeStream, 81920, cancellationToken).ConfigureAwait(false);
await sourceStream
.CopyToAsync(writeStream, Constants.BufferSize, cancellationToken)
.ConfigureAwait(false);
#endif
}

View File

@@ -5,6 +5,14 @@ namespace SharpCompress.Readers;
public class ReaderOptions : OptionsBase
{
/// <summary>
/// The default buffer size for stream operations.
/// This value (65536 bytes) is preserved for backward compatibility.
/// New code should use Constants.BufferSize instead (81920 bytes), which matches .NET's Stream.CopyTo default.
/// </summary>
[Obsolete(
"Use Constants.BufferSize instead. This constant will be removed in a future version."
)]
public const int DefaultBufferSize = 0x10000;
/// <summary>
@@ -16,7 +24,7 @@ public class ReaderOptions : OptionsBase
public bool DisableCheckIncomplete { get; set; }
public int BufferSize { get; set; } = DefaultBufferSize;
public int BufferSize { get; set; } = Constants.BufferSize;
/// <summary>
/// Provide a hint for the extension of the archive being read, can speed up finding the correct decoder. Should be without the leading period in the form like: tar.gz or zip

View File

@@ -2,11 +2,11 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.42.0</VersionPrefix>
<AssemblyVersion>0.42.0</AssemblyVersion>
<FileVersion>0.42.0</FileVersion>
<VersionPrefix>0.0.0</VersionPrefix>
<AssemblyVersion>0.0.0.0</AssemblyVersion>
<FileVersion>0.0.0.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks>net48;net8.0;net10.0</TargetFrameworks>
<TargetFrameworks>net48;netstandard2.0;net8.0;net10.0</TargetFrameworks>
<AssemblyName>SharpCompress</AssemblyName>
<AssemblyOriginatorKeyFile>../../SharpCompress.snk</AssemblyOriginatorKeyFile>
<SignAssembly>true</SignAssembly>
@@ -17,7 +17,7 @@
<Copyright>Copyright (c) 2025 Adam Hathcock</Copyright>
<GenerateAssemblyTitleAttribute>false</GenerateAssemblyTitleAttribute>
<GenerateAssemblyProductAttribute>false</GenerateAssemblyProductAttribute>
<Description>SharpCompress is a compression library for NET 4.8/NET 8.0/NET 10.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
<Description>SharpCompress is a compression library for NET 4.8/NET Standard 2.0/NET 8.0/NET 10.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
<PublishRepositoryUrl>true</PublishRepositoryUrl>
<IncludeSymbols>true</IncludeSymbols>
<DebugType>embedded</DebugType>
@@ -30,25 +30,13 @@
</PropertyGroup>
<PropertyGroup Condition=" '$(TargetFramework)' == 'net8.0' Or '$(TargetFramework)' == 'net10.0' ">
<IsTrimmable>true</IsTrimmable>
<IsAotCompatible>true</IsAotCompatible>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(TargetFramework)|$(Platform)'=='Debug|net8.0|AnyCPU'">
<DefineConstants>$(DefineConstants);DEBUG_STREAMS</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(TargetFramework)|$(Platform)'=='Debug|net10.0|AnyCPU'">
<DefineConstants>$(DefineConstants);DEBUG_STREAMS</DefineConstants>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.SourceLink.GitHub" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net8.0' Or '$(TargetFramework)' == 'net10.0' ">
<PackageReference Include="Microsoft.NET.ILLink.Tasks" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net48' ">
<ItemGroup Condition=" '$(TargetFramework)' == 'net48' Or '$(TargetFramework)' == 'netstandard2.0' ">
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
<PackageReference Include="System.Text.Encoding.CodePages" />
<PackageReference Include="System.Buffers" />
<PackageReference Include="System.Memory" />
<PackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" PrivateAssets="All" />
</ItemGroup>
<ItemGroup>
<None Include="..\..\README.md" Pack="true" PackagePath="\" />

View File

@@ -11,8 +11,6 @@ namespace SharpCompress;
internal static class Utility
{
//80kb is a good industry standard temporary buffer size
private const int TEMP_BUFFER_SIZE = 81920;
private static readonly HashSet<char> invalidChars = new(Path.GetInvalidFileNameChars());
public static ReadOnlyCollection<T> ToReadOnly<T>(this IList<T> items) => new(items);
@@ -71,48 +69,16 @@ internal static class Utility
return;
}
using var buffer = MemoryPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
while (advanceAmount > 0)
{
var toRead = (int)Math.Min(buffer.Memory.Length, advanceAmount);
var read = source.Read(buffer.Memory.Slice(0, toRead).Span);
if (read <= 0)
{
break;
}
advanceAmount -= read;
}
using var readOnlySubStream = new IO.ReadOnlySubStream(source, advanceAmount);
readOnlySubStream.CopyTo(Stream.Null);
}
public static void Skip(this Stream source)
{
using var buffer = MemoryPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
while (source.Read(buffer.Memory.Span) > 0) { }
}
public static void Skip(this Stream source) => source.CopyTo(Stream.Null);
public static async Task SkipAsync(
this Stream source,
CancellationToken cancellationToken = default
)
public static Task SkipAsync(this Stream source, CancellationToken cancellationToken = default)
{
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
while (true)
{
var read = await source
.ReadAsync(array, 0, array.Length, cancellationToken)
.ConfigureAwait(false);
if (read <= 0)
{
break;
}
}
}
finally
{
ArrayPool<byte>.Shared.Return(array);
}
cancellationToken.ThrowIfCancellationRequested();
return source.CopyToAsync(Stream.Null);
}
public static DateTime DosDateToDateTime(ushort iDate, ushort iTime)
@@ -183,7 +149,7 @@ internal static class Utility
public static long TransferTo(this Stream source, Stream destination, long maxLength)
{
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
var array = ArrayPool<byte>.Shared.Rent(Common.Constants.BufferSize);
try
{
var maxReadSize = array.Length;
@@ -222,7 +188,7 @@ internal static class Utility
CancellationToken cancellationToken = default
)
{
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
var array = ArrayPool<byte>.Shared.Rent(Common.Constants.BufferSize);
try
{
var maxReadSize = array.Length;
@@ -300,7 +266,7 @@ internal static class Utility
return;
}
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
var array = ArrayPool<byte>.Shared.Rent(Common.Constants.BufferSize);
try
{
while (advanceAmount > 0)

View File

@@ -48,7 +48,7 @@ public sealed class GZipWriter : AbstractWriter
stream.FileName = filename;
stream.LastModified = modificationTime;
var progressStream = WrapWithProgress(source, filename);
progressStream.CopyTo(stream);
progressStream.CopyTo(stream, Constants.BufferSize);
_wroteToStream = true;
}

View File

@@ -34,7 +34,6 @@ internal class ZipCentralDirectoryEntry
internal ulong Decompressed { get; set; }
internal ushort Zip64HeaderOffset { get; set; }
internal ulong HeaderOffset { get; }
internal string FileName => fileName;
internal uint Write(Stream outputStream)
{

View File

@@ -8,7 +8,6 @@ using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Common.Zip.SOZip;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
@@ -16,6 +15,7 @@ using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.Compressors.ZStandard;
using SharpCompress.IO;
using Constants = SharpCompress.Common.Constants;
namespace SharpCompress.Writers.Zip;
@@ -28,19 +28,12 @@ public class ZipWriter : AbstractWriter
private long streamPosition;
private PpmdProperties? ppmdProps;
private readonly bool isZip64;
private readonly bool enableSOZip;
private readonly int sozipChunkSize;
private readonly long sozipMinFileSize;
public ZipWriter(Stream destination, ZipWriterOptions zipWriterOptions)
: base(ArchiveType.Zip, zipWriterOptions)
{
zipComment = zipWriterOptions.ArchiveComment ?? string.Empty;
isZip64 = zipWriterOptions.UseZip64;
enableSOZip = zipWriterOptions.EnableSOZip;
sozipChunkSize = zipWriterOptions.SOZipChunkSize;
sozipMinFileSize = zipWriterOptions.SOZipMinFileSize;
if (destination.CanSeek)
{
streamPosition = destination.Position;
@@ -95,7 +88,7 @@ public class ZipWriter : AbstractWriter
{
using var output = WriteToStream(entryPath, zipWriterEntryOptions);
var progressStream = WrapWithProgress(source, entryPath);
progressStream.CopyTo(output);
progressStream.CopyTo(output, Constants.BufferSize);
}
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)
@@ -125,21 +118,12 @@ public class ZipWriter : AbstractWriter
var headersize = (uint)WriteHeader(entryPath, options, entry, useZip64);
streamPosition += headersize;
// Determine if SOZip should be used for this entry
var useSozip =
(options.EnableSOZip ?? enableSOZip)
&& compression == ZipCompressionMethod.Deflate
&& OutputStream.CanSeek;
return new ZipWritingStream(
this,
OutputStream.NotNull(),
entry,
compression,
options.CompressionLevel ?? compressionLevel,
useSozip,
useSozip ? sozipChunkSize : 0
options.CompressionLevel ?? compressionLevel
);
}
@@ -321,64 +305,6 @@ public class ZipWriter : AbstractWriter
OutputStream.Write(intBuf);
}
private void WriteSozipIndexFile(
ZipCentralDirectoryEntry dataEntry,
SOZipDeflateStream sozipStream
)
{
var indexFileName = SOZipIndex.GetIndexFileName(dataEntry.FileName);
// Create the SOZip index
var index = new SOZipIndex(
chunkSize: sozipStream.ChunkSize,
uncompressedSize: sozipStream.UncompressedBytesWritten,
compressedSize: sozipStream.CompressedBytesWritten,
compressedOffsets: sozipStream.CompressedOffsets
);
var indexBytes = index.ToByteArray();
// Calculate CRC for index data
var crc = new CRC32();
crc.SlurpBlock(indexBytes, 0, indexBytes.Length);
var indexCrc = (uint)crc.Crc32Result;
// Write the index file as a stored (uncompressed) entry
var indexEntry = new ZipCentralDirectoryEntry(
ZipCompressionMethod.None,
indexFileName,
(ulong)streamPosition,
WriterOptions.ArchiveEncoding
)
{
ModificationTime = DateTime.Now,
};
// Write the local file header for index
var indexOptions = new ZipWriterEntryOptions { CompressionType = CompressionType.None };
var headerSize = (uint)WriteHeader(indexFileName, indexOptions, indexEntry, isZip64);
streamPosition += headerSize;
// Write the index data directly
OutputStream.Write(indexBytes, 0, indexBytes.Length);
// Finalize the index entry
indexEntry.Crc = indexCrc;
indexEntry.Compressed = (ulong)indexBytes.Length;
indexEntry.Decompressed = (ulong)indexBytes.Length;
if (OutputStream.CanSeek)
{
// Update the header with sizes and CRC
OutputStream.Position = (long)(indexEntry.HeaderOffset + 14);
WriteFooter(indexCrc, (uint)indexBytes.Length, (uint)indexBytes.Length);
OutputStream.Position = streamPosition + indexBytes.Length;
}
streamPosition += indexBytes.Length;
entries.Add(indexEntry);
}
private void WriteEndRecord(ulong size)
{
var zip64EndOfCentralDirectoryNeeded =
@@ -460,10 +386,7 @@ public class ZipWriter : AbstractWriter
private readonly ZipWriter writer;
private readonly ZipCompressionMethod zipCompressionMethod;
private readonly int compressionLevel;
private readonly bool useSozip;
private readonly int sozipChunkSize;
private SharpCompressStream? counting;
private SOZipDeflateStream? sozipStream;
private ulong decompressed;
// Flag to prevent throwing exceptions on Dispose
@@ -475,9 +398,7 @@ public class ZipWriter : AbstractWriter
Stream originalStream,
ZipCentralDirectoryEntry entry,
ZipCompressionMethod zipCompressionMethod,
int compressionLevel,
bool useSozip = false,
int sozipChunkSize = 0
int compressionLevel
)
{
this.writer = writer;
@@ -486,8 +407,6 @@ public class ZipWriter : AbstractWriter
this.entry = entry;
this.zipCompressionMethod = zipCompressionMethod;
this.compressionLevel = compressionLevel;
this.useSozip = useSozip;
this.sozipChunkSize = sozipChunkSize;
writeStream = GetWriteStream(originalStream);
}
@@ -517,15 +436,6 @@ public class ZipWriter : AbstractWriter
}
case ZipCompressionMethod.Deflate:
{
if (useSozip && sozipChunkSize > 0)
{
sozipStream = new SOZipDeflateStream(
counting,
(CompressionLevel)compressionLevel,
sozipChunkSize
);
return sozipStream;
}
return new DeflateStream(
counting,
CompressionMode.Compress,
@@ -672,18 +582,7 @@ public class ZipWriter : AbstractWriter
writer.WriteFooter(entry.Crc, compressedvalue, decompressedvalue);
writer.streamPosition += (long)entry.Compressed + 16;
}
writer.entries.Add(entry);
// Write SOZip index file if SOZip was used and file meets minimum size
if (
useSozip
&& sozipStream is not null
&& entry.Decompressed >= (ulong)writer.sozipMinFileSize
)
{
writer.WriteSozipIndexFile(entry, sozipStream);
}
}
}

View File

@@ -49,11 +49,4 @@ public class ZipWriterEntryOptions
/// This option is not supported with non-seekable streams.
/// </summary>
public bool? EnableZip64 { get; set; }
/// <summary>
/// Enable or disable SOZip (Seek-Optimized ZIP) for this entry.
/// When null, uses the archive's default setting.
/// SOZip is only applicable to Deflate-compressed files on seekable streams.
/// </summary>
public bool? EnableSOZip { get; set; }
}

View File

@@ -1,6 +1,5 @@
using System;
using SharpCompress.Common;
using SharpCompress.Common.Zip.SOZip;
using SharpCompress.Compressors.Deflate;
using D = SharpCompress.Compressors.Deflate;
@@ -25,9 +24,6 @@ public class ZipWriterOptions : WriterOptions
{
UseZip64 = writerOptions.UseZip64;
ArchiveComment = writerOptions.ArchiveComment;
EnableSOZip = writerOptions.EnableSOZip;
SOZipChunkSize = writerOptions.SOZipChunkSize;
SOZipMinFileSize = writerOptions.SOZipMinFileSize;
}
}
@@ -84,27 +80,4 @@ public class ZipWriterOptions : WriterOptions
/// are less than 4GiB in length.
/// </summary>
public bool UseZip64 { get; set; }
/// <summary>
/// Enables SOZip (Seek-Optimized ZIP) for Deflate-compressed files.
/// When enabled, files that meet the minimum size requirement will have
/// an accompanying index file that allows random access within the
/// compressed data. Requires a seekable output stream.
/// </summary>
public bool EnableSOZip { get; set; }
/// <summary>
/// The chunk size for SOZip index creation in bytes.
/// Must be a multiple of 1024 bytes. Default is 32KB (32768 bytes).
/// Smaller chunks allow for finer-grained random access but result
/// in larger index files and slightly less efficient compression.
/// </summary>
public int SOZipChunkSize { get; set; } = (int)SOZipIndex.DEFAULT_CHUNK_SIZE;
/// <summary>
/// Minimum file size (uncompressed) in bytes for SOZip optimization.
/// Files smaller than this size will not have SOZip index files created.
/// Default is 1MB (1048576 bytes).
/// </summary>
public long SOZipMinFileSize { get; set; } = 1048576;
}

View File

@@ -22,12 +22,12 @@
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"requested": "[10.0.102, )",
"resolved": "10.0.102",
"contentHash": "Oxq3RCIJSdtpIU4hLqO7XaDe/Ra3HS9Wi8rJl838SAg6Zu1iQjerA0+xXWBgUFYbgknUGCLOU0T+lzMLkvY9Qg==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
"Microsoft.Build.Tasks.Git": "10.0.102",
"Microsoft.SourceLink.Common": "10.0.102"
}
},
"System.Buffers": {
@@ -60,8 +60,8 @@
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
"resolved": "10.0.102",
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net48": {
"type": "Transitive",
@@ -70,8 +70,8 @@
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
"resolved": "10.0.102",
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
},
"System.Numerics.Vectors": {
"type": "Transitive",
@@ -97,60 +97,192 @@
"contentHash": "+RJT4qaekpZ7DDLhf+LTjq+E48jieKiY9ulJ+BoxKmZblIJfIJT8Ufcaa/clQqnYvWs8jugfGSMu8ylS0caG0w=="
}
},
"net10.0": {
"Microsoft.NET.ILLink.Tasks": {
".NETStandard,Version=v2.0": {
"Microsoft.Bcl.AsyncInterfaces": {
"type": "Direct",
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "kICGrGYEzCNI3wPzfEXcwNHgTvlvVn9yJDhSdRK+oZQy4jvYH529u7O0xf5ocQKzOMjfS07+3z9PKRIjrFMJDA=="
"contentHash": "vFuwSLj9QJBbNR0NeNO4YVASUbokxs+i/xbuu8B+Fs4FAZg5QaFa6eGrMaRqTzzNI5tAb97T7BhSxtLckFyiRA==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.6.3"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"requested": "[10.0.102, )",
"resolved": "10.0.102",
"contentHash": "Oxq3RCIJSdtpIU4hLqO7XaDe/Ra3HS9Wi8rJl838SAg6Zu1iQjerA0+xXWBgUFYbgknUGCLOU0T+lzMLkvY9Qg==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
"Microsoft.Build.Tasks.Git": "10.0.102",
"Microsoft.SourceLink.Common": "10.0.102"
}
},
"NETStandard.Library": {
"type": "Direct",
"requested": "[2.0.3, )",
"resolved": "2.0.3",
"contentHash": "st47PosZSHrjECdjeIzZQbzivYBJFv6P2nv4cj2ypdI204DO+vZ7l5raGMiX4eXMJ53RfOIg+/s4DHVZ54Nu2A==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0"
}
},
"System.Buffers": {
"type": "Direct",
"requested": "[4.6.1, )",
"resolved": "4.6.1",
"contentHash": "N8GXpmiLMtljq7gwvyS+1QvKT/W2J8sNAvx+HVg4NGmsG/H+2k/y9QI23auLJRterrzCiDH+IWAw4V/GPwsMlw=="
},
"System.Memory": {
"type": "Direct",
"requested": "[4.6.3, )",
"resolved": "4.6.3",
"contentHash": "qdcDOgnFZY40+Q9876JUHnlHu7bosOHX8XISRoH94fwk6hgaeQGSgfZd8srWRZNt5bV9ZW2TljcegDNxsf+96A==",
"dependencies": {
"System.Buffers": "4.6.1",
"System.Numerics.Vectors": "4.6.1",
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "QLP54mIATaBpjGlsZIxga38VPk1G9js0Kw651B+bvrXi2kSgGZYrxJSpM3whhTZCBK4HEBHX3fzfDQMw7CXHGQ==",
"dependencies": {
"System.Memory": "4.6.3",
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
"resolved": "10.0.102",
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
},
"Microsoft.NETCore.Platforms": {
"type": "Transitive",
"resolved": "1.1.0",
"contentHash": "kz0PEW2lhqygehI/d6XsPCQzD7ff7gUJaVGPVETX611eadGsA3A877GdSlU0LRVMCTH/+P3o2iDTak+S08V2+A=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
"resolved": "10.0.102",
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.6.1",
"contentHash": "sQxefTnhagrhoq2ReR0D/6K0zJcr9Hrd6kikeXsA1I8kOCboTavcUC4r7TSfpKFeE163uMuxZcyfO1mGO3EN8Q=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.1.2",
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
"resolved": "4.6.3",
"contentHash": "7sCiwilJLYbTZELaKnc7RecBBXWXA+xMLQWZKWawBxYjp6DBlSE3v9/UcvKBvr1vv2tTOhipiogM8rRmxlhrVA==",
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
}
}
},
"net10.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[10.0.2, )",
"resolved": "10.0.2",
"contentHash": "sXdDtMf2qcnbygw9OdE535c2lxSxrZP8gO4UhDJ0xiJbl1wIqXS1OTcTDFTIJPOFd6Mhcm8gPEthqWGUxBsTqw=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[10.0.102, )",
"resolved": "10.0.102",
"contentHash": "Oxq3RCIJSdtpIU4hLqO7XaDe/Ra3HS9Wi8rJl838SAg6Zu1iQjerA0+xXWBgUFYbgknUGCLOU0T+lzMLkvY9Qg==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "10.0.102",
"Microsoft.SourceLink.Common": "10.0.102"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "10.0.102",
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "10.0.102",
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
}
},
"net8.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[10.0.0, )",
"resolved": "10.0.0",
"contentHash": "kICGrGYEzCNI3wPzfEXcwNHgTvlvVn9yJDhSdRK+oZQy4jvYH529u7O0xf5ocQKzOMjfS07+3z9PKRIjrFMJDA=="
"requested": "[8.0.23, )",
"resolved": "8.0.23",
"contentHash": "GqHiB1HbbODWPbY/lc5xLQH8siEEhNA0ptpJCC6X6adtAYNEzu5ZlqV3YHA3Gh7fuEwgA8XqVwMtH2KNtuQM1Q=="
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"requested": "[10.0.102, )",
"resolved": "10.0.102",
"contentHash": "Oxq3RCIJSdtpIU4hLqO7XaDe/Ra3HS9Wi8rJl838SAg6Zu1iQjerA0+xXWBgUFYbgknUGCLOU0T+lzMLkvY9Qg==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
"Microsoft.Build.Tasks.Git": "10.0.102",
"Microsoft.SourceLink.Common": "10.0.102"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
"resolved": "10.0.102",
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
"resolved": "10.0.102",
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
}
}
}

View File

@@ -12,6 +12,25 @@
"JetBrains.Profiler.Api": "1.4.10"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[10.0.102, )",
"resolved": "10.0.102",
"contentHash": "Oxq3RCIJSdtpIU4hLqO7XaDe/Ra3HS9Wi8rJl838SAg6Zu1iQjerA0+xXWBgUFYbgknUGCLOU0T+lzMLkvY9Qg==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "10.0.102",
"Microsoft.SourceLink.Common": "10.0.102"
}
},
"JetBrains.FormatRipper": {
"type": "Transitive",
"resolved": "2.4.0",
@@ -33,6 +52,21 @@
"JetBrains.HabitatDetector": "1.4.5"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "10.0.102",
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "10.0.102",
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
},
"sharpcompress": {
"type": "Project"
}

View File

@@ -2,8 +2,8 @@ using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Test.Mocks;
@@ -31,8 +31,8 @@ public class ForwardOnlyStream : SharpCompressStream, IStreamStack
public bool IsDisposed { get; private set; }
public ForwardOnlyStream(Stream stream, int bufferSize = ReaderOptions.DefaultBufferSize)
: base(stream, bufferSize: bufferSize)
public ForwardOnlyStream(Stream stream, int? bufferSize = null)
: base(stream, bufferSize: bufferSize ?? Constants.BufferSize)
{
this.stream = stream;
#if DEBUG_STREAMS

View File

@@ -0,0 +1,73 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Test.Mocks;
/// <summary>
/// A stream wrapper that throws NotSupportedException on Flush() calls.
/// This is used to test that archive iteration handles streams that don't support flushing.
/// </summary>
public class ThrowOnFlushStream : Stream
{
private readonly Stream inner;
public ThrowOnFlushStream(Stream inner)
{
this.inner = inner;
}
public override bool CanRead => inner.CanRead;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotSupportedException();
public override long Position
{
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
public override void Flush() => throw new NotSupportedException("Flush not supported");
public override Task FlushAsync(CancellationToken cancellationToken) =>
throw new NotSupportedException("FlushAsync not supported");
public override int Read(byte[] buffer, int offset, int count) =>
inner.Read(buffer, offset, count);
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
) => inner.ReadAsync(buffer, offset, count, cancellationToken);
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
) => inner.ReadAsync(buffer, cancellationToken);
#endif
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
protected override void Dispose(bool disposing)
{
if (disposing)
{
inner.Dispose();
}
base.Dispose(disposing);
}
}

View File

@@ -717,4 +717,37 @@ public class RarArchiveTests : ArchiveTests
// Verify the exception message matches our expectation
Assert.Contains("unpacked file size does not match header", exception.Message);
}
/// <summary>
/// Test case for malformed RAR archives that previously caused infinite loops.
/// This test verifies that attempting to read entries from a potentially malformed
/// 512-byte RAR archive throws an InvalidOperationException instead of looping infinitely.
/// See: https://github.com/adamhathcock/sharpcompress/issues/1176
/// </summary>
[Fact]
public void Rar_MalformedArchive_NoInfiniteLoop()
{
var testFile = "Rar.malformed_512byte.rar";
var readerOptions = new ReaderOptions { LookForHeader = true };
// This should throw InvalidOperationException, not hang in an infinite loop
var exception = Assert.Throws<InvalidOperationException>(() =>
{
using var fileStream = File.Open(
Path.Combine(TEST_ARCHIVES_PATH, testFile),
FileMode.Open
);
using var archive = RarArchive.Open(fileStream, readerOptions);
// Attempting to enumerate entries should throw an exception
// instead of looping infinitely
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
// This line should not be reached due to the exception
}
});
// Verify that the exception is related to seeking beyond available data
Assert.Contains("Cannot seek to position", exception.Message);
}
}

View File

@@ -0,0 +1,139 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Common;
using Xunit;
namespace SharpCompress.Test.SevenZip;
#if !NETFRAMEWORK
public class SevenZipArchiveAsyncTests : ArchiveTests
{
[Fact]
public async Task SevenZipArchive_LZMA_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.LZMA.7z");
using var stream = File.OpenRead(testArchive);
using var archive = ArchiveFactory.Open(stream);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
var targetPath = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
var targetDir = Path.GetDirectoryName(targetPath);
if (!string.IsNullOrEmpty(targetDir) && !Directory.Exists(targetDir))
{
Directory.CreateDirectory(targetDir);
}
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
await using var targetStream = File.Create(targetPath);
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
}
VerifyFiles();
}
[Fact]
public async Task SevenZipArchive_LZMA2_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.LZMA2.7z");
using var stream = File.OpenRead(testArchive);
using var archive = ArchiveFactory.Open(stream);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
var targetPath = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
var targetDir = Path.GetDirectoryName(targetPath);
if (!string.IsNullOrEmpty(targetDir) && !Directory.Exists(targetDir))
{
Directory.CreateDirectory(targetDir);
}
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
await using var targetStream = File.Create(targetPath);
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
}
VerifyFiles();
}
[Fact]
public async Task SevenZipArchive_Solid_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.solid.7z");
using var stream = File.OpenRead(testArchive);
using var archive = ArchiveFactory.Open(stream);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
var targetPath = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
var targetDir = Path.GetDirectoryName(targetPath);
if (!string.IsNullOrEmpty(targetDir) && !Directory.Exists(targetDir))
{
Directory.CreateDirectory(targetDir);
}
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
await using var targetStream = File.Create(targetPath);
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
}
VerifyFiles();
}
[Fact]
public async Task SevenZipArchive_BZip2_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.BZip2.7z");
using var stream = File.OpenRead(testArchive);
using var archive = ArchiveFactory.Open(stream);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
var targetPath = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
var targetDir = Path.GetDirectoryName(targetPath);
if (!string.IsNullOrEmpty(targetDir) && !Directory.Exists(targetDir))
{
Directory.CreateDirectory(targetDir);
}
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
await using var targetStream = File.Create(targetPath);
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
}
VerifyFiles();
}
[Fact]
public async Task SevenZipArchive_PPMd_AsyncStreamExtraction()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.PPMd.7z");
using var stream = File.OpenRead(testArchive);
using var archive = ArchiveFactory.Open(stream);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
var targetPath = Path.Combine(SCRATCH_FILES_PATH, entry.Key!);
var targetDir = Path.GetDirectoryName(targetPath);
if (!string.IsNullOrEmpty(targetDir) && !Directory.Exists(targetDir))
{
Directory.CreateDirectory(targetDir);
}
using var sourceStream = await entry.OpenEntryStreamAsync(CancellationToken.None);
await using var targetStream = File.Create(targetPath);
await sourceStream.CopyToAsync(targetStream, CancellationToken.None);
}
VerifyFiles();
}
}
#endif

View File

@@ -251,4 +251,98 @@ public class SevenZipArchiveTests : ArchiveTests
);
Assert.False(nonSolidArchive.IsSolid);
}
[Fact]
public void SevenZipArchive_Solid_ExtractAllEntries_Contiguous()
{
// This test verifies that solid archives iterate entries as contiguous streams
// rather than recreating the decompression stream for each entry
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.solid.7z");
using var archive = SevenZipArchive.Open(testArchive);
Assert.True(archive.IsSolid);
using var reader = archive.ExtractAllEntries();
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
}
[Fact]
public void SevenZipArchive_Solid_VerifyStreamReuse()
{
// This test verifies that the folder stream is reused within each folder
// and not recreated for each entry in solid archives
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "7Zip.solid.7z");
using var archive = SevenZipArchive.Open(testArchive);
Assert.True(archive.IsSolid);
using var reader = archive.ExtractAllEntries();
var sevenZipReader = Assert.IsType<SevenZipArchive.SevenZipReader>(reader);
sevenZipReader.DiagnosticsEnabled = true;
Stream? currentFolderStreamInstance = null;
object? currentFolder = null;
var entryCount = 0;
var entriesInCurrentFolder = 0;
var streamRecreationsWithinFolder = 0;
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
// Extract the entry to trigger GetEntryStream
using var entryStream = reader.OpenEntryStream();
var buffer = new byte[4096];
while (entryStream.Read(buffer, 0, buffer.Length) > 0)
{
// Read the stream to completion
}
entryCount++;
var folderStream = sevenZipReader.DiagnosticsCurrentFolderStream;
var folder = sevenZipReader.DiagnosticsCurrentFolder;
Assert.NotNull(folderStream); // Folder stream should exist
// Check if we're in a new folder
if (currentFolder == null || !ReferenceEquals(currentFolder, folder))
{
// Starting a new folder
currentFolder = folder;
currentFolderStreamInstance = folderStream;
entriesInCurrentFolder = 1;
}
else
{
// Same folder - verify stream wasn't recreated
entriesInCurrentFolder++;
if (!ReferenceEquals(currentFolderStreamInstance, folderStream))
{
// Stream was recreated within the same folder - this is the bug we're testing for!
streamRecreationsWithinFolder++;
}
currentFolderStreamInstance = folderStream;
}
}
}
// Verify we actually tested multiple entries
Assert.True(entryCount > 1, "Test should have multiple entries to verify stream reuse");
// The critical check: within a single folder, the stream should NEVER be recreated
Assert.Equal(0, streamRecreationsWithinFolder); // Folder stream should remain the same for all entries in the same folder
}
}

View File

@@ -6,8 +6,8 @@
<AssemblyOriginatorKeyFile>SharpCompress.Test.snk</AssemblyOriginatorKeyFile>
<SignAssembly>true</SignAssembly>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(TargetFramework)|$(Platform)'=='Debug|net10.0|AnyCPU'">
<DefineConstants>$(DefineConstants);DEBUG_STREAMS</DefineConstants>
<PropertyGroup Condition=" '$(TargetFramework)' == 'net48' ">
<DefineConstants>$(DefineConstants);LEGACY_DOTNET</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition="$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::Windows)))">
<DefineConstants>$(DefineConstants);WINDOWS</DefineConstants>
@@ -23,9 +23,8 @@
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.runner.visualstudio" />
<PackageReference Include="xunit" />
<PackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(VersionlessImplicitFrameworkDefine)' != 'NETFRAMEWORK' ">
<ItemGroup Condition="$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::Linux)))">
<PackageReference Include="Mono.Posix.NETStandard" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,202 @@
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.PPMd;
using SharpCompress.Compressors.Reduce;
using SharpCompress.Compressors.ZStandard;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.Streams;
public class DisposalTests
{
private void VerifyStreamDisposal(
Func<Stream, bool, Stream> createStream,
bool supportsLeaveOpen = true
)
{
// 1. Test Dispose behavior (should dispose inner stream)
{
using var innerStream = new TestStream(new MemoryStream());
// createStream(stream, leaveOpen: false)
var stream = createStream(innerStream, false);
stream.Dispose();
// Some streams might not support disposal of inner stream (e.g. PpmdStream apparently)
// But for those that satisfy the pattern, we assert true.
Assert.True(
innerStream.IsDisposed,
"Stream should have been disposed when leaveOpen=false"
);
}
// 2. Test LeaveOpen behavior (should NOT dispose inner stream)
if (supportsLeaveOpen)
{
using var innerStream = new TestStream(new MemoryStream());
// createStream(stream, leaveOpen: true)
var stream = createStream(innerStream, true);
stream.Dispose();
Assert.False(
innerStream.IsDisposed,
"Stream should NOT have been disposed when leaveOpen=true"
);
}
}
private void VerifyAlwaysDispose(Func<Stream, Stream> createStream)
{
using var innerStream = new TestStream(new MemoryStream());
var stream = createStream(innerStream);
stream.Dispose();
Assert.True(innerStream.IsDisposed, "Stream should have been disposed (AlwaysDispose)");
}
private void VerifyNeverDispose(Func<Stream, Stream> createStream)
{
using var innerStream = new TestStream(new MemoryStream());
var stream = createStream(innerStream);
stream.Dispose();
Assert.False(innerStream.IsDisposed, "Stream should NOT have been disposed (NeverDispose)");
}
[Fact]
public void SourceStream_Disposal()
{
VerifyStreamDisposal(
(stream, leaveOpen) =>
new SourceStream(
stream,
i => null,
new ReaderOptions { LeaveStreamOpen = leaveOpen }
)
);
}
[Fact]
public void ProgressReportingStream_Disposal()
{
VerifyStreamDisposal(
(stream, leaveOpen) =>
new ProgressReportingStream(
stream,
new Progress<ProgressReport>(),
"",
0,
leaveOpen: leaveOpen
)
);
}
[Fact]
public void DataDescriptorStream_Disposal()
{
// DataDescriptorStream DOES dispose inner stream
VerifyAlwaysDispose(stream => new DataDescriptorStream(stream));
}
[Fact]
public void DeflateStream_Disposal()
{
// DeflateStream in SharpCompress always disposes inner stream
VerifyAlwaysDispose(stream => new DeflateStream(stream, CompressionMode.Compress));
}
[Fact]
public void GZipStream_Disposal()
{
// GZipStream in SharpCompress always disposes inner stream
VerifyAlwaysDispose(stream => new GZipStream(stream, CompressionMode.Compress));
}
[Fact]
public void LzwStream_Disposal()
{
VerifyStreamDisposal(
(stream, leaveOpen) =>
{
var lzw = new LzwStream(stream);
lzw.IsStreamOwner = !leaveOpen;
return lzw;
}
);
}
[Fact]
public void PpmdStream_Disposal()
{
// PpmdStream seems to not dispose inner stream based on code analysis
// It takes PpmdProperties which we need to mock or create.
var props = new PpmdProperties();
VerifyNeverDispose(stream => new PpmdStream(props, stream, false));
}
[Fact]
public void LzmaStream_Disposal()
{
// LzmaStream always disposes inner stream
// Need to provide valid properties to avoid crash in constructor (invalid window size)
// 5 bytes: 1 byte properties + 4 bytes dictionary size (little endian)
// Dictionary size = 1024 (0x400) -> 00 04 00 00
var lzmaProps = new byte[] { 0, 0, 4, 0, 0 };
VerifyAlwaysDispose(stream => new LzmaStream(lzmaProps, stream));
}
[Fact]
public void LZipStream_Disposal()
{
// LZipStream now supports leaveOpen parameter
// Use Compress mode to avoid need for valid input header
VerifyStreamDisposal(
(stream, leaveOpen) => new LZipStream(stream, CompressionMode.Compress, leaveOpen)
);
}
[Fact]
public void BZip2Stream_Disposal()
{
// BZip2Stream now supports leaveOpen parameter
VerifyStreamDisposal(
(stream, leaveOpen) =>
new BZip2Stream(stream, CompressionMode.Compress, false, leaveOpen)
);
}
[Fact]
public void ReduceStream_Disposal()
{
// ReduceStream does not dispose inner stream
VerifyNeverDispose(stream => new ReduceStream(stream, 0, 0, 1));
}
[Fact]
public void ZStandard_CompressionStream_Disposal()
{
VerifyStreamDisposal(
(stream, leaveOpen) =>
new CompressionStream(stream, level: 0, bufferSize: 0, leaveOpen: leaveOpen)
);
}
[Fact]
public void ZStandard_DecompressionStream_Disposal()
{
VerifyStreamDisposal(
(stream, leaveOpen) =>
new DecompressionStream(
stream,
bufferSize: 0,
checkEndOfStream: false,
leaveOpen: leaveOpen
)
);
}
}

View File

@@ -0,0 +1,226 @@
using System;
using System.IO;
using System.Text;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.Streams;
public class LeaveOpenBehaviorTests
{
private static byte[] CreateTestData() =>
Encoding.UTF8.GetBytes("The quick brown fox jumps over the lazy dog");
[Fact]
public void BZip2Stream_Compress_LeaveOpen_False()
{
using var innerStream = new TestStream(new MemoryStream());
using (
var bzip2 = new BZip2Stream(
innerStream,
CompressionMode.Compress,
false,
leaveOpen: false
)
)
{
bzip2.Write(CreateTestData(), 0, CreateTestData().Length);
bzip2.Finish();
}
Assert.True(innerStream.IsDisposed, "Inner stream should be disposed when leaveOpen=false");
}
[Fact]
public void BZip2Stream_Compress_LeaveOpen_True()
{
using var innerStream = new TestStream(new MemoryStream());
byte[] compressed;
using (
var bzip2 = new BZip2Stream(
innerStream,
CompressionMode.Compress,
false,
leaveOpen: true
)
)
{
bzip2.Write(CreateTestData(), 0, CreateTestData().Length);
bzip2.Finish();
}
Assert.False(
innerStream.IsDisposed,
"Inner stream should NOT be disposed when leaveOpen=true"
);
// Should be able to read the compressed data
innerStream.Position = 0;
compressed = new byte[innerStream.Length];
innerStream.Read(compressed, 0, compressed.Length);
Assert.True(compressed.Length > 0);
}
[Fact]
public void BZip2Stream_Decompress_LeaveOpen_False()
{
// First compress some data
var memStream = new MemoryStream();
using (var bzip2 = new BZip2Stream(memStream, CompressionMode.Compress, false, true))
{
bzip2.Write(CreateTestData(), 0, CreateTestData().Length);
bzip2.Finish();
}
memStream.Position = 0;
using var innerStream = new TestStream(memStream);
var decompressed = new byte[CreateTestData().Length];
using (
var bzip2 = new BZip2Stream(
innerStream,
CompressionMode.Decompress,
false,
leaveOpen: false
)
)
{
bzip2.Read(decompressed, 0, decompressed.Length);
}
Assert.True(innerStream.IsDisposed, "Inner stream should be disposed when leaveOpen=false");
Assert.Equal(CreateTestData(), decompressed);
}
[Fact]
public void BZip2Stream_Decompress_LeaveOpen_True()
{
// First compress some data
var memStream = new MemoryStream();
using (var bzip2 = new BZip2Stream(memStream, CompressionMode.Compress, false, true))
{
bzip2.Write(CreateTestData(), 0, CreateTestData().Length);
bzip2.Finish();
}
memStream.Position = 0;
using var innerStream = new TestStream(memStream);
var decompressed = new byte[CreateTestData().Length];
using (
var bzip2 = new BZip2Stream(
innerStream,
CompressionMode.Decompress,
false,
leaveOpen: true
)
)
{
bzip2.Read(decompressed, 0, decompressed.Length);
}
Assert.False(
innerStream.IsDisposed,
"Inner stream should NOT be disposed when leaveOpen=true"
);
Assert.Equal(CreateTestData(), decompressed);
// Should still be able to use the stream
innerStream.Position = 0;
Assert.True(innerStream.CanRead);
}
[Fact]
public void LZipStream_Compress_LeaveOpen_False()
{
using var innerStream = new TestStream(new MemoryStream());
using (var lzip = new LZipStream(innerStream, CompressionMode.Compress, leaveOpen: false))
{
lzip.Write(CreateTestData(), 0, CreateTestData().Length);
lzip.Finish();
}
Assert.True(innerStream.IsDisposed, "Inner stream should be disposed when leaveOpen=false");
}
[Fact]
public void LZipStream_Compress_LeaveOpen_True()
{
using var innerStream = new TestStream(new MemoryStream());
byte[] compressed;
using (var lzip = new LZipStream(innerStream, CompressionMode.Compress, leaveOpen: true))
{
lzip.Write(CreateTestData(), 0, CreateTestData().Length);
lzip.Finish();
}
Assert.False(
innerStream.IsDisposed,
"Inner stream should NOT be disposed when leaveOpen=true"
);
// Should be able to read the compressed data
innerStream.Position = 0;
compressed = new byte[innerStream.Length];
innerStream.Read(compressed, 0, compressed.Length);
Assert.True(compressed.Length > 0);
}
[Fact]
public void LZipStream_Decompress_LeaveOpen_False()
{
// First compress some data
var memStream = new MemoryStream();
using (var lzip = new LZipStream(memStream, CompressionMode.Compress, true))
{
lzip.Write(CreateTestData(), 0, CreateTestData().Length);
lzip.Finish();
}
memStream.Position = 0;
using var innerStream = new TestStream(memStream);
var decompressed = new byte[CreateTestData().Length];
using (var lzip = new LZipStream(innerStream, CompressionMode.Decompress, leaveOpen: false))
{
lzip.Read(decompressed, 0, decompressed.Length);
}
Assert.True(innerStream.IsDisposed, "Inner stream should be disposed when leaveOpen=false");
Assert.Equal(CreateTestData(), decompressed);
}
[Fact]
public void LZipStream_Decompress_LeaveOpen_True()
{
// First compress some data
var memStream = new MemoryStream();
using (var lzip = new LZipStream(memStream, CompressionMode.Compress, true))
{
lzip.Write(CreateTestData(), 0, CreateTestData().Length);
lzip.Finish();
}
memStream.Position = 0;
using var innerStream = new TestStream(memStream);
var decompressed = new byte[CreateTestData().Length];
using (var lzip = new LZipStream(innerStream, CompressionMode.Decompress, leaveOpen: true))
{
lzip.Read(decompressed, 0, decompressed.Length);
}
Assert.False(
innerStream.IsDisposed,
"Inner stream should NOT be disposed when leaveOpen=true"
);
Assert.Equal(CreateTestData(), decompressed);
// Should still be able to use the stream
innerStream.Position = 0;
Assert.True(innerStream.CanRead);
}
}

View File

@@ -5,6 +5,7 @@ using System.Linq;
using System.Text;
using SharpCompress.Compressors.LZMA;
using SharpCompress.IO;
using SharpCompress.Test.Mocks;
using Xunit;
namespace SharpCompress.Test.Streams;
@@ -64,7 +65,14 @@ public class SharpCompressStreamTests
{
createData(ms);
using (SharpCompressStream scs = new SharpCompressStream(ms, true, false, 0x10000))
using (
SharpCompressStream scs = new SharpCompressStream(
new ForwardOnlyStream(ms),
true,
false,
0x10000
)
)
{
IStreamStack stack = (IStreamStack)scs;
@@ -89,4 +97,25 @@ public class SharpCompressStreamTests
}
}
}
[Fact]
public void BufferedSubStream_DoubleDispose_DoesNotCorruptArrayPool()
{
// This test verifies that calling Dispose multiple times on BufferedSubStream
// doesn't return the same array to the pool twice, which would cause pool corruption
byte[] data = new byte[0x10000];
using (MemoryStream ms = new MemoryStream(data))
{
var stream = new BufferedSubStream(ms, 0, data.Length);
// First disposal
stream.Dispose();
// Second disposal should not throw or corrupt the pool
stream.Dispose();
}
// If we got here without an exception, the test passed
Assert.True(true);
}
}

View File

@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SharpCompress.Common.Zip.SOZip;
using SharpCompress.Readers;
using Xunit;
@@ -46,7 +45,7 @@ public class TestBase : IDisposable
public void Dispose() => Directory.Delete(SCRATCH_BASE_PATH, true);
public void VerifyFiles(bool skipSoIndexes = false)
public void VerifyFiles()
{
if (UseExtensionInsteadOfNameToVerify)
{
@@ -54,7 +53,7 @@ public class TestBase : IDisposable
}
else
{
VerifyFilesByName(skipSoIndexes);
VerifyFilesByName();
}
}
@@ -73,23 +72,10 @@ public class TestBase : IDisposable
}
}
private void VerifyFilesByName(bool skipSoIndexes)
protected void VerifyFilesByName()
{
var extracted = Directory
.EnumerateFiles(SCRATCH_FILES_PATH, "*.*", SearchOption.AllDirectories)
.Where(x =>
{
if (
skipSoIndexes
&& Path.GetFileName(x)
.EndsWith(SOZipIndex.INDEX_EXTENSION, StringComparison.OrdinalIgnoreCase)
)
{
return false;
}
return true;
})
.ToLookup(path => path.Substring(SCRATCH_FILES_PATH.Length));
var original = Directory
.EnumerateFiles(ORIGINAL_FILES_PATH, "*.*", SearchOption.AllDirectories)

View File

@@ -1,257 +0,0 @@
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Common.Zip.SOZip;
using SharpCompress.Readers.Zip;
using SharpCompress.Test.Mocks;
using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
using Xunit;
namespace SharpCompress.Test.Zip;
public class SoZipReaderTests : TestBase
{
[Fact]
public async Task SOZip_Reader_RegularZip_NoSozipEntries()
{
// Regular zip files should not have SOZip entries
var path = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.zip");
using Stream stream = new ForwardOnlyStream(File.OpenRead(path));
using var reader = ZipReader.Open(stream);
while (await reader.MoveToNextEntryAsync())
{
// Regular zip entries should NOT be SOZip
Assert.False(reader.Entry.IsSozip, $"Entry {reader.Entry.Key} should not be SOZip");
Assert.False(
reader.Entry.IsSozipIndexFile,
$"Entry {reader.Entry.Key} should not be a SOZip index file"
);
}
}
[Fact]
public void SOZip_Archive_RegularZip_NoSozipEntries()
{
// Regular zip files should not have SOZip entries
var path = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.zip");
using Stream stream = File.OpenRead(path);
using var archive = ZipArchive.Open(stream);
foreach (var entry in archive.Entries)
{
// Regular zip entries should NOT be SOZip
Assert.False(entry.IsSozip, $"Entry {entry.Key} should not be SOZip");
Assert.False(
entry.IsSozipIndexFile,
$"Entry {entry.Key} should not be a SOZip index file"
);
}
}
[Fact]
public void SOZip_Archive_ReadSOZipFile()
{
// Read the SOZip test archive
var path = Path.Combine(TEST_ARCHIVES_PATH, "Zip.sozip.zip");
using Stream stream = File.OpenRead(path);
using var archive = ZipArchive.Open(stream);
var entries = archive.Entries.ToList();
// Should have 3 entries: data.txt, .data.txt.sozip.idx, and small.txt
Assert.Equal(3, entries.Count);
// Verify we have one SOZip index file
var indexFiles = entries.Where(e => e.IsSozipIndexFile).ToList();
Assert.Single(indexFiles);
Assert.Equal(".data.txt.sozip.idx", indexFiles[0].Key);
// Verify the index file is not compressed
Assert.Equal(CompressionType.None, indexFiles[0].CompressionType);
// Read and validate the index
using (var indexStream = indexFiles[0].OpenEntryStream())
{
using var memStream = new MemoryStream();
indexStream.CopyTo(memStream);
var indexBytes = memStream.ToArray();
var index = SOZipIndex.Read(indexBytes);
Assert.Equal(SOZipIndex.SOZIP_VERSION, index.Version);
Assert.Equal(1024u, index.ChunkSize); // As set in CreateSOZipTestArchive
Assert.True(index.UncompressedSize > 0);
Assert.True(index.OffsetCount > 0);
}
// Verify the data file can be read correctly
var dataEntry = entries.First(e => e.Key == "data.txt");
using (var dataStream = dataEntry.OpenEntryStream())
{
using var reader = new StreamReader(dataStream);
var content = reader.ReadToEnd();
Assert.Equal(5000, content.Length);
Assert.True(content.All(c => c == 'A'));
}
// Verify the small file
var smallEntry = entries.First(e => e.Key == "small.txt");
Assert.False(smallEntry.IsSozipIndexFile);
using (var smallStream = smallEntry.OpenEntryStream())
{
using var reader = new StreamReader(smallStream);
var content = reader.ReadToEnd();
Assert.Equal("Small content", content);
}
}
[Fact]
public async Task SOZip_Reader_ReadSOZipFile()
{
// Read the SOZip test archive with ZipReader
var path = Path.Combine(TEST_ARCHIVES_PATH, "Zip.sozip.zip");
using Stream stream = new ForwardOnlyStream(File.OpenRead(path));
using var reader = ZipReader.Open(stream);
var foundData = false;
var foundIndex = false;
var foundSmall = false;
while (await reader.MoveToNextEntryAsync())
{
if (reader.Entry.Key == "data.txt")
{
foundData = true;
Assert.False(reader.Entry.IsSozipIndexFile);
using var entryStream = reader.OpenEntryStream();
using var streamReader = new StreamReader(entryStream);
var content = streamReader.ReadToEnd();
Assert.Equal(5000, content.Length);
Assert.True(content.All(c => c == 'A'));
}
else if (reader.Entry.Key == ".data.txt.sozip.idx")
{
foundIndex = true;
Assert.True(reader.Entry.IsSozipIndexFile);
using var indexStream = reader.OpenEntryStream();
using var memStream = new MemoryStream();
await indexStream.CopyToAsync(memStream);
var indexBytes = memStream.ToArray();
var index = SOZipIndex.Read(indexBytes);
Assert.Equal(SOZipIndex.SOZIP_VERSION, index.Version);
}
else if (reader.Entry.Key == "small.txt")
{
foundSmall = true;
Assert.False(reader.Entry.IsSozipIndexFile);
}
}
Assert.True(foundData, "data.txt entry not found");
Assert.True(foundIndex, ".data.txt.sozip.idx entry not found");
Assert.True(foundSmall, "small.txt entry not found");
}
[Fact]
public void SOZip_Archive_DetectsIndexFileByName()
{
// Create a zip with a SOZip index file (by name pattern)
using var memoryStream = new MemoryStream();
using (
var writer = WriterFactory.Open(
memoryStream,
ArchiveType.Zip,
new ZipWriterOptions(CompressionType.Deflate) { LeaveStreamOpen = true }
)
)
{
// Write a regular file
writer.Write("test.txt", new MemoryStream(Encoding.UTF8.GetBytes("Hello World")));
// Write a file that looks like a SOZip index (by name pattern)
var indexData = new SOZipIndex(
chunkSize: 32768,
uncompressedSize: 100,
compressedSize: 50,
compressedOffsets: new ulong[] { 0 }
);
writer.Write(".test.txt.sozip.idx", new MemoryStream(indexData.ToByteArray()));
}
memoryStream.Position = 0;
// Test with ZipArchive
using var archive = ZipArchive.Open(memoryStream);
var entries = archive.Entries.ToList();
Assert.Equal(2, entries.Count);
var regularEntry = entries.First(e => e.Key == "test.txt");
Assert.False(regularEntry.IsSozipIndexFile);
Assert.False(regularEntry.IsSozip); // No SOZip extra field
var indexEntry = entries.First(e => e.Key == ".test.txt.sozip.idx");
Assert.True(indexEntry.IsSozipIndexFile);
}
[Fact]
public async Task SOZip_Reader_DetectsIndexFileByName()
{
// Create a zip with a SOZip index file (by name pattern)
using var memoryStream = new MemoryStream();
using (
var writer = WriterFactory.Open(
memoryStream,
ArchiveType.Zip,
new ZipWriterOptions(CompressionType.Deflate) { LeaveStreamOpen = true }
)
)
{
// Write a regular file
writer.Write("test.txt", new MemoryStream(Encoding.UTF8.GetBytes("Hello World")));
// Write a file that looks like a SOZip index (by name pattern)
var indexData = new SOZipIndex(
chunkSize: 32768,
uncompressedSize: 100,
compressedSize: 50,
compressedOffsets: new ulong[] { 0 }
);
writer.Write(".test.txt.sozip.idx", new MemoryStream(indexData.ToByteArray()));
}
memoryStream.Position = 0;
// Test with ZipReader
using Stream stream = new ForwardOnlyStream(memoryStream);
using var reader = ZipReader.Open(stream);
var foundRegular = false;
var foundIndex = false;
while (await reader.MoveToNextEntryAsync())
{
if (reader.Entry.Key == "test.txt")
{
foundRegular = true;
Assert.False(reader.Entry.IsSozipIndexFile);
Assert.False(reader.Entry.IsSozip);
}
else if (reader.Entry.Key == ".test.txt.sozip.idx")
{
foundIndex = true;
Assert.True(reader.Entry.IsSozipIndexFile);
}
}
Assert.True(foundRegular, "Regular entry not found");
Assert.True(foundIndex, "Index entry not found");
}
}

View File

@@ -1,358 +0,0 @@
using System;
using System.IO;
using System.Linq;
using System.Text;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Common.Zip.SOZip;
using SharpCompress.Readers;
using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
using Xunit;
namespace SharpCompress.Test.Zip;
public class SoZipWriterTests : TestBase
{
[Fact]
public void SOZipIndex_RoundTrip()
{
// Create an index
var offsets = new ulong[] { 0, 1024, 2048, 3072 };
var originalIndex = new SOZipIndex(
chunkSize: 32768,
uncompressedSize: 100000,
compressedSize: 50000,
compressedOffsets: offsets
);
// Serialize to bytes
var bytes = originalIndex.ToByteArray();
// Deserialize back
var parsedIndex = SOZipIndex.Read(bytes);
// Verify all fields
Assert.Equal(SOZipIndex.SOZIP_VERSION, parsedIndex.Version);
Assert.Equal(32768u, parsedIndex.ChunkSize);
Assert.Equal(100000ul, parsedIndex.UncompressedSize);
Assert.Equal(50000ul, parsedIndex.CompressedSize);
Assert.Equal(4u, parsedIndex.OffsetCount);
Assert.Equal(offsets, parsedIndex.CompressedOffsets);
}
[Fact]
public void SOZipIndex_Read_InvalidMagic_ThrowsException()
{
var invalidData = new byte[] { 0x00, 0x00, 0x00, 0x00 };
var exception = Assert.Throws<InvalidDataException>(() => SOZipIndex.Read(invalidData));
Assert.Contains("magic number mismatch", exception.Message);
}
[Fact]
public void SOZipIndex_GetChunkIndex()
{
var offsets = new ulong[] { 0, 1000, 2000, 3000, 4000 };
var index = new SOZipIndex(
chunkSize: 32768,
uncompressedSize: 163840, // 5 * 32768
compressedSize: 5000,
compressedOffsets: offsets
);
Assert.Equal(0, index.GetChunkIndex(0));
Assert.Equal(0, index.GetChunkIndex(32767));
Assert.Equal(1, index.GetChunkIndex(32768));
Assert.Equal(2, index.GetChunkIndex(65536));
Assert.Equal(4, index.GetChunkIndex(163839));
}
[Fact]
public void SOZipIndex_GetCompressedOffset()
{
var offsets = new ulong[] { 0, 1000, 2000, 3000, 4000 };
var index = new SOZipIndex(
chunkSize: 32768,
uncompressedSize: 163840,
compressedSize: 5000,
compressedOffsets: offsets
);
Assert.Equal(0ul, index.GetCompressedOffset(0));
Assert.Equal(1000ul, index.GetCompressedOffset(1));
Assert.Equal(2000ul, index.GetCompressedOffset(2));
Assert.Equal(3000ul, index.GetCompressedOffset(3));
Assert.Equal(4000ul, index.GetCompressedOffset(4));
}
[Fact]
public void SOZipIndex_GetUncompressedOffset()
{
var offsets = new ulong[] { 0, 1000, 2000, 3000, 4000 };
var index = new SOZipIndex(
chunkSize: 32768,
uncompressedSize: 163840,
compressedSize: 5000,
compressedOffsets: offsets
);
Assert.Equal(0ul, index.GetUncompressedOffset(0));
Assert.Equal(32768ul, index.GetUncompressedOffset(1));
Assert.Equal(65536ul, index.GetUncompressedOffset(2));
Assert.Equal(98304ul, index.GetUncompressedOffset(3));
Assert.Equal(131072ul, index.GetUncompressedOffset(4));
}
[Fact]
public void SOZipIndex_GetIndexFileName()
{
Assert.Equal(".file.txt.sozip.idx", SOZipIndex.GetIndexFileName("file.txt"));
Assert.Equal("dir/.file.txt.sozip.idx", SOZipIndex.GetIndexFileName("dir/file.txt"));
Assert.Equal("a/b/.file.txt.sozip.idx", SOZipIndex.GetIndexFileName("a/b/file.txt"));
}
[Fact]
public void SOZipIndex_IsIndexFile()
{
Assert.True(SOZipIndex.IsIndexFile(".file.txt.sozip.idx"));
Assert.True(SOZipIndex.IsIndexFile("dir/.file.txt.sozip.idx"));
Assert.True(SOZipIndex.IsIndexFile(".test.sozip.idx"));
Assert.False(SOZipIndex.IsIndexFile("file.txt"));
Assert.False(SOZipIndex.IsIndexFile("file.sozip.idx")); // Missing leading dot
Assert.False(SOZipIndex.IsIndexFile(".file.txt")); // Missing .sozip.idx
Assert.False(SOZipIndex.IsIndexFile(""));
Assert.False(SOZipIndex.IsIndexFile(null!));
}
[Fact]
public void SOZipIndex_GetMainFileName()
{
Assert.Equal("file.txt", SOZipIndex.GetMainFileName(".file.txt.sozip.idx"));
Assert.Equal("dir/file.txt", SOZipIndex.GetMainFileName("dir/.file.txt.sozip.idx"));
Assert.Equal("test", SOZipIndex.GetMainFileName(".test.sozip.idx"));
Assert.Null(SOZipIndex.GetMainFileName("file.txt"));
Assert.Null(SOZipIndex.GetMainFileName(""));
}
[Fact]
public void ZipEntry_IsSozipIndexFile_Detection()
{
// Create a zip with a file that has a SOZip index file name pattern
using var memoryStream = new MemoryStream();
using (
var writer = WriterFactory.Open(
memoryStream,
ArchiveType.Zip,
new ZipWriterOptions(CompressionType.Deflate) { LeaveStreamOpen = true }
)
)
{
// Write a regular file
writer.Write("test.txt", new MemoryStream(Encoding.UTF8.GetBytes("Hello World")));
// Write a file with SOZip index name pattern
var indexData = new SOZipIndex(
chunkSize: 32768,
uncompressedSize: 100,
compressedSize: 50,
compressedOffsets: new ulong[] { 0 }
);
writer.Write(".test.txt.sozip.idx", new MemoryStream(indexData.ToByteArray()));
}
memoryStream.Position = 0;
using var archive = ZipArchive.Open(memoryStream);
var entries = archive.Entries.ToList();
Assert.Equal(2, entries.Count);
var regularEntry = entries.First(e => e.Key == "test.txt");
Assert.False(regularEntry.IsSozipIndexFile);
Assert.False(regularEntry.IsSozip); // No SOZip extra field
var indexEntry = entries.First(e => e.Key == ".test.txt.sozip.idx");
Assert.True(indexEntry.IsSozipIndexFile);
}
[Fact]
public void ZipWriterOptions_SOZipDefaults()
{
var options = new ZipWriterOptions(CompressionType.Deflate);
Assert.False(options.EnableSOZip);
Assert.Equal((int)SOZipIndex.DEFAULT_CHUNK_SIZE, options.SOZipChunkSize);
Assert.Equal(1048576L, options.SOZipMinFileSize); // 1MB
}
[Fact]
public void ZipWriterEntryOptions_SOZipDefaults()
{
var options = new ZipWriterEntryOptions();
Assert.Null(options.EnableSOZip);
}
[Fact]
public void SOZip_RoundTrip_CompressAndDecompress()
{
// Create a SOZip archive from Original files
var archivePath = Path.Combine(SCRATCH2_FILES_PATH, "test.sozip.zip");
using (var stream = File.Create(archivePath))
{
var options = new ZipWriterOptions(CompressionType.Deflate)
{
EnableSOZip = true,
SOZipMinFileSize = 1024, // 1KB to ensure test files qualify
LeaveStreamOpen = false,
};
using var writer = new ZipWriter(stream, options);
// Write all files from Original directory
var files = Directory.GetFiles(ORIGINAL_FILES_PATH, "*", SearchOption.AllDirectories);
foreach (var filePath in files)
{
var relativePath = filePath
.Substring(ORIGINAL_FILES_PATH.Length + 1)
.Replace('\\', '/');
using var fileStream = File.OpenRead(filePath);
writer.Write(relativePath, fileStream, new ZipWriterEntryOptions());
}
}
// Validate the archive was created and has files
Assert.True(File.Exists(archivePath));
// Validate the archive has SOZip entries
using (var stream = File.OpenRead(archivePath))
{
using var archive = ZipArchive.Open(stream);
var allEntries = archive.Entries.ToList();
// Archive should have files
Assert.NotEmpty(allEntries);
var sozipIndexEntries = allEntries.Where(e => e.IsSozipIndexFile).ToList();
// Should have at least one SOZip index file
Assert.NotEmpty(sozipIndexEntries);
// Verify index files have valid SOZip index data
foreach (var indexEntry in sozipIndexEntries)
{
// Check that the entry is stored (not compressed)
Assert.Equal(CompressionType.None, indexEntry.CompressionType);
using var indexStream = indexEntry.OpenEntryStream();
using var memStream = new MemoryStream();
indexStream.CopyTo(memStream);
var indexBytes = memStream.ToArray();
// Debug: Check first 4 bytes
Assert.True(
indexBytes.Length >= 4,
$"Index file too small: {indexBytes.Length} bytes"
);
// Should be able to parse the index without exception
var index = SOZipIndex.Read(indexBytes);
Assert.Equal(SOZipIndex.SOZIP_VERSION, index.Version);
Assert.True(index.ChunkSize > 0);
Assert.True(index.UncompressedSize > 0);
Assert.True(index.OffsetCount > 0);
// Verify there's a corresponding data file
var mainFileName = SOZipIndex.GetMainFileName(indexEntry.Key!);
Assert.NotNull(mainFileName);
Assert.Contains(allEntries, e => e.Key == mainFileName);
}
}
// Read and decompress the archive
using (var stream = File.OpenRead(archivePath))
{
using var reader = ReaderFactory.Open(stream);
reader.WriteAllToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true }
);
}
// Verify extracted files match originals
VerifyFiles(true);
}
[Fact]
public void CreateSOZipTestArchive()
{
// Create a SOZip test archive that can be committed to the repository
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.sozip.zip");
using (var stream = File.Create(archivePath))
{
var options = new ZipWriterOptions(CompressionType.Deflate)
{
EnableSOZip = true,
SOZipMinFileSize = 100, // Low threshold to ensure test content is optimized
SOZipChunkSize = 1024, // Small chunks for testing
LeaveStreamOpen = false,
};
using var writer = new ZipWriter(stream, options);
// Create test content that's large enough to create multiple chunks
var largeContent = new string('A', 5000); // 5KB of 'A's
// Write a file with enough data to be SOZip-optimized
writer.Write(
"data.txt",
new MemoryStream(Encoding.UTF8.GetBytes(largeContent)),
new ZipWriterEntryOptions()
);
// Write a smaller file that won't be SOZip-optimized
writer.Write(
"small.txt",
new MemoryStream(Encoding.UTF8.GetBytes("Small content")),
new ZipWriterEntryOptions()
);
}
// Validate the archive was created
Assert.True(File.Exists(archivePath));
// Validate it's a valid SOZip archive
using (var stream = File.OpenRead(archivePath))
{
using var archive = ZipArchive.Open(stream);
var entries = archive.Entries.ToList();
// Should have data file, small file, and index file
Assert.Equal(3, entries.Count);
// Verify we have one SOZip index file
var indexFiles = entries.Where(e => e.IsSozipIndexFile).ToList();
Assert.Single(indexFiles);
// Verify the index file
var indexEntry = indexFiles.First();
Assert.Equal(".data.txt.sozip.idx", indexEntry.Key);
// Verify the data file can be read
var dataEntry = entries.First(e => e.Key == "data.txt");
using var dataStream = dataEntry.OpenEntryStream();
using var reader = new StreamReader(dataStream);
var content = reader.ReadToEnd();
Assert.Equal(5000, content.Length);
Assert.True(content.All(c => c == 'A'));
}
}
}

View File

@@ -251,4 +251,106 @@ public class ZipReaderAsyncTests : ReaderTests
}
Assert.Equal(8, count);
}
[Fact]
public async ValueTask EntryStream_Dispose_DoesNotThrow_OnNonSeekableStream_Deflate_Async()
{
// Since version 0.41.0: EntryStream.DisposeAsync() should not throw NotSupportedException
// when FlushAsync() fails on non-seekable streams (Deflate compression)
var path = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip");
using Stream stream = new ForwardOnlyStream(File.OpenRead(path));
using var reader = ReaderFactory.Open(stream);
// This should not throw, even if internal FlushAsync() fails
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
#if LEGACY_DOTNET
using var entryStream = await reader.OpenEntryStreamAsync();
#else
await using var entryStream = await reader.OpenEntryStreamAsync();
#endif
// Read some data
var buffer = new byte[1024];
await entryStream.ReadAsync(buffer, 0, buffer.Length);
// DisposeAsync should not throw NotSupportedException
}
}
}
[Fact]
public async ValueTask EntryStream_Dispose_DoesNotThrow_OnNonSeekableStream_LZMA_Async()
{
// Since version 0.41.0: EntryStream.DisposeAsync() should not throw NotSupportedException
// when FlushAsync() fails on non-seekable streams (LZMA compression)
var path = Path.Combine(TEST_ARCHIVES_PATH, "Zip.lzma.dd.zip");
using Stream stream = new ForwardOnlyStream(File.OpenRead(path));
using var reader = ReaderFactory.Open(stream);
// This should not throw, even if internal FlushAsync() fails
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
#if LEGACY_DOTNET
using var entryStream = await reader.OpenEntryStreamAsync();
#else
await using var entryStream = await reader.OpenEntryStreamAsync();
#endif
// Read some data
var buffer = new byte[1024];
await entryStream.ReadAsync(buffer, 0, buffer.Length);
// DisposeAsync should not throw NotSupportedException
}
}
}
[Fact]
public async ValueTask Archive_Iteration_DoesNotBreak_WhenFlushThrows_Deflate_Async()
{
// Regression test: since 0.41.0, archive iteration would silently break
// when the input stream throws NotSupportedException in Flush().
// Only the first entry would be returned, then iteration would stop without exception.
var path = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip");
using var fileStream = File.OpenRead(path);
using Stream stream = new ThrowOnFlushStream(fileStream);
using var reader = ReaderFactory.Open(stream);
var count = 0;
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
count++;
}
}
// Should iterate through all entries, not just the first one
Assert.True(count > 1, $"Expected more than 1 entry, but got {count}");
}
[Fact]
public async ValueTask Archive_Iteration_DoesNotBreak_WhenFlushThrows_LZMA_Async()
{
// Regression test: since 0.41.0, archive iteration would silently break
// when the input stream throws NotSupportedException in Flush().
// Only the first entry would be returned, then iteration would stop without exception.
var path = Path.Combine(TEST_ARCHIVES_PATH, "Zip.lzma.dd.zip");
using var fileStream = File.OpenRead(path);
using Stream stream = new ThrowOnFlushStream(fileStream);
using var reader = ReaderFactory.Open(stream);
var count = 0;
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
count++;
}
}
// Should iterate through all entries, not just the first one
Assert.True(count > 1, $"Expected more than 1 entry, but got {count}");
}
}

View File

@@ -436,4 +436,98 @@ public class ZipReaderTests : ReaderTests
Assert.Equal(archiveKeys.OrderBy(k => k), readerKeys.OrderBy(k => k));
}
}
[Fact]
public void EntryStream_Dispose_DoesNotThrow_OnNonSeekableStream_Deflate()
{
// Since version 0.41.0: EntryStream.Dispose() should not throw NotSupportedException
// when Flush() fails on non-seekable streams (Deflate compression)
var path = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip");
using Stream stream = new ForwardOnlyStream(File.OpenRead(path));
using var reader = ReaderFactory.Open(stream);
// This should not throw, even if internal Flush() fails
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using var entryStream = reader.OpenEntryStream();
// Read some data
var buffer = new byte[1024];
entryStream.Read(buffer, 0, buffer.Length);
// Dispose should not throw NotSupportedException
}
}
}
[Fact]
public void EntryStream_Dispose_DoesNotThrow_OnNonSeekableStream_LZMA()
{
// Since version 0.41.0: EntryStream.Dispose() should not throw NotSupportedException
// when Flush() fails on non-seekable streams (LZMA compression)
var path = Path.Combine(TEST_ARCHIVES_PATH, "Zip.lzma.dd.zip");
using Stream stream = new ForwardOnlyStream(File.OpenRead(path));
using var reader = ReaderFactory.Open(stream);
// This should not throw, even if internal Flush() fails
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using var entryStream = reader.OpenEntryStream();
// Read some data
var buffer = new byte[1024];
entryStream.Read(buffer, 0, buffer.Length);
// Dispose should not throw NotSupportedException
}
}
}
[Fact]
public void Archive_Iteration_DoesNotBreak_WhenFlushThrows_Deflate()
{
// Regression test: since 0.41.0, archive iteration would silently break
// when the input stream throws NotSupportedException in Flush().
// Only the first entry would be returned, then iteration would stop without exception.
var path = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip");
using var fileStream = File.OpenRead(path);
using Stream stream = new ThrowOnFlushStream(fileStream);
using var reader = ReaderFactory.Open(stream);
var count = 0;
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
count++;
}
}
// Should iterate through all entries, not just the first one
Assert.True(count > 1, $"Expected more than 1 entry, but got {count}");
}
[Fact]
public void Archive_Iteration_DoesNotBreak_WhenFlushThrows_LZMA()
{
// Regression test: since 0.41.0, archive iteration would silently break
// when the input stream throws NotSupportedException in Flush().
// Only the first entry would be returned, then iteration would stop without exception.
var path = Path.Combine(TEST_ARCHIVES_PATH, "Zip.lzma.dd.zip");
using var fileStream = File.OpenRead(path);
using Stream stream = new ThrowOnFlushStream(fileStream);
using var reader = ReaderFactory.Open(stream);
var count = 0;
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
count++;
}
}
// Should iterate through all entries, not just the first one
Assert.True(count > 1, $"Expected more than 1 entry, but got {count}");
}
}

View File

@@ -0,0 +1,117 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Readers;
using Xunit;
namespace SharpCompress.Test.Zip;
/// <summary>
/// Tests for ZIP reading with streams that return short reads.
/// Reproduces the regression where ZIP parsing fails depending on Stream.Read chunking patterns.
/// </summary>
public class ZipShortReadTests : ReaderTests
{
/// <summary>
/// A non-seekable stream that returns controlled short reads.
/// Simulates real-world network/multipart streams that legally return fewer bytes than requested.
/// </summary>
private sealed class PatternReadStream : Stream
{
private readonly MemoryStream _inner;
private readonly int _firstReadSize;
private readonly int _chunkSize;
private bool _firstReadDone;
public PatternReadStream(byte[] bytes, int firstReadSize, int chunkSize)
{
_inner = new MemoryStream(bytes, writable: false);
_firstReadSize = firstReadSize;
_chunkSize = chunkSize;
}
public override int Read(byte[] buffer, int offset, int count)
{
int limit = !_firstReadDone ? _firstReadSize : _chunkSize;
_firstReadDone = true;
int toRead = Math.Min(count, limit);
return _inner.Read(buffer, offset, toRead);
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotSupportedException();
public override long Position
{
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
public override void Flush() => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
}
/// <summary>
/// Test that ZIP reading works correctly with short reads on non-seekable streams.
/// Uses a test archive and different chunking patterns.
/// </summary>
[Theory]
[InlineData("Zip.deflate.zip", 1000, 4096)]
[InlineData("Zip.deflate.zip", 999, 4096)]
[InlineData("Zip.deflate.zip", 100, 4096)]
[InlineData("Zip.deflate.zip", 50, 512)]
[InlineData("Zip.deflate.zip", 1, 1)] // Extreme case: 1 byte at a time
[InlineData("Zip.deflate.dd.zip", 1000, 4096)]
[InlineData("Zip.deflate.dd.zip", 999, 4096)]
[InlineData("Zip.zip64.zip", 3816, 4096)]
[InlineData("Zip.zip64.zip", 3815, 4096)] // Similar to the issue pattern
public void Zip_Reader_Handles_Short_Reads(string zipFile, int firstReadSize, int chunkSize)
{
// Use an existing test ZIP file
var zipPath = Path.Combine(TEST_ARCHIVES_PATH, zipFile);
if (!File.Exists(zipPath))
{
return; // Skip if file doesn't exist
}
var bytes = File.ReadAllBytes(zipPath);
// Baseline with MemoryStream (seekable, no short reads)
var baseline = ReadEntriesFromStream(new MemoryStream(bytes, writable: false));
Assert.NotEmpty(baseline);
// Non-seekable stream with controlled short read pattern
var chunked = ReadEntriesFromStream(new PatternReadStream(bytes, firstReadSize, chunkSize));
Assert.Equal(baseline, chunked);
}
private List<string> ReadEntriesFromStream(Stream stream)
{
var names = new List<string>();
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LeaveStreamOpen = true });
while (reader.MoveToNextEntry())
{
if (reader.Entry.IsDirectory)
{
continue;
}
names.Add(reader.Entry.Key!);
using var entryStream = reader.OpenEntryStream();
entryStream.CopyTo(Stream.Null);
}
return names;
}
}

View File

@@ -29,6 +29,16 @@
"Microsoft.NETFramework.ReferenceAssemblies.net48": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[10.0.102, )",
"resolved": "10.0.102",
"contentHash": "Oxq3RCIJSdtpIU4hLqO7XaDe/Ra3HS9Wi8rJl838SAg6Zu1iQjerA0+xXWBgUFYbgknUGCLOU0T+lzMLkvY9Qg==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "10.0.102",
"Microsoft.SourceLink.Common": "10.0.102"
}
},
"Mono.Posix.NETStandard": {
"type": "Direct",
"requested": "[1.0.0, )",
@@ -55,6 +65,11 @@
"Microsoft.TestPlatform.ObjectModel": "17.13.0"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "10.0.102",
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
},
"Microsoft.CodeCoverage": {
"type": "Transitive",
"resolved": "18.0.1",
@@ -65,6 +80,11 @@
"resolved": "1.0.3",
"contentHash": "zMk4D+9zyiEWByyQ7oPImPN/Jhpj166Ky0Nlla4eXlNL8hI/BtSJsgR8Inldd4NNpIAH3oh8yym0W2DrhXdSLQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "10.0.102",
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
},
"Microsoft.TestPlatform.ObjectModel": {
"type": "Transitive",
"resolved": "17.13.0",
@@ -222,6 +242,16 @@
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[10.0.102, )",
"resolved": "10.0.102",
"contentHash": "Oxq3RCIJSdtpIU4hLqO7XaDe/Ra3HS9Wi8rJl838SAg6Zu1iQjerA0+xXWBgUFYbgknUGCLOU0T+lzMLkvY9Qg==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "10.0.102",
"Microsoft.SourceLink.Common": "10.0.102"
}
},
"Mono.Posix.NETStandard": {
"type": "Direct",
"requested": "[1.0.0, )",
@@ -245,6 +275,11 @@
"resolved": "3.1.5",
"contentHash": "tKi7dSTwP4m5m9eXPM2Ime4Kn7xNf4x4zT9sdLO/G4hZVnQCRiMTWoSZqI/pYTVeI27oPPqHBKYI/DjJ9GsYgA=="
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "10.0.102",
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
},
"Microsoft.CodeCoverage": {
"type": "Transitive",
"resolved": "18.0.1",
@@ -255,6 +290,11 @@
"resolved": "1.0.3",
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "10.0.102",
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
},
"Microsoft.TestPlatform.ObjectModel": {
"type": "Transitive",
"resolved": "18.0.1",

Binary file not shown.