Compare commits

...

128 Commits

Author SHA1 Message Date
Adam Hathcock
30e036f9ec Mark for 0.42.0 2025-11-28 13:24:03 +00:00
Adam Hathcock
095c871174 Merge pull request #1043 from adamhathcock/copilot/fix-divide-by-zero-exception
Fix DivideByZeroException when compressing empty files with BZip2
2025-11-27 16:47:39 +00:00
copilot-swe-agent[bot]
6d73c5b295 Fix DivideByZeroException when using BZip2 with empty files
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-27 15:59:39 +00:00
copilot-swe-agent[bot]
cc4d28193c Initial plan 2025-11-27 15:53:13 +00:00
Adam Hathcock
9433e06b93 Merge pull request #1023 from adamhathcock/copilot/fix-zip64-validation-issue
Fix version mismatch between Local File Header and Central Directory File Header in Zip archives
2025-11-25 15:30:48 +00:00
copilot-swe-agent[bot]
a92aaa51d5 Remove ZipCompressionMethod.None from version 63 check
None (stored) compression only requires version 10/20, not version 63.
Version 63 is specifically for advanced compression methods like LZMA,
PPMd, BZip2, and ZStandard.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-25 14:54:13 +00:00
Adam Hathcock
d41908adeb fixes for clarity 2025-11-25 14:25:58 +00:00
Adam Hathcock
81ca15b567 Update src/SharpCompress/Writers/Zip/ZipCentralDirectoryEntry.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-25 14:13:10 +00:00
Adam Hathcock
b81d0fd730 Merge pull request #1009 from adamhathcock/dependabot/nuget/AwesomeAssertions-9.3.0
Bump AwesomeAssertions from 9.2.1 to 9.3.0
2025-11-25 11:55:41 +00:00
Adam Hathcock
3a1bb187e8 Merge pull request #1031 from adamhathcock/dependabot/github_actions/actions/checkout-6
Bump actions/checkout from 5 to 6
2025-11-25 11:55:21 +00:00
Adam Hathcock
3fee14a070 Merge pull request #1035 from adamhathcock/adam/update-csharpier
Update csharpier and reformat
2025-11-25 11:54:56 +00:00
Adam Hathcock
5bf789ac65 Update csharpier and reformat 2025-11-25 11:50:21 +00:00
dependabot[bot]
be06049db3 Bump actions/checkout from 5 to 6
Bumps [actions/checkout](https://github.com/actions/checkout) from 5 to 6.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-24 09:20:23 +00:00
Adam Hathcock
a0435f6a60 Merge pull request #1030 from TwanVanDongen/master
Added buffer boundary tests.
2025-11-23 15:53:36 +00:00
Twan van Dongen
2321e2c90b Added buffer boundaty tests. Changed largefile to Alice29.txt as it's sufficient for the tests. 2025-11-22 12:32:25 +01:00
Adam Hathcock
97e98d8629 Merge pull request #1028 from TwanVanDongen/master
Buffer boundary tests
2025-11-21 08:35:54 +00:00
Twan van Dongen
d96e7362d2 Buffer boundary test for ARC's Squeezed method 2025-11-20 21:56:07 +01:00
Twan van Dongen
7dd46fe5ed More buffer boundary tests 2025-11-20 19:43:07 +01:00
Adam Hathcock
04c044cb2b Update tests/SharpCompress.Test/Zip/Zip64VersionConsistencyTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-19 15:49:55 +00:00
Adam Hathcock
cc10a12fbc Update tests/SharpCompress.Test/Zip/Zip64VersionConsistencyTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-19 15:49:27 +00:00
Adam Hathcock
8b0a1c699f Update tests/SharpCompress.Test/Zip/Zip64VersionConsistencyTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-19 15:49:17 +00:00
Adam Hathcock
15ca7c9807 Merge pull request #1025 from adamhathcock/copilot/fix-exception-disposing-rararchive
Fix ArgumentNullException when disposing RarArchive with damaged archives
2025-11-19 15:48:44 +00:00
Adam Hathcock
2b4da7e39b Merge pull request #1024 from adamhathcock/copilot/fix-memory-exhaustion-bug
Fix memory exhaustion in TAR header auto-detection
2025-11-19 15:33:52 +00:00
copilot-swe-agent[bot]
31f81f38af Fix null reference exception in UnpackV1.Unpack.Dispose()
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-19 15:16:18 +00:00
copilot-swe-agent[bot]
72cf77b7c7 Initial plan 2025-11-19 15:09:19 +00:00
copilot-swe-agent[bot]
0fe48c647e Enhance fix to handle LZMA/PPMd/BZip2/ZStandard compression methods
Also fixes pre-existing version mismatch for advanced compression methods that require version 63. Added tests for LZMA and PPMd to verify version consistency.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-19 11:29:43 +00:00
copilot-swe-agent[bot]
7b06652bff Add validation to prevent memory exhaustion in TAR long name headers
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-19 11:20:59 +00:00
copilot-swe-agent[bot]
434ce05416 Fix Zip64 version mismatch between LFH and CDFH
When UseZip64=true but files are small, ensure Central Directory File Header uses version 45 to match Local File Header. This fixes validation failures in System.IO.Packaging and other strict readers.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-19 11:14:06 +00:00
copilot-swe-agent[bot]
0698031ed4 Initial plan 2025-11-19 11:04:50 +00:00
copilot-swe-agent[bot]
51237a34eb Initial plan 2025-11-19 11:02:28 +00:00
Adam Hathcock
b8264a8131 Merge pull request #1004 from adamhathcock/adam/async-xz
Async XZ
2025-11-19 10:53:24 +00:00
Adam Hathcock
cad923018e Merge remote-tracking branch 'origin/master' into adam/async-xz 2025-11-19 09:56:01 +00:00
Adam Hathcock
db94b49941 fix mispellings 2025-11-19 09:55:43 +00:00
Adam Hathcock
72d15d9cbf Merge pull request #1019 from TwanVanDongen/master
ARJ's methods 1, 2 and 3 implemented for streaming
2025-11-18 13:41:49 +00:00
Twan van Dongen
e0186eadc0 Add buffer boundary tests for streaming decompression. Methods 1,2,3 still need fixing full streaming; exception now thrown to prevent corrupt output 2025-11-18 08:14:34 +01:00
Twan van Dongen
4cfa5b04af Included ARC and ARJ in readme 2025-11-14 15:57:22 +01:00
Twan van Dongen
f2c54b1f8b Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-11-14 15:43:51 +01:00
Twan van Dongen
d7d0bc6582 Missed formatting FilePart. 2025-11-14 15:43:47 +01:00
Twan
dd9dc2500b Merge branch 'adamhathcock:master' into master 2025-11-14 15:40:36 +01:00
Twan van Dongen
4efb109da8 Arj's methods CompressedMost (1), Compressed (2) and CompressedFaster (3) implemented. 2025-11-14 15:39:58 +01:00
Adam Hathcock
bcf9a6bdf1 Merge pull request #1017 from Morilli/fix-streamstack
Fix some IStreamStack and SharpCompressStream functions
2025-11-14 08:33:38 +00:00
Morilli
e3a25ecdc0 make formatting worse with csharpier 2025-11-14 03:54:26 +01:00
Morilli
783521928d fix SharpCompressStream BufferSize setter and Seek as well
the BufferSize setter was completely broken and trashed the `_internalPosition` value on set, for `Seek` this is just an off-by-one fix allowing seeking to immediately past the last byte in the buffer
2025-11-14 03:37:08 +01:00
Morilli
9a876abd31 fix IStreamStack.StackSeek with buffering streams 2025-11-14 03:37:08 +01:00
Morilli
97f58b412e Add test for StackSeek behavior 2025-11-14 03:37:08 +01:00
dependabot[bot]
4c61628078 Bump AwesomeAssertions from 9.2.1 to 9.3.0
---
updated-dependencies:
- dependency-name: AwesomeAssertions
  dependency-version: 9.3.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-10 11:57:30 +00:00
Adam Hathcock
99a8b0f750 Merge pull request #1007 from TwanVanDongen/master
ArjReader throws exception for password protected archives.
2025-11-06 09:58:24 +00:00
Twan van Dongen
a9017d7c25 ArjReader throws exception for password protected archives. 2025-11-06 09:32:12 +01:00
Adam Hathcock
d9e4b26648 Merge pull request #1006 from TwanVanDongen/master
ARJ multi-part archive handling improved
2025-11-05 08:37:54 +00:00
Twan van Dongen
0d03bafe49 Merg conflicts resolved 2025-11-05 08:19:30 +01:00
Twan
fee15a31f9 Merge branch 'adamhathcock:master' into master 2025-11-05 08:12:53 +01:00
Twan van Dongen
997d3910d4 CSharpier appliedArjReader thrown exception for multi-part archives. Method4 (decodefastest) refactored to support Stream. 2025-11-05 08:05:30 +01:00
Twan van Dongen
a3918cc0d7 ArjReader thrown exception for multi-part archives. Method4 (decodefastest) refactored to support Stream. 2025-11-05 08:02:20 +01:00
Adam Hathcock
f056986b07 Merge pull request #1005 from TwanVanDongen/master
Refactor SqueezeStream for CLS Compliance, Streaming, and Generic Test Coverage
2025-11-04 15:50:36 +00:00
Twan van Dongen
59c1f02f98 Still difficult to run CSharpier... 2025-11-02 19:23:30 +01:00
Twan van Dongen
3a71a2b1f8 Ran CSharpier. 2025-11-02 19:22:01 +01:00
Twan van Dongen
2ef1215b49 Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-11-02 19:20:47 +01:00
Twan van Dongen
130ac83076 Revert "CSharpier...Refactors the SqueezeStream class to ensure full CLS compliance and proper stream behavior. It replaces the previous one-shot decoding logic with a true streaming implementation by piping Huffman-decoded output into the existing RunLength90Stream, enabling real-time decompression."
This reverts commit dd606a0702.
2025-11-02 19:20:34 +01:00
Twan van Dongen
dd606a0702 CSharpier...Refactors the SqueezeStream class to ensure full CLS compliance and proper stream behavior. It replaces the previous one-shot decoding logic with a true streaming implementation by piping Huffman-decoded output into the existing RunLength90Stream, enabling real-time decompression. 2025-11-02 19:15:17 +01:00
Twan van Dongen
84cd772f50 Refactors the SqueezeStream class to ensure full CLS compliance and proper stream behavior. It replaces the previous one-shot decoding logic with a true streaming implementation by piping Huffman-decoded output into the existing RunLength90Stream, enabling real-time decompression. 2025-11-02 19:09:59 +01:00
Adam Hathcock
fa1d7af22f Merge remote-tracking branch 'origin/master' into adam/async-xz 2025-11-01 10:35:22 +00:00
Adam Hathcock
a771ba3bc0 async tests 2025-11-01 10:35:07 +00:00
Adam Hathcock
8b612c658d Merge pull request #1003 from adamhathcock/adam/async-lzma
async lzma
2025-11-01 10:34:54 +00:00
Adam Hathcock
7dd0da5fd7 Merge branch 'adam/async-lzma' into adam/async-xz 2025-11-01 10:28:27 +00:00
Adam Hathcock
f7b3525c4e fix tests and fmt 2025-11-01 10:25:14 +00:00
Adam Hathcock
de83bdae48 Merge remote-tracking branch 'origin/master' into adam/async-lzma 2025-11-01 10:21:25 +00:00
Adam Hathcock
d90b610767 Merge pull request #994 from TwanVanDongen/master
Adding the ARJ (Archived by Robert Jung) format
2025-11-01 10:20:54 +00:00
Adam Hathcock
2d41de6b72 add async tests 2025-11-01 09:57:26 +00:00
Adam Hathcock
f391c3caf3 make an async code 2025-11-01 09:42:17 +00:00
Twan van Dongen
9bdf150676 Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-10-31 16:17:26 +01:00
Twan van Dongen
0c199609eb CSharpier...Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-10-31 16:17:23 +01:00
Twan van Dongen
6eff9d3753 Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-10-31 16:15:51 +01:00
Twan
7ab16457c7 Added ARJ's compressmion method4 (compressed fastest).
Refactored TestBase to support archives with mixed compression algorithms.Merge branch 'adamhathcock:master' into master
2025-10-31 16:14:48 +01:00
Twan
e7ad8132b5 Merge branch 'adamhathcock:master' into master 2025-10-31 14:33:14 +01:00
Adam Hathcock
da87e45534 add async xzstream 2025-10-31 11:57:49 +00:00
Adam Hathcock
2ffaef5563 async xzblock 2025-10-31 11:47:27 +00:00
Adam Hathcock
55cb350d2c remove needless variable 2025-10-31 11:44:32 +00:00
Adam Hathcock
7fa271a1b4 fix ILLink 2025-10-31 11:43:35 +00:00
Adam Hathcock
c53ca372f2 don't use pools in tests 2025-10-31 11:39:57 +00:00
Adam Hathcock
75bc8501f4 Merge pull request #997 from adamhathcock/copilot/fix-ziparchive-linux-issue
Fix ArchiveFactory.Open double-wrapping causing "Cannot determine compressed stream type" on Linux
2025-10-31 11:24:59 +00:00
Adam Hathcock
1e22b47fe1 some fixes 2025-10-31 11:23:30 +00:00
Adam Hathcock
74e2dca207 fmt 2025-10-31 11:15:42 +00:00
Adam Hathcock
a669de24b7 Merge remote-tracking branch 'origin/master' into adam/async-lzma 2025-10-31 11:13:17 +00:00
Adam Hathcock
e1e9c449e9 use proper async versions 2025-10-31 11:12:46 +00:00
Adam Hathcock
60e1dc0239 review fixes 2025-10-31 11:10:55 +00:00
Adam Hathcock
10eb94fd82 Merge pull request #1002 from adamhathcock/adam/async-bzip2
async bzip2 and add
2025-10-31 11:09:39 +00:00
Adam Hathcock
ccc8587e5f review fixes 2025-10-31 10:55:33 +00:00
Adam Hathcock
53c96193c1 format 2025-10-30 16:40:59 +00:00
Adam Hathcock
d4f11e00b1 some more async changes 2025-10-30 16:31:45 +00:00
Adam Hathcock
321233b82c some async implementations 2025-10-30 15:08:57 +00:00
Adam Hathcock
eb188051d4 fmt 2025-10-30 14:53:11 +00:00
Adam Hathcock
a136084e11 add adc async 2025-10-30 14:52:51 +00:00
Adam Hathcock
bc06f3179d add basics for async bzip2 2025-10-30 14:42:46 +00:00
Adam Hathcock
ee84d971b2 Merge remote-tracking branch 'origin/master' into copilot/fix-ziparchive-linux-issue
# Conflicts:
#	src/SharpCompress/packages.lock.json
2025-10-30 14:30:31 +00:00
Twan
264d80ef4c Merge branch 'master' into master 2025-10-30 07:53:09 +01:00
Adam Hathcock
dba68187ac Merge pull request #996 from adamhathcock/adam/async-rar-ai 2025-10-29 14:26:15 +00:00
Adam Hathcock
ca4a1936b3 Merge remote-tracking branch 'origin/adam/async-rar-ai' into adam/async-rar-ai 2025-10-29 14:11:02 +00:00
Adam Hathcock
77c8d31a90 Merge pull request #1000 from adamhathcock/copilot/sub-pr-996
Fix Windows test failures due to ArrayPool buffer sizing
2025-10-29 14:10:39 +00:00
Adam Hathcock
ab7196f86c some review fixes 2025-10-29 14:07:12 +00:00
copilot-swe-agent[bot]
88b3a66bf9 Fix Windows test failures in SharpCompressStreamTests
ArrayPool.Rent() can return buffers larger than requested. The tests were using test.Length (the actual buffer size) instead of the requested size (0x1000), causing failures on Windows where ArrayPool returns larger buffers than on Linux.

Fixed by:
- Using explicit size (0x1000) instead of test.Length in Read() calls
- Using test.Take(0x1000) instead of test when comparing arrays

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-29 13:40:44 +00:00
copilot-swe-agent[bot]
ea77666b4a Final verification: All tests pass, no security issues
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-29 13:38:38 +00:00
copilot-swe-agent[bot]
db98e5f39b Fix ArchiveFactory.Open to avoid double-wrapping SharpCompressStream
Use SharpCompressStream.Create instead of constructor to properly handle
streams that are already wrapped. This prevents potential buffering issues
when opening ZIP files, particularly on Linux systems.

Added tests to verify both raw FileStream and pre-wrapped stream scenarios.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-29 13:34:53 +00:00
copilot-swe-agent[bot]
df59c5cb9d Plan: Fix potential ZipArchive.IsZipFile failure on Linux
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-29 13:26:00 +00:00
copilot-swe-agent[bot]
e786e95358 Initial plan 2025-10-29 13:25:48 +00:00
Adam Hathcock
75ada5623c add async tests for compress stream 2025-10-29 13:23:50 +00:00
copilot-swe-agent[bot]
ad5c655c45 Initial plan 2025-10-29 13:09:05 +00:00
Adam Hathcock
65e607454e add async for UnpWriteBufAsync and remove comments 2025-10-29 13:07:43 +00:00
Adam Hathcock
f238be6003 add arraypool usage in init 2025-10-29 10:35:03 +00:00
Adam Hathcock
dc31e4c5fa fmt 2025-10-29 10:27:30 +00:00
Adam Hathcock
665d8cd266 ran out of tokens, things work but need one more conversion 2025-10-29 10:23:57 +00:00
Adam Hathcock
8324114e84 remove unused code 2025-10-29 10:11:34 +00:00
Adam Hathcock
b83e6ee4ce fix async usage 2025-10-29 09:55:11 +00:00
Adam Hathcock
58bab0d310 async unpack2.0 2025-10-29 09:53:14 +00:00
Adam Hathcock
1af51aaaba async unpack1.5 2025-10-29 09:50:12 +00:00
Adam Hathcock
a09327b831 unpack50async 2025-10-29 09:46:10 +00:00
Adam Hathcock
16543bf74c async UnpReadBufAsync 2025-10-29 09:38:55 +00:00
Adam Hathcock
aa4cd373ac added more async methods 2025-10-29 09:28:34 +00:00
Adam Hathcock
351e294362 convert unpack15 and unpack20 2025-10-29 09:23:42 +00:00
Adam Hathcock
a0c5b1cd9d add archive tests 2025-10-29 09:08:08 +00:00
Adam Hathcock
df2ed1e584 fix RarStream 2025-10-29 09:00:23 +00:00
Adam Hathcock
b354f7a3a5 fix logic mistake 2025-10-29 08:47:18 +00:00
Adam Hathcock
bb53d1e1c6 entrystream fixes and fmt 2025-10-29 08:41:05 +00:00
Twan van Dongen
2aabd8d0e1 Initial implementation of the ARJ (Archived by Robert Jung) format, supporting 'store' for single file archives. Multi-file archives and all compression algorithms still require implementations. 2025-10-28 20:38:29 +01:00
Adam Hathcock
aca97c2c6c add rarcrc tests 2025-10-28 16:48:05 +00:00
Adam Hathcock
8e7d959cf4 add async creations 2025-10-28 16:07:16 +00:00
Adam Hathcock
b23f031db9 add async reads 2025-10-28 15:52:18 +00:00
Adam Hathcock
1ba529a9d5 first pass of async files 2025-10-28 15:29:41 +00:00
Adam Hathcock
3d29c183ef basic async usage 2025-10-28 15:00:11 +00:00
140 changed files with 24105 additions and 1273 deletions

View File

@@ -3,7 +3,7 @@
"isRoot": true,
"tools": {
"csharpier": {
"version": "1.1.2",
"version": "1.2.1",
"commands": [
"csharpier"
],

View File

@@ -14,7 +14,7 @@ jobs:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- uses: actions/setup-dotnet@v5
with:
dotnet-version: 8.0.x

3
.gitignore vendored
View File

@@ -11,6 +11,8 @@ TestResults/
packages/*/
project.lock.json
tests/TestArchives/Scratch
tests/TestArchives/*/Scratch
tests/TestArchives/*/Scratch2
.vs
tools
.vscode
@@ -18,4 +20,3 @@ tools
.DS_Store
*.snupkg
/tests/TestArchives/6d23a38c-f064-4ef1-ad89-b942396f53b9/Scratch

View File

@@ -1,7 +1,7 @@
<Project>
<ItemGroup>
<PackageVersion Include="Bullseye" Version="6.0.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.2.1" />
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
@@ -14,6 +14,7 @@
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.6" />
<PackageVersion Include="Microsoft.NET.ILLink.Tasks" Version="8.0.21" />
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
</ItemGroup>

View File

@@ -1,6 +1,6 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd, unarc and unarj with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).

View File

@@ -20,7 +20,7 @@ public static class ArchiveFactory
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
readerOptions ??= new ReaderOptions();
stream = new SharpCompressStream(stream, bufferSize: readerOptions.BufferSize);
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
}

View File

@@ -128,6 +128,7 @@ public static class IArchiveEntryExtensions
{
using var fs = File.Open(destinationFileName, fm);
await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false);
}
},
cancellationToken
);
}

View File

@@ -70,24 +70,51 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
public Stream OpenEntryStream()
{
RarStream stream;
if (IsRarV3)
{
return new RarStream(
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
);
}
else
{
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
);
}
return new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
);
stream.Initialize();
return stream;
}
public Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
Task.FromResult(OpenEntryStream());
public async Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
RarStream stream;
if (IsRarV3)
{
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
);
}
else
{
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
);
}
await stream.InitializeAsync(cancellationToken);
return stream;
}
public bool IsComplete
{

View File

@@ -57,7 +57,7 @@ namespace SharpCompress.Common.Arc
return value switch
{
1 or 2 => CompressionType.None,
3 => CompressionType.RLE90,
3 => CompressionType.Packed,
4 => CompressionType.Squeezed,
5 or 6 or 7 or 8 => CompressionType.Crunched,
9 => CompressionType.Squashed,

View File

@@ -44,7 +44,7 @@ namespace SharpCompress.Common.Arc
Header.CompressedSize
);
break;
case CompressionType.RLE90:
case CompressionType.Packed:
compressedStream = new RunLength90Stream(
_stream,
(int)Header.CompressedSize
@@ -54,6 +54,14 @@ namespace SharpCompress.Common.Arc
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Crunched:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: "
+ Header.CompressionMethod
+ " with size > 128KB"
);
}
compressedStream = new ArcLzwStream(
_stream,
(int)Header.CompressedSize,

View File

@@ -8,4 +8,5 @@ public enum ArchiveType
SevenZip,
GZip,
Arc,
Arj,
}

View File

@@ -0,0 +1,58 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Arc;
using SharpCompress.Common.Arj.Headers;
namespace SharpCompress.Common.Arj
{
public class ArjEntry : Entry
{
private readonly ArjFilePart _filePart;
internal ArjEntry(ArjFilePart filePart)
{
_filePart = filePart;
}
public override long Crc => _filePart.Header.OriginalCrc32;
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType
{
get
{
if (_filePart.Header.CompressionMethod == CompressionMethod.Stored)
{
return CompressionType.None;
}
return CompressionType.ArjLZ77;
}
}
public override long Size => _filePart?.Header.OriginalSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.Header.DateTimeModified.DateTime;
public override DateTime? CreatedTime => _filePart.Header.DateTimeCreated.DateTime;
public override DateTime? LastAccessedTime => _filePart.Header.DateTimeAccessed.DateTime;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => _filePart.Header.FileType == FileType.Directory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}
}

View File

@@ -0,0 +1,72 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Arj.Headers;
using SharpCompress.Compressors.Arj;
using SharpCompress.IO;
namespace SharpCompress.Common.Arj
{
public class ArjFilePart : FilePart
{
private readonly Stream _stream;
internal ArjLocalHeader Header { get; set; }
internal ArjFilePart(ArjLocalHeader localArjHeader, Stream seekableStream)
: base(localArjHeader.ArchiveEncoding)
{
_stream = seekableStream;
Header = localArjHeader;
}
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionMethod.Stored:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionMethod.CompressedMost:
case CompressionMethod.Compressed:
case CompressionMethod.CompressedFaster:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: "
+ Header.CompressionMethod
+ " with size > 128KB"
);
}
compressedStream = new LhaStream<Lh7DecoderCfg>(
_stream,
(int)Header.OriginalSize
);
break;
case CompressionMethod.CompressedFastest:
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream GetRawStream() => _stream;
}
}

View File

@@ -0,0 +1,36 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Readers;
namespace SharpCompress.Common.Arj
{
public class ArjVolume : Volume
{
public ArjVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
public override bool IsFirstVolume
{
get { return true; }
}
/// <summary>
/// ArjArchive is part of a multi-part archive.
/// </summary>
public override bool IsMultiVolume
{
get { return false; }
}
internal IEnumerable<ArjFilePart> GetVolumeFileParts()
{
return new List<ArjFilePart>();
}
}
}

View File

@@ -0,0 +1,142 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Arj.Headers
{
public enum ArjHeaderType
{
MainHeader,
LocalHeader,
}
public abstract class ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArjHeader(ArjHeaderType type)
{
ArjHeaderType = type;
}
public ArjHeaderType ArjHeaderType { get; }
public byte Flags { get; set; }
public FileType FileType { get; set; }
public abstract ArjHeader? Read(Stream reader);
public byte[] ReadHeader(Stream stream)
{
// check for magic bytes
Span<byte> magic = stackalloc byte[2];
if (stream.Read(magic) != 2)
{
return Array.Empty<byte>();
}
var magicValue = (ushort)(magic[0] | magic[1] << 8);
if (magicValue != ARJ_MAGIC)
{
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
}
// read header_size
byte[] headerBytes = new byte[2];
stream.Read(headerBytes, 0, 2);
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
if (headerSize < 1)
{
return Array.Empty<byte>();
}
var body = new byte[headerSize];
var read = stream.Read(body, 0, headerSize);
if (read < headerSize)
{
return Array.Empty<byte>();
}
byte[] crc = new byte[4];
read = stream.Read(crc, 0, 4);
var checksum = Crc32Stream.Compute(body);
// Compute the hash value
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
protected List<byte[]> ReadExtendedHeaders(Stream reader)
{
List<byte[]> extendedHeader = new List<byte[]>();
byte[] buffer = new byte[2];
while (true)
{
int bytesRead = reader.Read(buffer, 0, 2);
if (bytesRead < 2)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header size."
);
}
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
if (extHeaderSize == 0)
{
return extendedHeader;
}
byte[] header = new byte[extHeaderSize];
bytesRead = reader.Read(header, 0, extHeaderSize);
if (bytesRead < extHeaderSize)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header data."
);
}
byte[] crc = new byte[4];
bytesRead = reader.Read(crc, 0, 4);
if (bytesRead < 4)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header CRC."
);
}
var checksum = Crc32Stream.Compute(header);
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Extended header checksum is invalid");
}
extendedHeader.Add(header);
}
}
// Flag helpers
public bool IsGabled => (Flags & 0x01) != 0;
public bool IsAnsiPage => (Flags & 0x02) != 0;
public bool IsVolume => (Flags & 0x04) != 0;
public bool IsArjProtected => (Flags & 0x08) != 0;
public bool IsPathSym => (Flags & 0x10) != 0;
public bool IsBackup => (Flags & 0x20) != 0;
public bool IsSecured => (Flags & 0x40) != 0;
public bool IsAltName => (Flags & 0x80) != 0;
public static FileType FileTypeFromByte(byte value)
{
return Enum.IsDefined(typeof(FileType), value)
? (FileType)value
: Headers.FileType.Unknown;
}
}
}

View File

@@ -0,0 +1,161 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers
{
public class ArjLocalHeader : ArjHeader
{
public ArchiveEncoding ArchiveEncoding { get; }
public long DataStartPosition { get; protected set; }
public byte ArchiverVersionNumber { get; set; }
public byte MinVersionToExtract { get; set; }
public HostOS HostOS { get; set; }
public CompressionMethod CompressionMethod { get; set; }
public DosDateTime DateTimeModified { get; set; } = new DosDateTime(0);
public long CompressedSize { get; set; }
public long OriginalSize { get; set; }
public long OriginalCrc32 { get; set; }
public int FileSpecPosition { get; set; }
public int FileAccessMode { get; set; }
public byte FirstChapter { get; set; }
public byte LastChapter { get; set; }
public long ExtendedFilePosition { get; set; }
public DosDateTime DateTimeAccessed { get; set; } = new DosDateTime(0);
public DosDateTime DateTimeCreated { get; set; } = new DosDateTime(0);
public long OriginalSizeEvenForVolumes { get; set; }
public string Name { get; set; } = string.Empty;
public string Comment { get; set; } = string.Empty;
private const byte StdHdrSize = 30;
private const byte R9HdrSize = 46;
public ArjLocalHeader(ArchiveEncoding archiveEncoding)
: base(ArjHeaderType.LocalHeader)
{
ArchiveEncoding =
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
}
public override ArjHeader? Read(Stream stream)
{
var body = ReadHeader(stream);
if (body.Length > 0)
{
ReadExtendedHeaders(stream);
var header = LoadFrom(body);
header.DataStartPosition = stream.Position;
return header;
}
return null;
}
public ArjLocalHeader LoadFrom(byte[] headerBytes)
{
int offset = 0;
int ReadInt16()
{
if (offset + 1 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
return v;
}
long ReadInt32()
{
if (offset + 3 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF
| (headerBytes[offset + 1] & 0xFF) << 8
| (headerBytes[offset + 2] & 0xFF) << 16
| (headerBytes[offset + 3] & 0xFF) << 24;
offset += 4;
return v;
}
byte headerSize = headerBytes[offset++];
ArchiverVersionNumber = headerBytes[offset++];
MinVersionToExtract = headerBytes[offset++];
HostOS hostOS = (HostOS)headerBytes[offset++];
Flags = headerBytes[offset++];
CompressionMethod = CompressionMethodFromByte(headerBytes[offset++]);
FileType = FileTypeFromByte(headerBytes[offset++]);
offset++; // Skip 1 byte
var rawTimestamp = ReadInt32();
DateTimeModified =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
CompressedSize = ReadInt32();
OriginalSize = ReadInt32();
OriginalCrc32 = ReadInt32();
FileSpecPosition = ReadInt16();
FileAccessMode = ReadInt16();
FirstChapter = headerBytes[offset++];
LastChapter = headerBytes[offset++];
ExtendedFilePosition = 0;
OriginalSizeEvenForVolumes = 0;
if (headerSize > StdHdrSize)
{
ExtendedFilePosition = ReadInt32();
if (headerSize >= R9HdrSize)
{
rawTimestamp = ReadInt32();
DateTimeAccessed =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
rawTimestamp = ReadInt32();
DateTimeCreated =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
OriginalSizeEvenForVolumes = ReadInt32();
}
}
Name = Encoding.ASCII.GetString(
headerBytes,
offset,
Array.IndexOf(headerBytes, (byte)0, offset) - offset
);
offset += Name.Length + 1;
Comment = Encoding.ASCII.GetString(
headerBytes,
offset,
Array.IndexOf(headerBytes, (byte)0, offset) - offset
);
offset += Comment.Length + 1;
return this;
}
public static CompressionMethod CompressionMethodFromByte(byte value)
{
return value switch
{
0 => CompressionMethod.Stored,
1 => CompressionMethod.CompressedMost,
2 => CompressionMethod.Compressed,
3 => CompressionMethod.CompressedFaster,
4 => CompressionMethod.CompressedFastest,
8 => CompressionMethod.NoDataNoCrc,
9 => CompressionMethod.NoData,
_ => CompressionMethod.Unknown,
};
}
}
}

View File

@@ -0,0 +1,138 @@
using System;
using System.IO;
using System.Text;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Arj.Headers
{
public class ArjMainHeader : ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArchiveEncoding ArchiveEncoding { get; }
public int ArchiverVersionNumber { get; private set; }
public int MinVersionToExtract { get; private set; }
public HostOS HostOs { get; private set; }
public int SecurityVersion { get; private set; }
public DosDateTime CreationDateTime { get; private set; } = new DosDateTime(0);
public long CompressedSize { get; private set; }
public long ArchiveSize { get; private set; }
public long SecurityEnvelope { get; private set; }
public int FileSpecPosition { get; private set; }
public int SecurityEnvelopeLength { get; private set; }
public int EncryptionVersion { get; private set; }
public int LastChapter { get; private set; }
public int ArjProtectionFactor { get; private set; }
public int Flags2 { get; private set; }
public string Name { get; private set; } = string.Empty;
public string Comment { get; private set; } = string.Empty;
public ArjMainHeader(ArchiveEncoding archiveEncoding)
: base(ArjHeaderType.MainHeader)
{
ArchiveEncoding =
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
}
public override ArjHeader? Read(Stream stream)
{
var body = ReadHeader(stream);
ReadExtendedHeaders(stream);
return LoadFrom(body);
}
public ArjMainHeader LoadFrom(byte[] headerBytes)
{
var offset = 1;
byte ReadByte()
{
if (offset >= headerBytes.Length)
{
throw new EndOfStreamException();
}
return (byte)(headerBytes[offset++] & 0xFF);
}
int ReadInt16()
{
if (offset + 1 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
return v;
}
long ReadInt32()
{
if (offset + 3 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF
| (headerBytes[offset + 1] & 0xFF) << 8
| (headerBytes[offset + 2] & 0xFF) << 16
| (headerBytes[offset + 3] & 0xFF) << 24;
offset += 4;
return v;
}
string ReadNullTerminatedString(byte[] x, int startIndex)
{
var result = new StringBuilder();
int i = startIndex;
while (i < x.Length && x[i] != 0)
{
result.Append((char)x[i]);
i++;
}
// Skip the null terminator
i++;
if (i < x.Length)
{
byte[] remainder = new byte[x.Length - i];
Array.Copy(x, i, remainder, 0, remainder.Length);
x = remainder;
}
return result.ToString();
}
ArchiverVersionNumber = ReadByte();
MinVersionToExtract = ReadByte();
var hostOsByte = ReadByte();
HostOs = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
Flags = ReadByte();
SecurityVersion = ReadByte();
FileType = FileTypeFromByte(ReadByte());
offset++; // skip reserved
CreationDateTime = new DosDateTime((int)ReadInt32());
CompressedSize = ReadInt32();
ArchiveSize = ReadInt32();
SecurityEnvelope = ReadInt32();
FileSpecPosition = ReadInt16();
SecurityEnvelopeLength = ReadInt16();
EncryptionVersion = ReadByte();
LastChapter = ReadByte();
Name = ReadNullTerminatedString(headerBytes, offset);
Comment = ReadNullTerminatedString(headerBytes, offset + 1 + Name.Length);
return this;
}
}
}

View File

@@ -0,0 +1,20 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers
{
public enum CompressionMethod
{
Stored = 0,
CompressedMost = 1,
Compressed = 2,
CompressedFaster = 3,
CompressedFastest = 4,
NoDataNoCrc = 8,
NoData = 9,
Unknown,
}
}

View File

@@ -0,0 +1,37 @@
using System;
namespace SharpCompress.Common.Arj.Headers
{
public class DosDateTime
{
public DateTime DateTime { get; }
public DosDateTime(long dosValue)
{
// Ensure only the lower 32 bits are used
int value = unchecked((int)(dosValue & 0xFFFFFFFF));
var date = (value >> 16) & 0xFFFF;
var time = value & 0xFFFF;
var day = date & 0x1F;
var month = (date >> 5) & 0x0F;
var year = ((date >> 9) & 0x7F) + 1980;
var second = (time & 0x1F) * 2;
var minute = (time >> 5) & 0x3F;
var hour = (time >> 11) & 0x1F;
try
{
DateTime = new DateTime(year, month, day, hour, minute, second);
}
catch
{
DateTime = DateTime.MinValue;
}
}
public override string ToString() => DateTime.ToString("yyyy-MM-dd HH:mm:ss");
}
}

View File

@@ -0,0 +1,13 @@
namespace SharpCompress.Common.Arj.Headers
{
public enum FileType : byte
{
Binary = 0,
Text7Bit = 1,
CommentHeader = 2,
Directory = 3,
VolumeLabel = 4,
ChapterLabel = 5,
Unknown = 255,
}
}

View File

@@ -0,0 +1,19 @@
namespace SharpCompress.Common.Arj.Headers
{
public enum HostOS
{
MsDos = 0,
PrimOS = 1,
Unix = 2,
Amiga = 3,
MacOs = 4,
OS2 = 5,
AppleGS = 6,
AtariST = 7,
NeXT = 8,
VaxVMS = 9,
Win95 = 10,
Win32 = 11,
Unknown = 255,
}
}

View File

@@ -23,10 +23,11 @@ public enum CompressionType
Reduce4,
Explode,
Squeezed,
RLE90,
Packed,
Crunched,
Squashed,
Crushed,
Distilled,
ZStandard,
ArjLZ77,
}

View File

@@ -64,6 +64,11 @@ public class EntryStream : Stream, IStreamStack
protected override void Dispose(bool disposing)
{
if (_isDisposed)
{
return;
}
_isDisposed = true;
if (!(_completed || _reader.Cancelled))
{
SkipEntry();
@@ -81,12 +86,6 @@ public class EntryStream : Stream, IStreamStack
lzmaStream.Flush(); //Lzma over reads. Knock it back
}
}
if (_isDisposed)
{
return;
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(EntryStream));
#endif
@@ -97,6 +96,11 @@ public class EntryStream : Stream, IStreamStack
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask DisposeAsync()
{
if (_isDisposed)
{
return;
}
_isDisposed = true;
if (!(_completed || _reader.Cancelled))
{
await SkipEntryAsync().ConfigureAwait(false);
@@ -114,12 +118,6 @@ public class EntryStream : Stream, IStreamStack
await lzmaStream.FlushAsync().ConfigureAwait(false);
}
}
if (_isDisposed)
{
return;
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(EntryStream));
#endif
@@ -204,4 +202,11 @@ public class EntryStream : Stream, IStreamStack
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
public override Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
) => throw new NotSupportedException();
}

View File

@@ -25,6 +25,10 @@ internal sealed class TarHeader
internal const int BLOCK_SIZE = 512;
// Maximum size for long name/link headers to prevent memory exhaustion attacks
// This is generous enough for most real-world scenarios (32KB)
private const int MAX_LONG_NAME_SIZE = 32768;
internal void Write(Stream output)
{
var buffer = new byte[BLOCK_SIZE];
@@ -186,6 +190,15 @@ internal sealed class TarHeader
private string ReadLongName(BinaryReader reader, byte[] buffer)
{
var size = ReadSize(buffer);
// Validate size to prevent memory exhaustion from malformed headers
if (size < 0 || size > MAX_LONG_NAME_SIZE)
{
throw new InvalidFormatException(
$"Long name size {size} is invalid or exceeds maximum allowed size of {MAX_LONG_NAME_SIZE} bytes"
);
}
var nameLength = (int)size;
var nameBytes = reader.ReadBytes(nameLength);
var remainingBytesToRead = BLOCK_SIZE - (nameLength % BLOCK_SIZE);

View File

@@ -24,10 +24,29 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.ADC;
/// <summary>
/// Result of an ADC decompression operation
/// </summary>
public class AdcDecompressResult
{
/// <summary>
/// Number of bytes read from input
/// </summary>
public int BytesRead { get; set; }
/// <summary>
/// Decompressed output buffer
/// </summary>
public byte[]? Output { get; set; }
}
/// <summary>
/// Provides static methods for decompressing Apple Data Compression data
/// </summary>
@@ -78,6 +97,173 @@ public static class ADCBase
public static int Decompress(byte[] input, out byte[]? output, int bufferSize = 262144) =>
Decompress(new MemoryStream(input), out output, bufferSize);
/// <summary>
/// Decompresses a byte buffer asynchronously that's compressed with ADC
/// </summary>
/// <param name="input">Compressed buffer</param>
/// <param name="bufferSize">Max size for decompressed data</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>Result containing bytes read and decompressed data</returns>
public static async Task<AdcDecompressResult> DecompressAsync(
byte[] input,
int bufferSize = 262144,
CancellationToken cancellationToken = default
) => await DecompressAsync(new MemoryStream(input), bufferSize, cancellationToken);
/// <summary>
/// Decompresses a stream asynchronously that's compressed with ADC
/// </summary>
/// <param name="input">Stream containing compressed data</param>
/// <param name="bufferSize">Max size for decompressed data</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>Result containing bytes read and decompressed data</returns>
public static async Task<AdcDecompressResult> DecompressAsync(
Stream input,
int bufferSize = 262144,
CancellationToken cancellationToken = default
)
{
var result = new AdcDecompressResult();
if (input is null || input.Length == 0)
{
result.BytesRead = 0;
result.Output = null;
return result;
}
var start = (int)input.Position;
var position = (int)input.Position;
int chunkSize;
int offset;
int chunkType;
var buffer = ArrayPool<byte>.Shared.Rent(bufferSize);
var outPosition = 0;
var full = false;
byte[] temp = ArrayPool<byte>.Shared.Rent(3);
try
{
while (position < input.Length)
{
cancellationToken.ThrowIfCancellationRequested();
var readByte = input.ReadByte();
if (readByte == -1)
{
break;
}
chunkType = GetChunkType((byte)readByte);
switch (chunkType)
{
case PLAIN:
chunkSize = GetChunkSize((byte)readByte);
if (outPosition + chunkSize > bufferSize)
{
full = true;
break;
}
var readCount = await input.ReadAsync(
buffer,
outPosition,
chunkSize,
cancellationToken
);
outPosition += readCount;
position += readCount + 1;
break;
case TWO_BYTE:
chunkSize = GetChunkSize((byte)readByte);
temp[0] = (byte)readByte;
temp[1] = (byte)input.ReadByte();
offset = GetOffset(temp.AsSpan(0, 2));
if (outPosition + chunkSize > bufferSize)
{
full = true;
break;
}
if (offset == 0)
{
var lastByte = buffer[outPosition - 1];
for (var i = 0; i < chunkSize; i++)
{
buffer[outPosition] = lastByte;
outPosition++;
}
position += 2;
}
else
{
for (var i = 0; i < chunkSize; i++)
{
buffer[outPosition] = buffer[outPosition - offset - 1];
outPosition++;
}
position += 2;
}
break;
case THREE_BYTE:
chunkSize = GetChunkSize((byte)readByte);
temp[0] = (byte)readByte;
temp[1] = (byte)input.ReadByte();
temp[2] = (byte)input.ReadByte();
offset = GetOffset(temp.AsSpan(0, 3));
if (outPosition + chunkSize > bufferSize)
{
full = true;
break;
}
if (offset == 0)
{
var lastByte = buffer[outPosition - 1];
for (var i = 0; i < chunkSize; i++)
{
buffer[outPosition] = lastByte;
outPosition++;
}
position += 3;
}
else
{
for (var i = 0; i < chunkSize; i++)
{
buffer[outPosition] = buffer[outPosition - offset - 1];
outPosition++;
}
position += 3;
}
break;
}
if (full)
{
break;
}
}
var output = new byte[outPosition];
Array.Copy(buffer, output, outPosition);
result.BytesRead = position - start;
result.Output = output;
return result;
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
ArrayPool<byte>.Shared.Return(temp);
}
}
/// <summary>
/// Decompresses a stream that's compressed with ADC
/// </summary>

View File

@@ -28,6 +28,8 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.ADC;
@@ -187,6 +189,76 @@ public sealed class ADCStream : Stream, IStreamStack
return copied;
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
)
{
if (count == 0)
{
return 0;
}
if (buffer is null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (offset < buffer.GetLowerBound(0))
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if ((offset + count) > buffer.GetLength(0))
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (_outBuffer is null)
{
var result = await ADCBase.DecompressAsync(
_stream,
cancellationToken: cancellationToken
);
_outBuffer = result.Output;
_outPosition = 0;
}
var inPosition = offset;
var toCopy = count;
var copied = 0;
while (_outPosition + toCopy >= _outBuffer.Length)
{
cancellationToken.ThrowIfCancellationRequested();
var piece = _outBuffer.Length - _outPosition;
Array.Copy(_outBuffer, _outPosition, buffer, inPosition, piece);
inPosition += piece;
copied += piece;
_position += piece;
toCopy -= piece;
var result = await ADCBase.DecompressAsync(
_stream,
cancellationToken: cancellationToken
);
_outBuffer = result.Output;
_outPosition = 0;
if (result.BytesRead == 0 || _outBuffer is null || _outBuffer.Length == 0)
{
return copied;
}
}
Array.Copy(_outBuffer, _outPosition, buffer, inPosition, toCopy);
_outPosition += toCopy;
_position += toCopy;
copied += toCopy;
return copied;
}
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();

View File

@@ -0,0 +1,72 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.Arj
{
[CLSCompliant(true)]
public class BitReader
{
private readonly Stream _input;
private int _bitBuffer; // currently buffered bits
private int _bitCount; // number of bits in buffer
public BitReader(Stream input)
{
_input = input ?? throw new ArgumentNullException(nameof(input));
_bitBuffer = 0;
_bitCount = 0;
}
/// <summary>
/// Reads a single bit from the stream. Returns 0 or 1.
/// </summary>
public int ReadBit()
{
if (_bitCount == 0)
{
int nextByte = _input.ReadByte();
if (nextByte < 0)
{
throw new EndOfStreamException("No more data available in BitReader.");
}
_bitBuffer = nextByte;
_bitCount = 8;
}
int bit = (_bitBuffer >> (_bitCount - 1)) & 1;
_bitCount--;
return bit;
}
/// <summary>
/// Reads n bits (up to 32) from the stream.
/// </summary>
public int ReadBits(int count)
{
if (count < 0 || count > 32)
{
throw new ArgumentOutOfRangeException(
nameof(count),
"Count must be between 0 and 32."
);
}
int result = 0;
for (int i = 0; i < count; i++)
{
result = (result << 1) | ReadBit();
}
return result;
}
/// <summary>
/// Resets any buffered bits.
/// </summary>
public void AlignToByte()
{
_bitCount = 0;
_bitBuffer = 0;
}
}
}

View File

@@ -0,0 +1,43 @@
using System;
using System.Collections;
using System.Collections.Generic;
namespace SharpCompress.Compressors.Arj
{
/// <summary>
/// Iterator that reads & pushes values back into the ring buffer.
/// </summary>
public class HistoryIterator : IEnumerator<byte>
{
private int _index;
private readonly IRingBuffer _ring;
public HistoryIterator(IRingBuffer ring, int startIndex)
{
_ring = ring;
_index = startIndex;
}
public bool MoveNext()
{
Current = _ring[_index];
_index = unchecked(_index + 1);
// Push value back into the ring buffer
_ring.Push(Current);
return true; // iterator is infinite
}
public void Reset()
{
throw new NotSupportedException();
}
public byte Current { get; private set; }
object IEnumerator.Current => Current;
public void Dispose() { }
}
}

View File

@@ -0,0 +1,218 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Compressors.Arj
{
[CLSCompliant(true)]
public enum NodeType
{
Leaf,
Branch,
}
[CLSCompliant(true)]
public sealed class TreeEntry
{
public readonly NodeType Type;
public readonly int LeafValue;
public readonly int BranchIndex;
public const int MAX_INDEX = 4096;
private TreeEntry(NodeType type, int leafValue, int branchIndex)
{
Type = type;
LeafValue = leafValue;
BranchIndex = branchIndex;
}
public static TreeEntry Leaf(int value)
{
return new TreeEntry(NodeType.Leaf, value, -1);
}
public static TreeEntry Branch(int index)
{
if (index >= MAX_INDEX)
{
throw new ArgumentOutOfRangeException(
nameof(index),
"Branch index exceeds MAX_INDEX"
);
}
return new TreeEntry(NodeType.Branch, 0, index);
}
}
[CLSCompliant(true)]
public sealed class HuffTree
{
private readonly List<TreeEntry> _tree;
public HuffTree(int capacity = 0)
{
_tree = new List<TreeEntry>(capacity);
}
public void SetSingle(int value)
{
_tree.Clear();
_tree.Add(TreeEntry.Leaf(value));
}
public void BuildTree(byte[] lengths, int count)
{
if (lengths == null)
{
throw new ArgumentNullException(nameof(lengths));
}
if (count < 0 || count > lengths.Length)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (count > TreeEntry.MAX_INDEX / 2)
{
throw new ArgumentException(
$"Count exceeds maximum allowed: {TreeEntry.MAX_INDEX / 2}"
);
}
byte[] slice = new byte[count];
Array.Copy(lengths, slice, count);
BuildTree(slice);
}
public void BuildTree(byte[] valueLengths)
{
if (valueLengths == null)
{
throw new ArgumentNullException(nameof(valueLengths));
}
if (valueLengths.Length > TreeEntry.MAX_INDEX / 2)
{
throw new InvalidOperationException("Too many code lengths");
}
_tree.Clear();
int maxAllocated = 1; // start with a single (root) node
for (byte currentLen = 1; ; currentLen++)
{
// add missing branches up to current limit
int maxLimit = maxAllocated;
for (int i = _tree.Count; i < maxLimit; i++)
{
// TreeEntry.Branch may throw if index too large
try
{
_tree.Add(TreeEntry.Branch(maxAllocated));
}
catch (ArgumentOutOfRangeException e)
{
_tree.Clear();
throw new InvalidOperationException("Branch index exceeds limit", e);
}
// each branch node allocates two children
maxAllocated += 2;
}
// fill tree with leaves found in the lengths table at the current length
bool moreLeaves = false;
for (int value = 0; value < valueLengths.Length; value++)
{
byte len = valueLengths[value];
if (len == currentLen)
{
_tree.Add(TreeEntry.Leaf(value));
}
else if (len > currentLen)
{
moreLeaves = true; // there are more leaves to process
}
}
// sanity check (too many leaves)
if (_tree.Count > maxAllocated)
{
throw new InvalidOperationException("Too many leaves");
}
// stop when no longer finding longer codes
if (!moreLeaves)
{
break;
}
}
// ensure tree is complete
if (_tree.Count != maxAllocated)
{
throw new InvalidOperationException(
$"Missing some leaves: tree count = {_tree.Count}, expected = {maxAllocated}"
);
}
}
public int ReadEntry(BitReader reader)
{
if (_tree.Count == 0)
{
throw new InvalidOperationException("Tree not initialized");
}
TreeEntry node = _tree[0];
while (true)
{
if (node.Type == NodeType.Leaf)
{
return node.LeafValue;
}
int bit = reader.ReadBit();
int index = node.BranchIndex + bit;
if (index >= _tree.Count)
{
throw new InvalidOperationException("Invalid branch index during read");
}
node = _tree[index];
}
}
public override string ToString()
{
var result = new StringBuilder();
void FormatStep(int index, string prefix)
{
var node = _tree[index];
if (node.Type == NodeType.Leaf)
{
result.AppendLine($"{prefix} -> {node.LeafValue}");
}
else
{
FormatStep(node.BranchIndex, prefix + "0");
FormatStep(node.BranchIndex + 1, prefix + "1");
}
}
if (_tree.Count > 0)
{
FormatStep(0, "");
}
return result.ToString();
}
}
}

View File

@@ -0,0 +1,9 @@
namespace SharpCompress.Compressors.Arj
{
public interface ILhaDecoderConfig
{
int HistoryBits { get; }
int OffsetBits { get; }
RingBuffer RingBuffer { get; }
}
}

View File

@@ -0,0 +1,17 @@
namespace SharpCompress.Compressors.Arj
{
public interface IRingBuffer
{
int BufferSize { get; }
int Cursor { get; }
void SetCursor(int pos);
void Push(byte value);
HistoryIterator IterFromOffset(int offset);
HistoryIterator IterFromPos(int pos);
byte this[int index] { get; }
}
}

View File

@@ -0,0 +1,191 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Arj
{
[CLSCompliant(true)]
public sealed class LHDecoderStream : Stream, IStreamStack
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _stream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
private readonly BitReader _bitReader;
private readonly Stream _stream;
// Buffer containing *all* bytes decoded so far.
private readonly List<byte> _buffer = new();
private long _readPosition;
private readonly int _originalSize;
private bool _finishedDecoding;
private bool _disposed;
private const int THRESHOLD = 3;
public LHDecoderStream(Stream compressedStream, int originalSize)
{
_stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream));
if (!compressedStream.CanRead)
throw new ArgumentException(
"compressedStream must be readable.",
nameof(compressedStream)
);
_bitReader = new BitReader(compressedStream);
_originalSize = originalSize;
_readPosition = 0;
_finishedDecoding = (originalSize == 0);
}
public Stream BaseStream => _stream;
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => _originalSize;
public override long Position
{
get => _readPosition;
set => throw new NotSupportedException();
}
/// <summary>
/// Decodes a single element (literal or back-reference) and appends it to _buffer.
/// Returns true if data was added, or false if all input has already been decoded.
/// </summary>
private bool DecodeNext()
{
if (_buffer.Count >= _originalSize)
{
_finishedDecoding = true;
return false;
}
int len = DecodeVal(0, 7);
if (len == 0)
{
byte nextChar = (byte)_bitReader.ReadBits(8);
_buffer.Add(nextChar);
}
else
{
int repCount = len + THRESHOLD - 1;
int backPtr = DecodeVal(9, 13);
if (backPtr >= _buffer.Count)
throw new InvalidDataException("Invalid back_ptr in LH stream");
int srcIndex = _buffer.Count - 1 - backPtr;
for (int j = 0; j < repCount && _buffer.Count < _originalSize; j++)
{
byte b = _buffer[srcIndex];
_buffer.Add(b);
srcIndex++;
// srcIndex may grow; it's allowed (source region can overlap destination)
}
}
if (_buffer.Count >= _originalSize)
{
_finishedDecoding = true;
}
return true;
}
private int DecodeVal(int from, int to)
{
int add = 0;
int bit = from;
while (bit < to && _bitReader.ReadBits(1) == 1)
{
add |= 1 << bit;
bit++;
}
int res = bit > 0 ? _bitReader.ReadBits(bit) : 0;
return res + add;
}
/// <summary>
/// Reads decompressed bytes into buffer[offset..offset+count].
/// The method decodes additional data on demand when needed.
/// </summary>
public override int Read(byte[] buffer, int offset, int count)
{
if (_disposed)
throw new ObjectDisposedException(nameof(LHDecoderStream));
if (buffer == null)
throw new ArgumentNullException(nameof(buffer));
if (offset < 0 || count < 0 || offset + count > buffer.Length)
throw new ArgumentOutOfRangeException("offset/count");
if (_readPosition >= _originalSize)
return 0; // EOF
int totalRead = 0;
while (totalRead < count && _readPosition < _originalSize)
{
if (_readPosition >= _buffer.Count)
{
bool had = DecodeNext();
if (!had)
{
break;
}
}
int available = _buffer.Count - (int)_readPosition;
if (available <= 0)
{
if (!_finishedDecoding)
{
continue;
}
break;
}
int toCopy = Math.Min(available, count - totalRead);
_buffer.CopyTo((int)_readPosition, buffer, offset + totalRead, toCopy);
_readPosition += toCopy;
totalRead += toCopy;
}
return totalRead;
}
public override void Flush() => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
}
}

View File

@@ -0,0 +1,9 @@
namespace SharpCompress.Compressors.Arj
{
public class Lh5DecoderCfg : ILhaDecoderConfig
{
public int HistoryBits => 14;
public int OffsetBits => 4;
public RingBuffer RingBuffer { get; } = new RingBuffer(1 << 14);
}
}

View File

@@ -0,0 +1,9 @@
namespace SharpCompress.Compressors.Arj
{
public class Lh7DecoderCfg : ILhaDecoderConfig
{
public int HistoryBits => 17;
public int OffsetBits => 5;
public RingBuffer RingBuffer { get; } = new RingBuffer(1 << 17);
}
}

View File

@@ -0,0 +1,363 @@
using System;
using System.Data;
using System.IO;
using System.Linq;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Arj
{
[CLSCompliant(true)]
public sealed class LhaStream<C> : Stream, IStreamStack
where C : ILhaDecoderConfig, new()
{
private readonly BitReader _bitReader;
private readonly Stream _stream;
private readonly HuffTree _commandTree;
private readonly HuffTree _offsetTree;
private int _remainingCommands;
private (int offset, int count)? _copyProgress;
private readonly RingBuffer _ringBuffer;
private readonly C _config = new C();
private const int NUM_COMMANDS = 510;
private const int NUM_TEMP_CODELEN = 20;
private readonly int _originalSize;
private int _producedBytes = 0;
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _stream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
public LhaStream(Stream compressedStream, int originalSize)
{
_stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream));
_bitReader = new BitReader(compressedStream);
_ringBuffer = _config.RingBuffer;
_commandTree = new HuffTree(NUM_COMMANDS * 2);
_offsetTree = new HuffTree(NUM_TEMP_CODELEN * 2);
_remainingCommands = 0;
_copyProgress = null;
_originalSize = originalSize;
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotSupportedException();
public override long Position
{
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
public override void Flush() { }
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
public override int Read(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0 || count < 0 || (offset + count) > buffer.Length)
{
throw new ArgumentOutOfRangeException();
}
if (_producedBytes >= _originalSize)
{
return 0; // EOF
}
if (count == 0)
{
return 0;
}
int bytesRead = FillBuffer(buffer);
return bytesRead;
}
private byte ReadCodeLength()
{
byte len = (byte)_bitReader.ReadBits(3);
if (len == 7)
{
while (_bitReader.ReadBit() != 0)
{
len++;
if (len > 255)
{
throw new InvalidOperationException("Code length overflow");
}
}
}
return len;
}
private int ReadCodeSkip(int skipRange)
{
int bits;
int increment;
switch (skipRange)
{
case 0:
return 1;
case 1:
bits = 4;
increment = 3; // 3..=18
break;
default:
bits = 9;
increment = 20; // 20..=531
break;
}
int skip = _bitReader.ReadBits(bits);
return skip + increment;
}
private void ReadTempTree()
{
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
// number of codes to read (5 bits)
int numCodes = _bitReader.ReadBits(5);
// single code only
if (numCodes == 0)
{
int code = _bitReader.ReadBits(5);
_offsetTree.SetSingle((byte)code);
return;
}
if (numCodes > NUM_TEMP_CODELEN)
{
throw new Exception("temporary codelen table has invalid size");
}
// read actual lengths
int count = Math.Min(3, numCodes);
for (int i = 0; i < count; i++)
{
codeLengths[i] = (byte)ReadCodeLength();
}
// 2-bit skip value follows
int skip = _bitReader.ReadBits(2);
if (3 + skip > numCodes)
{
throw new Exception("temporary codelen table has invalid size");
}
for (int i = 3 + skip; i < numCodes; i++)
{
codeLengths[i] = (byte)ReadCodeLength();
}
_offsetTree.BuildTree(codeLengths, numCodes);
}
private void ReadCommandTree()
{
byte[] codeLengths = new byte[NUM_COMMANDS];
// number of codes to read (9 bits)
int numCodes = _bitReader.ReadBits(9);
// single code only
if (numCodes == 0)
{
int code = _bitReader.ReadBits(9);
_commandTree.SetSingle((ushort)code);
return;
}
if (numCodes > NUM_COMMANDS)
{
throw new Exception("commands codelen table has invalid size");
}
int index = 0;
while (index < numCodes)
{
for (int n = 0; n < numCodes - index; n++)
{
int code = _offsetTree.ReadEntry(_bitReader);
if (code >= 0 && code <= 2) // skip range
{
int skipCount = ReadCodeSkip(code);
index += n + skipCount;
goto outerLoop;
}
else
{
codeLengths[index + n] = (byte)(code - 2);
}
}
break;
outerLoop:
;
}
_commandTree.BuildTree(codeLengths, numCodes);
}
private void ReadOffsetTree()
{
int numCodes = _bitReader.ReadBits(_config.OffsetBits);
if (numCodes == 0)
{
int code = _bitReader.ReadBits(_config.OffsetBits);
_offsetTree.SetSingle(code);
return;
}
if (numCodes > _config.HistoryBits)
{
throw new InvalidDataException("Offset code table too large");
}
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
for (int i = 0; i < numCodes; i++)
{
codeLengths[i] = (byte)ReadCodeLength();
}
_offsetTree.BuildTree(codeLengths, numCodes);
}
private void BeginNewBlock()
{
ReadTempTree();
ReadCommandTree();
ReadOffsetTree();
}
private int ReadCommand() => _commandTree.ReadEntry(_bitReader);
private int ReadOffset()
{
int bits = _offsetTree.ReadEntry(_bitReader);
if (bits <= 1)
{
return bits;
}
int res = _bitReader.ReadBits(bits - 1);
return res | (1 << (bits - 1));
}
private int CopyFromHistory(byte[] target, int targetIndex, int offset, int count)
{
var historyIter = _ringBuffer.IterFromOffset(offset);
int copied = 0;
while (
copied < count && historyIter.MoveNext() && (targetIndex + copied) < target.Length
)
{
target[targetIndex + copied] = historyIter.Current;
copied++;
}
if (copied < count)
{
_copyProgress = (offset, count - copied);
}
return copied;
}
public int FillBuffer(byte[] buffer)
{
int bufLen = buffer.Length;
int bufIndex = 0;
// stop when we reached original size
if (_producedBytes >= _originalSize)
{
return 0;
}
// calculate limit, so that we don't go over the original size
int remaining = (int)Math.Min(bufLen, _originalSize - _producedBytes);
while (bufIndex < remaining)
{
if (_copyProgress.HasValue)
{
var (offset, count) = _copyProgress.Value;
int copied = CopyFromHistory(
buffer,
bufIndex,
offset,
(int)Math.Min(count, remaining - bufIndex)
);
bufIndex += copied;
_copyProgress = null;
}
if (_remainingCommands == 0)
{
_remainingCommands = _bitReader.ReadBits(16);
if (bufIndex + _remainingCommands > remaining)
{
break;
}
BeginNewBlock();
}
_remainingCommands--;
int command = ReadCommand();
if (command >= 0 && command <= 0xFF)
{
byte value = (byte)command;
buffer[bufIndex++] = value;
_ringBuffer.Push(value);
}
else
{
int count = command - 0x100 + 3;
int offset = ReadOffset();
int copyCount = (int)Math.Min(count, remaining - bufIndex);
bufIndex += CopyFromHistory(buffer, bufIndex, offset, copyCount);
}
}
_producedBytes += bufIndex;
return bufIndex;
}
}
}

View File

@@ -0,0 +1,67 @@
using System;
using System.Collections;
using System.Collections.Generic;
namespace SharpCompress.Compressors.Arj
{
/// <summary>
/// A fixed-size ring buffer where N must be a power of two.
/// </summary>
public class RingBuffer : IRingBuffer
{
private readonly byte[] _buffer;
private int _cursor;
public int BufferSize { get; }
public int Cursor => _cursor;
private readonly int _mask;
public RingBuffer(int size)
{
if ((size & (size - 1)) != 0)
{
throw new ArgumentException("RingArrayBuffer size must be a power of two");
}
BufferSize = size;
_buffer = new byte[size];
_cursor = 0;
_mask = size - 1;
// Fill with spaces
for (int i = 0; i < size; i++)
{
_buffer[i] = (byte)' ';
}
}
public void SetCursor(int pos)
{
_cursor = pos & _mask;
}
public void Push(byte value)
{
int index = _cursor;
_buffer[index & _mask] = value;
_cursor = (index + 1) & _mask;
}
public byte this[int index] => _buffer[index & _mask];
public HistoryIterator IterFromOffset(int offset)
{
int masked = (offset & _mask) + 1;
int startIndex = _cursor + BufferSize - masked;
return new HistoryIterator(this, startIndex);
}
public HistoryIterator IterFromPos(int pos)
{
int startIndex = pos & _mask;
return new HistoryIterator(this, startIndex);
}
}
}

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.BZip2;
@@ -96,13 +98,37 @@ public sealed class BZip2Stream : Stream, IStreamStack
public override void SetLength(long value) => stream.SetLength(value);
#if !NETFRAMEWORK&& !NETSTANDARD2_0
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override int Read(Span<byte> buffer) => stream.Read(buffer);
public override void Write(ReadOnlySpan<byte> buffer) => stream.Write(buffer);
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
) => await stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
public override async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
) => await stream.WriteAsync(buffer, cancellationToken).ConfigureAwait(false);
#endif
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
) => await stream.ReadAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false);
public override async Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
) => await stream.WriteAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false);
public override void Write(byte[] buffer, int offset, int count) =>
stream.Write(buffer, offset, count);

View File

@@ -2,6 +2,8 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
/*
@@ -1127,6 +1129,28 @@ internal class CBZip2InputStream : Stream, IStreamStack
return k;
}
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
)
{
var c = -1;
int k;
for (k = 0; k < count; ++k)
{
cancellationToken.ThrowIfCancellationRequested();
c = ReadByte();
if (c == -1)
{
break;
}
buffer[k + offset] = (byte)c;
}
return Task.FromResult(k);
}
public override long Seek(long offset, SeekOrigin origin) => 0;
public override void SetLength(long value) { }

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
/*
@@ -542,6 +544,12 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
private void EndBlock()
{
// Skip block processing for empty input (no data written)
if (last < 0)
{
return;
}
blockCRC = mCrc.GetFinalCRC();
combinedCRC = (combinedCRC << 1) | (int)(((uint)combinedCRC) >> 31);
combinedCRC ^= blockCRC;
@@ -2022,6 +2030,21 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
}
}
public override Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
)
{
for (var k = 0; k < count; ++k)
{
cancellationToken.ThrowIfCancellationRequested();
WriteByte(buffer[k + offset]);
}
return Task.CompletedTask;
}
public override bool CanRead => false;
public override bool CanSeek => false;

View File

@@ -2,6 +2,8 @@ using System;
using System.IO;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
@@ -283,5 +285,70 @@ internal sealed class AesDecoderStream : DecoderStream2, IStreamStack
return count;
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
)
{
if (count == 0 || mWritten == mLimit)
{
return 0;
}
if (mUnderflow > 0)
{
return HandleUnderflow(buffer, offset, count);
}
// Need at least 16 bytes to proceed.
if (mEnding - mOffset < 16)
{
Buffer.BlockCopy(mBuffer, mOffset, mBuffer, 0, mEnding - mOffset);
mEnding -= mOffset;
mOffset = 0;
do
{
cancellationToken.ThrowIfCancellationRequested();
var read = await mStream
.ReadAsync(mBuffer, mEnding, mBuffer.Length - mEnding, cancellationToken)
.ConfigureAwait(false);
if (read == 0)
{
// We are not done decoding and have less than 16 bytes.
throw new EndOfStreamException();
}
mEnding += read;
} while (mEnding - mOffset < 16);
}
// We shouldn't return more data than we are limited to.
if (count > mLimit - mWritten)
{
count = (int)(mLimit - mWritten);
}
// We cannot transform less than 16 bytes into the target buffer,
// but we also cannot return zero, so we need to handle this.
if (count < 16)
{
return HandleUnderflow(buffer, offset, count);
}
if (count > mEnding - mOffset)
{
count = mEnding - mOffset;
}
// Otherwise we transform directly into the target buffer.
var processed = mDecoder.TransformBlock(mBuffer, mOffset, count & ~15, buffer, offset);
mOffset += processed;
mWritten += processed;
return processed;
}
#endregion
}

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.LZMA;
@@ -191,6 +193,18 @@ internal class Bcj2DecoderStream : DecoderStream2, IStreamStack
return count;
}
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
// Bcj2DecoderStream uses complex state machine with multiple streams
return Task.FromResult(Read(buffer, offset, count));
}
public override int ReadByte()
{
if (_mFinished)

View File

@@ -3,6 +3,8 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.LZMA.LZ;
@@ -85,6 +87,12 @@ internal class OutWindow : IDisposable
_stream = null;
}
public async Task ReleaseStreamAsync(CancellationToken cancellationToken = default)
{
await FlushAsync(cancellationToken).ConfigureAwait(false);
_stream = null;
}
private void Flush()
{
if (_stream is null)
@@ -104,6 +112,27 @@ internal class OutWindow : IDisposable
_streamPos = _pos;
}
private async Task FlushAsync(CancellationToken cancellationToken = default)
{
if (_stream is null)
{
return;
}
var size = _pos - _streamPos;
if (size == 0)
{
return;
}
await _stream
.WriteAsync(_buffer, _streamPos, size, cancellationToken)
.ConfigureAwait(false);
if (_pos >= _windowSize)
{
_pos = 0;
}
_streamPos = _pos;
}
public void CopyPending()
{
if (_pendingLen < 1)
@@ -124,6 +153,26 @@ internal class OutWindow : IDisposable
_pendingLen = rem;
}
public async Task CopyPendingAsync(CancellationToken cancellationToken = default)
{
if (_pendingLen < 1)
{
return;
}
var rem = _pendingLen;
var pos = (_pendingDist < _pos ? _pos : _pos + _windowSize) - _pendingDist - 1;
while (rem > 0 && HasSpace)
{
if (pos >= _windowSize)
{
pos = 0;
}
await PutByteAsync(_buffer[pos++], cancellationToken).ConfigureAwait(false);
rem--;
}
_pendingLen = rem;
}
public void CopyBlock(int distance, int len)
{
var rem = len;
@@ -157,6 +206,43 @@ internal class OutWindow : IDisposable
_pendingDist = distance;
}
public async Task CopyBlockAsync(
int distance,
int len,
CancellationToken cancellationToken = default
)
{
var rem = len;
var pos = (distance < _pos ? _pos : _pos + _windowSize) - distance - 1;
var targetSize = HasSpace ? (int)Math.Min(rem, _limit - _total) : 0;
var sizeUntilWindowEnd = Math.Min(_windowSize - _pos, _windowSize - pos);
var sizeUntilOverlap = Math.Abs(pos - _pos);
var fastSize = Math.Min(Math.Min(sizeUntilWindowEnd, sizeUntilOverlap), targetSize);
if (fastSize >= 2)
{
_buffer.AsSpan(pos, fastSize).CopyTo(_buffer.AsSpan(_pos, fastSize));
_pos += fastSize;
pos += fastSize;
_total += fastSize;
if (_pos >= _windowSize)
{
await FlushAsync(cancellationToken).ConfigureAwait(false);
}
rem -= fastSize;
}
while (rem > 0 && HasSpace)
{
if (pos >= _windowSize)
{
pos = 0;
}
await PutByteAsync(_buffer[pos++], cancellationToken).ConfigureAwait(false);
rem--;
}
_pendingLen = rem;
_pendingDist = distance;
}
public void PutByte(byte b)
{
_buffer[_pos++] = b;
@@ -167,6 +253,16 @@ internal class OutWindow : IDisposable
}
}
public async Task PutByteAsync(byte b, CancellationToken cancellationToken = default)
{
_buffer[_pos++] = b;
_total++;
if (_pos >= _windowSize)
{
await FlushAsync(cancellationToken).ConfigureAwait(false);
}
}
public byte GetByte(int distance)
{
var pos = _pos - distance - 1;
@@ -207,6 +303,44 @@ internal class OutWindow : IDisposable
return len - size;
}
public async Task<int> CopyStreamAsync(
Stream stream,
int len,
CancellationToken cancellationToken = default
)
{
var size = len;
while (size > 0 && _pos < _windowSize && _total < _limit)
{
cancellationToken.ThrowIfCancellationRequested();
var curSize = _windowSize - _pos;
if (curSize > _limit - _total)
{
curSize = (int)(_limit - _total);
}
if (curSize > size)
{
curSize = size;
}
var numReadBytes = await stream
.ReadAsync(_buffer, _pos, curSize, cancellationToken)
.ConfigureAwait(false);
if (numReadBytes == 0)
{
throw new DataErrorException();
}
size -= numReadBytes;
_pos += numReadBytes;
_total += numReadBytes;
if (_pos >= _windowSize)
{
await FlushAsync(cancellationToken).ConfigureAwait(false);
}
}
return len - size;
}
public void SetLimit(long size) => _limit = _total + size;
public bool HasSpace => _pos < _windowSize && _total < _limit;

View File

@@ -1,6 +1,8 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Crypto;
using SharpCompress.IO;
@@ -157,6 +159,11 @@ public sealed class LZipStream : Stream, IStreamStack
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
) => _stream.ReadAsync(buffer, cancellationToken);
public override int Read(Span<byte> buffer) => _stream.Read(buffer);
public override void Write(ReadOnlySpan<byte> buffer)
@@ -179,6 +186,25 @@ public sealed class LZipStream : Stream, IStreamStack
++_writeCount;
}
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
) => _stream.ReadAsync(buffer, offset, count, cancellationToken);
public override async Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
await _stream.WriteAsync(buffer, offset, count, cancellationToken);
_writeCount += count;
}
#endregion
/// <summary>

View File

@@ -1,6 +1,7 @@
#nullable disable
using System;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.Compressors.LZMA.RangeCoder;
@@ -199,6 +200,9 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
}
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
[MemberNotNull(nameof(_outWindow))]
#endif
private void CreateDictionary()
{
if (_dictionarySize < 0)
@@ -309,6 +313,42 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
_outWindow = null;
}
public async System.Threading.Tasks.Task CodeAsync(
Stream inStream,
Stream outStream,
long inSize,
long outSize,
ICodeProgress progress,
System.Threading.CancellationToken cancellationToken = default
)
{
if (_outWindow is null)
{
CreateDictionary();
}
_outWindow.Init(outStream);
if (outSize > 0)
{
_outWindow.SetLimit(outSize);
}
else
{
_outWindow.SetLimit(long.MaxValue - _outWindow.Total);
}
var rangeDecoder = new RangeCoder.Decoder();
rangeDecoder.Init(inStream);
await CodeAsync(_dictionarySize, _outWindow, rangeDecoder, cancellationToken)
.ConfigureAwait(false);
await _outWindow.ReleaseStreamAsync(cancellationToken).ConfigureAwait(false);
rangeDecoder.ReleaseStream();
_outWindow.Dispose();
_outWindow = null;
}
internal bool Code(int dictionarySize, OutWindow outWindow, RangeCoder.Decoder rangeDecoder)
{
var dictionarySizeCheck = Math.Max(dictionarySize, 1);
@@ -435,6 +475,143 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
return false;
}
internal async System.Threading.Tasks.Task<bool> CodeAsync(
int dictionarySize,
OutWindow outWindow,
RangeCoder.Decoder rangeDecoder,
System.Threading.CancellationToken cancellationToken = default
)
{
var dictionarySizeCheck = Math.Max(dictionarySize, 1);
await outWindow.CopyPendingAsync(cancellationToken).ConfigureAwait(false);
while (outWindow.HasSpace)
{
cancellationToken.ThrowIfCancellationRequested();
var posState = (uint)outWindow.Total & _posStateMask;
if (
_isMatchDecoders[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState]
.Decode(rangeDecoder) == 0
)
{
byte b;
var prevByte = outWindow.GetByte(0);
if (!_state.IsCharState())
{
b = _literalDecoder.DecodeWithMatchByte(
rangeDecoder,
(uint)outWindow.Total,
prevByte,
outWindow.GetByte((int)_rep0)
);
}
else
{
b = _literalDecoder.DecodeNormal(rangeDecoder, (uint)outWindow.Total, prevByte);
}
await outWindow.PutByteAsync(b, cancellationToken).ConfigureAwait(false);
_state.UpdateChar();
}
else
{
uint len;
if (_isRepDecoders[_state._index].Decode(rangeDecoder) == 1)
{
if (_isRepG0Decoders[_state._index].Decode(rangeDecoder) == 0)
{
if (
_isRep0LongDecoders[
(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState
]
.Decode(rangeDecoder) == 0
)
{
_state.UpdateShortRep();
await outWindow
.PutByteAsync(outWindow.GetByte((int)_rep0), cancellationToken)
.ConfigureAwait(false);
continue;
}
}
else
{
uint distance;
if (_isRepG1Decoders[_state._index].Decode(rangeDecoder) == 0)
{
distance = _rep1;
}
else
{
if (_isRepG2Decoders[_state._index].Decode(rangeDecoder) == 0)
{
distance = _rep2;
}
else
{
distance = _rep3;
_rep3 = _rep2;
}
_rep2 = _rep1;
}
_rep1 = _rep0;
_rep0 = distance;
}
len = _repLenDecoder.Decode(rangeDecoder, posState) + Base.K_MATCH_MIN_LEN;
_state.UpdateRep();
}
else
{
_rep3 = _rep2;
_rep2 = _rep1;
_rep1 = _rep0;
len = Base.K_MATCH_MIN_LEN + _lenDecoder.Decode(rangeDecoder, posState);
_state.UpdateMatch();
var posSlot = _posSlotDecoder[Base.GetLenToPosState(len)].Decode(rangeDecoder);
if (posSlot >= Base.K_START_POS_MODEL_INDEX)
{
var numDirectBits = (int)((posSlot >> 1) - 1);
_rep0 = ((2 | (posSlot & 1)) << numDirectBits);
if (posSlot < Base.K_END_POS_MODEL_INDEX)
{
_rep0 += BitTreeDecoder.ReverseDecode(
_posDecoders,
_rep0 - posSlot - 1,
rangeDecoder,
numDirectBits
);
}
else
{
_rep0 += (
rangeDecoder.DecodeDirectBits(numDirectBits - Base.K_NUM_ALIGN_BITS)
<< Base.K_NUM_ALIGN_BITS
);
_rep0 += _posAlignDecoder.ReverseDecode(rangeDecoder);
}
}
else
{
_rep0 = posSlot;
}
}
if (_rep0 >= outWindow.Total || _rep0 >= dictionarySizeCheck)
{
if (_rep0 == 0xFFFFFFFF)
{
return true;
}
throw new DataErrorException();
}
await outWindow
.CopyBlockAsync((int)_rep0, (int)len, cancellationToken)
.ConfigureAwait(false);
}
}
return false;
}
public void SetDecoderProperties(byte[] properties)
{
if (properties.Length < 1)
@@ -470,29 +647,4 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
}
_outWindow.Train(stream);
}
/*
public override bool CanRead { get { return true; }}
public override bool CanWrite { get { return true; }}
public override bool CanSeek { get { return true; }}
public override long Length { get { return 0; }}
public override long Position
{
get { return 0; }
set { }
}
public override void Flush() { }
public override int Read(byte[] buffer, int offset, int count)
{
return 0;
}
public override void Write(byte[] buffer, int offset, int count)
{
}
public override long Seek(long offset, System.IO.SeekOrigin origin)
{
return 0;
}
public override void SetLength(long value) {}
*/
}

View File

@@ -3,6 +3,8 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.IO;
@@ -423,6 +425,82 @@ public class LzmaStream : Stream, IStreamStack
}
}
private async Task DecodeChunkHeaderAsync(CancellationToken cancellationToken = default)
{
var controlBuffer = new byte[1];
await _inputStream.ReadAsync(controlBuffer, 0, 1, cancellationToken).ConfigureAwait(false);
var control = controlBuffer[0];
_inputPosition++;
if (control == 0x00)
{
_endReached = true;
return;
}
if (control >= 0xE0 || control == 0x01)
{
_needProps = true;
_needDictReset = false;
_outWindow.Reset();
}
else if (_needDictReset)
{
throw new DataErrorException();
}
if (control >= 0x80)
{
_uncompressedChunk = false;
_availableBytes = (control & 0x1F) << 16;
var buffer = new byte[2];
await _inputStream.ReadAsync(buffer, 0, 2, cancellationToken).ConfigureAwait(false);
_availableBytes += (buffer[0] << 8) + buffer[1] + 1;
_inputPosition += 2;
await _inputStream.ReadAsync(buffer, 0, 2, cancellationToken).ConfigureAwait(false);
_rangeDecoderLimit = (buffer[0] << 8) + buffer[1] + 1;
_inputPosition += 2;
if (control >= 0xC0)
{
_needProps = false;
await _inputStream
.ReadAsync(controlBuffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
Properties[0] = controlBuffer[0];
_inputPosition++;
_decoder = new Decoder();
_decoder.SetDecoderProperties(Properties);
}
else if (_needProps)
{
throw new DataErrorException();
}
else if (control >= 0xA0)
{
_decoder = new Decoder();
_decoder.SetDecoderProperties(Properties);
}
_rangeDecoder.Init(_inputStream);
}
else if (control > 0x02)
{
throw new DataErrorException();
}
else
{
_uncompressedChunk = true;
var buffer = new byte[2];
await _inputStream.ReadAsync(buffer, 0, 2, cancellationToken).ConfigureAwait(false);
_availableBytes = (buffer[0] << 8) + buffer[1] + 1;
_inputPosition += 2;
}
}
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
@@ -435,5 +513,128 @@ public class LzmaStream : Stream, IStreamStack
}
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (_endReached)
{
return 0;
}
var total = 0;
while (total < count)
{
cancellationToken.ThrowIfCancellationRequested();
if (_availableBytes == 0)
{
if (_isLzma2)
{
await DecodeChunkHeaderAsync(cancellationToken).ConfigureAwait(false);
}
else
{
_endReached = true;
}
if (_endReached)
{
break;
}
}
var toProcess = count - total;
if (toProcess > _availableBytes)
{
toProcess = (int)_availableBytes;
}
_outWindow.SetLimit(toProcess);
if (_uncompressedChunk)
{
_inputPosition += await _outWindow
.CopyStreamAsync(_inputStream, toProcess, cancellationToken)
.ConfigureAwait(false);
}
else if (
await _decoder
.CodeAsync(_dictionarySize, _outWindow, _rangeDecoder, cancellationToken)
.ConfigureAwait(false)
&& _outputSize < 0
)
{
_availableBytes = _outWindow.AvailableBytes;
}
var read = _outWindow.Read(buffer, offset, toProcess);
total += read;
offset += read;
_position += read;
_availableBytes -= read;
if (_availableBytes == 0 && !_uncompressedChunk)
{
if (
!_rangeDecoder.IsFinished
|| (_rangeDecoderLimit >= 0 && _rangeDecoder._total != _rangeDecoderLimit)
)
{
_outWindow.SetLimit(toProcess + 1);
if (
!await _decoder
.CodeAsync(
_dictionarySize,
_outWindow,
_rangeDecoder,
cancellationToken
)
.ConfigureAwait(false)
)
{
_rangeDecoder.ReleaseStream();
throw new DataErrorException();
}
}
_rangeDecoder.ReleaseStream();
_inputPosition += _rangeDecoder._total;
if (_outWindow.HasPending)
{
throw new DataErrorException();
}
}
}
if (_endReached)
{
if (_inputSize >= 0 && _inputPosition != _inputSize)
{
throw new DataErrorException();
}
if (_outputSize >= 0 && _position != _outputSize)
{
throw new DataErrorException();
}
}
return total;
}
public override Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
Write(buffer, offset, count);
return Task.CompletedTask;
}
public byte[] Properties { get; } = new byte[5];
}

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.LZMA.Utilites;
@@ -101,4 +103,22 @@ internal class CrcBuilderStream : Stream, IStreamStack
_mCrc = Crc.Update(_mCrc, buffer, offset, count);
_mTarget.Write(buffer, offset, count);
}
public override async Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
if (_mFinished)
{
throw new InvalidOperationException("CRC calculation has been finished.");
}
Processed += count;
_mCrc = Crc.Update(_mCrc, buffer, offset, count);
await _mTarget.WriteAsync(buffer, offset, count, cancellationToken);
}
}

View File

@@ -1,6 +1,9 @@
using System;
using System.Buffers;
using System.Diagnostics;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.LZMA.Utilites;
@@ -11,7 +14,7 @@ public class CrcCheckStream : Stream
private uint _mCurrentCrc;
private bool _mClosed;
private readonly long[] _mBytes = new long[256];
private readonly long[] _mBytes = ArrayPool<long>.Shared.Rent(256);
private long _mLength;
public CrcCheckStream(uint crc)
@@ -65,6 +68,7 @@ public class CrcCheckStream : Stream
finally
{
base.Dispose(disposing);
ArrayPool<long>.Shared.Return(_mBytes);
}
}
@@ -101,4 +105,16 @@ public class CrcCheckStream : Stream
_mCurrentCrc = Crc.Update(_mCurrentCrc, buffer, offset, count);
}
public override Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
Write(buffer, offset, count);
return Task.CompletedTask;
}
}

View File

@@ -1,13 +1,13 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.RLE90
{
/// <summary>
/// Real-time streaming RLE90 decompression stream.
/// Decompresses bytes on demand without buffering the entire file in memory.
/// </summary>
public class RunLength90Stream : Stream, IStreamStack
{
#if DEBUG_STREAMS
@@ -31,13 +31,19 @@ namespace SharpCompress.Compressors.RLE90
void IStreamStack.SetPosition(long position) { }
private readonly Stream _stream;
private readonly int _compressedSize;
private int _bytesReadFromSource;
private const byte DLE = 0x90;
private int _compressedSize;
private bool _processed = false;
private bool _inDleMode;
private byte _lastByte;
private int _repeatCount;
private bool _endOfCompressedData;
public RunLength90Stream(Stream stream, int compressedSize)
{
_stream = stream;
_stream = stream ?? throw new ArgumentNullException(nameof(stream));
_compressedSize = compressedSize;
#if DEBUG_STREAMS
this.DebugConstruct(typeof(RunLength90Stream));
@@ -53,44 +59,93 @@ namespace SharpCompress.Compressors.RLE90
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotImplementedException();
public override long Length => throw new NotSupportedException();
public override long Position
{
get => _stream.Position;
set => throw new NotImplementedException();
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
public override void Flush() => throw new NotImplementedException();
public override void Flush() => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
public override int Read(byte[] buffer, int offset, int count)
{
if (_processed)
if (buffer == null)
throw new ArgumentNullException(nameof(buffer));
if (offset < 0 || count < 0 || offset + count > buffer.Length)
throw new ArgumentOutOfRangeException();
int bytesWritten = 0;
while (bytesWritten < count && !_endOfCompressedData)
{
return 0;
// Handle pending repeat bytes first
if (_repeatCount > 0)
{
int toWrite = Math.Min(_repeatCount, count - bytesWritten);
for (int i = 0; i < toWrite; i++)
{
buffer[offset + bytesWritten++] = _lastByte;
}
_repeatCount -= toWrite;
continue;
}
// Try to read the next byte from compressed data
if (_bytesReadFromSource >= _compressedSize)
{
_endOfCompressedData = true;
break;
}
int next = _stream.ReadByte();
if (next == -1)
{
_endOfCompressedData = true;
break;
}
_bytesReadFromSource++;
byte c = (byte)next;
if (_inDleMode)
{
_inDleMode = false;
if (c == 0)
{
buffer[offset + bytesWritten++] = DLE;
_lastByte = DLE;
}
else
{
_repeatCount = c - 1;
// Well handle these repeats in next loop iteration.
}
}
else if (c == DLE)
{
_inDleMode = true;
}
else
{
buffer[offset + bytesWritten++] = c;
_lastByte = c;
}
}
_processed = true;
using var binaryReader = new BinaryReader(_stream);
byte[] compressedBuffer = binaryReader.ReadBytes(_compressedSize);
var unpacked = RLE.UnpackRLE(compressedBuffer);
unpacked.CopyTo(buffer);
return unpacked.Count;
return bytesWritten;
}
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotImplementedException();
public override void SetLength(long value) => throw new NotImplementedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotImplementedException();
}
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Compressors.Rar;
@@ -8,6 +10,14 @@ internal interface IRarUnpack
void DoUnpack(FileHeader fileHeader, Stream readStream, Stream writeStream);
void DoUnpack();
Task DoUnpackAsync(
FileHeader fileHeader,
Stream readStream,
Stream writeStream,
CancellationToken cancellationToken
);
Task DoUnpackAsync(CancellationToken cancellationToken);
// eg u/i pause/resume button
bool Suspended { get; set; }

View File

@@ -150,6 +150,136 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
return totalRead;
}
public override async System.Threading.Tasks.Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
System.Threading.CancellationToken cancellationToken
)
{
var totalRead = 0;
var currentOffset = offset;
var currentCount = count;
while (currentCount > 0)
{
var readSize = currentCount;
if (currentCount > maxPosition - currentPosition)
{
readSize = (int)(maxPosition - currentPosition);
}
var read = await currentStream
.ReadAsync(buffer, currentOffset, readSize, cancellationToken)
.ConfigureAwait(false);
if (read < 0)
{
throw new EndOfStreamException();
}
currentPosition += read;
currentOffset += read;
currentCount -= read;
totalRead += read;
if (
((maxPosition - currentPosition) == 0)
&& filePartEnumerator.Current.FileHeader.IsSplitAfter
)
{
if (filePartEnumerator.Current.FileHeader.R4Salt != null)
{
throw new InvalidFormatException(
"Sharpcompress currently does not support multi-volume decryption."
);
}
var fileName = filePartEnumerator.Current.FileHeader.FileName;
if (!filePartEnumerator.MoveNext())
{
throw new InvalidFormatException(
"Multi-part rar file is incomplete. Entry expects a new volume: "
+ fileName
);
}
InitializeNextFilePart();
}
else
{
break;
}
}
currentPartTotalReadBytes += totalRead;
currentEntryTotalReadBytes += totalRead;
streamListener.FireCompressedBytesRead(
currentPartTotalReadBytes,
currentEntryTotalReadBytes
);
return totalRead;
}
#if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
public override async System.Threading.Tasks.ValueTask<int> ReadAsync(
Memory<byte> buffer,
System.Threading.CancellationToken cancellationToken = default
)
{
var totalRead = 0;
var currentOffset = 0;
var currentCount = buffer.Length;
while (currentCount > 0)
{
var readSize = currentCount;
if (currentCount > maxPosition - currentPosition)
{
readSize = (int)(maxPosition - currentPosition);
}
var read = await currentStream
.ReadAsync(buffer.Slice(currentOffset, readSize), cancellationToken)
.ConfigureAwait(false);
if (read < 0)
{
throw new EndOfStreamException();
}
currentPosition += read;
currentOffset += read;
currentCount -= read;
totalRead += read;
if (
((maxPosition - currentPosition) == 0)
&& filePartEnumerator.Current.FileHeader.IsSplitAfter
)
{
if (filePartEnumerator.Current.FileHeader.R4Salt != null)
{
throw new InvalidFormatException(
"Sharpcompress currently does not support multi-volume decryption."
);
}
var fileName = filePartEnumerator.Current.FileHeader.FileName;
if (!filePartEnumerator.MoveNext())
{
throw new InvalidFormatException(
"Multi-part rar file is incomplete. Entry expects a new volume: "
+ fileName
);
}
InitializeNextFilePart();
}
else
{
break;
}
}
currentPartTotalReadBytes += totalRead;
currentEntryTotalReadBytes += totalRead;
streamListener.FireCompressedBytesRead(
currentPartTotalReadBytes,
currentEntryTotalReadBytes
);
return totalRead;
}
#endif
public override bool CanRead => true;
public override bool CanSeek => false;

View File

@@ -1,6 +1,8 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -103,7 +105,7 @@ internal class RarBLAKE2spStream : RarStream, IStreamStack
byte[] _hash = { };
public RarBLAKE2spStream(
private RarBLAKE2spStream(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream
@@ -121,6 +123,29 @@ internal class RarBLAKE2spStream : RarStream, IStreamStack
ResetCrc();
}
public static RarBLAKE2spStream Create(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream
)
{
var stream = new RarBLAKE2spStream(unpack, fileHeader, readStream);
stream.Initialize();
return stream;
}
public static async Task<RarBLAKE2spStream> CreateAsync(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream,
CancellationToken cancellationToken = default
)
{
var stream = new RarBLAKE2spStream(unpack, fileHeader, readStream);
await stream.InitializeAsync(cancellationToken);
return stream;
}
protected override void Dispose(bool disposing)
{
#if DEBUG_STREAMS
@@ -333,4 +358,59 @@ internal class RarBLAKE2spStream : RarStream, IStreamStack
return result;
}
public override async System.Threading.Tasks.Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
System.Threading.CancellationToken cancellationToken
)
{
var result = await base.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
if (result != 0)
{
Update(_blake2sp, new ReadOnlySpan<byte>(buffer, offset, result), result);
}
else
{
_hash = Final(_blake2sp);
if (!disableCRCCheck && !(GetCrc().SequenceEqual(readStream.CurrentCrc)) && count != 0)
{
// NOTE: we use the last FileHeader in a multipart volume to check CRC
throw new InvalidFormatException("file crc mismatch");
}
}
return result;
}
#if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
public override async System.Threading.Tasks.ValueTask<int> ReadAsync(
Memory<byte> buffer,
System.Threading.CancellationToken cancellationToken = default
)
{
var result = await base.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (result != 0)
{
Update(_blake2sp, buffer.Span.Slice(0, result), result);
}
else
{
_hash = Final(_blake2sp);
if (
!disableCRCCheck
&& !(GetCrc().SequenceEqual(readStream.CurrentCrc))
&& buffer.Length != 0
)
{
// NOTE: we use the last FileHeader in a multipart volume to check CRC
throw new InvalidFormatException("file crc mismatch");
}
}
return result;
}
#endif
}

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -31,7 +33,7 @@ internal class RarCrcStream : RarStream, IStreamStack
private uint currentCrc;
private readonly bool disableCRC;
public RarCrcStream(
private RarCrcStream(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream
@@ -46,6 +48,29 @@ internal class RarCrcStream : RarStream, IStreamStack
ResetCrc();
}
public static RarCrcStream Create(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream
)
{
var stream = new RarCrcStream(unpack, fileHeader, readStream);
stream.Initialize();
return stream;
}
public static async Task<RarCrcStream> CreateAsync(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream,
CancellationToken cancellationToken = default
)
{
var stream = new RarCrcStream(unpack, fileHeader, readStream);
await stream.InitializeAsync(cancellationToken);
return stream;
}
protected override void Dispose(bool disposing)
{
#if DEBUG_STREAMS
@@ -77,4 +102,56 @@ internal class RarCrcStream : RarStream, IStreamStack
return result;
}
public override async System.Threading.Tasks.Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
System.Threading.CancellationToken cancellationToken
)
{
var result = await base.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
if (result != 0)
{
currentCrc = RarCRC.CheckCrc(currentCrc, buffer, offset, result);
}
else if (
!disableCRC
&& GetCrc() != BitConverter.ToUInt32(readStream.CurrentCrc, 0)
&& count != 0
)
{
// NOTE: we use the last FileHeader in a multipart volume to check CRC
throw new InvalidFormatException("file crc mismatch");
}
return result;
}
#if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
public override async System.Threading.Tasks.ValueTask<int> ReadAsync(
Memory<byte> buffer,
System.Threading.CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var result = await base.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (result != 0)
{
currentCrc = RarCRC.CheckCrc(currentCrc, buffer.Span, 0, result);
}
else if (
!disableCRC
&& GetCrc() != BitConverter.ToUInt32(readStream.CurrentCrc, 0)
&& buffer.Length != 0
)
{
// NOTE: we use the last FileHeader in a multipart volume to check CRC
throw new InvalidFormatException("file crc mismatch");
}
return result;
}
#endif
}

View File

@@ -3,6 +3,8 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -56,13 +58,24 @@ internal class RarStream : Stream, IStreamStack
#if DEBUG_STREAMS
this.DebugConstruct(typeof(RarStream));
#endif
}
public void Initialize()
{
fetch = true;
unpack.DoUnpack(fileHeader, readStream, this);
fetch = false;
_position = 0;
}
public async Task InitializeAsync(CancellationToken cancellationToken = default)
{
fetch = true;
await unpack.DoUnpackAsync(fileHeader, readStream, this, cancellationToken);
fetch = false;
_position = 0;
}
protected override void Dispose(bool disposing)
{
if (!isDisposed)
@@ -131,6 +144,73 @@ internal class RarStream : Stream, IStreamStack
return outTotal;
}
public override async System.Threading.Tasks.Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
System.Threading.CancellationToken cancellationToken
) => await ReadImplAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false);
private async System.Threading.Tasks.Task<int> ReadImplAsync(
byte[] buffer,
int offset,
int count,
System.Threading.CancellationToken cancellationToken
)
{
outTotal = 0;
if (tmpCount > 0)
{
var toCopy = tmpCount < count ? tmpCount : count;
Buffer.BlockCopy(tmpBuffer, tmpOffset, buffer, offset, toCopy);
tmpOffset += toCopy;
tmpCount -= toCopy;
offset += toCopy;
count -= toCopy;
outTotal += toCopy;
}
if (count > 0 && unpack.DestSize > 0)
{
outBuffer = buffer;
outOffset = offset;
outCount = count;
fetch = true;
await unpack.DoUnpackAsync(cancellationToken).ConfigureAwait(false);
fetch = false;
}
_position += outTotal;
if (count > 0 && outTotal == 0 && _position != Length)
{
// sanity check, eg if we try to decompress a redir entry
throw new InvalidOperationException(
$"unpacked file size does not match header: expected {Length} found {_position}"
);
}
return outTotal;
}
#if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
public override async System.Threading.Tasks.ValueTask<int> ReadAsync(
Memory<byte> buffer,
System.Threading.CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var array = System.Buffers.ArrayPool<byte>.Shared.Rent(buffer.Length);
try
{
var bytesRead = await ReadImplAsync(array, 0, buffer.Length, cancellationToken)
.ConfigureAwait(false);
new ReadOnlySpan<byte>(array, 0, bytesRead).CopyTo(buffer.Span);
return bytesRead;
}
finally
{
System.Buffers.ArrayPool<byte>.Shared.Return(array);
}
}
#endif
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
@@ -165,6 +245,18 @@ internal class RarStream : Stream, IStreamStack
}
}
public override System.Threading.Tasks.Task WriteAsync(
byte[] buffer,
int offset,
int count,
System.Threading.CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
Write(buffer, offset, count);
return System.Threading.Tasks.Task.CompletedTask;
}
private void EnsureBufferCapacity(int count)
{
if (this.tmpBuffer.Length < this.tmpCount + count)

View File

@@ -27,7 +27,7 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
if (!disposed)
{
base.Dispose();
if (!externalWindow)
if (!externalWindow && window is not null)
{
ArrayPool<byte>.Shared.Return(window);
window = null;
@@ -155,6 +155,25 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
DoUnpack();
}
public async System.Threading.Tasks.Task DoUnpackAsync(
FileHeader fileHeader,
Stream readStream,
Stream writeStream,
System.Threading.CancellationToken cancellationToken = default
)
{
destUnpSize = fileHeader.UncompressedSize;
this.fileHeader = fileHeader;
this.readStream = readStream;
this.writeStream = writeStream;
if (!fileHeader.IsSolid)
{
Init(null);
}
suspended = false;
await DoUnpackAsync(cancellationToken).ConfigureAwait(false);
}
public void DoUnpack()
{
if (fileHeader.CompressionMethod == 0)
@@ -189,6 +208,42 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
}
}
public async System.Threading.Tasks.Task DoUnpackAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
if (fileHeader.CompressionMethod == 0)
{
await UnstoreFileAsync(cancellationToken).ConfigureAwait(false);
return;
}
switch (fileHeader.CompressionAlgorithm)
{
case 15: // rar 1.5 compression
await unpack15Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 20: // rar 2.x compression
case 26: // files larger than 2GB
await unpack20Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 29: // rar 3.x compression
case 36: // alternative hash
await Unpack29Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
case 50: // rar 5.x compression
await Unpack5Async(fileHeader.IsSolid, cancellationToken).ConfigureAwait(false);
break;
default:
throw new InvalidFormatException(
"unknown rar compression version " + fileHeader.CompressionAlgorithm
);
}
}
private void UnstoreFile()
{
Span<byte> buffer = stackalloc byte[(int)Math.Min(0x10000, destUnpSize)];
@@ -205,6 +260,26 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
} while (!suspended && destUnpSize > 0);
}
private async System.Threading.Tasks.Task UnstoreFileAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var buffer = new byte[(int)Math.Min(0x10000, destUnpSize)];
do
{
var code = await readStream
.ReadAsync(buffer, 0, buffer.Length, cancellationToken)
.ConfigureAwait(false);
if (code == 0 || code == -1)
{
break;
}
code = code < destUnpSize ? code : (int)destUnpSize;
await writeStream.WriteAsync(buffer, 0, code, cancellationToken).ConfigureAwait(false);
destUnpSize -= code;
} while (!suspended && destUnpSize > 0);
}
private void Unpack29(bool solid)
{
Span<int> DDecode = stackalloc int[PackDef.DC];
@@ -483,6 +558,281 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
UnpWriteBuf();
}
private async System.Threading.Tasks.Task Unpack29Async(
bool solid,
System.Threading.CancellationToken cancellationToken = default
)
{
int[] DDecode = new int[PackDef.DC];
byte[] DBits = new byte[PackDef.DC];
int Bits;
if (DDecode[1] == 0)
{
int Dist = 0,
BitLength = 0,
Slot = 0;
for (var I = 0; I < DBitLengthCounts.Length; I++, BitLength++)
{
var count = DBitLengthCounts[I];
for (var J = 0; J < count; J++, Slot++, Dist += (1 << BitLength))
{
DDecode[Slot] = Dist;
DBits[Slot] = (byte)BitLength;
}
}
}
FileExtracted = true;
if (!suspended)
{
UnpInitData(solid);
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if ((!solid || !tablesRead) && !ReadTables())
{
return;
}
}
if (ppmError)
{
return;
}
while (true)
{
unpPtr &= PackDef.MAXWINMASK;
if (inAddr > readBorder)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 260 && wrPtr != unpPtr)
{
await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
if (destUnpSize < 0)
{
return;
}
if (suspended)
{
FileExtracted = false;
return;
}
}
if (unpBlockType == BlockTypes.BLOCK_PPM)
{
var ch = ppm.DecodeChar();
if (ch == -1)
{
ppmError = true;
break;
}
if (ch == PpmEscChar)
{
var nextCh = ppm.DecodeChar();
if (nextCh == 0)
{
if (!ReadTables())
{
break;
}
continue;
}
if (nextCh == 2 || nextCh == -1)
{
break;
}
if (nextCh == 3)
{
if (!ReadVMCode())
{
break;
}
continue;
}
if (nextCh == 4)
{
uint Distance = 0,
Length = 0;
var failed = false;
for (var I = 0; I < 4 && !failed; I++)
{
var ch2 = ppm.DecodeChar();
if (ch2 == -1)
{
failed = true;
}
else if (I == 3)
{
Length = (uint)ch2;
}
else
{
Distance = (Distance << 8) + (uint)ch2;
}
}
if (failed)
{
break;
}
CopyString(Length + 32, Distance + 2);
continue;
}
if (nextCh == 5)
{
var length = ppm.DecodeChar();
if (length == -1)
{
break;
}
CopyString((uint)(length + 4), 1);
continue;
}
}
window[unpPtr++] = (byte)ch;
continue;
}
var Number = this.decodeNumber(LD);
if (Number < 256)
{
window[unpPtr++] = (byte)Number;
continue;
}
if (Number >= 271)
{
var Length = LDecode[Number -= 271] + 3;
if ((Bits = LBits[Number]) > 0)
{
Length += GetBits() >> (16 - Bits);
AddBits(Bits);
}
var DistNumber = this.decodeNumber(DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
if (DistNumber > 9)
{
if (Bits > 4)
{
Distance += (GetBits() >> (20 - Bits)) << 4;
AddBits(Bits - 4);
}
if (lowDistRepCount > 0)
{
lowDistRepCount--;
Distance += prevLowDist;
}
else
{
var LowDist = this.decodeNumber(LDD);
if (LowDist == 16)
{
lowDistRepCount = PackDef.LOW_DIST_REP_COUNT - 1;
Distance += prevLowDist;
}
else
{
Distance += LowDist;
prevLowDist = (int)LowDist;
}
}
}
else
{
Distance += GetBits() >> (16 - Bits);
AddBits(Bits);
}
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000)
{
Length++;
}
}
InsertOldDist(Distance);
lastLength = Length;
CopyString(Length, Distance);
continue;
}
if (Number == 256)
{
if (!ReadEndOfBlock())
{
break;
}
continue;
}
if (Number == 257)
{
if (!ReadVMCode())
{
break;
}
continue;
}
if (Number == 258)
{
if (lastLength != 0)
{
CopyString(lastLength, oldDist[0]);
}
continue;
}
if (Number < 263)
{
var DistNum = Number - 259;
var Distance = (uint)oldDist[DistNum];
for (var I = DistNum; I > 0; I--)
{
oldDist[I] = oldDist[I - 1];
}
oldDist[0] = (int)Distance;
var LengthNumber = this.decodeNumber(RD);
var Length = LDecode[LengthNumber] + 2;
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += GetBits() >> (16 - Bits);
AddBits(Bits);
}
lastLength = Length;
CopyString((uint)Length, Distance);
continue;
}
if (Number < 272)
{
var Distance = SDDecode[Number -= 263] + 1;
if ((Bits = SDBits[Number]) > 0)
{
Distance += GetBits() >> (16 - Bits);
AddBits(Bits);
}
InsertOldDist((uint)Distance);
lastLength = 2;
CopyString(2, (uint)Distance);
}
}
await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
}
private void UnpWriteBuf()
{
var WrittenBorder = wrPtr;
@@ -1339,6 +1689,256 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
}
}
private async System.Threading.Tasks.Task UnpWriteBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var WrittenBorder = wrPtr;
var WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
for (var I = 0; I < prgStack.Count; I++)
{
var flt = prgStack[I];
if (flt is null)
{
continue;
}
if (flt.NextWindow)
{
flt.NextWindow = false;
continue;
}
var BlockStart = flt.BlockStart;
var BlockLength = flt.BlockLength;
if (((BlockStart - WrittenBorder) & PackDef.MAXWINMASK) < WriteSize)
{
if (WrittenBorder != BlockStart)
{
await UnpWriteAreaAsync(WrittenBorder, BlockStart, cancellationToken)
.ConfigureAwait(false);
WrittenBorder = BlockStart;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
if (BlockLength <= WriteSize)
{
var BlockEnd = (BlockStart + BlockLength) & PackDef.MAXWINMASK;
if (BlockStart < BlockEnd || BlockEnd == 0)
{
rarVM.setMemory(0, window, BlockStart, BlockLength);
}
else
{
var FirstPartLength = PackDef.MAXWINSIZE - BlockStart;
rarVM.setMemory(0, window, BlockStart, FirstPartLength);
rarVM.setMemory(FirstPartLength, window, 0, BlockEnd);
}
var ParentPrg = filters[flt.ParentFilter].Program;
var Prg = flt.Program;
if (ParentPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
Prg.GlobalData.Clear();
for (
var i = 0;
i < ParentPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
Prg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = ParentPrg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
ExecuteCode(Prg);
if (Prg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
if (ParentPrg.GlobalData.Count < Prg.GlobalData.Count)
{
ParentPrg.GlobalData.SetSize(Prg.GlobalData.Count);
}
for (var i = 0; i < Prg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE; i++)
{
ParentPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] = Prg.GlobalData[
RarVM.VM_FIXEDGLOBALSIZE + i
];
}
}
else
{
ParentPrg.GlobalData.Clear();
}
var FilteredDataOffset = Prg.FilteredDataOffset;
var FilteredDataSize = Prg.FilteredDataSize;
var FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
try
{
Array.Copy(
rarVM.Mem,
FilteredDataOffset,
FilteredData,
0,
FilteredDataSize
);
prgStack[I] = null;
while (I + 1 < prgStack.Count)
{
var NextFilter = prgStack[I + 1];
if (
NextFilter is null
|| NextFilter.BlockStart != BlockStart
|| NextFilter.BlockLength != FilteredDataSize
|| NextFilter.NextWindow
)
{
break;
}
rarVM.setMemory(0, FilteredData, 0, FilteredDataSize);
var pPrg = filters[NextFilter.ParentFilter].Program;
var NextPrg = NextFilter.Program;
if (pPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
NextPrg.GlobalData.SetSize(pPrg.GlobalData.Count);
for (
var i = 0;
i < pPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
ExecuteCode(NextPrg);
if (NextPrg.GlobalData.Count > RarVM.VM_FIXEDGLOBALSIZE)
{
if (pPrg.GlobalData.Count < NextPrg.GlobalData.Count)
{
pPrg.GlobalData.SetSize(NextPrg.GlobalData.Count);
}
for (
var i = 0;
i < NextPrg.GlobalData.Count - RarVM.VM_FIXEDGLOBALSIZE;
i++
)
{
pPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i] =
NextPrg.GlobalData[RarVM.VM_FIXEDGLOBALSIZE + i];
}
}
else
{
pPrg.GlobalData.Clear();
}
FilteredDataOffset = NextPrg.FilteredDataOffset;
FilteredDataSize = NextPrg.FilteredDataSize;
if (FilteredData.Length < FilteredDataSize)
{
ArrayPool<byte>.Shared.Return(FilteredData);
FilteredData = ArrayPool<byte>.Shared.Rent(FilteredDataSize);
}
for (var i = 0; i < FilteredDataSize; i++)
{
FilteredData[i] = NextPrg.GlobalData[FilteredDataOffset + i];
}
I++;
prgStack[I] = null;
}
await writeStream
.WriteAsync(FilteredData, 0, FilteredDataSize, cancellationToken)
.ConfigureAwait(false);
writtenFileSize += FilteredDataSize;
destUnpSize -= FilteredDataSize;
WrittenBorder = BlockEnd;
WriteSize = (unpPtr - WrittenBorder) & PackDef.MAXWINMASK;
}
finally
{
ArrayPool<byte>.Shared.Return(FilteredData);
}
}
else
{
for (var J = I; J < prgStack.Count; J++)
{
var filt = prgStack[J];
if (filt != null && filt.NextWindow)
{
filt.NextWindow = false;
}
}
wrPtr = WrittenBorder;
return;
}
}
}
await UnpWriteAreaAsync(WrittenBorder, unpPtr, cancellationToken).ConfigureAwait(false);
wrPtr = unpPtr;
}
private async System.Threading.Tasks.Task UnpWriteAreaAsync(
int startPtr,
int endPtr,
System.Threading.CancellationToken cancellationToken = default
)
{
if (endPtr < startPtr)
{
await UnpWriteDataAsync(
window,
startPtr,
-startPtr & PackDef.MAXWINMASK,
cancellationToken
)
.ConfigureAwait(false);
await UnpWriteDataAsync(window, 0, endPtr, cancellationToken).ConfigureAwait(false);
}
else
{
await UnpWriteDataAsync(window, startPtr, endPtr - startPtr, cancellationToken)
.ConfigureAwait(false);
}
}
private async System.Threading.Tasks.Task UnpWriteDataAsync(
byte[] data,
int offset,
int size,
System.Threading.CancellationToken cancellationToken = default
)
{
if (destUnpSize < 0)
{
return;
}
var writeSize = size;
if (writeSize > destUnpSize)
{
writeSize = (int)destUnpSize;
}
await writeStream
.WriteAsync(data, offset, writeSize, cancellationToken)
.ConfigureAwait(false);
writtenFileSize += size;
destUnpSize -= size;
}
private void CleanUp()
{
if (ppm != null)

View File

@@ -316,6 +316,110 @@ internal partial class Unpack
oldUnpWriteBuf();
}
private async System.Threading.Tasks.Task unpack15Async(
bool solid,
System.Threading.CancellationToken cancellationToken = default
)
{
if (suspended)
{
unpPtr = wrPtr;
}
else
{
UnpInitData(solid);
oldUnpInitData(solid);
await unpReadBufAsync(cancellationToken).ConfigureAwait(false);
if (!solid)
{
initHuff();
unpPtr = 0;
}
else
{
unpPtr = wrPtr;
}
--destUnpSize;
}
if (destUnpSize >= 0)
{
getFlagsBuf();
FlagsCnt = 8;
}
while (destUnpSize >= 0)
{
unpPtr &= PackDef.MAXWINMASK;
if (
inAddr > readTop - 30
&& !await unpReadBufAsync(cancellationToken).ConfigureAwait(false)
)
{
break;
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 270 && wrPtr != unpPtr)
{
await oldUnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
if (suspended)
{
return;
}
}
if (StMode != 0)
{
huffDecode();
continue;
}
if (--FlagsCnt < 0)
{
getFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
longLZ();
}
else
{
huffDecode();
}
}
else
{
FlagBuf <<= 1;
if (--FlagsCnt < 0)
{
getFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
huffDecode();
}
else
{
longLZ();
}
}
else
{
FlagBuf <<= 1;
shortLZ();
}
}
}
await oldUnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
}
private bool unpReadBuf()
{
var dataSize = readTop - inAddr;
@@ -351,6 +455,40 @@ internal partial class Unpack
return (readCode != -1);
}
private async System.Threading.Tasks.Task<bool> unpReadBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var dataSize = readTop - inAddr;
if (dataSize < 0)
{
return (false);
}
if (inAddr > MAX_SIZE / 2)
{
if (dataSize > 0)
{
Array.Copy(InBuf, inAddr, InBuf, 0, dataSize);
}
inAddr = 0;
readTop = dataSize;
}
else
{
dataSize = readTop;
}
var readCode = await readStream
.ReadAsync(InBuf, dataSize, (MAX_SIZE - dataSize) & ~0xf, cancellationToken)
.ConfigureAwait(false);
if (readCode > 0)
{
readTop += readCode;
}
readBorder = readTop - 30;
return (readCode != -1);
}
private int getShortLen1(int pos) => pos == 1 ? Buf60 + 3 : ShortLen1[pos];
private int getShortLen2(int pos) => pos == 3 ? Buf60 + 3 : ShortLen2[pos];
@@ -814,4 +952,26 @@ internal partial class Unpack
}
wrPtr = unpPtr;
}
private async System.Threading.Tasks.Task oldUnpWriteBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
if (unpPtr < wrPtr)
{
await writeStream
.WriteAsync(window, wrPtr, -wrPtr & PackDef.MAXWINMASK, cancellationToken)
.ConfigureAwait(false);
await writeStream
.WriteAsync(window, 0, unpPtr, cancellationToken)
.ConfigureAwait(false);
}
else
{
await writeStream
.WriteAsync(window, wrPtr, unpPtr - wrPtr, cancellationToken)
.ConfigureAwait(false);
}
wrPtr = unpPtr;
}
}

View File

@@ -368,6 +368,163 @@ internal partial class Unpack
oldUnpWriteBuf();
}
private async System.Threading.Tasks.Task unpack20Async(
bool solid,
System.Threading.CancellationToken cancellationToken = default
)
{
int Bits;
if (suspended)
{
unpPtr = wrPtr;
}
else
{
UnpInitData(solid);
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if (!solid)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
return;
}
}
--destUnpSize;
}
while (destUnpSize >= 0)
{
unpPtr &= PackDef.MAXWINMASK;
if (inAddr > readTop - 30)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((wrPtr - unpPtr) & PackDef.MAXWINMASK) < 270 && wrPtr != unpPtr)
{
await oldUnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
if (suspended)
{
return;
}
}
if (UnpAudioBlock != 0)
{
var AudioNumber = this.decodeNumber(MD[UnpCurChannel]);
if (AudioNumber == 256)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
window[unpPtr++] = DecodeAudio(AudioNumber);
if (++UnpCurChannel == UnpChannels)
{
UnpCurChannel = 0;
}
--destUnpSize;
continue;
}
var Number = this.decodeNumber(LD);
if (Number < 256)
{
window[unpPtr++] = (byte)Number;
--destUnpSize;
continue;
}
if (Number > 269)
{
var Length = LDecode[Number -= 270] + 3;
if ((Bits = LBits[Number]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
var DistNumber = this.decodeNumber(DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000L)
{
Length++;
}
}
CopyString20(Length, Distance);
continue;
}
if (Number == 269)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
if (Number == 256)
{
CopyString20(lastLength, lastDist);
continue;
}
if (Number < 261)
{
var Distance = oldDist[(oldDistPtr - (Number - 256)) & 3];
var LengthNumber = this.decodeNumber(RD);
var Length = LDecode[LengthNumber] + 2;
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
if (Distance >= 0x101)
{
Length++;
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000)
{
Length++;
}
}
}
CopyString20(Length, Distance);
continue;
}
if (Number < 270)
{
var Distance = SDDecode[Number -= 261] + 1;
if ((Bits = SDBits[Number]) > 0)
{
Distance += Utility.URShift(GetBits(), (16 - Bits));
AddBits(Bits);
}
CopyString20(2, Distance);
}
}
ReadLastTables();
await oldUnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
}
private void CopyString20(int Length, int Distance)
{
lastDist = oldDist[oldDistPtr++ & 3] = Distance;
@@ -534,6 +691,120 @@ internal partial class Unpack
return (true);
}
private async System.Threading.Tasks.Task<bool> ReadTables20Async(
System.Threading.CancellationToken cancellationToken = default
)
{
byte[] BitLength = new byte[PackDef.BC20];
byte[] Table = new byte[PackDef.MC20 * 4];
int TableSize,
N,
I;
if (inAddr > readTop - 25)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return (false);
}
}
var BitField = GetBits();
UnpAudioBlock = (BitField & 0x8000);
if (0 == (BitField & 0x4000))
{
new Span<byte>(UnpOldTable20).Clear();
}
AddBits(2);
if (UnpAudioBlock != 0)
{
UnpChannels = ((Utility.URShift(BitField, 12)) & 3) + 1;
if (UnpCurChannel >= UnpChannels)
{
UnpCurChannel = 0;
}
AddBits(2);
TableSize = PackDef.MC20 * UnpChannels;
}
else
{
TableSize = PackDef.NC20 + PackDef.DC20 + PackDef.RC20;
}
for (I = 0; I < PackDef.BC20; I++)
{
BitLength[I] = (byte)(Utility.URShift(GetBits(), 12));
AddBits(4);
}
UnpackUtility.makeDecodeTables(BitLength, 0, BD, PackDef.BC20);
I = 0;
while (I < TableSize)
{
if (inAddr > readTop - 5)
{
if (!await unpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return (false);
}
}
var Number = this.decodeNumber(BD);
if (Number < 16)
{
Table[I] = (byte)((Number + UnpOldTable20[I]) & 0xf);
I++;
}
else if (Number == 16)
{
N = (Utility.URShift(GetBits(), 14)) + 3;
AddBits(2);
while (N-- > 0 && I < TableSize)
{
Table[I] = Table[I - 1];
I++;
}
}
else
{
if (Number == 17)
{
N = (Utility.URShift(GetBits(), 13)) + 3;
AddBits(3);
}
else
{
N = (Utility.URShift(GetBits(), 9)) + 11;
AddBits(7);
}
while (N-- > 0 && I < TableSize)
{
Table[I++] = 0;
}
}
}
if (inAddr > readTop)
{
return (true);
}
if (UnpAudioBlock != 0)
{
for (I = 0; I < UnpChannels; I++)
{
UnpackUtility.makeDecodeTables(Table, I * PackDef.MC20, MD[I], PackDef.MC20);
}
}
else
{
UnpackUtility.makeDecodeTables(Table, 0, LD, PackDef.NC20);
UnpackUtility.makeDecodeTables(Table, PackDef.NC20, DD, PackDef.DC20);
UnpackUtility.makeDecodeTables(Table, PackDef.NC20 + PackDef.DC20, RD, PackDef.RC20);
}
for (var i = 0; i < UnpOldTable20.Length; i++)
{
UnpOldTable20[i] = Table[i];
}
return (true);
}
private void unpInitData20(bool Solid)
{
if (!Solid)

View File

@@ -479,6 +479,354 @@ internal partial class Unpack
return ReadCode != -1;
}
private async System.Threading.Tasks.Task<bool> UnpReadBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var DataSize = ReadTop - Inp.InAddr; // Data left to process.
if (DataSize < 0)
{
return false;
}
BlockHeader.BlockSize -= Inp.InAddr - BlockHeader.BlockStart;
if (Inp.InAddr > MAX_SIZE / 2)
{
if (DataSize > 0)
{
Array.Copy(InBuf, inAddr, InBuf, 0, DataSize);
}
Inp.InAddr = 0;
ReadTop = DataSize;
}
else
{
DataSize = ReadTop;
}
var ReadCode = 0;
if (MAX_SIZE != DataSize)
{
ReadCode = await readStream
.ReadAsync(InBuf, DataSize, MAX_SIZE - DataSize, cancellationToken)
.ConfigureAwait(false);
}
if (ReadCode > 0) // Can be also -1.
{
ReadTop += ReadCode;
}
ReadBorder = ReadTop - 30;
BlockHeader.BlockStart = Inp.InAddr;
if (BlockHeader.BlockSize != -1) // '-1' means not defined yet.
{
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
}
return ReadCode != -1;
}
public async System.Threading.Tasks.Task Unpack5Async(
bool Solid,
System.Threading.CancellationToken cancellationToken = default
)
{
FileExtracted = true;
if (!Suspended)
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (
!await ReadBlockHeaderAsync(cancellationToken).ConfigureAwait(false)
|| !ReadTables()
|| !TablesRead5
)
{
return;
}
}
while (true)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr >= ReadBorder)
{
var FileDone = false;
// We use 'while', because for empty block containing only Huffman table,
// we'll be on the block border once again just after reading the table.
while (
Inp.InAddr > BlockHeader.BlockStart + BlockHeader.BlockSize - 1
|| Inp.InAddr == BlockHeader.BlockStart + BlockHeader.BlockSize - 1
&& Inp.InBit >= BlockHeader.BlockBitSize
)
{
if (BlockHeader.LastBlockInFile)
{
FileDone = true;
break;
}
if (
!await ReadBlockHeaderAsync(cancellationToken).ConfigureAwait(false)
|| !ReadTables()
)
{
return;
}
}
if (FileDone || !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (
((WriteBorder - UnpPtr) & MaxWinMask) < PackDef.MAX_LZ_MATCH + 3
&& WriteBorder != UnpPtr
)
{
UnpWriteBuf();
if (WrittenFileSize > DestUnpSize)
{
return;
}
if (Suspended)
{
FileExtracted = false;
return;
}
}
//uint MainSlot=DecodeNumber(Inp,LD);
var MainSlot = this.DecodeNumber(LD);
if (MainSlot < 256)
{
// if (Fragmented)
// FragWindow[UnpPtr++]=(byte)MainSlot;
// else
Window[UnpPtr++] = (byte)MainSlot;
continue;
}
if (MainSlot >= 262)
{
var Length = SlotToLength(MainSlot - 262);
//uint DBits,Distance=1,DistSlot=DecodeNumber(Inp,&BlockTables.DD);
int DBits;
uint Distance = 1,
DistSlot = this.DecodeNumber(DD);
if (DistSlot < 4)
{
DBits = 0;
Distance += DistSlot;
}
else
{
//DBits=DistSlot/2 - 1;
DBits = (int)((DistSlot / 2) - 1);
Distance += (2 | (DistSlot & 1)) << DBits;
}
if (DBits > 0)
{
if (DBits >= 4)
{
if (DBits > 4)
{
Distance += ((Inp.getbits() >> (36 - DBits)) << 4);
Inp.AddBits(DBits - 4);
}
//uint LowDist=DecodeNumber(Inp,&BlockTables.LDD);
var LowDist = this.DecodeNumber(LDD);
Distance += LowDist;
}
else
{
Distance += Inp.getbits() >> (32 - DBits);
Inp.AddBits(DBits);
}
}
if (Distance > 0x100)
{
Length++;
if (Distance > 0x2000)
{
Length++;
if (Distance > 0x40000)
{
Length++;
}
}
}
InsertOldDist(Distance);
LastLength = Length;
// if (Fragmented)
// FragWindow.CopyString(Length,Distance,UnpPtr,MaxWinMask);
// else
CopyString(Length, Distance);
continue;
}
if (MainSlot == 256)
{
var Filter = new UnpackFilter();
if (
!await ReadFilterAsync(Filter, cancellationToken).ConfigureAwait(false)
|| !AddFilter(Filter)
)
{
break;
}
continue;
}
if (MainSlot == 257)
{
if (LastLength != 0)
// if (Fragmented)
// FragWindow.CopyString(LastLength,OldDist[0],UnpPtr,MaxWinMask);
// else
//CopyString(LastLength,OldDist[0]);
{
CopyString(LastLength, OldDistN(0));
}
continue;
}
if (MainSlot < 262)
{
//uint DistNum=MainSlot-258;
var DistNum = (int)(MainSlot - 258);
//uint Distance=OldDist[DistNum];
var Distance = OldDistN(DistNum);
//for (uint I=DistNum;I>0;I--)
for (var I = DistNum; I > 0; I--)
//OldDistN[I]=OldDistN(I-1);
{
SetOldDistN(I, OldDistN(I - 1));
}
//OldDistN[0]=Distance;
SetOldDistN(0, Distance);
var LengthSlot = this.DecodeNumber(RD);
var Length = SlotToLength(LengthSlot);
LastLength = Length;
// if (Fragmented)
// FragWindow.CopyString(Length,Distance,UnpPtr,MaxWinMask);
// else
CopyString(Length, Distance);
continue;
}
}
UnpWriteBuf();
}
private async System.Threading.Tasks.Task<bool> ReadBlockHeaderAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
Header.HeaderSize = 0;
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 7)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
//Inp.faddbits((8-Inp.InBit)&7);
Inp.faddbits((uint)((8 - Inp.InBit) & 7));
var BlockFlags = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
//uint ByteCount=((BlockFlags>>3)&3)+1; // Block size byte count.
var ByteCount = (uint)(((BlockFlags >> 3) & 3) + 1); // Block size byte count.
if (ByteCount == 4)
{
return false;
}
//Header.HeaderSize=2+ByteCount;
Header.HeaderSize = (int)(2 + ByteCount);
Header.BlockBitSize = (BlockFlags & 7) + 1;
var SavedCheckSum = (byte)(Inp.fgetbits() >> 8);
Inp.faddbits(8);
var BlockSize = 0;
//for (uint I=0;I<ByteCount;I++)
for (var I = 0; I < ByteCount; I++)
{
//BlockSize+=(Inp.fgetbits()>>8)<<(I*8);
BlockSize += (int)(Inp.fgetbits() >> 8) << (I * 8);
Inp.AddBits(8);
}
Header.BlockSize = BlockSize;
var CheckSum = (byte)(0x5a ^ BlockFlags ^ BlockSize ^ (BlockSize >> 8) ^ (BlockSize >> 16));
if (CheckSum != SavedCheckSum)
{
return false;
}
Header.BlockStart = Inp.InAddr;
ReadBorder = Math.Min(ReadBorder, Header.BlockStart + Header.BlockSize - 1);
Header.LastBlockInFile = (BlockFlags & 0x40) != 0;
Header.TablePresent = (BlockFlags & 0x80) != 0;
return true;
}
private async System.Threading.Tasks.Task<bool> ReadFilterAsync(
UnpackFilter Filter,
System.Threading.CancellationToken cancellationToken = default
)
{
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 16)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
Filter.uBlockStart = ReadFilterData();
Filter.uBlockLength = ReadFilterData();
if (Filter.BlockLength > MAX_FILTER_BLOCK_SIZE)
{
Filter.BlockLength = 0;
}
//Filter.Type=Inp.fgetbits()>>13;
Filter.Type = (byte)(Inp.fgetbits() >> 13);
Inp.faddbits(3);
if (Filter.Type == (byte)FilterType.FILTER_DELTA)
{
//Filter.Channels=(Inp.fgetbits()>>11)+1;
Filter.Channels = (byte)((Inp.fgetbits() >> 11) + 1);
Inp.faddbits(5);
}
return true;
}
//?
// void UnpWriteBuf()
// {
@@ -814,116 +1162,5 @@ internal partial class Unpack
Header.TablePresent = (BlockFlags & 0x80) != 0;
return true;
}
//?
// bool ReadTables(BitInput Inp, ref UnpackBlockHeader Header, ref UnpackBlockTables Tables)
// {
// if (!Header.TablePresent)
// return true;
//
// if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-25)
// if (!UnpReadBuf())
// return false;
//
// byte BitLength[BC];
// for (uint I=0;I<BC;I++)
// {
// uint Length=(byte)(Inp.fgetbits() >> 12);
// Inp.faddbits(4);
// if (Length==15)
// {
// uint ZeroCount=(byte)(Inp.fgetbits() >> 12);
// Inp.faddbits(4);
// if (ZeroCount==0)
// BitLength[I]=15;
// else
// {
// ZeroCount+=2;
// while (ZeroCount-- > 0 && I<ASIZE(BitLength))
// BitLength[I++]=0;
// I--;
// }
// }
// else
// BitLength[I]=Length;
// }
//
// MakeDecodeTables(BitLength,&Tables.BD,BC);
//
// byte Table[HUFF_TABLE_SIZE];
// const uint TableSize=HUFF_TABLE_SIZE;
// for (uint I=0;I<TableSize;)
// {
// if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-5)
// if (!UnpReadBuf())
// return false;
// uint Number=DecodeNumber(Inp,&Tables.BD);
// if (Number<16)
// {
// Table[I]=Number;
// I++;
// }
// else
// if (Number<18)
// {
// uint N;
// if (Number==16)
// {
// N=(Inp.fgetbits() >> 13)+3;
// Inp.faddbits(3);
// }
// else
// {
// N=(Inp.fgetbits() >> 9)+11;
// Inp.faddbits(7);
// }
// if (I==0)
// {
// // We cannot have "repeat previous" code at the first position.
// // Multiple such codes would shift Inp position without changing I,
// // which can lead to reading beyond of Inp boundary in mutithreading
// // mode, where Inp.ExternalBuffer disables bounds check and we just
// // reserve a lot of buffer space to not need such check normally.
// return false;
// }
// else
// while (N-- > 0 && I<TableSize)
// {
// Table[I]=Table[I-1];
// I++;
// }
// }
// else
// {
// uint N;
// if (Number==18)
// {
// N=(Inp.fgetbits() >> 13)+3;
// Inp.faddbits(3);
// }
// else
// {
// N=(Inp.fgetbits() >> 9)+11;
// Inp.faddbits(7);
// }
// while (N-- > 0 && I<TableSize)
// Table[I++]=0;
// }
// }
// TablesRead5=true;
// if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop)
// return false;
// MakeDecodeTables(&Table[0],&Tables.LD,NC);
// MakeDecodeTables(&Table[NC],&Tables.DD,DC);
// MakeDecodeTables(&Table[NC+DC],&Tables.LDD,LDC);
// MakeDecodeTables(&Table[NC+DC+LDC],&Tables.RD,RC);
// return true;
// }
//?
// void InitFilters()
// {
// Filters.SoftReset();
// }
}
#endif

View File

@@ -29,9 +29,28 @@ internal partial class Unpack : IRarUnpack
// NOTE: caller has logic to check for -1 for error we throw instead.
readStream.Read(buf, offset, count);
private async System.Threading.Tasks.Task<int> UnpIO_UnpReadAsync(
byte[] buf,
int offset,
int count,
System.Threading.CancellationToken cancellationToken = default
) =>
// NOTE: caller has logic to check for -1 for error we throw instead.
await readStream.ReadAsync(buf, offset, count, cancellationToken).ConfigureAwait(false);
private void UnpIO_UnpWrite(byte[] buf, size_t offset, uint count) =>
writeStream.Write(buf, checked((int)offset), checked((int)count));
private async System.Threading.Tasks.Task UnpIO_UnpWriteAsync(
byte[] buf,
size_t offset,
uint count,
System.Threading.CancellationToken cancellationToken = default
) =>
await writeStream
.WriteAsync(buf, checked((int)offset), checked((int)count), cancellationToken)
.ConfigureAwait(false);
public void DoUnpack(FileHeader fileHeader, Stream readStream, Stream writeStream)
{
// as of 12/2017 .NET limits array indexing to using a signed integer
@@ -53,6 +72,25 @@ internal partial class Unpack : IRarUnpack
DoUnpack();
}
public async System.Threading.Tasks.Task DoUnpackAsync(
FileHeader fileHeader,
Stream readStream,
Stream writeStream,
System.Threading.CancellationToken cancellationToken = default
)
{
DestUnpSize = fileHeader.UncompressedSize;
this.fileHeader = fileHeader;
this.readStream = readStream;
this.writeStream = writeStream;
if (!fileHeader.IsStored)
{
Init(fileHeader.WindowSize, fileHeader.IsSolid);
}
Suspended = false;
await DoUnpackAsync(cancellationToken).ConfigureAwait(false);
}
public void DoUnpack()
{
if (fileHeader.IsStored)
@@ -65,6 +103,27 @@ internal partial class Unpack : IRarUnpack
}
}
public async System.Threading.Tasks.Task DoUnpackAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
if (fileHeader.IsStored)
{
await UnstoreFileAsync(cancellationToken).ConfigureAwait(false);
}
else
{
// TODO: When compression methods are converted to async, call them here
// For now, fall back to synchronous version
await DoUnpackAsync(
fileHeader.CompressionAlgorithm,
fileHeader.IsSolid,
cancellationToken
)
.ConfigureAwait(false);
}
}
private void UnstoreFile()
{
Span<byte> b = stackalloc byte[(int)Math.Min(0x10000, DestUnpSize)];
@@ -80,6 +139,25 @@ internal partial class Unpack : IRarUnpack
} while (!Suspended);
}
private async System.Threading.Tasks.Task UnstoreFileAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var buffer = new byte[(int)Math.Min(0x10000, DestUnpSize)];
do
{
var n = await readStream
.ReadAsync(buffer, 0, buffer.Length, cancellationToken)
.ConfigureAwait(false);
if (n == 0)
{
break;
}
await writeStream.WriteAsync(buffer, 0, n, cancellationToken).ConfigureAwait(false);
DestUnpSize -= n;
} while (!Suspended);
}
public bool Suspended { get; set; }
public long DestSize => DestUnpSize;

View File

@@ -200,6 +200,102 @@ internal partial class Unpack
UnpWriteBuf20();
}
private async System.Threading.Tasks.Task Unpack15Async(
bool Solid,
System.Threading.CancellationToken cancellationToken = default
)
{
UnpInitData(Solid);
UnpInitData15(Solid);
await UnpReadBufAsync(cancellationToken).ConfigureAwait(false);
if (!Solid)
{
InitHuff();
UnpPtr = 0;
}
else
{
UnpPtr = WrPtr;
}
--DestUnpSize;
if (DestUnpSize >= 0)
{
GetFlagsBuf();
FlagsCnt = 8;
}
while (DestUnpSize >= 0)
{
UnpPtr &= MaxWinMask;
if (
Inp.InAddr > ReadTop - 30
&& !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false)
)
{
break;
}
if (((WrPtr - UnpPtr) & MaxWinMask) < 270 && WrPtr != UnpPtr)
{
await UnpWriteBuf20Async(cancellationToken).ConfigureAwait(false);
}
if (StMode != 0)
{
HuffDecode();
continue;
}
if (--FlagsCnt < 0)
{
GetFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
LongLZ();
}
else
{
HuffDecode();
}
}
else
{
FlagBuf <<= 1;
if (--FlagsCnt < 0)
{
GetFlagsBuf();
FlagsCnt = 7;
}
if ((FlagBuf & 0x80) != 0)
{
FlagBuf <<= 1;
if (Nlzb > Nhfb)
{
HuffDecode();
}
else
{
LongLZ();
}
}
else
{
FlagBuf <<= 1;
ShortLZ();
}
}
}
await UnpWriteBuf20Async(cancellationToken).ConfigureAwait(false);
}
//#define GetShortLen1(pos) ((pos)==1 ? Buf60+3:ShortLen1[pos])
private uint GetShortLen1(uint pos) => ((pos) == 1 ? (uint)(Buf60 + 3) : ShortLen1[pos]);

View File

@@ -349,6 +349,170 @@ internal partial class Unpack
UnpWriteBuf20();
}
private async System.Threading.Tasks.Task Unpack20Async(
bool Solid,
System.Threading.CancellationToken cancellationToken = default
)
{
uint Bits;
if (Suspended)
{
UnpPtr = WrPtr;
}
else
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if (
(!Solid || !TablesRead2)
&& !await ReadTables20Async(cancellationToken).ConfigureAwait(false)
)
{
return;
}
--DestUnpSize;
}
while (DestUnpSize >= 0)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr > ReadTop - 30)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((WrPtr - UnpPtr) & MaxWinMask) < 270 && WrPtr != UnpPtr)
{
await UnpWriteBuf20Async(cancellationToken).ConfigureAwait(false);
if (Suspended)
{
return;
}
}
if (UnpAudioBlock)
{
var AudioNumber = DecodeNumber(Inp, MD[UnpCurChannel]);
if (AudioNumber == 256)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
Window[UnpPtr++] = DecodeAudio((int)AudioNumber);
if (++UnpCurChannel == UnpChannels)
{
UnpCurChannel = 0;
}
--DestUnpSize;
continue;
}
var Number = DecodeNumber(Inp, BlockTables.LD);
if (Number < 256)
{
Window[UnpPtr++] = (byte)Number;
--DestUnpSize;
continue;
}
if (Number > 269)
{
var Length = (uint)(LDecode[Number -= 270] + 3);
if ((Bits = LBits[Number]) > 0)
{
Length += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
var DistNumber = DecodeNumber(Inp, BlockTables.DD);
var Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0)
{
Distance += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000L)
{
Length++;
}
}
CopyString20(Length, Distance);
continue;
}
if (Number == 269)
{
if (!await ReadTables20Async(cancellationToken).ConfigureAwait(false))
{
break;
}
continue;
}
if (Number == 256)
{
CopyString20(LastLength, LastDist);
continue;
}
if (Number < 261)
{
var Distance = OldDist[(OldDistPtr - (Number - 256)) & 3];
var LengthNumber = DecodeNumber(Inp, BlockTables.RD);
var Length = (uint)(LDecode[LengthNumber] + 2);
if ((Bits = LBits[LengthNumber]) > 0)
{
Length += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
if (Distance >= 0x101)
{
Length++;
if (Distance >= 0x2000)
{
Length++;
if (Distance >= 0x40000)
{
Length++;
}
}
}
CopyString20(Length, Distance);
continue;
}
if (Number < 270)
{
var Distance = (uint)(SDDecode[Number -= 261] + 1);
if ((Bits = SDBits[Number]) > 0)
{
Distance += Inp.getbits() >> (int)(16 - Bits);
Inp.addbits(Bits);
}
CopyString20(2, Distance);
continue;
}
}
ReadLastTables();
await UnpWriteBuf20Async(cancellationToken).ConfigureAwait(false);
}
private void UnpWriteBuf20()
{
if (UnpPtr != WrPtr)
@@ -370,6 +534,36 @@ internal partial class Unpack
WrPtr = UnpPtr;
}
private async System.Threading.Tasks.Task UnpWriteBuf20Async(
System.Threading.CancellationToken cancellationToken = default
)
{
if (UnpPtr != WrPtr)
{
UnpSomeRead = true;
}
if (UnpPtr < WrPtr)
{
await UnpIO_UnpWriteAsync(
Window,
WrPtr,
(uint)(-(int)WrPtr & MaxWinMask),
cancellationToken
)
.ConfigureAwait(false);
await UnpIO_UnpWriteAsync(Window, 0, UnpPtr, cancellationToken).ConfigureAwait(false);
UnpAllBuf = true;
}
else
{
await UnpIO_UnpWriteAsync(Window, WrPtr, UnpPtr - WrPtr, cancellationToken)
.ConfigureAwait(false);
}
WrPtr = UnpPtr;
}
private bool ReadTables20()
{
Span<byte> BitLength = stackalloc byte[checked((int)BC20)];
@@ -490,6 +684,130 @@ internal partial class Unpack
return true;
}
private async System.Threading.Tasks.Task<bool> ReadTables20Async(
System.Threading.CancellationToken cancellationToken = default
)
{
byte[] BitLength = new byte[checked((int)BC20)];
byte[] Table = new byte[checked((int)MC20 * 4)];
if (Inp.InAddr > ReadTop - 25)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var BitField = Inp.getbits();
UnpAudioBlock = (BitField & 0x8000) != 0;
if ((BitField & 0x4000) != 0)
{
Array.Clear(UnpOldTable20, 0, UnpOldTable20.Length);
}
Inp.addbits(2);
uint TableSize;
if (UnpAudioBlock)
{
UnpChannels = ((BitField >> 12) & 3) + 1;
if (UnpCurChannel >= UnpChannels)
{
UnpCurChannel = 0;
}
Inp.addbits(2);
TableSize = MC20 * UnpChannels;
}
else
{
TableSize = NC20 + DC20 + RC20;
}
for (int I = 0; I < checked((int)BC20); I++)
{
BitLength[I] = (byte)(Inp.getbits() >> 12);
Inp.addbits(4);
}
MakeDecodeTables(BitLength, 0, BlockTables.BD, BC20);
for (int I = 0; I < checked((int)TableSize); )
{
if (Inp.InAddr > ReadTop - 5)
{
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return false;
}
}
var Number = DecodeNumber(Inp, BlockTables.BD);
if (Number < 16)
{
Table[I] = (byte)((Number + UnpOldTable20[I]) & 0xF);
I++;
}
else if (Number < 18)
{
uint N;
if (Number == 16)
{
N = (Inp.getbits() >> 14) + 3;
Inp.addbits(2);
}
else
{
N = (Inp.getbits() >> 13) + 11;
Inp.addbits(3);
}
if (I == 0)
{
return false;
}
while (N-- > 0 && I < checked((int)TableSize))
{
Table[I] = Table[I - 1];
I++;
}
}
else
{
uint N;
if (Number == 18)
{
N = (Inp.getbits() >> 13) + 3;
Inp.addbits(3);
}
else
{
N = (Inp.getbits() >> 9) + 11;
Inp.addbits(7);
}
while (N-- > 0 && I < checked((int)TableSize))
{
Table[I++] = 0;
}
}
}
if (UnpAudioBlock)
{
for (int I = 0; I < UnpChannels; I++)
{
MakeDecodeTables(Table, (int)(I * MC20), MD[I], MC20);
}
}
else
{
MakeDecodeTables(Table, 0, BlockTables.LD, NC20);
MakeDecodeTables(Table, (int)NC20, BlockTables.DD, DC20);
MakeDecodeTables(Table, (int)(NC20 + DC20), BlockTables.RD, RC20);
}
Array.Copy(Table, 0, this.UnpOldTable20, 0, UnpOldTable20.Length);
return true;
}
private void ReadLastTables()
{
if (ReadTop >= Inp.InAddr + 5)

View File

@@ -1,793 +0,0 @@
#if !Rar2017_64bit
#else
using nint = System.Int64;
using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
//using static SharpCompress.Compressors.Rar.UnpackV2017.Unpack.Unpack30Local;
/*
namespace SharpCompress.Compressors.Rar.UnpackV2017
{
internal partial class Unpack
{
#if !RarV2017_RAR5ONLY
// We use it instead of direct PPM.DecodeChar call to be sure that
// we reset PPM structures in case of corrupt data. It is important,
// because these structures can be invalid after PPM.DecodeChar returned -1.
int SafePPMDecodeChar()
{
int Ch=PPM.DecodeChar();
if (Ch==-1) // Corrupt PPM data found.
{
PPM.CleanUp(); // Reset possibly corrupt PPM data structures.
UnpBlockType=BLOCK_LZ; // Set faster and more fail proof LZ mode.
}
return(Ch);
}
internal static class Unpack30Local {
public static readonly byte[] LDecode={0,1,2,3,4,5,6,7,8,10,12,14,16,20,24,28,32,40,48,56,64,80,96,112,128,160,192,224};
public static readonly byte[] LBits= {0,0,0,0,0,0,0,0,1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5};
public static readonly int[] DDecode = new int[DC];
public static readonly byte[] DBits = new byte[DC];
public static readonly int[] DBitLengthCounts= {4,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,14,0,12};
public static readonly byte[] SDDecode={0,4,8,16,32,64,128,192};
public static readonly byte[] SDBits= {2,2,3, 4, 5, 6, 6, 6};
}
void Unpack29(bool Solid)
{
uint Bits;
if (DDecode[1]==0)
{
int Dist=0,BitLength=0,Slot=0;
for (int I=0;I<DBitLengthCounts.Length;I++,BitLength++)
for (int J=0;J<DBitLengthCounts[I];J++,Slot++,Dist+=(1<<BitLength))
{
DDecode[Slot]=Dist;
DBits[Slot]=(byte)BitLength;
}
}
FileExtracted=true;
if (!Suspended)
{
UnpInitData(Solid);
if (!UnpReadBuf30())
return;
if ((!Solid || !TablesRead3) && !ReadTables30())
return;
}
while (true)
{
UnpPtr&=MaxWinMask;
if (Inp.InAddr>ReadBorder)
{
if (!UnpReadBuf30())
break;
}
if (((WrPtr-UnpPtr) & MaxWinMask)<260 && WrPtr!=UnpPtr)
{
UnpWriteBuf30();
if (WrittenFileSize>DestUnpSize)
return;
if (Suspended)
{
FileExtracted=false;
return;
}
}
if (UnpBlockType==BLOCK_PPM)
{
// Here speed is critical, so we do not use SafePPMDecodeChar,
// because sometimes even the inline function can introduce
// some additional penalty.
int Ch=PPM.DecodeChar();
if (Ch==-1) // Corrupt PPM data found.
{
PPM.CleanUp(); // Reset possibly corrupt PPM data structures.
UnpBlockType=BLOCK_LZ; // Set faster and more fail proof LZ mode.
break;
}
if (Ch==PPMEscChar)
{
int NextCh=SafePPMDecodeChar();
if (NextCh==0) // End of PPM encoding.
{
if (!ReadTables30())
break;
continue;
}
if (NextCh==-1) // Corrupt PPM data found.
break;
if (NextCh==2) // End of file in PPM mode.
break;
if (NextCh==3) // Read VM code.
{
if (!ReadVMCodePPM())
break;
continue;
}
if (NextCh==4) // LZ inside of PPM.
{
uint Distance=0,Length;
bool Failed=false;
for (int I=0;I<4 && !Failed;I++)
{
int _Ch=SafePPMDecodeChar();
if (_Ch==-1)
Failed=true;
else
if (I==3)
Length=(byte)_Ch;
else
Distance=(Distance<<8)+(byte)_Ch;
}
if (Failed)
break;
CopyString(Length+32,Distance+2);
continue;
}
if (NextCh==5) // One byte distance match (RLE) inside of PPM.
{
int Length=SafePPMDecodeChar();
if (Length==-1)
break;
CopyString((uint)(Length+4),1);
continue;
}
// If we are here, NextCh must be 1, what means that current byte
// is equal to our 'escape' byte, so we just store it to Window.
}
Window[UnpPtr++]=(byte)Ch;
continue;
}
uint Number=DecodeNumber(Inp,BlockTables.LD);
if (Number<256)
{
Window[UnpPtr++]=(byte)Number;
continue;
}
if (Number>=271)
{
uint Length=(uint)(LDecode[Number-=271]+3);
if ((Bits=LBits[Number])>0)
{
Length+=Inp.getbits()>>(int)(16-Bits);
Inp.addbits(Bits);
}
uint DistNumber=DecodeNumber(Inp,BlockTables.DD);
uint Distance=(uint)(DDecode[DistNumber]+1);
if ((Bits=DBits[DistNumber])>0)
{
if (DistNumber>9)
{
if (Bits>4)
{
Distance+=((Inp.getbits()>>(int)(20-Bits))<<4);
Inp.addbits(Bits-4);
}
if (LowDistRepCount>0)
{
LowDistRepCount--;
Distance+=(uint)PrevLowDist;
}
else
{
uint LowDist=DecodeNumber(Inp,BlockTables.LDD);
if (LowDist==16)
{
LowDistRepCount=(int)(LOW_DIST_REP_COUNT-1);
Distance+=(uint)PrevLowDist;
}
else
{
Distance+=LowDist;
PrevLowDist=(int)LowDist;
}
}
}
else
{
Distance+=Inp.getbits()>>(int)(16-Bits);
Inp.addbits(Bits);
}
}
if (Distance>=0x2000)
{
Length++;
if (Distance>=0x40000)
Length++;
}
InsertOldDist(Distance);
LastLength=Length;
CopyString(Length,Distance);
continue;
}
if (Number==256)
{
if (!ReadEndOfBlock())
break;
continue;
}
if (Number==257)
{
if (!ReadVMCode())
break;
continue;
}
if (Number==258)
{
if (LastLength!=0)
CopyString(LastLength,OldDist[0]);
continue;
}
if (Number<263)
{
uint DistNum=Number-259;
uint Distance=OldDist[DistNum];
for (uint I=DistNum;I>0;I--)
OldDist[I]=OldDist[I-1];
OldDist[0]=Distance;
uint LengthNumber=DecodeNumber(Inp,BlockTables.RD);
int Length=LDecode[LengthNumber]+2;
if ((Bits=LBits[LengthNumber])>0)
{
Length+=(int)(Inp.getbits()>>(int)(16-Bits));
Inp.addbits(Bits);
}
LastLength=(uint)Length;
CopyString((uint)Length,Distance);
continue;
}
if (Number<272)
{
uint Distance=(uint)(SDDecode[Number-=263]+1);
if ((Bits=SDBits[Number])>0)
{
Distance+=Inp.getbits()>>(int)(16-Bits);
Inp.addbits(Bits);
}
InsertOldDist(Distance);
LastLength=2;
CopyString(2,Distance);
continue;
}
}
UnpWriteBuf30();
}
// Return 'false' to quit unpacking the current file or 'true' to continue.
bool ReadEndOfBlock()
{
uint BitField=Inp.getbits();
bool NewTable,NewFile=false;
// "1" - no new file, new table just here.
// "00" - new file, no new table.
// "01" - new file, new table (in beginning of next file).
if ((BitField & 0x8000)!=0)
{
NewTable=true;
Inp.addbits(1);
}
else
{
NewFile=true;
NewTable=(BitField & 0x4000)!=0;
Inp.addbits(2);
}
TablesRead3=!NewTable;
// Quit immediately if "new file" flag is set. If "new table" flag
// is present, we'll read the table in beginning of next file
// based on 'TablesRead3' 'false' value.
if (NewFile)
return false;
return ReadTables30(); // Quit only if we failed to read tables.
}
bool ReadVMCode()
{
// Entire VM code is guaranteed to fully present in block defined
// by current Huffman table. Compressor checks that VM code does not cross
// Huffman block boundaries.
uint FirstByte=Inp.getbits()>>8;
Inp.addbits(8);
uint Length=(FirstByte & 7)+1;
if (Length==7)
{
Length=(Inp.getbits()>>8)+7;
Inp.addbits(8);
}
else
if (Length==8)
{
Length=Inp.getbits();
Inp.addbits(16);
}
if (Length==0)
return false;
Array<byte> VMCode(Length);
for (uint I=0;I<Length;I++)
{
// Try to read the new buffer if only one byte is left.
// But if we read all bytes except the last, one byte is enough.
if (Inp.InAddr>=ReadTop-1 && !UnpReadBuf30() && I<Length-1)
return false;
VMCode[I]=Inp.getbits()>>8;
Inp.addbits(8);
}
return AddVMCode(FirstByte,&VMCode[0],Length);
}
bool ReadVMCodePPM()
{
uint FirstByte=(uint)SafePPMDecodeChar();
if ((int)FirstByte==-1)
return false;
uint Length=(FirstByte & 7)+1;
if (Length==7)
{
int B1=SafePPMDecodeChar();
if (B1==-1)
return false;
Length=B1+7;
}
else
if (Length==8)
{
int B1=SafePPMDecodeChar();
if (B1==-1)
return false;
int B2=SafePPMDecodeChar();
if (B2==-1)
return false;
Length=B1*256+B2;
}
if (Length==0)
return false;
Array<byte> VMCode(Length);
for (uint I=0;I<Length;I++)
{
int Ch=SafePPMDecodeChar();
if (Ch==-1)
return false;
VMCode[I]=Ch;
}
return AddVMCode(FirstByte,&VMCode[0],Length);
}
bool AddVMCode(uint FirstByte,byte[] Code,int CodeSize)
{
VMCodeInp.InitBitInput();
//x memcpy(VMCodeInp.InBuf,Code,Min(BitInput.MAX_SIZE,CodeSize));
Array.Copy(Code, 0, VMCodeInp.InBuf, 0, Math.Min(BitInput.MAX_SIZE,CodeSize));
VM.Init();
uint FiltPos;
if ((FirstByte & 0x80)!=0)
{
FiltPos=RarVM.ReadData(VMCodeInp);
if (FiltPos==0)
InitFilters30(false);
else
FiltPos--;
}
else
FiltPos=(uint)this.LastFilter; // Use the same filter as last time.
if (FiltPos>Filters30.Count || FiltPos>OldFilterLengths.Count)
return false;
LastFilter=(int)FiltPos;
bool NewFilter=(FiltPos==Filters30.Count);
UnpackFilter30 StackFilter=new UnpackFilter30(); // New filter for PrgStack.
UnpackFilter30 Filter;
if (NewFilter) // New filter code, never used before since VM reset.
{
if (FiltPos>MAX3_UNPACK_FILTERS)
{
// Too many different filters, corrupt archive.
//delete StackFilter;
return false;
}
Filters30.Add(1);
Filters30[Filters30.Count-1]=Filter=new UnpackFilter30();
StackFilter.ParentFilter=(uint)(Filters30.Count-1);
// Reserve one item to store the data block length of our new filter
// entry. We'll set it to real block length below, after reading it.
// But we need to initialize it now, because when processing corrupt
// data, we can access this item even before we set it to real value.
OldFilterLengths.Add(0);
}
else // Filter was used in the past.
{
Filter=Filters30[(int)FiltPos];
StackFilter.ParentFilter=FiltPos;
}
int EmptyCount=0;
for (int I=0;I<PrgStack.Count;I++)
{
PrgStack[I-EmptyCount]=PrgStack[I];
if (PrgStack[I]==null)
EmptyCount++;
if (EmptyCount>0)
PrgStack[I]=null;
}
if (EmptyCount==0)
{
if (PrgStack.Count>MAX3_UNPACK_FILTERS)
{
//delete StackFilter;
return false;
}
PrgStack.Add(1);
EmptyCount=1;
}
size_t StackPos=(uint)(this.PrgStack.Count-EmptyCount);
PrgStack[(int)StackPos]=StackFilter;
uint BlockStart=RarVM.ReadData(VMCodeInp);
if ((FirstByte & 0x40)!=0)
BlockStart+=258;
StackFilter.BlockStart=(uint)((BlockStart+UnpPtr)&MaxWinMask);
if ((FirstByte & 0x20)!=0)
{
StackFilter.BlockLength=RarVM.ReadData(VMCodeInp);
// Store the last data block length for current filter.
OldFilterLengths[(int)FiltPos]=(int)StackFilter.BlockLength;
}
else
{
// Set the data block size to same value as the previous block size
// for same filter. It is possible for corrupt data to access a new
// and not filled yet item of OldFilterLengths array here. This is why
// we set new OldFilterLengths items to zero above.
StackFilter.BlockLength=FiltPos<OldFilterLengths.Count ? OldFilterLengths[(int)FiltPos]:0;
}
StackFilter.NextWindow=WrPtr!=UnpPtr && ((WrPtr-UnpPtr)&MaxWinMask)<=BlockStart;
// DebugLog("\nNextWindow: UnpPtr=%08x WrPtr=%08x BlockStart=%08x",UnpPtr,WrPtr,BlockStart);
memset(StackFilter.Prg.InitR,0,sizeof(StackFilter.Prg.InitR));
StackFilter.Prg.InitR[4]=StackFilter.BlockLength;
if ((FirstByte & 0x10)!=0) // Set registers to optional parameters if any.
{
uint InitMask=VMCodeInp.fgetbits()>>9;
VMCodeInp.faddbits(7);
for (int I=0;I<7;I++)
if ((InitMask & (1<<I)) != 0)
StackFilter.Prg.InitR[I]=RarVM.ReadData(VMCodeInp);
}
if (NewFilter)
{
uint VMCodeSize=RarVM.ReadData(VMCodeInp);
if (VMCodeSize>=0x10000 || VMCodeSize==0)
return false;
Array<byte> VMCode(VMCodeSize);
for (uint I=0;I<VMCodeSize;I++)
{
if (VMCodeInp.Overflow(3))
return false;
VMCode[I]=VMCodeInp.fgetbits()>>8;
VMCodeInp.faddbits(8);
}
VM.Prepare(&VMCode[0],VMCodeSize,&Filter->Prg);
}
StackFilter.Prg.Type=Filter.Prg.Type;
return true;
}
bool UnpReadBuf30()
{
int DataSize=ReadTop-Inp.InAddr; // Data left to process.
if (DataSize<0)
return false;
if (Inp.InAddr>BitInput.MAX_SIZE/2)
{
// If we already processed more than half of buffer, let's move
// remaining data into beginning to free more space for new data
// and ensure that calling function does not cross the buffer border
// even if we did not read anything here. Also it ensures that read size
// is not less than CRYPT_BLOCK_SIZE, so we can align it without risk
// to make it zero.
if (DataSize>0)
//x memmove(Inp.InBuf,Inp.InBuf+Inp.InAddr,DataSize);
Array.Copy(Inp.InBuf,Inp.InAddr,Inp.InBuf,0,DataSize);
Inp.InAddr=0;
ReadTop=DataSize;
}
else
DataSize=ReadTop;
int ReadCode=UnpIO_UnpRead(Inp.InBuf,DataSize,BitInput.MAX_SIZE-DataSize);
if (ReadCode>0)
ReadTop+=ReadCode;
ReadBorder=ReadTop-30;
return ReadCode!=-1;
}
void UnpWriteBuf30()
{
uint WrittenBorder=(uint)WrPtr;
uint WriteSize=(uint)((UnpPtr-WrittenBorder)&MaxWinMask);
for (int I=0;I<PrgStack.Count;I++)
{
// Here we apply filters to data which we need to write.
// We always copy data to virtual machine memory before processing.
// We cannot process them just in place in Window buffer, because
// these data can be used for future string matches, so we must
// preserve them in original form.
UnpackFilter30 flt=PrgStack[I];
if (flt==null)
continue;
if (flt.NextWindow)
{
flt.NextWindow=false;
continue;
}
uint BlockStart=flt.BlockStart;
uint BlockLength=flt.BlockLength;
if (((BlockStart-WrittenBorder)&MaxWinMask)<WriteSize)
{
if (WrittenBorder!=BlockStart)
{
UnpWriteArea(WrittenBorder,BlockStart);
WrittenBorder=BlockStart;
WriteSize=(uint)((UnpPtr-WrittenBorder)&MaxWinMask);
}
if (BlockLength<=WriteSize)
{
uint BlockEnd=(BlockStart+BlockLength)&MaxWinMask;
if (BlockStart<BlockEnd || BlockEnd==0)
VM.SetMemory(0,Window+BlockStart,BlockLength);
else
{
uint FirstPartLength=uint(MaxWinSize-BlockStart);
VM.SetMemory(0,Window+BlockStart,FirstPartLength);
VM.SetMemory(FirstPartLength,Window,BlockEnd);
}
VM_PreparedProgram *ParentPrg=&Filters30[flt->ParentFilter]->Prg;
VM_PreparedProgram *Prg=&flt->Prg;
ExecuteCode(Prg);
byte[] FilteredData=Prg.FilteredData;
uint FilteredDataSize=Prg.FilteredDataSize;
delete PrgStack[I];
PrgStack[I]=null;
while (I+1<PrgStack.Count)
{
UnpackFilter30 NextFilter=PrgStack[I+1];
// It is required to check NextWindow here.
if (NextFilter==null || NextFilter.BlockStart!=BlockStart ||
NextFilter.BlockLength!=FilteredDataSize || NextFilter.NextWindow)
break;
// Apply several filters to same data block.
VM.SetMemory(0,FilteredData,FilteredDataSize);
VM_PreparedProgram *ParentPrg=&Filters30[NextFilter.ParentFilter]->Prg;
VM_PreparedProgram *NextPrg=&NextFilter->Prg;
ExecuteCode(NextPrg);
FilteredData=NextPrg.FilteredData;
FilteredDataSize=NextPrg.FilteredDataSize;
I++;
delete PrgStack[I];
PrgStack[I]=null;
}
UnpIO_UnpWrite(FilteredData,0,FilteredDataSize);
UnpSomeRead=true;
WrittenFileSize+=FilteredDataSize;
WrittenBorder=BlockEnd;
WriteSize=(uint)((UnpPtr-WrittenBorder)&MaxWinMask);
}
else
{
// Current filter intersects the window write border, so we adjust
// the window border to process this filter next time, not now.
for (size_t J=I;J<PrgStack.Count;J++)
{
UnpackFilter30 flt=PrgStack[J];
if (flt!=null && flt.NextWindow)
flt.NextWindow=false;
}
WrPtr=WrittenBorder;
return;
}
}
}
UnpWriteArea(WrittenBorder,UnpPtr);
WrPtr=UnpPtr;
}
void ExecuteCode(VM_PreparedProgram *Prg)
{
Prg->InitR[6]=(uint)WrittenFileSize;
VM.Execute(Prg);
}
bool ReadTables30()
{
byte[] BitLength = new byte[BC];
byte[] Table = new byte[HUFF_TABLE_SIZE30];
if (Inp.InAddr>ReadTop-25)
if (!UnpReadBuf30())
return(false);
Inp.faddbits((uint)((8-Inp.InBit)&7));
uint BitField=Inp.fgetbits();
if ((BitField & 0x8000) != 0)
{
UnpBlockType=BLOCK_PPM;
return(PPM.DecodeInit(this,PPMEscChar));
}
UnpBlockType=BLOCK_LZ;
PrevLowDist=0;
LowDistRepCount=0;
if ((BitField & 0x4000) == 0)
Utility.Memset(UnpOldTable,0,UnpOldTable.Length);
Inp.faddbits(2);
for (uint I=0;I<BC;I++)
{
uint Length=(byte)(Inp.fgetbits() >> 12);
Inp.faddbits(4);
if (Length==15)
{
uint ZeroCount=(byte)(Inp.fgetbits() >> 12);
Inp.faddbits(4);
if (ZeroCount==0)
BitLength[I]=15;
else
{
ZeroCount+=2;
while (ZeroCount-- > 0 && I<BitLength.Length)
BitLength[I++]=0;
I--;
}
}
else
BitLength[I]=(byte)Length;
}
MakeDecodeTables(BitLength,0,BlockTables.BD,BC30);
const uint TableSize=HUFF_TABLE_SIZE30;
for (uint I=0;I<TableSize;)
{
if (Inp.InAddr>ReadTop-5)
if (!UnpReadBuf30())
return(false);
uint Number=DecodeNumber(Inp,BlockTables.BD);
if (Number<16)
{
Table[I]=(byte)((Number+this.UnpOldTable[I]) & 0xf);
I++;
}
else
if (Number<18)
{
uint N;
if (Number==16)
{
N=(Inp.fgetbits() >> 13)+3;
Inp.faddbits(3);
}
else
{
N=(Inp.fgetbits() >> 9)+11;
Inp.faddbits(7);
}
if (I==0)
return false; // We cannot have "repeat previous" code at the first position.
else
while (N-- > 0 && I<TableSize)
{
Table[I]=Table[I-1];
I++;
}
}
else
{
uint N;
if (Number==18)
{
N=(Inp.fgetbits() >> 13)+3;
Inp.faddbits(3);
}
else
{
N=(Inp.fgetbits() >> 9)+11;
Inp.faddbits(7);
}
while (N-- > 0 && I<TableSize)
Table[I++]=0;
}
}
TablesRead3=true;
if (Inp.InAddr>ReadTop)
return false;
MakeDecodeTables(Table,0,BlockTables.LD,NC30);
MakeDecodeTables(Table,(int)NC30,BlockTables.DD,DC30);
MakeDecodeTables(Table,(int)(NC30+DC30),BlockTables.LDD,LDC30);
MakeDecodeTables(Table,(int)(NC30+DC30+LDC30),BlockTables.RD,RC30);
//x memcpy(UnpOldTable,Table,sizeof(UnpOldTable));
Array.Copy(Table,0,UnpOldTable,0,UnpOldTable.Length);
return true;
}
#endif
void UnpInitData30(bool Solid)
{
if (!Solid)
{
TablesRead3=false;
Utility.Memset(UnpOldTable, 0, UnpOldTable.Length);
PPMEscChar=2;
UnpBlockType=BLOCK_LZ;
}
InitFilters30(Solid);
}
void InitFilters30(bool Solid)
{
if (!Solid)
{
//OldFilterLengths.SoftReset();
OldFilterLengths.Clear();
LastFilter=0;
//for (size_t I=0;I<Filters30.Count;I++)
// delete Filters30[I];
//Filters30.SoftReset();
Filters30.Clear();
}
//for (size_t I=0;I<PrgStack.Count;I++)
// delete PrgStack[I];
//PrgStack.SoftReset();
PrgStack.Clear();
}
}
}
*/

View File

@@ -222,6 +222,180 @@ internal partial class Unpack
UnpWriteBuf();
}
private async System.Threading.Tasks.Task Unpack5Async(
bool Solid,
System.Threading.CancellationToken cancellationToken = default
)
{
FileExtracted = true;
if (!Suspended)
{
UnpInitData(Solid);
if (!await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
return;
}
if (
!ReadBlockHeader(Inp, ref BlockHeader)
|| !ReadTables(Inp, ref BlockHeader, ref BlockTables)
|| !TablesRead5
)
{
return;
}
}
while (true)
{
UnpPtr &= MaxWinMask;
if (Inp.InAddr >= ReadBorder)
{
var FileDone = false;
while (
Inp.InAddr > BlockHeader.BlockStart + BlockHeader.BlockSize - 1
|| Inp.InAddr == BlockHeader.BlockStart + BlockHeader.BlockSize - 1
&& Inp.InBit >= BlockHeader.BlockBitSize
)
{
if (BlockHeader.LastBlockInFile)
{
FileDone = true;
break;
}
if (
!ReadBlockHeader(Inp, ref BlockHeader)
|| !ReadTables(Inp, ref BlockHeader, ref BlockTables)
)
{
return;
}
}
if (FileDone || !await UnpReadBufAsync(cancellationToken).ConfigureAwait(false))
{
break;
}
}
if (((WriteBorder - UnpPtr) & MaxWinMask) < MAX_LZ_MATCH + 3 && WriteBorder != UnpPtr)
{
await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
if (WrittenFileSize > DestUnpSize)
{
return;
}
}
uint MainSlot = DecodeNumber(Inp, BlockTables.LD);
if (MainSlot < 256)
{
if (Fragmented)
{
FragWindow[UnpPtr++] = (byte)MainSlot;
}
else
{
Window[UnpPtr++] = (byte)MainSlot;
}
continue;
}
if (MainSlot >= 262)
{
uint Length = SlotToLength(Inp, MainSlot - 262);
uint DBits,
Distance = 1,
DistSlot = DecodeNumber(Inp, BlockTables.DD);
if (DistSlot < 4)
{
DBits = 0;
Distance += DistSlot;
}
else
{
DBits = (DistSlot / 2) - 1;
Distance += (2 | (DistSlot & 1)) << (int)DBits;
}
if (DBits > 0)
{
if (DBits >= 4)
{
if (DBits > 4)
{
Distance += ((Inp.getbits() >> (int)(20 - DBits)) << 4);
Inp.addbits(DBits - 4);
}
uint LowDist = DecodeNumber(Inp, BlockTables.LDD);
Distance += LowDist;
}
else
{
Distance += Inp.getbits() >> (int)(16 - DBits);
Inp.addbits(DBits);
}
}
if (Distance > 0x100)
{
Length++;
if (Distance > 0x2000)
{
Length++;
if (Distance > 0x40000)
{
Length++;
}
}
}
InsertOldDist(Distance);
LastLength = Length;
CopyString(Length, Distance);
continue;
}
if (MainSlot == 256)
{
var Filter = new UnpackFilter();
if (!ReadFilter(Inp, Filter) || !AddFilter(Filter))
{
break;
}
continue;
}
if (MainSlot == 257)
{
if (LastLength != 0)
{
CopyString((uint)LastLength, (uint)OldDist[0]);
}
continue;
}
if (MainSlot < 262)
{
uint DistNum = MainSlot - 258;
uint Distance = (uint)OldDist[(int)DistNum];
for (var I = (int)DistNum; I > 0; I--)
{
OldDist[I] = OldDist[I - 1];
}
OldDist[0] = Distance;
uint LengthSlot = DecodeNumber(Inp, BlockTables.RD);
uint Length = SlotToLength(Inp, LengthSlot);
LastLength = Length;
CopyString(Length, Distance);
continue;
}
}
await UnpWriteBufAsync(cancellationToken).ConfigureAwait(false);
}
private uint ReadFilterData(BitInput Inp)
{
var ByteCount = (Inp.fgetbits() >> 14) + 1;
@@ -339,6 +513,58 @@ internal partial class Unpack
return ReadCode != -1;
}
private async System.Threading.Tasks.Task<bool> UnpReadBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var DataSize = ReadTop - Inp.InAddr; // Data left to process.
if (DataSize < 0)
{
return false;
}
BlockHeader.BlockSize -= Inp.InAddr - BlockHeader.BlockStart;
if (Inp.InAddr > MAX_SIZE / 2)
{
if (DataSize > 0)
{
Buffer.BlockCopy(Inp.InBuf, Inp.InAddr, Inp.InBuf, 0, DataSize);
}
Inp.InAddr = 0;
ReadTop = DataSize;
}
else
{
DataSize = ReadTop;
}
var ReadCode = 0;
if (MAX_SIZE != DataSize)
{
ReadCode = await UnpIO_UnpReadAsync(
Inp.InBuf,
DataSize,
MAX_SIZE - DataSize,
cancellationToken
)
.ConfigureAwait(false);
}
if (ReadCode > 0) // Can be also -1.
{
ReadTop += ReadCode;
}
ReadBorder = ReadTop - 30;
BlockHeader.BlockStart = Inp.InAddr;
if (BlockHeader.BlockSize != -1) // '-1' means not defined yet.
{
ReadBorder = Math.Min(ReadBorder, BlockHeader.BlockStart + BlockHeader.BlockSize - 1);
}
return ReadCode != -1;
}
private void UnpWriteBuf()
{
var WrittenBorder = WrPtr;
@@ -533,6 +759,163 @@ internal partial class Unpack
}
}
private async System.Threading.Tasks.Task UnpWriteBufAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
var WrittenBorder = WrPtr;
var FullWriteSize = (UnpPtr - WrittenBorder) & MaxWinMask;
var WriteSizeLeft = FullWriteSize;
var NotAllFiltersProcessed = false;
for (var I = 0; I < Filters.Count; I++)
{
var flt = Filters[I];
if (flt.Type == FILTER_NONE)
{
continue;
}
if (flt.NextWindow)
{
if (((flt.BlockStart - WrPtr) & MaxWinMask) <= FullWriteSize)
{
flt.NextWindow = false;
}
continue;
}
var BlockStart = flt.BlockStart;
var BlockLength = flt.BlockLength;
if (((BlockStart - WrittenBorder) & MaxWinMask) < WriteSizeLeft)
{
if (WrittenBorder != BlockStart)
{
await UnpWriteAreaAsync(WrittenBorder, BlockStart, cancellationToken)
.ConfigureAwait(false);
WrittenBorder = BlockStart;
WriteSizeLeft = (UnpPtr - WrittenBorder) & MaxWinMask;
}
if (BlockLength <= WriteSizeLeft)
{
if (BlockLength > 0)
{
var BlockEnd = (BlockStart + BlockLength) & MaxWinMask;
FilterSrcMemory = EnsureCapacity(
FilterSrcMemory,
checked((int)BlockLength)
);
var Mem = FilterSrcMemory;
if (BlockStart < BlockEnd || BlockEnd == 0)
{
if (Fragmented)
{
FragWindow.CopyData(Mem, 0, BlockStart, BlockLength);
}
else
{
Buffer.BlockCopy(Window, (int)BlockStart, Mem, 0, (int)BlockLength);
}
}
else
{
var FirstPartLength = MaxWinSize - BlockStart;
if (Fragmented)
{
FragWindow.CopyData(Mem, 0, BlockStart, FirstPartLength);
FragWindow.CopyData(Mem, FirstPartLength, 0, BlockEnd);
}
else
{
Buffer.BlockCopy(
Window,
(int)BlockStart,
Mem,
0,
(int)FirstPartLength
);
Buffer.BlockCopy(
Window,
0,
Mem,
(int)FirstPartLength,
(int)BlockEnd
);
}
}
var OutMem = ApplyFilter(Mem, BlockLength, flt);
Filters[I].Type = FILTER_NONE;
if (OutMem != null)
{
await UnpIO_UnpWriteAsync(OutMem, 0, BlockLength, cancellationToken)
.ConfigureAwait(false);
WrittenFileSize += BlockLength;
}
WrittenBorder = BlockEnd;
WriteSizeLeft = (UnpPtr - WrittenBorder) & MaxWinMask;
}
}
else
{
NotAllFiltersProcessed = true;
for (var J = I; J < Filters.Count; J++)
{
var fltj = Filters[J];
if (
fltj.Type != FILTER_NONE
&& fltj.NextWindow == false
&& ((fltj.BlockStart - WrPtr) & MaxWinMask) < FullWriteSize
)
{
fltj.NextWindow = true;
}
}
break;
}
}
}
var EmptyCount = 0;
for (var I = 0; I < Filters.Count; I++)
{
if (EmptyCount > 0)
{
Filters[I - EmptyCount] = Filters[I];
}
if (Filters[I].Type == FILTER_NONE)
{
EmptyCount++;
}
}
if (EmptyCount > 0)
{
Filters.RemoveRange(Filters.Count - EmptyCount, EmptyCount);
}
if (!NotAllFiltersProcessed)
{
await UnpWriteAreaAsync(WrittenBorder, UnpPtr, cancellationToken).ConfigureAwait(false);
WrPtr = UnpPtr;
}
WriteBorder = (UnpPtr + Math.Min(MaxWinSize, UNPACK_MAX_WRITE)) & MaxWinMask;
if (
WriteBorder == UnpPtr
|| WrPtr != UnpPtr
&& ((WrPtr - UnpPtr) & MaxWinMask) < ((WriteBorder - UnpPtr) & MaxWinMask)
)
{
WriteBorder = WrPtr;
}
}
private byte[] ApplyFilter(byte[] __d, uint DataSize, UnpackFilter Flt)
{
var Data = 0;
@@ -664,6 +1047,48 @@ internal partial class Unpack
}
}
private async System.Threading.Tasks.Task UnpWriteAreaAsync(
size_t StartPtr,
size_t EndPtr,
System.Threading.CancellationToken cancellationToken = default
)
{
if (EndPtr != StartPtr)
{
UnpSomeRead = true;
}
if (EndPtr < StartPtr)
{
UnpAllBuf = true;
}
if (Fragmented)
{
var SizeToWrite = (EndPtr - StartPtr) & MaxWinMask;
while (SizeToWrite > 0)
{
var BlockSize = FragWindow.GetBlockSize(StartPtr, SizeToWrite);
FragWindow.GetBuffer(StartPtr, out var __buffer, out var __offset);
await UnpWriteDataAsync(__buffer, __offset, BlockSize, cancellationToken)
.ConfigureAwait(false);
SizeToWrite -= BlockSize;
StartPtr = (StartPtr + BlockSize) & MaxWinMask;
}
}
else if (EndPtr < StartPtr)
{
await UnpWriteDataAsync(Window, StartPtr, MaxWinSize - StartPtr, cancellationToken)
.ConfigureAwait(false);
await UnpWriteDataAsync(Window, 0, EndPtr, cancellationToken).ConfigureAwait(false);
}
else
{
await UnpWriteDataAsync(Window, StartPtr, EndPtr - StartPtr, cancellationToken)
.ConfigureAwait(false);
}
}
private void UnpWriteData(byte[] Data, size_t offset, size_t Size)
{
if (WrittenFileSize >= DestUnpSize)
@@ -682,6 +1107,29 @@ internal partial class Unpack
WrittenFileSize += Size;
}
private async System.Threading.Tasks.Task UnpWriteDataAsync(
byte[] Data,
size_t offset,
size_t Size,
System.Threading.CancellationToken cancellationToken = default
)
{
if (WrittenFileSize >= DestUnpSize)
{
return;
}
var WriteSize = Size;
var LeftToWrite = DestUnpSize - WrittenFileSize;
if (WriteSize > LeftToWrite)
{
WriteSize = (size_t)LeftToWrite;
}
await UnpIO_UnpWriteAsync(Data, offset, WriteSize, cancellationToken).ConfigureAwait(false);
WrittenFileSize += Size;
}
private void UnpInitData50(bool Solid)
{
if (!Solid)

View File

@@ -1,16 +1,11 @@
#nullable disable
using System;
using System.Buffers;
using SharpCompress.Common;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.UnpackGlobal;
#if !Rar2017_64bit
using size_t = System.UInt32;
#else
using nint = System.Int64;
using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
namespace SharpCompress.Compressors.Rar.UnpackV2017;
@@ -42,11 +37,9 @@ internal sealed partial class Unpack : BitInput
// It prevents crash if first DoUnpack call is later made with wrong
// (true) 'Solid' value.
UnpInitData(false);
#if !RarV2017_SFX_MODULE
// RAR 1.5 decompression initialization
UnpInitData15(false);
InitHuff();
#endif
}
// later: may need Dispose() if we support thread pool
@@ -110,7 +103,7 @@ internal sealed partial class Unpack : BitInput
throw new InvalidFormatException("Grow && Fragmented");
}
var NewWindow = Fragmented ? null : new byte[WinSize];
var NewWindow = Fragmented ? null : ArrayPool<byte>.Shared.Rent((int)WinSize);
if (NewWindow == null)
{
@@ -126,6 +119,7 @@ internal sealed partial class Unpack : BitInput
if (Window != null) // If allocated by preceding files.
{
//free(Window);
ArrayPool<byte>.Shared.Return(Window);
Window = null;
}
FragWindow.Init(WinSize);
@@ -170,7 +164,6 @@ internal sealed partial class Unpack : BitInput
// just for extra safety.
switch (Method)
{
#if !RarV2017_SFX_MODULE
case 15: // rar 1.5 compression
if (!Fragmented)
{
@@ -186,33 +179,64 @@ internal sealed partial class Unpack : BitInput
}
break;
#endif
#if !RarV2017_RAR5ONLY
case 29: // rar 3.x compression
if (!Fragmented)
{
throw new NotImplementedException();
}
break;
case 50: // RAR 5.0 compression algorithm.
Unpack5(Solid);
break;
#if !Rar2017_NOSTRICT
default:
throw new InvalidFormatException("unknown compression method " + Method);
#endif
}
}
private async System.Threading.Tasks.Task DoUnpackAsync(
uint Method,
bool Solid,
System.Threading.CancellationToken cancellationToken = default
)
{
// Methods <50 will crash in Fragmented mode when accessing NULL Window.
// They cannot be called in such mode now, but we check it below anyway
// just for extra safety.
switch (Method)
{
#if !RarV2017_SFX_MODULE
case 15: // rar 1.5 compression
if (!Fragmented)
{
await Unpack15Async(Solid, cancellationToken).ConfigureAwait(false);
}
break;
case 20: // rar 2.x compression
case 26: // files larger than 2GB
if (!Fragmented)
{
await Unpack20Async(Solid, cancellationToken).ConfigureAwait(false);
}
break;
#endif
#if !RarV2017_RAR5ONLY
case 29: // rar 3.x compression
if (!Fragmented)
{
// TODO: Create Unpack29Async when ready
throw new NotImplementedException();
}
break;
#endif
case 50: // RAR 5.0 compression algorithm.
/*#if RarV2017_RAR_SMP
if (MaxUserThreads > 1)
{
// We do not use the multithreaded unpack routine to repack RAR archives
// in 'suspended' mode, because unlike the single threaded code it can
// write more than one dictionary for same loop pass. So we would need
// larger buffers of unknown size. Also we do not support multithreading
// in fragmented window mode.
if (!Fragmented)
{
Unpack5MT(Solid);
break;
}
}
#endif*/
Unpack5(Solid);
// RAR 5.0 has full async support via UnpReadBufAsync and UnpWriteBuf
await Unpack5Async(Solid, cancellationToken).ConfigureAwait(false);
break;
#if !Rar2017_NOSTRICT
default:

View File

@@ -1,14 +1,13 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.RLE90;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Squeezed
{
[CLSCompliant(true)]
public class SqueezeStream : Stream, IStreamStack
{
#if DEBUG_STREAMS
@@ -35,12 +34,15 @@ namespace SharpCompress.Compressors.Squeezed
private readonly int _compressedSize;
private const int NUMVALS = 257;
private const int SPEOF = 256;
private bool _processed = false;
private Stream _decodedStream;
public SqueezeStream(Stream stream, int compressedSize)
{
_stream = stream;
_stream = stream ?? throw new ArgumentNullException(nameof(stream));
_compressedSize = compressedSize;
_decodedStream = BuildDecodedStream();
#if DEBUG_STREAMS
this.DebugConstruct(typeof(SqueezeStream));
#endif
@@ -51,52 +53,46 @@ namespace SharpCompress.Compressors.Squeezed
#if DEBUG_STREAMS
this.DebugDispose(typeof(SqueezeStream));
#endif
_decodedStream?.Dispose();
base.Dispose(disposing);
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotImplementedException();
public override long Length => throw new NotSupportedException();
public override long Position
{
get => _stream.Position;
set => throw new NotImplementedException();
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
public override void Flush() => throw new NotImplementedException();
public override void Flush() => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
public override int Read(byte[] buffer, int offset, int count)
{
if (_processed)
{
return 0;
}
_processed = true;
using var binaryReader = new BinaryReader(_stream);
return _decodedStream.Read(buffer, offset, count);
}
// Read numnodes (equivalent to convert_u16!(numnodes, buf))
var numnodes = binaryReader.ReadUInt16();
private Stream BuildDecodedStream()
{
var binaryReader = new BinaryReader(_stream, Encoding.Default, leaveOpen: true);
int numnodes = binaryReader.ReadUInt16();
// Validation: numnodes should be within bounds
if (numnodes >= NUMVALS)
if (numnodes >= NUMVALS || numnodes == 0)
{
throw new InvalidDataException(
$"Invalid number of nodes {numnodes} (max {NUMVALS - 1})"
);
return new MemoryStream(Array.Empty<byte>());
}
// Handle the case where no nodes exist
if (numnodes == 0)
{
return 0;
}
// Build dnode (tree of nodes)
var dnode = new int[numnodes, 2];
for (int j = 0; j < numnodes; j++)
{
@@ -104,42 +100,27 @@ namespace SharpCompress.Compressors.Squeezed
dnode[j, 1] = binaryReader.ReadInt16();
}
// Initialize BitReader for reading bits
var bitReader = new BitReader(_stream);
var decoded = new List<byte>();
var huffmanDecoded = new MemoryStream();
int i = 0;
// Decode the buffer using the dnode tree
while (true)
{
i = dnode[i, bitReader.ReadBit() ? 1 : 0];
if (i < 0)
{
i = (short)-(i + 1);
i = -(i + 1);
if (i == SPEOF)
{
break;
}
else
{
decoded.Add((byte)i);
i = 0;
}
huffmanDecoded.WriteByte((byte)i);
i = 0;
}
}
// Unpack the decoded buffer using the RLE class
var unpacked = RLE.UnpackRLE(decoded.ToArray());
unpacked.CopyTo(buffer, 0);
return unpacked.Count();
huffmanDecoded.Position = 0;
return new RunLength90Stream(huffmanDecoded, (int)huffmanDecoded.Length);
}
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotImplementedException();
public override void SetLength(long value) => throw new NotImplementedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotImplementedException();
}
}

View File

@@ -1,6 +1,8 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Xz;
@@ -30,6 +32,28 @@ public static class BinaryUtils
internal static uint ReadLittleEndianUInt32(this Stream stream) =>
unchecked((uint)ReadLittleEndianInt32(stream));
public static async Task<int> ReadLittleEndianInt32Async(
this Stream stream,
CancellationToken cancellationToken = default
)
{
var bytes = new byte[4];
var read = await stream.ReadFullyAsync(bytes, cancellationToken).ConfigureAwait(false);
if (!read)
{
throw new EndOfStreamException();
}
return BinaryPrimitives.ReadInt32LittleEndian(bytes);
}
internal static async Task<uint> ReadLittleEndianUInt32Async(
this Stream stream,
CancellationToken cancellationToken = default
) =>
unchecked(
(uint)await ReadLittleEndianInt32Async(stream, cancellationToken).ConfigureAwait(false)
);
internal static byte[] ToBigEndianBytes(this uint uint32)
{
var result = BitConverter.GetBytes(uint32);

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Compressors.Xz;
@@ -39,4 +41,75 @@ internal static class MultiByteIntegers
}
return Output;
}
public static async Task<ulong> ReadXZIntegerAsync(
this BinaryReader reader,
CancellationToken cancellationToken = default,
int MaxBytes = 9
)
{
if (MaxBytes <= 0)
{
throw new ArgumentOutOfRangeException(nameof(MaxBytes));
}
if (MaxBytes > 9)
{
MaxBytes = 9;
}
var LastByte = await ReadByteAsync(reader, cancellationToken).ConfigureAwait(false);
var Output = (ulong)LastByte & 0x7F;
var i = 0;
while ((LastByte & 0x80) != 0)
{
if (++i >= MaxBytes)
{
throw new InvalidFormatException();
}
LastByte = await ReadByteAsync(reader, cancellationToken).ConfigureAwait(false);
if (LastByte == 0)
{
throw new InvalidFormatException();
}
Output |= ((ulong)(LastByte & 0x7F)) << (i * 7);
}
return Output;
}
public static async Task<byte> ReadByteAsync(
this BinaryReader reader,
CancellationToken cancellationToken = default
)
{
var buffer = new byte[1];
var bytesRead = await reader
.BaseStream.ReadAsync(buffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
if (bytesRead != 1)
{
throw new EndOfStreamException();
}
return buffer[0];
}
public static async Task<byte[]> ReadBytesAsync(
this BinaryReader reader,
int count,
CancellationToken cancellationToken = default
)
{
var buffer = new byte[count];
var bytesRead = await reader
.BaseStream.ReadAsync(buffer, 0, count, cancellationToken)
.ConfigureAwait(false);
if (bytesRead != count)
{
throw new EndOfStreamException();
}
return buffer;
}
}

View File

@@ -4,6 +4,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Compressors.Xz.Filters;
@@ -72,6 +74,49 @@ public sealed class XZBlock : XZReadOnlyStream
return bytesRead;
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
)
{
var bytesRead = 0;
if (!HeaderIsLoaded)
{
await LoadHeaderAsync(cancellationToken).ConfigureAwait(false);
}
if (!_streamConnected)
{
ConnectStream();
}
if (!_endOfStream)
{
bytesRead = await _decomStream
.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
}
if (bytesRead != count)
{
_endOfStream = true;
}
if (_endOfStream && !_paddingSkipped)
{
await SkipPaddingAsync(cancellationToken).ConfigureAwait(false);
}
if (_endOfStream && !_crcChecked)
{
await CheckCrcAsync(cancellationToken).ConfigureAwait(false);
}
return bytesRead;
}
private void SkipPadding()
{
var bytes = (BaseStream.Position - _startPosition) % 4;
@@ -87,6 +132,23 @@ public sealed class XZBlock : XZReadOnlyStream
_paddingSkipped = true;
}
private async Task SkipPaddingAsync(CancellationToken cancellationToken = default)
{
var bytes = (BaseStream.Position - _startPosition) % 4;
if (bytes > 0)
{
var paddingBytes = new byte[4 - bytes];
await BaseStream
.ReadAsync(paddingBytes, 0, paddingBytes.Length, cancellationToken)
.ConfigureAwait(false);
if (paddingBytes.Any(b => b != 0))
{
throw new InvalidFormatException("Padding bytes were non-null");
}
}
_paddingSkipped = true;
}
private void CheckCrc()
{
var crc = new byte[_checkSize];
@@ -96,6 +158,15 @@ public sealed class XZBlock : XZReadOnlyStream
_crcChecked = true;
}
private async Task CheckCrcAsync(CancellationToken cancellationToken = default)
{
var crc = new byte[_checkSize];
await BaseStream.ReadAsync(crc, 0, _checkSize, cancellationToken).ConfigureAwait(false);
// Actually do a check (and read in the bytes
// into the function throughout the stream read).
_crcChecked = true;
}
private void ConnectStream()
{
_decomStream = BaseStream;
@@ -123,6 +194,21 @@ public sealed class XZBlock : XZReadOnlyStream
HeaderIsLoaded = true;
}
private async Task LoadHeaderAsync(CancellationToken cancellationToken = default)
{
await ReadHeaderSizeAsync(cancellationToken).ConfigureAwait(false);
var headerCache = await CacheHeaderAsync(cancellationToken).ConfigureAwait(false);
using (var cache = new MemoryStream(headerCache))
using (var cachedReader = new BinaryReader(cache))
{
cachedReader.BaseStream.Position = 1; // skip the header size byte
ReadBlockFlags(cachedReader);
ReadFilters(cachedReader);
}
HeaderIsLoaded = true;
}
private void ReadHeaderSize()
{
_blockHeaderSizeByte = (byte)BaseStream.ReadByte();
@@ -132,6 +218,17 @@ public sealed class XZBlock : XZReadOnlyStream
}
}
private async Task ReadHeaderSizeAsync(CancellationToken cancellationToken = default)
{
var buffer = new byte[1];
await BaseStream.ReadAsync(buffer, 0, 1, cancellationToken).ConfigureAwait(false);
_blockHeaderSizeByte = buffer[0];
if (_blockHeaderSizeByte == 0)
{
throw new XZIndexMarkerReachedException();
}
}
private byte[] CacheHeader()
{
var blockHeaderWithoutCrc = new byte[BlockHeaderSize - 4];
@@ -139,7 +236,7 @@ public sealed class XZBlock : XZReadOnlyStream
var read = BaseStream.Read(blockHeaderWithoutCrc, 1, BlockHeaderSize - 5);
if (read != BlockHeaderSize - 5)
{
throw new EndOfStreamException("Reached end of stream unexectedly");
throw new EndOfStreamException("Reached end of stream unexpectedly");
}
var crc = BaseStream.ReadLittleEndianUInt32();
@@ -152,6 +249,30 @@ public sealed class XZBlock : XZReadOnlyStream
return blockHeaderWithoutCrc;
}
private async Task<byte[]> CacheHeaderAsync(CancellationToken cancellationToken = default)
{
var blockHeaderWithoutCrc = new byte[BlockHeaderSize - 4];
blockHeaderWithoutCrc[0] = _blockHeaderSizeByte;
var read = await BaseStream
.ReadAsync(blockHeaderWithoutCrc, 1, BlockHeaderSize - 5, cancellationToken)
.ConfigureAwait(false);
if (read != BlockHeaderSize - 5)
{
throw new EndOfStreamException("Reached end of stream unexpectedly");
}
var crc = await BaseStream
.ReadLittleEndianUInt32Async(cancellationToken)
.ConfigureAwait(false);
var calcCrc = Crc32.Compute(blockHeaderWithoutCrc);
if (crc != calcCrc)
{
throw new InvalidFormatException("Block header corrupt");
}
return blockHeaderWithoutCrc;
}
private void ReadBlockFlags(BinaryReader reader)
{
var blockFlags = reader.ReadByte();

View File

@@ -1,6 +1,8 @@
using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -27,6 +29,16 @@ public class XZFooter
return footer;
}
public static async Task<XZFooter> FromStreamAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var footer = new XZFooter(new BinaryReader(stream, Encoding.UTF8, true));
await footer.ProcessAsync(cancellationToken).ConfigureAwait(false);
return footer;
}
public void Process()
{
var crc = _reader.ReadLittleEndianUInt32();
@@ -49,4 +61,29 @@ public class XZFooter
throw new InvalidFormatException("Magic footer missing");
}
}
public async Task ProcessAsync(CancellationToken cancellationToken = default)
{
var crc = await _reader
.BaseStream.ReadLittleEndianUInt32Async(cancellationToken)
.ConfigureAwait(false);
var footerBytes = await _reader.ReadBytesAsync(6, cancellationToken).ConfigureAwait(false);
var myCrc = Crc32.Compute(footerBytes);
if (crc != myCrc)
{
throw new InvalidFormatException("Footer corrupt");
}
using (var stream = new MemoryStream(footerBytes))
using (var reader = new BinaryReader(stream))
{
BackwardSize = (reader.ReadLittleEndianUInt32() + 1) * 4;
StreamFlags = reader.ReadBytes(2);
}
var magBy = await _reader.ReadBytesAsync(2, cancellationToken).ConfigureAwait(false);
if (!magBy.AsSpan().SequenceEqual(_magicBytes))
{
throw new InvalidFormatException("Magic footer missing");
}
}
}

View File

@@ -1,6 +1,8 @@
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -23,12 +25,28 @@ public class XZHeader
return header;
}
public static async Task<XZHeader> FromStreamAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var header = new XZHeader(new BinaryReader(stream, Encoding.UTF8, true));
await header.ProcessAsync(cancellationToken).ConfigureAwait(false);
return header;
}
public void Process()
{
CheckMagicBytes(_reader.ReadBytes(6));
ProcessStreamFlags();
}
public async Task ProcessAsync(CancellationToken cancellationToken = default)
{
CheckMagicBytes(await _reader.ReadBytesAsync(6, cancellationToken).ConfigureAwait(false));
await ProcessStreamFlagsAsync(cancellationToken).ConfigureAwait(false);
}
private void ProcessStreamFlags()
{
var streamFlags = _reader.ReadBytes(2);
@@ -47,6 +65,26 @@ public class XZHeader
}
}
private async Task ProcessStreamFlagsAsync(CancellationToken cancellationToken = default)
{
var streamFlags = await _reader.ReadBytesAsync(2, cancellationToken).ConfigureAwait(false);
var crc = await _reader
.BaseStream.ReadLittleEndianUInt32Async(cancellationToken)
.ConfigureAwait(false);
var calcCrc = Crc32.Compute(streamFlags);
if (crc != calcCrc)
{
throw new InvalidFormatException("Stream header corrupt");
}
BlockCheckType = (CheckType)(streamFlags[1] & 0x0F);
var futureUse = (byte)(streamFlags[1] & 0xF0);
if (futureUse != 0 || streamFlags[0] != 0)
{
throw new InvalidFormatException("Unknown XZ Stream Version");
}
}
private void CheckMagicBytes(byte[] header)
{
if (!header.SequenceEqual(MagicHeader))

View File

@@ -3,6 +3,8 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -39,6 +41,20 @@ public class XZIndex
return index;
}
public static async Task<XZIndex> FromStreamAsync(
Stream stream,
bool indexMarkerAlreadyVerified,
CancellationToken cancellationToken = default
)
{
var index = new XZIndex(
new BinaryReader(stream, Encoding.UTF8, true),
indexMarkerAlreadyVerified
);
await index.ProcessAsync(cancellationToken).ConfigureAwait(false);
return index;
}
public void Process()
{
if (!_indexMarkerAlreadyVerified)
@@ -55,6 +71,26 @@ public class XZIndex
VerifyCrc32();
}
public async Task ProcessAsync(CancellationToken cancellationToken = default)
{
if (!_indexMarkerAlreadyVerified)
{
await VerifyIndexMarkerAsync(cancellationToken).ConfigureAwait(false);
}
NumberOfRecords = await _reader.ReadXZIntegerAsync(cancellationToken).ConfigureAwait(false);
for (ulong i = 0; i < NumberOfRecords; i++)
{
Records.Add(
await XZIndexRecord
.FromBinaryReaderAsync(_reader, cancellationToken)
.ConfigureAwait(false)
);
}
await SkipPaddingAsync(cancellationToken).ConfigureAwait(false);
await VerifyCrc32Async(cancellationToken).ConfigureAwait(false);
}
private void VerifyIndexMarker()
{
var marker = _reader.ReadByte();
@@ -64,6 +100,15 @@ public class XZIndex
}
}
private async Task VerifyIndexMarkerAsync(CancellationToken cancellationToken = default)
{
var marker = await _reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
if (marker != 0)
{
throw new InvalidFormatException("Not an index block");
}
}
private void SkipPadding()
{
var bytes = (int)(_reader.BaseStream.Position - StreamStartPosition) % 4;
@@ -77,9 +122,32 @@ public class XZIndex
}
}
private async Task SkipPaddingAsync(CancellationToken cancellationToken = default)
{
var bytes = (int)(_reader.BaseStream.Position - StreamStartPosition) % 4;
if (bytes > 0)
{
var paddingBytes = await _reader
.ReadBytesAsync(4 - bytes, cancellationToken)
.ConfigureAwait(false);
if (paddingBytes.Any(b => b != 0))
{
throw new InvalidFormatException("Padding bytes were non-null");
}
}
}
private void VerifyCrc32()
{
var crc = _reader.ReadLittleEndianUInt32();
// TODO verify this matches
}
private async Task VerifyCrc32Async(CancellationToken cancellationToken = default)
{
var crc = await _reader
.BaseStream.ReadLittleEndianUInt32Async(cancellationToken)
.ConfigureAwait(false);
// TODO verify this matches
}
}

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Xz;
@@ -18,4 +20,16 @@ public class XZIndexRecord
record.UncompressedSize = br.ReadXZInteger();
return record;
}
public static async Task<XZIndexRecord> FromBinaryReaderAsync(
BinaryReader br,
CancellationToken cancellationToken = default
)
{
var record = new XZIndexRecord();
record.UnpaddedSize = await br.ReadXZIntegerAsync(cancellationToken).ConfigureAwait(false);
record.UncompressedSize = await br.ReadXZIntegerAsync(cancellationToken)
.ConfigureAwait(false);
return record;
}
}

View File

@@ -2,6 +2,8 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -104,6 +106,35 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
return bytesRead;
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
)
{
var bytesRead = 0;
if (_endOfStream)
{
return bytesRead;
}
if (!HeaderIsRead)
{
await ReadHeaderAsync(cancellationToken).ConfigureAwait(false);
}
bytesRead = await ReadBlocksAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
if (bytesRead < count)
{
_endOfStream = true;
await ReadIndexAsync(cancellationToken).ConfigureAwait(false);
await ReadFooterAsync(cancellationToken).ConfigureAwait(false);
}
return bytesRead;
}
private void ReadHeader()
{
Header = XZHeader.FromStream(BaseStream);
@@ -111,12 +142,31 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
HeaderIsRead = true;
}
private async Task ReadHeaderAsync(CancellationToken cancellationToken = default)
{
Header = await XZHeader
.FromStreamAsync(BaseStream, cancellationToken)
.ConfigureAwait(false);
AssertBlockCheckTypeIsSupported();
HeaderIsRead = true;
}
private void ReadIndex() => Index = XZIndex.FromStream(BaseStream, true);
// TODO veryfy Index
private async Task ReadIndexAsync(CancellationToken cancellationToken = default) =>
Index = await XZIndex
.FromStreamAsync(BaseStream, true, cancellationToken)
.ConfigureAwait(false);
// TODO verify Index
private void ReadFooter() => Footer = XZFooter.FromStream(BaseStream);
// TODO verify footer
private async Task ReadFooterAsync(CancellationToken cancellationToken = default) =>
Footer = await XZFooter
.FromStreamAsync(BaseStream, cancellationToken)
.ConfigureAwait(false);
private int ReadBlocks(byte[] buffer, int offset, int count)
{
var bytesRead = 0;
@@ -152,6 +202,48 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
return bytesRead;
}
private async Task<int> ReadBlocksAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
)
{
var bytesRead = 0;
if (_currentBlock is null)
{
NextBlock();
}
for (; ; )
{
try
{
if (bytesRead >= count)
{
break;
}
var remaining = count - bytesRead;
var newOffset = offset + bytesRead;
var justRead = await _currentBlock
.ReadAsync(buffer, newOffset, remaining, cancellationToken)
.ConfigureAwait(false);
if (justRead < remaining)
{
NextBlock();
}
bytesRead += justRead;
}
catch (XZIndexMarkerReachedException)
{
break;
}
}
return bytesRead;
}
private void NextBlock() =>
_currentBlock = new XZBlock(BaseStream, Header.BlockCheckType, Header.BlockCheckSize);
}

View File

@@ -0,0 +1,42 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Arj.Headers;
using SharpCompress.Readers;
using SharpCompress.Readers.Arj;
namespace SharpCompress.Factories
{
public class ArjFactory : Factory, IReaderFactory
{
public override string Name => "Arj";
public override ArchiveType? KnownArchiveType => ArchiveType.Arj;
public override IEnumerable<string> GetSupportedExtensions()
{
yield return "arj";
}
public override bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var arjHeader = new ArjMainHeader(new ArchiveEncoding());
if (arjHeader.Read(stream) == null)
{
return false;
}
return true;
}
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
ArjReader.Open(stream, options);
}
}

View File

@@ -18,6 +18,7 @@ public abstract class Factory : IFactory
RegisterFactory(new GZipFactory());
RegisterFactory(new TarFactory());
RegisterFactory(new ArcFactory());
RegisterFactory(new ArjFactory());
}
private static readonly HashSet<Factory> _factories = new();

View File

@@ -143,7 +143,6 @@ namespace SharpCompress.IO
var stack = new List<IStreamStack>();
Stream? current = stream as Stream;
int lastBufferingIndex = -1;
int firstSeekableIndex = -1;
Stream? firstSeekableStream = null;
// Traverse the stack, collecting info
@@ -161,7 +160,6 @@ namespace SharpCompress.IO
// Find the first seekable stream (closest to the root)
if (current != null && current.CanSeek)
{
firstSeekableIndex = stack.Count;
firstSeekableStream = current;
}
@@ -169,16 +167,17 @@ namespace SharpCompress.IO
if (lastBufferingIndex != -1)
{
var bufferingStream = stack[lastBufferingIndex];
if (position >= 0 && position < bufferingStream.BufferSize)
var targetBufferPosition =
position - bufferingStream.GetPosition() + bufferingStream.BufferPosition;
if (targetBufferPosition >= 0 && targetBufferPosition <= bufferingStream.BufferSize)
{
bufferingStream.BufferPosition = (int)position;
bufferingStream.BufferPosition = (int)targetBufferPosition;
return position;
}
else
{
// If position is not in buffer, reset buffer and proceed as non-buffering
bufferingStream.BufferPosition = 0;
}
// If position is not in buffer, reset buffer and proceed as non-buffering
bufferingStream.BufferPosition = 0;
// Continue to seek as if no buffer is present
}

View File

@@ -79,7 +79,7 @@ public class SharpCompressStream : Stream, IStreamStack
{
if (value < 0 || value > _bufferedLength)
throw new ArgumentOutOfRangeException(nameof(value));
_internalPosition = value;
_internalPosition = _internalPosition - _bufferPosition + value;
_bufferPosition = value;
ValidateBufferState(); // Add here
}
@@ -90,8 +90,6 @@ public class SharpCompressStream : Stream, IStreamStack
public Stream Stream { get; }
//private MemoryStream _bufferStream = new();
private bool _readOnly; //some archive detection requires seek to be disabled to cause it to exception to try the next arc type
//private bool _isRewound;
@@ -293,7 +291,7 @@ public class SharpCompressStream : Stream, IStreamStack
long bufferPos = _internalPosition - _bufferPosition;
if (targetPos >= bufferPos && targetPos < bufferPos + _bufferedLength)
if (targetPos >= bufferPos && targetPos <= bufferPos + _bufferedLength)
{
_bufferPosition = (int)(targetPos - bufferPos); //repoint within the buffer
_internalPosition = targetPos;

View File

@@ -294,7 +294,9 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
return stream;
}
public Task<EntryStream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
public async Task<EntryStream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
if (_wroteCurrentEntry)
{
@@ -302,9 +304,9 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
"WriteEntryToAsync or OpenEntryStreamAsync can only be called once."
);
}
var stream = GetEntryStream();
var stream = await GetEntryStreamAsync(cancellationToken).ConfigureAwait(false);
_wroteCurrentEntry = true;
return Task.FromResult(stream);
return stream;
}
/// <summary>
@@ -316,6 +318,10 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
protected virtual EntryStream GetEntryStream() =>
CreateEntryStream(Entry.Parts.First().GetCompressedStream());
protected virtual Task<EntryStream> GetEntryStreamAsync(
CancellationToken cancellationToken = default
) => Task.FromResult(GetEntryStream());
#endregion
IEntry IReader.Entry => Entry;

View File

@@ -30,7 +30,7 @@ namespace SharpCompress.Readers.Arc
protected override IEnumerable<ArcEntry> GetEntries(Stream stream)
{
ArcEntryHeader headerReader = new ArcEntryHeader(new ArchiveEncoding());
ArcEntryHeader headerReader = new ArcEntryHeader(Options.ArchiveEncoding);
ArcEntryHeader? header;
while ((header = headerReader.ReadHeader(stream)) != null)
{

View File

@@ -0,0 +1,89 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Arj;
using SharpCompress.Common.Arj.Headers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Readers.Arj
{
public abstract class ArjReader : AbstractReader<ArjEntry, ArjVolume>
{
internal ArjReader(ReaderOptions options)
: base(options, ArchiveType.Arj) { }
/// <summary>
/// Derived class must create or manage the Volume itself.
/// AbstractReader.Volume is get-only, so it cannot be set here.
/// </summary>
public override ArjVolume? Volume => _volume;
private ArjVolume? _volume;
/// <summary>
/// Opens an ArjReader for Non-seeking usage with a single volume
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <returns></returns>
public static ArjReader Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
return new SingleVolumeArjReader(stream, options ?? new ReaderOptions());
}
/// <summary>
/// Opens an ArjReader for Non-seeking usage with multiple volumes
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <returns></returns>
public static ArjReader Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.NotNull(nameof(streams));
return new MultiVolumeArjReader(streams, options ?? new ReaderOptions());
}
protected abstract void ValidateArchive(ArjVolume archive);
protected override IEnumerable<ArjEntry> GetEntries(Stream stream)
{
var encoding = new ArchiveEncoding();
var mainHeaderReader = new ArjMainHeader(encoding);
var localHeaderReader = new ArjLocalHeader(encoding);
var mainHeader = mainHeaderReader.Read(stream);
if (mainHeader?.IsVolume == true)
{
throw new MultiVolumeExtractionException(
"Multi volumes are currently not supported"
);
}
if (mainHeader?.IsGabled == true)
{
throw new CryptographicException(
"Password protected archives are currently not supported"
);
}
if (_volume == null)
{
_volume = new ArjVolume(stream, Options, 0);
ValidateArchive(_volume);
}
while (true)
{
var localHeader = localHeaderReader.Read(stream);
if (localHeader == null)
break;
yield return new ArjEntry(new ArjFilePart((ArjLocalHeader)localHeader, stream));
}
}
protected virtual IEnumerable<FilePart> CreateFilePartEnumerableForCurrentEntry() =>
Entry.Parts;
}
}

View File

@@ -0,0 +1,117 @@
#nullable disable
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Arj;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Readers.Arj;
internal class MultiVolumeArjReader : ArjReader
{
private readonly IEnumerator<Stream> streams;
private Stream tempStream;
internal MultiVolumeArjReader(IEnumerable<Stream> streams, ReaderOptions options)
: base(options) => this.streams = streams.GetEnumerator();
protected override void ValidateArchive(ArjVolume archive) { }
protected override Stream RequestInitialStream()
{
if (streams.MoveNext())
{
return streams.Current;
}
throw new MultiVolumeExtractionException(
"No stream provided when requested by MultiVolumeArjReader"
);
}
internal override bool NextEntryForCurrentStream()
{
if (!base.NextEntryForCurrentStream())
{
// if we're got another stream to try to process then do so
return streams.MoveNext() && LoadStreamForReading(streams.Current);
}
return true;
}
protected override IEnumerable<FilePart> CreateFilePartEnumerableForCurrentEntry()
{
var enumerator = new MultiVolumeStreamEnumerator(this, streams, tempStream);
tempStream = null;
return enumerator;
}
private class MultiVolumeStreamEnumerator : IEnumerable<FilePart>, IEnumerator<FilePart>
{
private readonly MultiVolumeArjReader reader;
private readonly IEnumerator<Stream> nextReadableStreams;
private Stream tempStream;
private bool isFirst = true;
internal MultiVolumeStreamEnumerator(
MultiVolumeArjReader r,
IEnumerator<Stream> nextReadableStreams,
Stream tempStream
)
{
reader = r;
this.nextReadableStreams = nextReadableStreams;
this.tempStream = tempStream;
}
public IEnumerator<FilePart> GetEnumerator() => this;
IEnumerator IEnumerable.GetEnumerator() => this;
public FilePart Current { get; private set; }
public void Dispose() { }
object IEnumerator.Current => Current;
public bool MoveNext()
{
if (isFirst)
{
Current = reader.Entry.Parts.First();
isFirst = false; //first stream already to go
return true;
}
if (!reader.Entry.IsSplitAfter)
{
return false;
}
if (tempStream != null)
{
reader.LoadStreamForReading(tempStream);
tempStream = null;
}
else if (!nextReadableStreams.MoveNext())
{
throw new MultiVolumeExtractionException(
"No stream provided when requested by MultiVolumeArjReader"
);
}
else
{
reader.LoadStreamForReading(nextReadableStreams.Current);
}
Current = reader.Entry.Parts.First();
return true;
}
public void Reset() { }
}
}

View File

@@ -0,0 +1,31 @@
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Arj;
namespace SharpCompress.Readers.Arj
{
internal class SingleVolumeArjReader : ArjReader
{
private readonly Stream _stream;
internal SingleVolumeArjReader(Stream stream, ReaderOptions options)
: base(options)
{
stream.NotNull(nameof(stream));
_stream = stream;
}
protected override Stream RequestInitialStream() => _stream;
protected override void ValidateArchive(ArjVolume archive)
{
if (archive.IsMultiVolume)
{
throw new MultiVolumeExtractionException(
"Streamed archive is a Multi-volume archive. Use a different ArjReader method to extract."
);
}
}
}
}

View File

@@ -113,16 +113,54 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
);
if (Entry.IsRarV3)
{
return CreateEntryStream(new RarCrcStream(UnpackV1.Value, Entry.FileHeader, stream));
return CreateEntryStream(RarCrcStream.Create(UnpackV1.Value, Entry.FileHeader, stream));
}
if (Entry.FileHeader.FileCrc?.Length > 5)
{
return CreateEntryStream(
new RarBLAKE2spStream(UnpackV2017.Value, Entry.FileHeader, stream)
RarBLAKE2spStream.Create(UnpackV2017.Value, Entry.FileHeader, stream)
);
}
return CreateEntryStream(new RarCrcStream(UnpackV2017.Value, Entry.FileHeader, stream));
return CreateEntryStream(RarCrcStream.Create(UnpackV2017.Value, Entry.FileHeader, stream));
}
protected override async System.Threading.Tasks.Task<EntryStream> GetEntryStreamAsync(
System.Threading.CancellationToken cancellationToken = default
)
{
if (Entry.IsRedir)
{
throw new InvalidOperationException("no stream for redirect entry");
}
var stream = new MultiVolumeReadOnlyStream(
CreateFilePartEnumerableForCurrentEntry().Cast<RarFilePart>(),
this
);
if (Entry.IsRarV3)
{
return CreateEntryStream(
await RarCrcStream
.CreateAsync(UnpackV1.Value, Entry.FileHeader, stream, cancellationToken)
.ConfigureAwait(false)
);
}
if (Entry.FileHeader.FileCrc?.Length > 5)
{
return CreateEntryStream(
await RarBLAKE2spStream
.CreateAsync(UnpackV2017.Value, Entry.FileHeader, stream, cancellationToken)
.ConfigureAwait(false)
);
}
return CreateEntryStream(
await RarCrcStream
.CreateAsync(UnpackV2017.Value, Entry.FileHeader, stream, cancellationToken)
.ConfigureAwait(false)
);
}
}

View File

@@ -70,7 +70,7 @@ public static class ReaderFactory
}
throw new InvalidFormatException(
"Cannot determine compressed stream type. Supported Reader Formats: Arc, Zip, GZip, BZip2, Tar, Rar, LZip, XZ, ZStandard"
"Cannot determine compressed stream type. Supported Reader Formats: Arc, Arj, Zip, GZip, BZip2, Tar, Rar, LZip, XZ, ZStandard"
);
}
}

View File

@@ -2,9 +2,9 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.41.0</VersionPrefix>
<AssemblyVersion>0.41.0</AssemblyVersion>
<FileVersion>0.41.0</FileVersion>
<VersionPrefix>0.42.0</VersionPrefix>
<AssemblyVersion>0.42.0</AssemblyVersion>
<FileVersion>0.42.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks>net48;net481;netstandard2.0;net6.0;net8.0</TargetFrameworks>
<AssemblyName>SharpCompress</AssemblyName>
@@ -38,6 +38,9 @@
<PackageReference Include="ZstdSharp.Port" />
<PackageReference Include="Microsoft.SourceLink.GitHub" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net8.0'">
<PackageReference Include="Microsoft.NET.ILLink.Tasks" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net48' Or '$(TargetFramework)' == 'net481' ">
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
<PackageReference Include="System.Text.Encoding.CodePages" />

View File

@@ -447,6 +447,31 @@ internal static class Utility
}
#endif
public static async Task<bool> ReadFullyAsync(
this Stream stream,
byte[] buffer,
CancellationToken cancellationToken = default
)
{
var total = 0;
int read;
while (
(
read = await stream
.ReadAsync(buffer, total, buffer.Length - total, cancellationToken)
.ConfigureAwait(false)
) > 0
)
{
total += read;
if (total >= buffer.Length)
{
return true;
}
}
return (total >= buffer.Length);
}
public static string TrimNulls(this string source) => source.Replace('\0', ' ').Trim();
/// <summary>

View File

@@ -48,7 +48,29 @@ internal class ZipCentralDirectoryEntry
var decompressedvalue = zip64 ? uint.MaxValue : (uint)Decompressed;
var headeroffsetvalue = zip64 ? uint.MaxValue : (uint)HeaderOffset;
var extralength = zip64 ? (2 + 2 + 8 + 8 + 8 + 4) : 0;
var version = (byte)(zip64 ? 45 : 20); // Version 20 required for deflate/encryption
// Determine version needed to extract:
// - Version 63 for LZMA, PPMd, BZip2, ZStandard (advanced compression methods)
// - Version 45 for Zip64 extensions (when Zip64HeaderOffset != 0 or actual sizes require it)
// - Version 20 for standard Deflate/None compression
byte version;
if (
compression == ZipCompressionMethod.LZMA
|| compression == ZipCompressionMethod.PPMd
|| compression == ZipCompressionMethod.BZip2
|| compression == ZipCompressionMethod.ZStandard
)
{
version = 63;
}
else if (zip64 || Zip64HeaderOffset != 0)
{
version = 45;
}
else
{
version = 20;
}
var flags = Equals(archiveEncoding.GetEncoding(), Encoding.UTF8)
? HeaderFlags.Efs

View File

@@ -160,7 +160,7 @@ public class ZipWriter : AbstractWriter
WriteDirectoryEntry(normalizedName, options);
}
public override async Task WriteDirectoryAsync(
public override Task WriteDirectoryAsync(
string directoryName,
DateTime? modificationTime,
CancellationToken cancellationToken = default
@@ -168,7 +168,7 @@ public class ZipWriter : AbstractWriter
{
// Synchronous implementation is sufficient for directory entries
WriteDirectory(directoryName, modificationTime);
await Task.CompletedTask.ConfigureAwait(false);
return Task.CompletedTask;
}
private void WriteDirectoryEntry(string directoryPath, ZipWriterEntryOptions options)

View File

@@ -335,9 +335,9 @@
"net8.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[8.0.17, )",
"resolved": "8.0.17",
"contentHash": "x5/y4l8AtshpBOrCZdlE4txw8K3e3s9meBFeZeR3l8hbbku2V7kK6ojhXvrbjg1rk3G+JqL1BI26gtgc1ZrdUw=="
"requested": "[8.0.21, )",
"resolved": "8.0.21",
"contentHash": "s8H5PZQs50OcNkaB6Si54+v3GWM7vzs6vxFRMlD3aXsbM+aPCtod62gmK0BYWou9diGzmo56j8cIf/PziijDqQ=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",

View File

@@ -0,0 +1,61 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Compressors.ADC;
using Xunit;
namespace SharpCompress.Test;
public class AdcAsyncTest : TestBase
{
[Fact]
public async Task TestAdcStreamAsyncWholeChunk()
{
using var decFs = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "adc_decompressed.bin"));
var decompressed = new byte[decFs.Length];
decFs.Read(decompressed, 0, decompressed.Length);
using var cmpFs = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "adc_compressed.bin"));
using var decStream = new ADCStream(cmpFs);
var test = new byte[262144];
await decStream.ReadAsync(test, 0, test.Length);
Assert.Equal(decompressed, test);
}
[Fact]
public async Task TestAdcStreamAsync()
{
using var decFs = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "adc_decompressed.bin"));
var decompressed = new byte[decFs.Length];
decFs.Read(decompressed, 0, decompressed.Length);
using var cmpFs = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "adc_compressed.bin"));
using var decStream = new ADCStream(cmpFs);
using var decMs = new MemoryStream();
var test = new byte[512];
var count = 0;
do
{
count = await decStream.ReadAsync(test, 0, test.Length);
decMs.Write(test, 0, count);
} while (count > 0);
Assert.Equal(decompressed, decMs.ToArray());
}
[Fact]
public async Task TestAdcStreamAsyncWithCancellation()
{
using var cmpFs = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "adc_compressed.bin"));
using var decStream = new ADCStream(cmpFs);
var test = new byte[512];
using var cts = new System.Threading.CancellationTokenSource();
// Read should complete without cancellation
var bytesRead = await decStream.ReadAsync(test, 0, test.Length, cts.Token);
Assert.True(bytesRead > 0);
}
}

View File

@@ -23,36 +23,26 @@ namespace SharpCompress.Test.Arc
public void Arc_Uncompressed_Read() => Read("Arc.uncompressed.arc", CompressionType.None);
[Fact]
public void Arc_Squeezed_Read()
{
ProcessArchive("Arc.squeezed.arc");
}
public void Arc_Squeezed_Read() => Read("Arc.squeezed.arc");
[Fact]
public void Arc_Crunched_Read()
public void Arc_Crunched_Read() => Read("Arc.crunched.arc");
[Theory]
[InlineData("Arc.crunched.largefile.arc", CompressionType.Crunched)]
public void Arc_LargeFile_ShouldThrow(string fileName, CompressionType compressionType)
{
ProcessArchive("Arc.crunched.arc");
var exception = Assert.Throws<NotSupportedException>(() =>
ReadForBufferBoundaryCheck(fileName, compressionType)
);
}
private void ProcessArchive(string archiveName)
[Theory]
[InlineData("Arc.uncompressed.largefile.arc", CompressionType.None)]
[InlineData("Arc.squeezed.largefile.arc", CompressionType.Squeezed)]
public void Arc_LargeFileTest_Read(string fileName, CompressionType compressionType)
{
// Process a given archive by its name
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, archiveName)))
using (IReader reader = ArcReader.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
VerifyFilesByExtension();
ReadForBufferBoundaryCheck(fileName, compressionType);
}
}
}

View File

@@ -622,4 +622,36 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
}
[Fact]
public void ArchiveFactory_Open_WithPreWrappedStream()
{
// Test that ArchiveFactory.Open works correctly with a stream that's already wrapped
// This addresses the issue where ZIP files fail to open on Linux
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.noEmptyDirs.zip");
// Open with a pre-wrapped stream
using (var fileStream = File.OpenRead(testArchive))
using (var wrappedStream = SharpCompressStream.Create(fileStream, bufferSize: 32768))
using (var archive = ArchiveFactory.Open(wrappedStream))
{
Assert.Equal(ArchiveType.Zip, archive.Type);
Assert.Equal(3, archive.Entries.Count());
}
}
[Fact]
public void ArchiveFactory_Open_WithRawFileStream()
{
// Test that ArchiveFactory.Open works correctly with a raw FileStream
// This is the common use case reported in the issue
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Zip.bzip2.noEmptyDirs.zip");
using (var stream = File.OpenRead(testArchive))
using (var archive = ArchiveFactory.Open(stream))
{
Assert.Equal(ArchiveType.Zip, archive.Type);
Assert.Equal(3, archive.Entries.Count());
}
}
}

View File

@@ -0,0 +1,99 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Readers.Arj;
using Xunit;
using Xunit.Sdk;
namespace SharpCompress.Test.Arj
{
public class ArjReaderTests : ReaderTests
{
public ArjReaderTests()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
}
[Fact]
public void Arj_Uncompressed_Read() => Read("Arj.store.arj", CompressionType.None);
[Fact]
public void Arj_Method1_Read() => Read("Arj.method1.arj");
[Fact]
public void Arj_Method2_Read() => Read("Arj.method2.arj");
[Fact]
public void Arj_Method3_Read() => Read("Arj.method3.arj");
[Fact]
public void Arj_Method4_Read() => Read("Arj.method4.arj");
[Fact]
public void Arj_Encrypted_Read()
{
var exception = Assert.Throws<CryptographicException>(() => Read("Arj.encrypted.arj"));
}
[Fact]
public void Arj_Multi_Reader()
{
var exception = Assert.Throws<MultiVolumeExtractionException>(() =>
DoArj_Multi_Reader([
"Arj.store.split.arj",
"Arj.store.split.a01",
"Arj.store.split.a02",
"Arj.store.split.a03",
"Arj.store.split.a04",
"Arj.store.split.a05",
])
);
}
[Theory]
[InlineData("Arj.method1.largefile.arj", CompressionType.ArjLZ77)]
[InlineData("Arj.method2.largefile.arj", CompressionType.ArjLZ77)]
[InlineData("Arj.method3.largefile.arj", CompressionType.ArjLZ77)]
public void Arj_LargeFile_ShouldThrow(string fileName, CompressionType compressionType)
{
var exception = Assert.Throws<NotSupportedException>(() =>
ReadForBufferBoundaryCheck(fileName, compressionType)
);
}
[Theory]
[InlineData("Arj.store.largefile.arj", CompressionType.None)]
[InlineData("Arj.method4.largefile.arj", CompressionType.ArjLZ77)]
public void Arj_LargeFileTest_Read(string fileName, CompressionType compressionType)
{
ReadForBufferBoundaryCheck(fileName, compressionType);
}
private void DoArj_Multi_Reader(string[] archives)
{
using (
var reader = ArjReader.Open(
archives
.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.Select(p => File.OpenRead(p))
)
)
{
while (reader.MoveToNextEntry())
{
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
}
}
}

View File

@@ -0,0 +1,231 @@
using System;
using System.Buffers;
using System.IO;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Compressors.BZip2;
using Xunit;
namespace SharpCompress.Test.BZip2;
public class BZip2StreamAsyncTests
{
private byte[] CreateTestData(int size)
{
var data = new byte[size];
// Create compressible data with repetitive pattern
for (int i = 0; i < size; i++)
{
data[i] = (byte)('A' + (i % 26));
}
return data;
}
[Fact]
public async Task BZip2CompressDecompressAsyncTest()
{
var testData = CreateTestData(10000);
byte[] compressed;
// Compress
using (var memoryStream = new MemoryStream())
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
SharpCompress.Compressors.CompressionMode.Compress,
false
)
)
{
await bzip2Stream.WriteAsync(testData, 0, testData.Length);
(bzip2Stream as BZip2Stream)?.Finish();
}
compressed = memoryStream.ToArray();
}
// Verify compression occurred
Assert.True(compressed.Length > 0);
Assert.True(compressed.Length < testData.Length);
// Decompress
byte[] decompressed;
using (var memoryStream = new MemoryStream(compressed))
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
SharpCompress.Compressors.CompressionMode.Decompress,
false
)
)
{
decompressed = new byte[testData.Length];
var totalRead = 0;
int bytesRead;
while (
(
bytesRead = await bzip2Stream.ReadAsync(
decompressed,
totalRead,
testData.Length - totalRead
)
) > 0
)
{
totalRead += bytesRead;
}
}
}
// Verify decompression
Assert.Equal(testData, decompressed);
}
[Fact]
public async Task BZip2ReadAsyncWithCancellationTest()
{
var testData = Encoding.ASCII.GetBytes(new string('A', 5000)); // Repetitive data compresses well
byte[] compressed;
// Compress
using (var memoryStream = new MemoryStream())
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
SharpCompress.Compressors.CompressionMode.Compress,
false
)
)
{
await bzip2Stream.WriteAsync(testData, 0, testData.Length);
(bzip2Stream as BZip2Stream)?.Finish();
}
compressed = memoryStream.ToArray();
}
// Decompress with cancellation support
using (var memoryStream = new MemoryStream(compressed))
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
SharpCompress.Compressors.CompressionMode.Decompress,
false
)
)
{
var buffer = new byte[1024];
using var cts = new System.Threading.CancellationTokenSource();
// Read should complete without cancellation
var bytesRead = await bzip2Stream.ReadAsync(buffer, 0, buffer.Length, cts.Token);
Assert.True(bytesRead > 0);
}
}
}
[Fact]
public async Task BZip2MultipleAsyncWritesTest()
{
using (var memoryStream = new MemoryStream())
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
SharpCompress.Compressors.CompressionMode.Compress,
false
)
)
{
var data1 = Encoding.ASCII.GetBytes("Hello ");
var data2 = Encoding.ASCII.GetBytes("World");
var data3 = Encoding.ASCII.GetBytes("!");
await bzip2Stream.WriteAsync(data1, 0, data1.Length);
await bzip2Stream.WriteAsync(data2, 0, data2.Length);
await bzip2Stream.WriteAsync(data3, 0, data3.Length);
(bzip2Stream as BZip2Stream)?.Finish();
}
var compressed = memoryStream.ToArray();
Assert.True(compressed.Length > 0);
// Decompress and verify
using (var readStream = new MemoryStream(compressed))
{
using (
var bzip2Stream = new BZip2Stream(
readStream,
SharpCompress.Compressors.CompressionMode.Decompress,
false
)
)
{
var result = new StringBuilder();
var buffer = new byte[256];
int bytesRead;
while ((bytesRead = await bzip2Stream.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
result.Append(Encoding.ASCII.GetString(buffer, 0, bytesRead));
}
Assert.Equal("Hello World!", result.ToString());
}
}
}
}
[Fact]
public async Task BZip2LargeDataAsyncTest()
{
var largeData = CreateTestData(100000);
// Compress
byte[] compressed;
using (var memoryStream = new MemoryStream())
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
SharpCompress.Compressors.CompressionMode.Compress,
false
)
)
{
await bzip2Stream.WriteAsync(largeData, 0, largeData.Length);
(bzip2Stream as BZip2Stream)?.Finish();
}
compressed = memoryStream.ToArray();
}
// Decompress
byte[] decompressed;
using (var memoryStream = new MemoryStream(compressed))
{
using (
var bzip2Stream = new BZip2Stream(
memoryStream,
SharpCompress.Compressors.CompressionMode.Decompress,
false
)
)
{
decompressed = new byte[largeData.Length];
var totalRead = 0;
int bytesRead;
var buffer = new byte[4096];
while ((bytesRead = await bzip2Stream.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
Array.Copy(buffer, 0, decompressed, totalRead, bytesRead);
totalRead += bytesRead;
}
}
}
// Verify
Assert.Equal(largeData, decompressed);
}
}

View File

@@ -6,6 +6,8 @@ using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Readers;
using SharpCompress.Writers;
using Xunit;
@@ -191,12 +193,7 @@ public class AsyncTests : TestBase
// Test async write with GZipStream
using (var fileStream = File.Create(compressedPath))
using (
var gzipStream = new Compressors.Deflate.GZipStream(
fileStream,
Compressors.CompressionMode.Compress
)
)
using (var gzipStream = new GZipStream(fileStream, CompressionMode.Compress))
{
await gzipStream.WriteAsync(testData, 0, testData.Length);
await gzipStream.FlushAsync();
@@ -207,12 +204,7 @@ public class AsyncTests : TestBase
// Test async read with GZipStream
using (var fileStream = File.OpenRead(compressedPath))
using (
var gzipStream = new Compressors.Deflate.GZipStream(
fileStream,
Compressors.CompressionMode.Decompress
)
)
using (var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress))
{
var decompressed = new byte[testData.Length];
var totalRead = 0;

View File

@@ -0,0 +1,725 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.Readers;
using Xunit;
namespace SharpCompress.Test.Rar;
public class RarArchiveAsyncTests : ArchiveTests
{
[Fact]
public async Task Rar_EncryptedFileAndHeader_Archive_Async() =>
await ReadRarPasswordAsync("Rar.encrypted_filesAndHeader.rar", "test");
[Fact]
public async Task Rar_EncryptedFileAndHeader_NoPasswordExceptionTest_Async() =>
await Assert.ThrowsAsync(
typeof(CryptographicException),
async () => await ReadRarPasswordAsync("Rar.encrypted_filesAndHeader.rar", null)
);
[Fact]
public async Task Rar5_EncryptedFileAndHeader_Archive_Async() =>
await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "test");
[Fact]
public async Task Rar5_EncryptedFileAndHeader_Archive_Err_Async() =>
await Assert.ThrowsAsync(
typeof(CryptographicException),
async () => await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "failed")
);
[Fact]
public async Task Rar5_EncryptedFileAndHeader_NoPasswordExceptionTest_Async() =>
await Assert.ThrowsAsync(
typeof(CryptographicException),
async () => await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", null)
);
[Fact]
public async Task Rar_EncryptedFileOnly_Archive_Async() =>
await ReadRarPasswordAsync("Rar.encrypted_filesOnly.rar", "test");
[Fact]
public async Task Rar_EncryptedFileOnly_Archive_Err_Async() =>
await Assert.ThrowsAsync(
typeof(CryptographicException),
async () => await ReadRarPasswordAsync("Rar5.encrypted_filesOnly.rar", "failed")
);
[Fact]
public async Task Rar5_EncryptedFileOnly_Archive_Async() =>
await ReadRarPasswordAsync("Rar5.encrypted_filesOnly.rar", "test");
[Fact]
public async Task Rar_Encrypted_Archive_Async() =>
await ReadRarPasswordAsync("Rar.Encrypted.rar", "test");
[Fact]
public async Task Rar5_Encrypted_Archive_Async() =>
await ReadRarPasswordAsync("Rar5.encrypted_filesAndHeader.rar", "test");
private async Task ReadRarPasswordAsync(string testArchive, string? password)
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, testArchive)))
using (
var archive = RarArchive.Open(
stream,
new ReaderOptions { Password = password, LeaveStreamOpen = true }
)
)
{
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
Assert.Equal(CompressionType.Rar, entry.CompressionType);
await entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
VerifyFiles();
}
[Fact]
public async Task Rar_Multi_Archive_Encrypted_Async() =>
await Assert.ThrowsAsync<InvalidFormatException>(async () =>
await ArchiveFileReadPasswordAsync("Rar.EncryptedParts.part01.rar", "test")
);
protected async Task ArchiveFileReadPasswordAsync(string archiveName, string password)
{
using (
var archive = RarArchive.Open(
Path.Combine(TEST_ARCHIVES_PATH, archiveName),
new ReaderOptions { Password = password, LeaveStreamOpen = true }
)
)
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
}
[Fact]
public async Task Rar_None_ArchiveStreamRead_Async() =>
await ArchiveStreamReadAsync("Rar.none.rar");
[Fact]
public async Task Rar5_None_ArchiveStreamRead_Async() =>
await ArchiveStreamReadAsync("Rar5.none.rar");
[Fact]
public async Task Rar_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Rar.rar");
[Fact]
public async Task Rar5_ArchiveStreamRead_Async() => await ArchiveStreamReadAsync("Rar5.rar");
[Fact]
public async Task Rar_test_invalid_exttime_ArchiveStreamRead_Async() =>
await DoRar_test_invalid_exttime_ArchiveStreamReadAsync("Rar.test_invalid_exttime.rar");
private async Task DoRar_test_invalid_exttime_ArchiveStreamReadAsync(string filename)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename));
using var archive = ArchiveFactory.Open(stream);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
[Fact]
public async Task Rar_Jpg_ArchiveStreamRead_Async()
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.jpeg.jpg"));
using (var archive = RarArchive.Open(stream, new ReaderOptions { LookForHeader = true }))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
}
[Fact]
public async Task Rar_IsSolidArchiveCheck_Async() =>
await DoRar_IsSolidArchiveCheckAsync("Rar.rar");
[Fact]
public async Task Rar5_IsSolidArchiveCheck_Async() =>
await DoRar_IsSolidArchiveCheckAsync("Rar5.rar");
private async Task DoRar_IsSolidArchiveCheckAsync(string filename)
{
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
{
using var archive = RarArchive.Open(stream);
Assert.False(archive.IsSolid);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
}
[Fact]
public async Task Rar_IsSolidEntryStreamCheck_Async() =>
await DoRar_IsSolidEntryStreamCheckAsync("Rar.solid.rar");
private async Task DoRar_IsSolidEntryStreamCheckAsync(string filename)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename));
using var archive = RarArchive.Open(stream);
Assert.True(archive.IsSolid);
IArchiveEntry[] entries = archive.Entries.Where(a => !a.IsDirectory).ToArray();
Assert.NotInRange(entries.Length, 0, 1);
Assert.False(entries[0].IsSolid);
var testEntry = entries[1];
Assert.True(testEntry.IsSolid);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
using (var crcStream = new CrcCheckStream((uint)entry.Crc))
{
using var eStream = await entry.OpenEntryStreamAsync();
await eStream.CopyToAsync(crcStream);
}
if (entry == testEntry)
{
break;
}
}
}
[Fact]
public async Task Rar_Solid_ArchiveStreamRead_Async() =>
await ArchiveStreamReadAsync("Rar.solid.rar");
[Fact]
public async Task Rar5_Solid_ArchiveStreamRead_Async() =>
await ArchiveStreamReadAsync("Rar5.solid.rar");
[Fact]
public async Task Rar_Solid_StreamRead_Extract_All_Async() =>
await ArchiveStreamReadExtractAllAsync("Rar.solid.rar", CompressionType.Rar);
[Fact]
public async Task Rar5_Solid_StreamRead_Extract_All_Async() =>
await ArchiveStreamReadExtractAllAsync("Rar5.solid.rar", CompressionType.Rar);
[Fact]
public async Task Rar_Multi_ArchiveStreamRead_Async() =>
await DoRar_Multi_ArchiveStreamReadAsync(
[
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
],
false
);
[Fact]
public async Task Rar5_Multi_ArchiveStreamRead_Async() =>
await DoRar_Multi_ArchiveStreamReadAsync(
[
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
],
false
);
private async Task DoRar_Multi_ArchiveStreamReadAsync(string[] archives, bool isSolid)
{
using var archive = RarArchive.Open(
archives.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s)).Select(File.OpenRead)
);
Assert.Equal(archive.IsSolid, isSolid);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
[Fact]
public async Task Rar5_MultiSolid_ArchiveStreamRead_Async() =>
await DoRar_Multi_ArchiveStreamReadAsync(
[
"Rar.multi.solid.part01.rar",
"Rar.multi.solid.part02.rar",
"Rar.multi.solid.part03.rar",
"Rar.multi.solid.part04.rar",
"Rar.multi.solid.part05.rar",
"Rar.multi.solid.part06.rar",
],
true
);
[Fact]
public async Task RarNoneArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar.none.rar");
[Fact]
public async Task Rar5NoneArchiveFileRead_Async() =>
await ArchiveFileReadAsync("Rar5.none.rar");
[Fact]
public async Task Rar_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar.rar");
[Fact]
public async Task Rar5_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar5.rar");
[Fact]
public async Task Rar_ArchiveFileRead_HasDirectories_Async() =>
await DoRar_ArchiveFileRead_HasDirectoriesAsync("Rar.rar");
[Fact]
public async Task Rar5_ArchiveFileRead_HasDirectories_Async() =>
await DoRar_ArchiveFileRead_HasDirectoriesAsync("Rar5.rar");
private Task DoRar_ArchiveFileRead_HasDirectoriesAsync(string filename)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename));
using var archive = RarArchive.Open(stream);
Assert.False(archive.IsSolid);
Assert.Contains(true, archive.Entries.Select(entry => entry.IsDirectory));
return Task.CompletedTask;
}
[Fact]
public async Task Rar_Jpg_ArchiveFileRead_Async()
{
using (
var archive = RarArchive.Open(
Path.Combine(TEST_ARCHIVES_PATH, "Rar.jpeg.jpg"),
new ReaderOptions { LookForHeader = true }
)
)
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
}
[Fact]
public async Task Rar_Solid_ArchiveFileRead_Async() =>
await ArchiveFileReadAsync("Rar.solid.rar");
[Fact]
public async Task Rar5_Solid_ArchiveFileRead_Async() =>
await ArchiveFileReadAsync("Rar5.solid.rar");
[Fact]
public async Task Rar2_Multi_ArchiveStreamRead_Async() =>
await DoRar_Multi_ArchiveStreamReadAsync(
[
"Rar2.multi.rar",
"Rar2.multi.r00",
"Rar2.multi.r01",
"Rar2.multi.r02",
"Rar2.multi.r03",
"Rar2.multi.r04",
"Rar2.multi.r05",
],
false
);
[Fact]
public async Task Rar2_Multi_ArchiveFileRead_Async() =>
await ArchiveFileReadAsync("Rar2.multi.rar");
[Fact]
public async Task Rar2_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar2.rar");
[Fact]
public async Task Rar15_ArchiveFileRead_Async()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
await ArchiveFileReadAsync("Rar15.rar");
}
[Fact]
public void Rar15_ArchiveVersionTest_Async()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Rar15.rar");
using var archive = RarArchive.Open(testArchive);
Assert.Equal(1, archive.MinVersion);
Assert.Equal(1, archive.MaxVersion);
}
[Fact]
public void Rar2_ArchiveVersionTest_Async()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Rar2.rar");
using var archive = RarArchive.Open(testArchive);
Assert.Equal(2, archive.MinVersion);
Assert.Equal(2, archive.MaxVersion);
}
[Fact]
public void Rar4_ArchiveVersionTest_Async()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Rar4.multi.part01.rar");
using var archive = RarArchive.Open(testArchive);
Assert.Equal(3, archive.MinVersion);
Assert.Equal(4, archive.MaxVersion);
}
[Fact]
public void Rar5_ArchiveVersionTest_Async()
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "Rar5.solid.rar");
using var archive = RarArchive.Open(testArchive);
Assert.Equal(5, archive.MinVersion);
Assert.Equal(6, archive.MaxVersion);
}
[Fact]
public async Task Rar4_Multi_ArchiveFileRead_Async() =>
await ArchiveFileReadAsync("Rar4.multi.part01.rar");
[Fact]
public async Task Rar4_ArchiveFileRead_Async() => await ArchiveFileReadAsync("Rar4.rar");
[Fact]
public void Rar_GetPartsSplit_Async() =>
ArchiveGetParts(
new[]
{
"Rar4.split.001",
"Rar4.split.002",
"Rar4.split.003",
"Rar4.split.004",
"Rar4.split.005",
"Rar4.split.006",
}
);
[Fact]
public void Rar_GetPartsOld_Async() =>
ArchiveGetParts(
new[]
{
"Rar2.multi.rar",
"Rar2.multi.r00",
"Rar2.multi.r01",
"Rar2.multi.r02",
"Rar2.multi.r03",
"Rar2.multi.r04",
"Rar2.multi.r05",
}
);
[Fact]
public void Rar_GetPartsNew_Async() =>
ArchiveGetParts(
new[]
{
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
"Rar4.multi.part03.rar",
"Rar4.multi.part04.rar",
"Rar4.multi.part05.rar",
"Rar4.multi.part06.rar",
"Rar4.multi.part07.rar",
}
);
[Fact]
public async Task Rar4_Multi_ArchiveStreamRead_Async() =>
await DoRar_Multi_ArchiveStreamReadAsync(
[
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
"Rar4.multi.part03.rar",
"Rar4.multi.part04.rar",
"Rar4.multi.part05.rar",
"Rar4.multi.part06.rar",
"Rar4.multi.part07.rar",
],
false
);
[Fact]
public async Task Rar4_Split_ArchiveStreamRead_Async() =>
await ArchiveStreamMultiReadAsync(
null,
[
"Rar4.split.001",
"Rar4.split.002",
"Rar4.split.003",
"Rar4.split.004",
"Rar4.split.005",
"Rar4.split.006",
]
);
[Fact]
public async Task Rar4_Multi_ArchiveFirstFileRead_Async() =>
await ArchiveFileReadAsync("Rar4.multi.part01.rar");
[Fact]
public async Task Rar4_Split_ArchiveFirstFileRead_Async() =>
await ArchiveFileReadAsync("Rar4.split.001");
[Fact]
public async Task Rar4_Split_ArchiveStreamFirstFileRead_Async() =>
await ArchiveStreamMultiReadAsync(null, ["Rar4.split.001"]);
[Fact]
public async Task Rar4_Split_ArchiveOpen_Async() =>
await ArchiveOpenStreamReadAsync(
null,
"Rar4.split.001",
"Rar4.split.002",
"Rar4.split.003",
"Rar4.split.004",
"Rar4.split.005",
"Rar4.split.006"
);
[Fact]
public async Task Rar4_Multi_ArchiveOpen_Async() =>
await ArchiveOpenStreamReadAsync(
null,
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
"Rar4.multi.part03.rar",
"Rar4.multi.part04.rar",
"Rar4.multi.part05.rar",
"Rar4.multi.part06.rar",
"Rar4.multi.part07.rar"
);
[Fact]
public void Rar4_Multi_ArchiveOpenEntryVolumeIndexTest_Async() =>
ArchiveOpenEntryVolumeIndexTest(
[
[0, 1],
[1, 5],
[5, 6],
],
null,
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
"Rar4.multi.part03.rar",
"Rar4.multi.part04.rar",
"Rar4.multi.part05.rar",
"Rar4.multi.part06.rar",
"Rar4.multi.part07.rar"
);
[Fact]
public async Task Rar_Multi_ArchiveFileRead_Async() =>
await ArchiveFileReadAsync("Rar.multi.part01.rar");
[Fact]
public async Task Rar5_Multi_ArchiveFileRead_Async() =>
await ArchiveFileReadAsync("Rar5.multi.part01.rar");
[Fact]
public void Rar_IsFirstVolume_True_Async() => DoRar_IsFirstVolume_True("Rar.multi.part01.rar");
[Fact]
public void Rar5_IsFirstVolume_True_Async() =>
DoRar_IsFirstVolume_True("Rar5.multi.part01.rar");
private void DoRar_IsFirstVolume_True(string firstFilename)
{
using var archive = RarArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, firstFilename));
Assert.True(archive.IsMultipartVolume());
Assert.True(archive.IsFirstVolume());
}
[Fact]
public void Rar_IsFirstVolume_False_Async() =>
DoRar_IsFirstVolume_False("Rar.multi.part03.rar");
[Fact]
public void Rar5_IsFirstVolume_False_Async() =>
DoRar_IsFirstVolume_False("Rar5.multi.part03.rar");
private void DoRar_IsFirstVolume_False(string notFirstFilename)
{
using var archive = RarArchive.Open(Path.Combine(TEST_ARCHIVES_PATH, notFirstFilename));
Assert.True(archive.IsMultipartVolume());
Assert.False(archive.IsFirstVolume());
}
[Fact]
public async Task Rar5_CRC_Blake2_Archive_Async() =>
await ArchiveFileReadAsync("Rar5.crc_blake2.rar");
[Fact]
void Rar_Iterate_Archive_Async() =>
ArchiveFileSkip("Rar.rar", "Failure jpg exe Empty jpg\\test.jpg exe\\test.exe тест.txt");
[Fact]
public void Rar2_Iterate_Archive_Async() =>
ArchiveFileSkip("Rar2.rar", "Failure Empty тест.txt jpg\\test.jpg exe\\test.exe jpg exe");
[Fact]
public void Rar4_Iterate_Archive_Async() =>
ArchiveFileSkip("Rar4.rar", "Failure Empty jpg exe тест.txt jpg\\test.jpg exe\\test.exe");
[Fact]
public void Rar5_Iterate_Archive_Async() =>
ArchiveFileSkip("Rar5.rar", "Failure jpg exe Empty тест.txt jpg\\test.jpg exe\\test.exe");
[Fact]
public void Rar_Encrypted_Iterate_Archive_Async() =>
ArchiveFileSkip(
"Rar.encrypted_filesOnly.rar",
"Failure jpg exe Empty тест.txt jpg\\test.jpg exe\\test.exe"
);
[Fact]
public void Rar5_Encrypted_Iterate_Archive_Async() =>
ArchiveFileSkip(
"Rar5.encrypted_filesOnly.rar",
"Failure jpg exe Empty тест.txt jpg\\test.jpg exe\\test.exe"
);
private async Task ArchiveStreamReadAsync(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using var stream = File.OpenRead(testArchive);
using var archive = ArchiveFactory.Open(stream);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
VerifyFiles();
}
private async Task ArchiveStreamReadExtractAllAsync(
string testArchive,
CompressionType compression
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using var stream = File.OpenRead(testArchive);
using var archive = ArchiveFactory.Open(stream);
Assert.True(archive.IsSolid);
using (var reader = archive.ExtractAllEntries())
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(compression, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
VerifyFiles();
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
VerifyFiles();
}
private async Task ArchiveFileReadAsync(string testArchive)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using var archive = ArchiveFactory.Open(testArchive);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
VerifyFiles();
}
private async Task ArchiveStreamMultiReadAsync(
ReaderOptions? readerOptions,
params string[] testArchives
)
{
var paths = testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x));
using var archive = ArchiveFactory.Open(paths.Select(a => new FileInfo(a)), readerOptions);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
VerifyFiles();
}
private async Task ArchiveOpenStreamReadAsync(
ReaderOptions? readerOptions,
params string[] testArchives
)
{
var paths = testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x));
using var archive = ArchiveFactory.Open(paths.Select(f => new FileInfo(f)), readerOptions);
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
await entry.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
VerifyFiles();
}
}

View File

@@ -0,0 +1,361 @@
using System;
using SharpCompress.Compressors.Rar;
using Xunit;
namespace SharpCompress.Test.Compressors.Rar;
public class RarCRCTest
{
[Fact]
public void CheckCrc_SingleByte_ReturnsCorrectCrc()
{
// Arrange
uint startCrc = 0;
byte testByte = 0x42;
// Act
var result = RarCRC.CheckCrc(startCrc, testByte);
// Assert
Assert.NotEqual(0u, result);
}
[Fact]
public void CheckCrc_SingleByte_WithNonZeroStartCrc()
{
// Arrange
uint startCrc = 0x12345678;
byte testByte = 0xAB;
// Act
var result = RarCRC.CheckCrc(startCrc, testByte);
// Assert
Assert.NotEqual(startCrc, result);
}
[Fact]
public void CheckCrc_SingleByte_DifferentBytesProduceDifferentCrcs()
{
// Arrange
uint startCrc = 0;
byte byte1 = 0x01;
byte byte2 = 0x02;
// Act
var result1 = RarCRC.CheckCrc(startCrc, byte1);
var result2 = RarCRC.CheckCrc(startCrc, byte2);
// Assert
Assert.NotEqual(result1, result2);
}
[Fact]
public void CheckCrc_EmptySpan_ReturnsStartCrc()
{
// Arrange
uint startCrc = 0x12345678;
ReadOnlySpan<byte> data = ReadOnlySpan<byte>.Empty;
// Act
var result = RarCRC.CheckCrc(startCrc, data, 0, 0);
// Assert
Assert.Equal(startCrc, result);
}
[Fact]
public void CheckCrc_SingleByteSpan_MatchesSingleByteMethod()
{
// Arrange
uint startCrc = 0;
byte testByte = 0x42;
ReadOnlySpan<byte> data = stackalloc byte[] { testByte };
// Act
var resultSingleByte = RarCRC.CheckCrc(startCrc, testByte);
var resultSpan = RarCRC.CheckCrc(startCrc, data, 0, 1);
// Assert
Assert.Equal(resultSingleByte, resultSpan);
}
[Fact]
public void CheckCrc_MultipleBytes_ProducesConsistentResult()
{
// Arrange
uint startCrc = 0;
ReadOnlySpan<byte> data = stackalloc byte[] { 0x01, 0x02, 0x03, 0x04 };
// Act
var result1 = RarCRC.CheckCrc(startCrc, data, 0, 4);
var result2 = RarCRC.CheckCrc(startCrc, data, 0, 4);
// Assert
Assert.Equal(result1, result2);
}
[Fact]
public void CheckCrc_MultipleBytes_IncrementalMatchesComplete()
{
// Arrange
uint startCrc = 0;
ReadOnlySpan<byte> data = stackalloc byte[] { 0x01, 0x02, 0x03, 0x04 };
// Act - calculate incrementally
var crc1 = RarCRC.CheckCrc(startCrc, data[0]);
var crc2 = RarCRC.CheckCrc(crc1, data[1]);
var crc3 = RarCRC.CheckCrc(crc2, data[2]);
var crc4 = RarCRC.CheckCrc(crc3, data[3]);
// Act - calculate all at once
var crcComplete = RarCRC.CheckCrc(startCrc, data, 0, 4);
// Assert
Assert.Equal(crc4, crcComplete);
}
[Fact]
public void CheckCrc_WithOffset_ProcessesCorrectBytes()
{
// Arrange
uint startCrc = 0;
ReadOnlySpan<byte> data = stackalloc byte[] { 0xFF, 0xFF, 0x01, 0x02, 0x03, 0xFF, 0xFF };
// Act
var result = RarCRC.CheckCrc(startCrc, data, 2, 3);
var expected = RarCRC.CheckCrc(startCrc, stackalloc byte[] { 0x01, 0x02, 0x03 }, 0, 3);
// Assert
Assert.Equal(expected, result);
}
[Fact]
public void CheckCrc_WithCountSmallerThanData_ProcessesOnlyCount()
{
// Arrange
uint startCrc = 0;
ReadOnlySpan<byte> data = stackalloc byte[] { 0x01, 0x02, 0x03, 0x04, 0x05 };
// Act
var result = RarCRC.CheckCrc(startCrc, data, 0, 3);
var expected = RarCRC.CheckCrc(startCrc, stackalloc byte[] { 0x01, 0x02, 0x03 }, 0, 3);
// Assert
Assert.Equal(expected, result);
}
[Fact]
public void CheckCrc_CountLargerThanRemainingData_ProcessesOnlyAvailableData()
{
// Arrange
uint startCrc = 0;
ReadOnlySpan<byte> data = stackalloc byte[] { 0x01, 0x02, 0x03 };
// Act
var result = RarCRC.CheckCrc(startCrc, data, 0, 100);
var expected = RarCRC.CheckCrc(startCrc, data, 0, 3);
// Assert
Assert.Equal(expected, result);
}
[Fact]
public void CheckCrc_KnownTestVector_HelloWorld()
{
// Arrange - "Hello, World!" in ASCII
uint startCrc = 0xFFFFFFFF; // CRC32 typically starts with inverted bits
var data = System.Text.Encoding.ASCII.GetBytes("Hello, World!");
// Act
var result = RarCRC.CheckCrc(startCrc, data, 0, data.Length);
// Assert - verify it produces a result (exact value depends on CRC32 variant)
Assert.NotEqual(startCrc, result);
Assert.NotEqual(0u, result);
}
[Fact]
public void CheckCrc_AllZeros_ProducesConsistentResult()
{
// Arrange
uint startCrc = 0;
ReadOnlySpan<byte> data = stackalloc byte[10]; // all zeros
// Act
var result = RarCRC.CheckCrc(startCrc, data, 0, 10);
// Assert - verify it's deterministic
var result2 = RarCRC.CheckCrc(startCrc, data, 0, 10);
Assert.Equal(result, result2);
// CRC of all zeros from startCrc=0 can be 0, that's valid
}
[Fact]
public void CheckCrc_AllOnes_ProducesConsistentResult()
{
// Arrange
uint startCrc = 0;
Span<byte> data = stackalloc byte[10];
data.Fill(0xFF);
// Act
var result = RarCRC.CheckCrc(startCrc, data, 0, 10);
// Assert
var result2 = RarCRC.CheckCrc(startCrc, data, 0, 10);
Assert.Equal(result, result2);
Assert.NotEqual(0u, result);
}
[Fact]
public void CheckCrc_OrderMatters()
{
// Arrange
uint startCrc = 0;
ReadOnlySpan<byte> data1 = stackalloc byte[] { 0x01, 0x02 };
ReadOnlySpan<byte> data2 = stackalloc byte[] { 0x02, 0x01 };
// Act
var result1 = RarCRC.CheckCrc(startCrc, data1, 0, 2);
var result2 = RarCRC.CheckCrc(startCrc, data2, 0, 2);
// Assert - different order should produce different CRC
Assert.NotEqual(result1, result2);
}
[Fact]
public void CheckCrc_LargeData_ProcessesCorrectly()
{
// Arrange
uint startCrc = 0;
var data = new byte[1024];
for (int i = 0; i < data.Length; i++)
{
data[i] = (byte)(i % 256);
}
// Act
var result = RarCRC.CheckCrc(startCrc, data, 0, data.Length);
// Assert
Assert.NotEqual(0u, result);
// Verify it's deterministic
var result2 = RarCRC.CheckCrc(startCrc, data, 0, data.Length);
Assert.Equal(result, result2);
}
[Fact]
public void CheckCrc_PartialSpan_WithOffsetAndCount()
{
// Arrange
uint startCrc = 0;
var data = new byte[100];
for (int i = 0; i < data.Length; i++)
{
data[i] = (byte)(i % 256);
}
// Act - process middle section
var result = RarCRC.CheckCrc(startCrc, data, 25, 50);
// Assert - verify it processes exactly 50 bytes starting at offset 25
var middleSection = new byte[50];
Array.Copy(data, 25, middleSection, 0, 50);
var expected = RarCRC.CheckCrc(startCrc, middleSection, 0, 50);
Assert.Equal(expected, result);
}
[Fact]
public void CheckCrc_ZeroCount_ReturnsStartCrc()
{
// Arrange
uint startCrc = 0x12345678;
ReadOnlySpan<byte> data = stackalloc byte[] { 0x01, 0x02, 0x03 };
// Act
var result = RarCRC.CheckCrc(startCrc, data, 0, 0);
// Assert
Assert.Equal(startCrc, result);
}
[Fact]
public void CheckCrc_MaxByteValue_HandlesCorrectly()
{
// Arrange
uint startCrc = 0;
byte maxByte = 0xFF;
// Act
var result = RarCRC.CheckCrc(startCrc, maxByte);
// Assert
Assert.NotEqual(0u, result);
}
[Fact]
public void CheckCrc_MinByteValue_HandlesCorrectly()
{
// Arrange
uint startCrc = 0;
byte minByte = 0x00;
// Act
var result = RarCRC.CheckCrc(startCrc, minByte);
// Assert - CRC of 0x00 from startCrc=0 can be 0, that's mathematically valid
// What matters is that it's deterministic and doesn't crash
var result2 = RarCRC.CheckCrc(startCrc, minByte);
Assert.Equal(result, result2);
}
[Fact]
public void CheckCrc_ChainedCalls_ProduceCorrectResult()
{
// Arrange
uint startCrc = 0;
ReadOnlySpan<byte> part1 = stackalloc byte[] { 0x01, 0x02 };
ReadOnlySpan<byte> part2 = stackalloc byte[] { 0x03, 0x04 };
ReadOnlySpan<byte> combined = stackalloc byte[] { 0x01, 0x02, 0x03, 0x04 };
// Act
var crc1 = RarCRC.CheckCrc(startCrc, part1, 0, 2);
var crc2 = RarCRC.CheckCrc(crc1, part2, 0, 2);
var crcCombined = RarCRC.CheckCrc(startCrc, combined, 0, 4);
// Assert - chained calculation should equal combined calculation
Assert.Equal(crc2, crcCombined);
}
[Theory]
[InlineData(0x00000000)]
[InlineData(0xFFFFFFFF)]
[InlineData(0x12345678)]
[InlineData(0xABCDEF01)]
public void CheckCrc_VariousStartCrcs_ProduceDifferentResults(uint startCrc)
{
// Arrange
ReadOnlySpan<byte> data = stackalloc byte[] { 0x01, 0x02, 0x03 };
// Act
var result = RarCRC.CheckCrc(startCrc, data, 0, 3);
// Assert - result should be different from start (unless by extreme coincidence)
Assert.NotEqual(0u, result);
}
[Fact]
public void CheckCrc_OffsetAtEndOfData_ReturnsStartCrc()
{
// Arrange
uint startCrc = 0x12345678;
ReadOnlySpan<byte> data = stackalloc byte[] { 0x01, 0x02, 0x03 };
// Act - offset is at the end, so no bytes to process
var result = RarCRC.CheckCrc(startCrc, data, 3, 5);
// Assert
Assert.Equal(startCrc, result);
}
}

View File

@@ -0,0 +1,372 @@
using System;
using System.Collections;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
using Xunit;
namespace SharpCompress.Test.Rar;
public class RarReaderAsyncTests : ReaderTests
{
[Fact]
public async Task Rar_Multi_Reader_Async() =>
await DoRar_Multi_Reader_Async([
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]);
[Fact]
public async Task Rar5_Multi_Reader_Async() =>
await DoRar_Multi_Reader_Async([
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]);
private async Task DoRar_Multi_Reader_Async(string[] archives)
{
using (
var reader = RarReader.Open(
archives
.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.Select(p => File.OpenRead(p))
)
)
{
while (reader.MoveToNextEntry())
{
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
}
[Fact]
public async Task Rar_Multi_Reader_Encrypted_Async() =>
await Assert.ThrowsAsync<InvalidFormatException>(async () =>
{
string[] archives =
[
"Rar.EncryptedParts.part01.rar",
"Rar.EncryptedParts.part02.rar",
"Rar.EncryptedParts.part03.rar",
"Rar.EncryptedParts.part04.rar",
"Rar.EncryptedParts.part05.rar",
"Rar.EncryptedParts.part06.rar",
];
using (
var reader = RarReader.Open(
archives
.Select(s => Path.Combine(TEST_ARCHIVES_PATH, s))
.Select(p => File.OpenRead(p)),
new ReaderOptions { Password = "test" }
)
)
{
while (reader.MoveToNextEntry())
{
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
});
[Fact]
public async Task Rar_Multi_Reader_Delete_Files_Async() =>
await DoRar_Multi_Reader_Delete_Files_Async([
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]);
[Fact]
public async Task Rar5_Multi_Reader_Delete_Files_Async() =>
await DoRar_Multi_Reader_Delete_Files_Async([
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]);
private async Task DoRar_Multi_Reader_Delete_Files_Async(string[] archives)
{
foreach (var file in archives)
{
File.Copy(
Path.Combine(TEST_ARCHIVES_PATH, file),
Path.Combine(SCRATCH2_FILES_PATH, file)
);
}
var streams = archives
.Select(s => Path.Combine(SCRATCH2_FILES_PATH, s))
.Select(File.OpenRead)
.ToList();
using (var reader = RarReader.Open(streams))
{
while (reader.MoveToNextEntry())
{
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
foreach (var stream in streams)
{
stream.Dispose();
}
VerifyFiles();
foreach (var file in archives.Select(s => Path.Combine(SCRATCH2_FILES_PATH, s)))
{
File.Delete(file);
}
}
[Fact]
public async Task Rar_None_Reader_Async() =>
await ReadAsync("Rar.none.rar", CompressionType.Rar);
[Fact]
public async Task Rar5_None_Reader_Async() =>
await ReadAsync("Rar5.none.rar", CompressionType.Rar);
[Fact]
public async Task Rar_Reader_Async() => await ReadAsync("Rar.rar", CompressionType.Rar);
[Fact]
public async Task Rar5_Reader_Async() => await ReadAsync("Rar5.rar", CompressionType.Rar);
[Fact]
public async Task Rar5_CRC_Blake2_Reader_Async() =>
await ReadAsync("Rar5.crc_blake2.rar", CompressionType.Rar);
[Fact]
public async Task Rar_EncryptedFileAndHeader_Reader_Async() =>
await ReadRar_Async("Rar.encrypted_filesAndHeader.rar", "test");
[Fact]
public async Task Rar5_EncryptedFileAndHeader_Reader_Async() =>
await ReadRar_Async("Rar5.encrypted_filesAndHeader.rar", "test");
[Fact]
public async Task Rar_EncryptedFileOnly_Reader_Async() =>
await ReadRar_Async("Rar.encrypted_filesOnly.rar", "test");
[Fact]
public async Task Rar5_EncryptedFileOnly_Reader_Async() =>
await ReadRar_Async("Rar5.encrypted_filesOnly.rar", "test");
[Fact]
public async Task Rar_Encrypted_Reader_Async() =>
await ReadRar_Async("Rar.Encrypted.rar", "test");
[Fact]
public async Task Rar5_Encrypted_Reader_Async() =>
await ReadRar_Async("Rar5.encrypted_filesOnly.rar", "test");
private async Task ReadRar_Async(string testArchive, string password) =>
await ReadAsync(
testArchive,
CompressionType.Rar,
new ReaderOptions { Password = password }
);
[Fact]
public async Task Rar_Entry_Stream_Async() => await DoRar_Entry_Stream_Async("Rar.rar");
[Fact]
public async Task Rar5_Entry_Stream_Async() => await DoRar_Entry_Stream_Async("Rar5.rar");
private async Task DoRar_Entry_Stream_Async(string filename)
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename)))
using (var reader = ReaderFactory.Open(stream))
{
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
var entryStream = await reader.OpenEntryStreamAsync();
try
{
var file = Path.GetFileName(reader.Entry.Key).NotNull();
var folder =
Path.GetDirectoryName(reader.Entry.Key)
?? throw new ArgumentNullException();
var destdir = Path.Combine(SCRATCH_FILES_PATH, folder);
if (!Directory.Exists(destdir))
{
Directory.CreateDirectory(destdir);
}
var destinationFileName = Path.Combine(destdir, file);
using var fs = File.OpenWrite(destinationFileName);
await entryStream.CopyToAsync(fs);
}
finally
{
#if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
await entryStream.DisposeAsync();
#else
entryStream.Dispose();
#endif
}
}
}
}
VerifyFiles();
}
[Fact]
public async Task Rar_Reader_Audio_program_Async()
{
using (
var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.Audio_program.rar"))
)
using (var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true }))
{
while (reader.MoveToNextEntry())
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
CompareFilesByPath(
Path.Combine(SCRATCH_FILES_PATH, "test.dat"),
Path.Combine(MISC_TEST_FILES_PATH, "test.dat")
);
}
[Fact]
public async Task Rar_Jpg_Reader_Async()
{
using (var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Rar.jpeg.jpg")))
using (var reader = RarReader.Open(stream, new ReaderOptions { LookForHeader = true }))
{
while (reader.MoveToNextEntry())
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
}
[Fact]
public async Task Rar_Solid_Reader_Async() =>
await ReadAsync("Rar.solid.rar", CompressionType.Rar);
[Fact]
public async Task Rar_Comment_Reader_Async() =>
await ReadAsync("Rar.comment.rar", CompressionType.Rar);
[Fact]
public async Task Rar5_Comment_Reader_Async() =>
await ReadAsync("Rar5.comment.rar", CompressionType.Rar);
[Fact]
public async Task Rar5_Solid_Reader_Async() =>
await ReadAsync("Rar5.solid.rar", CompressionType.Rar);
[Fact]
public async Task Rar_Solid_Skip_Reader_Async() =>
await DoRar_Solid_Skip_Reader_Async("Rar.solid.rar");
[Fact]
public async Task Rar5_Solid_Skip_Reader_Async() =>
await DoRar_Solid_Skip_Reader_Async("Rar5.solid.rar");
private async Task DoRar_Solid_Skip_Reader_Async(string filename)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename));
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
while (reader.MoveToNextEntry())
{
if (reader.Entry.Key.NotNull().Contains("jpg"))
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
[Fact]
public async Task Rar_Reader_Skip_Async() => await DoRar_Reader_Skip_Async("Rar.rar");
[Fact]
public async Task Rar5_Reader_Skip_Async() => await DoRar_Reader_Skip_Async("Rar5.rar");
private async Task DoRar_Reader_Skip_Async(string filename)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, filename));
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
while (reader.MoveToNextEntry())
{
if (reader.Entry.Key.NotNull().Contains("jpg"))
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
private async Task ReadAsync(
string testArchive,
CompressionType expectedCompression,
ReaderOptions? readerOptions = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using Stream stream = File.OpenRead(testArchive);
using var reader = ReaderFactory.Open(stream, readerOptions ?? new ReaderOptions());
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
VerifyFiles();
}
}

View File

@@ -14,29 +14,25 @@ public class RarReaderTests : ReaderTests
{
[Fact]
public void Rar_Multi_Reader() =>
DoRar_Multi_Reader(
[
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]
);
DoRar_Multi_Reader([
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]);
[Fact]
public void Rar5_Multi_Reader() =>
DoRar_Multi_Reader(
[
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]
);
DoRar_Multi_Reader([
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]);
private void DoRar_Multi_Reader(string[] archives)
{
@@ -61,16 +57,14 @@ public class RarReaderTests : ReaderTests
[Fact]
public void Rar_Multi_Reader_Encrypted() =>
DoRar_Multi_Reader_Encrypted(
[
"Rar.EncryptedParts.part01.rar",
"Rar.EncryptedParts.part02.rar",
"Rar.EncryptedParts.part03.rar",
"Rar.EncryptedParts.part04.rar",
"Rar.EncryptedParts.part05.rar",
"Rar.EncryptedParts.part06.rar",
]
);
DoRar_Multi_Reader_Encrypted([
"Rar.EncryptedParts.part01.rar",
"Rar.EncryptedParts.part02.rar",
"Rar.EncryptedParts.part03.rar",
"Rar.EncryptedParts.part04.rar",
"Rar.EncryptedParts.part05.rar",
"Rar.EncryptedParts.part06.rar",
]);
private void DoRar_Multi_Reader_Encrypted(string[] archives) =>
Assert.Throws<InvalidFormatException>(() =>
@@ -97,29 +91,25 @@ public class RarReaderTests : ReaderTests
[Fact]
public void Rar_Multi_Reader_Delete_Files() =>
DoRar_Multi_Reader_Delete_Files(
[
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]
);
DoRar_Multi_Reader_Delete_Files([
"Rar.multi.part01.rar",
"Rar.multi.part02.rar",
"Rar.multi.part03.rar",
"Rar.multi.part04.rar",
"Rar.multi.part05.rar",
"Rar.multi.part06.rar",
]);
[Fact]
public void Rar5_Multi_Reader_Delete_Files() =>
DoRar_Multi_Reader_Delete_Files(
[
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]
);
DoRar_Multi_Reader_Delete_Files([
"Rar5.multi.part01.rar",
"Rar5.multi.part02.rar",
"Rar5.multi.part03.rar",
"Rar5.multi.part04.rar",
"Rar5.multi.part05.rar",
"Rar5.multi.part06.rar",
]);
private void DoRar_Multi_Reader_Delete_Files(string[] archives)
{
@@ -407,16 +397,14 @@ public class RarReaderTests : ReaderTests
Path.Combine("exe", "test.exe"),
}
);
using var reader = RarReader.Open(
[
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part02.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part04.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part05.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part06.rar"),
]
);
using var reader = RarReader.Open([
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part02.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part04.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part05.rar"),
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part06.rar"),
]);
while (reader.MoveToNextEntry())
{
Assert.Equal(expectedOrder.Pop(), reader.Entry.Key);

View File

@@ -13,29 +13,53 @@ namespace SharpCompress.Test;
public abstract class ReaderTests : TestBase
{
protected void Read(string testArchive, ReaderOptions? options = null)
{
ReadCore(testArchive, options, ReadImpl);
}
protected void Read(
string testArchive,
CompressionType expectedCompression,
ReaderOptions? options = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ReadCore(testArchive, options, (path, opts) => ReadImpl(path, expectedCompression, opts));
}
options ??= new ReaderOptions() { BufferSize = 0x20000 }; //test larger buffer size (need test rather than eyeballing debug logs :P)
private void ReadCore(
string testArchive,
ReaderOptions? options,
Action<string, ReaderOptions> readImpl
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
options ??= new ReaderOptions { BufferSize = 0x20000 };
options.LeaveStreamOpen = true;
ReadImpl(testArchive, expectedCompression, options);
readImpl(testArchive, options);
options.LeaveStreamOpen = false;
ReadImpl(testArchive, expectedCompression, options);
readImpl(testArchive, options);
VerifyFiles();
}
private void ReadImpl(string testArchive, ReaderOptions options)
{
ReadImplCore(testArchive, options, UseReader);
}
private void ReadImpl(
string testArchive,
CompressionType expectedCompression,
ReaderOptions options
)
{
ReadImplCore(testArchive, options, r => UseReader(r, expectedCompression));
}
private void ReadImplCore(string testArchive, ReaderOptions options, Action<IReader> useReader)
{
using var file = File.OpenRead(testArchive);
using var protectedStream = SharpCompressStream.Create(
@@ -47,19 +71,17 @@ public abstract class ReaderTests : TestBase
using var testStream = new TestStream(protectedStream);
using (var reader = ReaderFactory.Open(testStream, options))
{
UseReader(reader, expectedCompression);
useReader(reader);
protectedStream.ThrowOnDispose = false;
Assert.False(testStream.IsDisposed, $"{nameof(testStream)} prematurely closed");
}
// Boolean XOR -- If the stream should be left open (true), then the stream should not be diposed (false)
// and if the stream should be closed (false), then the stream should be disposed (true)
var message =
$"{nameof(options.LeaveStreamOpen)} is set to '{options.LeaveStreamOpen}', so {nameof(testStream.IsDisposed)} should be set to '{!testStream.IsDisposed}', but is set to {testStream.IsDisposed}";
Assert.True(options.LeaveStreamOpen != testStream.IsDisposed, message);
}
public void UseReader(IReader reader, CompressionType expectedCompression)
protected void UseReader(IReader reader, CompressionType expectedCompression)
{
while (reader.MoveToNextEntry())
{
@@ -74,6 +96,20 @@ public abstract class ReaderTests : TestBase
}
}
private void UseReader(IReader reader)
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
}
}
protected async Task ReadAsync(
string testArchive,
CompressionType expectedCompression,
@@ -140,6 +176,27 @@ public abstract class ReaderTests : TestBase
}
}
protected void ReadForBufferBoundaryCheck(string fileName, CompressionType compressionType)
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, fileName));
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
while (reader.MoveToNextEntry())
{
Assert.Equal(compressionType, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
CompareFilesByPath(
Path.Combine(SCRATCH_FILES_PATH, "alice29.txt"),
Path.Combine(MISC_TEST_FILES_PATH, "alice29.txt")
);
}
protected void Iterate(
string testArchive,
string fileOrder,

View File

@@ -0,0 +1,606 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA;
using Xunit;
namespace SharpCompress.Test.Streams;
public class LzmaStreamAsyncTests
{
[Fact]
public async Task TestLzma2Decompress1ByteAsync()
{
var properties = new byte[] { 0x01 };
var compressedData = new byte[] { 0x01, 0x00, 0x00, 0x58, 0x00 };
var lzma2Stream = new MemoryStream(compressedData);
var decompressor = new LzmaStream(properties, lzma2Stream, 5, 1);
var buffer = new byte[1];
var bytesRead = await decompressor.ReadAsync(buffer, 0, 1).ConfigureAwait(false);
Assert.Equal(1, bytesRead);
Assert.Equal((byte)'X', buffer[0]);
}
private static byte[] LzmaData { get; } =
[
0x5D,
0x00,
0x20,
0x00,
0x00,
0x48,
0x01,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x80,
0x24,
0x18,
0x2F,
0xEB,
0x20,
0x78,
0xBA,
0x78,
0x70,
0xDC,
0x43,
0x2C,
0x32,
0xC9,
0xC3,
0x97,
0x4D,
0x10,
0x74,
0xE2,
0x20,
0xBF,
0x5A,
0xB4,
0xB3,
0xC4,
0x31,
0x80,
0x26,
0x3E,
0x6A,
0xEA,
0x51,
0xFC,
0xE4,
0x8D,
0x54,
0x96,
0x05,
0xCC,
0x78,
0x59,
0xAC,
0xD4,
0x21,
0x65,
0x8F,
0xA9,
0xC8,
0x0D,
0x9B,
0xE2,
0xC2,
0xF9,
0x7C,
0x3C,
0xDD,
0x4D,
0x38,
0x04,
0x0B,
0xF8,
0x0B,
0x68,
0xA5,
0x93,
0x6C,
0x64,
0xAC,
0xCF,
0x71,
0x68,
0xE8,
0x69,
0x25,
0xC6,
0x17,
0x28,
0xF1,
0x7C,
0xF1,
0xDC,
0x47,
0x51,
0x4D,
0x1E,
0x0E,
0x0B,
0x80,
0x37,
0x24,
0x58,
0x80,
0xF7,
0xB4,
0xAC,
0x54,
0xF1,
0x0F,
0x7F,
0x0F,
0x0F,
0xF5,
0x9C,
0xDE,
0x54,
0x4F,
0xA3,
0x7B,
0x20,
0xC5,
0xA8,
0x18,
0x3B,
0xED,
0xDC,
0x04,
0xF6,
0xFB,
0x86,
0xE0,
0xAB,
0xB6,
0x87,
0x99,
0x92,
0x43,
0x7B,
0x2C,
0xCC,
0x31,
0x83,
0x90,
0xFF,
0xF1,
0x76,
0x03,
0x90,
];
/// <summary>
/// The decoded data for <see cref="LzmaData"/>.
/// </summary>
private static byte[] LzmaResultData { get; } =
[
0x01,
0x00,
0xFD,
0x01,
0x00,
0x00,
0x00,
0x00,
0xFA,
0x61,
0x18,
0x5F,
0x02,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x02,
0x00,
0x00,
0x00,
0x03,
0x00,
0x00,
0x00,
0x01,
0x00,
0x00,
0x00,
0x02,
0x00,
0xB4,
0x01,
0x00,
0x00,
0x00,
0x00,
0x3D,
0x61,
0xE5,
0x5E,
0x03,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x12,
0x00,
0x00,
0x00,
0x02,
0x00,
0xB4,
0x01,
0x00,
0x00,
0x00,
0x00,
0xE2,
0x61,
0x18,
0x5F,
0x04,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x12,
0x00,
0x00,
0x00,
0x29,
0x00,
0x00,
0x00,
0x01,
0x00,
0xFD,
0x01,
0x00,
0x00,
0x00,
0x00,
0x14,
0x62,
0x18,
0x5F,
0x01,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x03,
0x00,
0x00,
0x00,
0x40,
0x00,
0x00,
0x00,
0x09,
0x00,
0x00,
0x00,
0x02,
0x00,
0xB4,
0x01,
0x00,
0x00,
0x00,
0x00,
0x7F,
0x61,
0xE5,
0x5E,
0x05,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x3B,
0x00,
0x00,
0x00,
0xCB,
0x15,
0x00,
0x00,
0x02,
0x00,
0xB4,
0x01,
0x00,
0x00,
0x00,
0x00,
0x7F,
0x61,
0xE5,
0x5E,
0x06,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x3B,
0x00,
0x00,
0x00,
0xCB,
0x15,
0x00,
0x00,
0x02,
0x00,
0xB4,
0x01,
0x00,
0x00,
0x00,
0x00,
0x3D,
0x61,
0xE5,
0x5E,
0x07,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x12,
0x00,
0x00,
0x00,
0x02,
0x00,
0xB4,
0x01,
0x00,
0x00,
0x00,
0x00,
0xFC,
0x96,
0x40,
0x5C,
0x08,
0x00,
0x00,
0x00,
0x60,
0x00,
0x00,
0x00,
0xFF,
0xFF,
0xFF,
0xFF,
0x00,
0x00,
0x00,
0x00,
0xF8,
0x83,
0x12,
0x00,
0xD4,
0x99,
0x00,
0x00,
0x43,
0x95,
0x00,
0x00,
0xEB,
0x7A,
0x00,
0x00,
0x40,
0x6F,
0x00,
0x00,
0xD2,
0x6F,
0x00,
0x00,
0x67,
0x74,
0x00,
0x00,
0x02,
0x69,
0x00,
0x00,
0x76,
0x79,
0x00,
0x00,
0x98,
0x66,
0x00,
0x00,
0x23,
0x25,
0x00,
0x00,
0x01,
0x00,
0xFD,
0x01,
0x00,
0x00,
0x00,
0x00,
0x3B,
0x2F,
0xC0,
0x5F,
0x09,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x00,
0x03,
0x00,
0x00,
0x00,
0x69,
0x00,
0x3D,
0x00,
0x0A,
0x00,
0x00,
0x00,
];
[Fact]
public async Task TestLzmaBufferAsync()
{
var input = new MemoryStream(LzmaData);
using var output = new MemoryStream();
var properties = new byte[5];
await input.ReadAsync(properties, 0, 5).ConfigureAwait(false);
var fileLengthBytes = new byte[8];
await input.ReadAsync(fileLengthBytes, 0, 8).ConfigureAwait(false);
var fileLength = BitConverter.ToInt64(fileLengthBytes, 0);
var coder = new Decoder();
coder.SetDecoderProperties(properties);
coder.Code(input, output, input.Length, fileLength, null);
Assert.Equal(output.ToArray(), LzmaResultData);
}
[Fact]
public async Task TestLzmaStreamEncodingWritesDataAsync()
{
using var inputStream = new MemoryStream(LzmaResultData);
using MemoryStream outputStream = new();
using var lzmaStream = new LzmaStream(LzmaEncoderProperties.Default, false, outputStream);
await inputStream.CopyToAsync(lzmaStream).ConfigureAwait(false);
lzmaStream.Close();
Assert.NotEqual(0, outputStream.Length);
}
[Fact]
public async Task TestLzmaEncodingAccuracyAsync()
{
var input = new MemoryStream(LzmaResultData);
var compressed = new MemoryStream();
var lzmaEncodingStream = new LzmaStream(LzmaEncoderProperties.Default, false, compressed);
await input.CopyToAsync(lzmaEncodingStream).ConfigureAwait(false);
lzmaEncodingStream.Close();
compressed.Position = 0;
var output = new MemoryStream();
await DecompressLzmaStreamAsync(
lzmaEncodingStream.Properties,
compressed,
compressed.Length,
output,
LzmaResultData.LongLength
)
.ConfigureAwait(false);
Assert.Equal(output.ToArray(), LzmaResultData);
}
private static async Task DecompressLzmaStreamAsync(
byte[] properties,
Stream compressedStream,
long compressedSize,
Stream decompressedStream,
long decompressedSize
)
{
var lzmaStream = new LzmaStream(
properties,
compressedStream,
compressedSize,
-1,
null,
false
);
var buffer = new byte[1024];
long totalRead = 0;
while (totalRead < decompressedSize)
{
var toRead = (int)Math.Min(buffer.Length, decompressedSize - totalRead);
var read = await lzmaStream.ReadAsync(buffer, 0, toRead).ConfigureAwait(false);
if (read > 0)
{
await decompressedStream.WriteAsync(buffer, 0, read).ConfigureAwait(false);
totalRead += read;
}
else
{
break;
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More