Compare commits

...

208 Commits

Author SHA1 Message Date
Adam Hathcock
895dd02830 another fix 2026-01-31 14:20:01 +00:00
Adam Hathcock
7112dba345 some shrink fixes 2026-01-31 13:56:58 +00:00
Adam Hathcock
0767292bb0 ReduceStream is async 2026-01-31 13:19:10 +00:00
Adam Hathcock
b40e1a002a Merge remote-tracking branch 'origin/adam/data-descriptor-fix' into adam/more-explode-async 2026-01-31 11:18:42 +00:00
Adam Hathcock
c096164486 add shrink stream async 2026-01-31 11:18:16 +00:00
Adam Hathcock
d92def91b0 Opus 4.5 did this fix, need to understand it 2026-01-31 10:59:30 +00:00
Adam Hathcock
b48e938c98 finish PPMD? 2026-01-30 13:46:30 +00:00
Adam Hathcock
4ed1f89866 more ppmd async 2026-01-30 13:19:24 +00:00
Adam Hathcock
525bcea989 ppmd create 2026-01-30 12:37:21 +00:00
Adam Hathcock
6c3f7c86da lzma works with zip 2026-01-30 12:28:01 +00:00
Adam Hathcock
595a97bd62 more explode async 2026-01-30 07:25:49 +00:00
Adam Hathcock
c9db03335b Fixed AsyncMarkingBinaryReader 2026-01-29 16:23:37 +00:00
Adam Hathcock
659f5d7834 fix some more tests 2026-01-29 15:47:22 +00:00
Adam Hathcock
42f6c77419 rewindable with memory stream 2026-01-29 15:23:53 +00:00
Adam Hathcock
bcaec86514 save this 2026-01-29 14:43:05 +00:00
Adam Hathcock
1ca914823f more rework 2026-01-29 14:42:29 +00:00
Adam Hathcock
be8841075a fixes 2026-01-29 11:08:38 +00:00
Adam Hathcock
a94e319935 clean up rewindable stream 2026-01-29 11:01:59 +00:00
Adam Hathcock
d60abc3f45 fmt 2026-01-29 10:16:37 +00:00
Adam Hathcock
b994f0ab55 more 7z async 2026-01-29 10:13:55 +00:00
Adam Hathcock
e2cb9f39ab fix up rewindable stream and use it more, add NonDisposingStream 2026-01-29 09:08:40 +00:00
Adam Hathcock
58459bda12 using a byte array instead of memory streams 2026-01-28 19:15:34 +00:00
Adam Hathcock
8dfd5349f0 making RewindableStream more proper 2026-01-28 16:50:35 +00:00
Adam Hathcock
c770bc4788 reintroduce RewindableStream stream. SharpCompressStream does too much 2026-01-28 16:33:19 +00:00
Adam Hathcock
24b4ef8780 fix test 2026-01-28 11:48:11 +00:00
Adam Hathcock
6ddcbf2bc9 fix some tests 2026-01-28 11:37:24 +00:00
Adam Hathcock
8d5d686b79 more fixes 2026-01-28 11:23:41 +00:00
Adam Hathcock
f4369e540a fmt 2026-01-28 11:13:29 +00:00
Adam Hathcock
c219eb4abb Merge branch 'release'
# Conflicts:
#	src/SharpCompress/Archives/ArchiveFactory.cs
#	src/SharpCompress/Archives/AutoArchiveFactory.cs
#	src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs
#	src/SharpCompress/Archives/Zip/ZipArchive.cs
#	src/SharpCompress/Factories/AceFactory.cs
#	src/SharpCompress/Factories/ArcFactory.cs
#	src/SharpCompress/Factories/ArjFactory.cs
#	src/SharpCompress/Factories/Factory.cs
#	src/SharpCompress/Factories/GZipFactory.cs
#	src/SharpCompress/Factories/IFactory.cs
#	src/SharpCompress/Factories/RarFactory.cs
#	src/SharpCompress/Factories/SevenZipFactory.cs
#	src/SharpCompress/Factories/TarFactory.cs
#	src/SharpCompress/Factories/ZStandardFactory.cs
#	src/SharpCompress/Factories/ZipFactory.cs
#	src/SharpCompress/IO/SharpCompressStream.cs
#	src/SharpCompress/Readers/AbstractReader.cs
#	src/SharpCompress/Utility.cs
2026-01-28 11:12:49 +00:00
Adam Hathcock
9a7bdd39e8 Merge pull request #1172 from adamhathcock/copilot/fix-sevenzip-contiguous-streams
Fix SevenZipReader to maintain contiguous stream state for solid archives
2026-01-28 08:35:28 +00:00
Adam Hathcock
484bc740d7 Update src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-28 08:26:28 +00:00
Adam Hathcock
8a67d501a8 Don't use reflection in tests 2026-01-28 08:10:06 +00:00
copilot-swe-agent[bot]
3c87242bd0 Add test to verify folder stream reuse in solid archives
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 17:29:44 +00:00
copilot-swe-agent[bot]
999124e68e Remove unused _currentFolderIndex field
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 17:03:20 +00:00
copilot-swe-agent[bot]
db2f5c9cb9 Fix SevenZipReader to iterate entries as contiguous streams
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 17:01:18 +00:00
Adam Hathcock
af08a7cd54 Merge pull request #1169 from adamhathcock/copilot/fix-zip-parsing-regression
Fix ZIP parsing failure on non-seekable streams with short reads
2026-01-27 16:54:12 +00:00
copilot-swe-agent[bot]
72eaf66f05 Initial plan 2026-01-27 16:53:53 +00:00
Adam Hathcock
8a3be35d67 Update tests/SharpCompress.Test/Zip/ZipShortReadTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-27 16:43:13 +00:00
copilot-swe-agent[bot]
d59e4c2a0d Refactor FillBuffer to use ReadFully pattern
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 16:25:24 +00:00
copilot-swe-agent[bot]
71655e04c4 Apply code formatting with CSharpier
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 16:02:26 +00:00
copilot-swe-agent[bot]
a706a9d725 Fix ZIP parsing regression with short reads on non-seekable streams
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 16:00:44 +00:00
copilot-swe-agent[bot]
970934a40b Initial plan 2026-01-27 15:51:50 +00:00
Adam Hathcock
a9c28a7b62 Merge pull request #1165 from adamhathcock/adam/buffer-size-consolidation
(Release) Buffer size consolidation
2026-01-27 14:41:14 +00:00
Adam Hathcock
4d31436740 constant should be a static property 2026-01-27 12:39:01 +00:00
Adam Hathcock
c82744c51c fmt 2026-01-27 12:15:31 +00:00
Adam Hathcock
f0eaddc6a6 Merge remote-tracking branch 'origin/adam/buffer-size-consolidation' into adam/buffer-size-consolidation 2026-01-27 12:14:17 +00:00
Adam Hathcock
d6156f0f1e release branch builds increment patch versions and master builds increment minor versions 2026-01-27 12:14:03 +00:00
Adam Hathcock
3c88c7fdd5 Merge pull request #1167 from adamhathcock/copilot/sub-pr-1165-again
Fix grammatical errors in ArcFactory comment documentation
2026-01-27 11:58:25 +00:00
Adam Hathcock
d11f6aefb0 Merge pull request #1166 from adamhathcock/copilot/sub-pr-1165
Add [Obsolete] attribute to ReaderOptions.DefaultBufferSize for backward compatibility
2026-01-27 11:57:54 +00:00
copilot-swe-agent[bot]
010a38bb73 Add clarifying comment about buffer size value difference
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 11:54:50 +00:00
copilot-swe-agent[bot]
53f12d75db Add [Obsolete] attribute to ReaderOptions.DefaultBufferSize
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 11:53:37 +00:00
copilot-swe-agent[bot]
6c866324b2 Fix grammatical errors in ArcFactory comments
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 11:49:29 +00:00
copilot-swe-agent[bot]
a114155189 Initial plan 2026-01-27 11:48:05 +00:00
copilot-swe-agent[bot]
014bbc3ea4 Initial plan 2026-01-27 11:47:52 +00:00
Adam Hathcock
d52facd4ab Remove change 2026-01-27 10:48:32 +00:00
Adam Hathcock
0a50386ada Using Constants class differently 2026-01-27 10:46:54 +00:00
Adam Hathcock
f64fa53ed1 Merge pull request #1132 from adamhathcock/adam/async-creation
Clean up for async creation
2026-01-27 07:29:45 +00:00
Adam Hathcock
335db1eb9e fix ValueTask struct copying 2026-01-26 18:10:59 +00:00
Adam Hathcock
27fe2d807e more lzma porting 2026-01-26 18:09:44 +00:00
Adam Hathcock
27cf2795ef More LZMA fixes? 2026-01-26 15:50:50 +00:00
Adam Hathcock
979c8d9234 Merge fixes 2026-01-26 14:20:42 +00:00
Adam Hathcock
04eabb7866 Merge remote-tracking branch 'origin/master' into adam/async-creation
# Conflicts:
#	src/SharpCompress/Common/EntryStream.cs
#	src/SharpCompress/IO/BufferedSubStream.cs
#	src/SharpCompress/packages.lock.json
2026-01-26 14:16:14 +00:00
Adam Hathcock
f4eccea20c Merge pull request #1162 from adamhathcock/adam/release-to-master
release to master
2026-01-26 13:40:22 +00:00
Adam Hathcock
fc63217dd0 Merge remote-tracking branch 'origin/release' into adam/release-to-master
# Conflicts:
#	src/SharpCompress/IO/BufferedSubStream.cs
#	tests/SharpCompress.Test/Zip/ZipReaderAsyncTests.cs
#	tests/SharpCompress.Test/Zip/ZipReaderTests.cs
2026-01-26 13:24:25 +00:00
Adam Hathcock
b9fc680548 Merge pull request #1160 from adamhathcock/adam/check-if-seek
add check to see if we need to seek before hand
2026-01-26 12:24:39 +00:00
Adam Hathcock
7dcc13c1f0 Merge pull request #1161 from adamhathcock/copilot/sub-pr-1160
Fix ArrayPool corruption from double-disposal in BufferedSubStream
2026-01-26 12:15:55 +00:00
copilot-swe-agent[bot]
56d3091688 Fix condition order to check CanSeek before Position
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-26 12:12:08 +00:00
copilot-swe-agent[bot]
a0af0604d1 Add disposal checks to RefillCache methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-26 12:11:16 +00:00
copilot-swe-agent[bot]
875c2d7694 Fix BufferedSubStream double-dispose issue with ArrayPool
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-26 12:10:19 +00:00
Adam Hathcock
8c95f863cb do CanSeek first 2026-01-26 12:06:57 +00:00
copilot-swe-agent[bot]
ddf37e82c2 Initial plan 2026-01-26 12:06:38 +00:00
Adam Hathcock
a82fda98d7 more testing and add pooling to cache 2026-01-26 11:45:25 +00:00
Adam Hathcock
44e4b1804e add check to see if we need to seek before hand 2026-01-26 09:41:13 +00:00
Adam Hathcock
984ea8f46f remove posix 2026-01-25 16:38:28 +00:00
Adam Hathcock
4d84394417 LZMA Lencoder uses async 2026-01-25 16:38:17 +00:00
Adam Hathcock
507074cf72 Merge branch 'opencode/glowing-wolf' into adam/async-creation 2026-01-25 15:24:17 +00:00
Adam Hathcock
f364b68e09 remove more buffer 2026-01-25 15:23:10 +00:00
Adam Hathcock
244acc0c9e implemented async rangecoder 2026-01-25 15:17:44 +00:00
Adam Hathcock
def0bce221 remove mono dep as it's annoying 2026-01-25 15:12:17 +00:00
Adam Hathcock
d0823db595 fmt 2026-01-25 15:04:28 +00:00
Adam Hathcock
73704bcd7e Merge branch 'opencode/clever-knight' into adam/async-creation 2026-01-25 15:04:07 +00:00
Adam Hathcock
86c3b93fa5 Merge branch 'opencode/glowing-wolf' into adam/async-creation 2026-01-25 15:04:01 +00:00
Adam Hathcock
e89fb211ce gzipwriter async 2026-01-25 15:03:51 +00:00
Adam Hathcock
55100cb37a ExplodeStream is async 2026-01-25 15:03:06 +00:00
Adam Hathcock
14fd880dac add tar writing async 2026-01-25 14:57:44 +00:00
Adam Hathcock
4ca1a7713e Merge pull request #1157 from adamhathcock/adam/1154-release
Merge pull request #1156 from adamhathcock/copilot/fix-sharpcompress-…
2026-01-25 11:36:59 +00:00
Adam Hathcock
9caf7be928 Revert testing 2026-01-24 10:23:02 +00:00
Adam Hathcock
bf4217fde6 Merge pull request #1156 from adamhathcock/copilot/fix-sharpcompress-archive-iteration
Fix silent iteration failure when input stream throws on Flush()
# Conflicts:
#	src/SharpCompress/packages.lock.json
2026-01-24 10:18:02 +00:00
Adam Hathcock
de3cda9034 Merge pull request #1156 from adamhathcock/copilot/fix-sharpcompress-archive-iteration
Fix silent iteration failure when input stream throws on Flush()
2026-01-24 10:11:16 +00:00
Adam Hathcock
f1102dc980 Undoing https://github.com/adamhathcock/sharpcompress/pull/1151 2026-01-24 10:01:49 +00:00
copilot-swe-agent[bot]
f2bb81d611 Add async versions of archive iteration regression tests
- Added Archive_Iteration_DoesNotBreak_WhenFlushThrows_Deflate_Async
- Added Archive_Iteration_DoesNotBreak_WhenFlushThrows_LZMA_Async
- Both async tests mirror the sync versions and pass successfully

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-23 16:42:44 +00:00
copilot-swe-agent[bot]
41e0c151de Fix regression: archive iteration breaking when input stream throws in Flush()
- Modified ZlibBaseStream.Flush() and FlushAsync() to only flush the underlying stream when in Writer mode
- Added ThrowOnFlushStream mock for testing
- Added regression tests for Deflate and LZMA compressed archives
- All tests pass successfully

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-23 16:04:09 +00:00
copilot-swe-agent[bot]
d0f44839ff Initial plan 2026-01-23 15:58:14 +00:00
Adam Hathcock
414cad1241 add braces 2026-01-23 10:55:51 +00:00
Adam Hathcock
abe0087cfd fmt 2026-01-23 10:32:11 +00:00
Adam Hathcock
060b1ed5dd fix disposal and add tests 2026-01-23 10:25:41 +00:00
Adam Hathcock
fbc168fafe Merge remote-tracking branch 'origin/adam/async-creation' into adam/async-creation 2026-01-23 09:46:53 +00:00
Adam Hathcock
d5a8c37113 Merge pull request #1154 from adamhathcock/adam/1151-release
Adam/1151 release cherry pick
2026-01-23 09:31:03 +00:00
Adam Hathcock
21ce9a38e6 fix up tests 2026-01-23 09:04:55 +00:00
Adam Hathcock
7732fbb698 Merge pull request #1151 from adamhathcock/copilot/fix-entrystream-flush-issue
Fix EntryStream.Dispose() throwing NotSupportedException on non-seekable streams
2026-01-23 08:59:56 +00:00
Adam Hathcock
44402414a6 LZMA create 2026-01-22 17:01:48 +00:00
Adam Hathcock
11b92d102a Create for explodestream 2026-01-22 16:48:53 +00:00
Adam Hathcock
16831e1e6e Merge pull request #1152 from adamhathcock/copilot/sub-pr-1132
Fix dispose methods to always set _isDisposed and call base.Dispose() when LeaveOpen is true
2026-01-22 16:39:47 +00:00
Adam Hathcock
3b83d08e2a fmt 2026-01-22 16:38:44 +00:00
Adam Hathcock
b622a2ce73 fix disposal and other simple issues 2026-01-22 16:38:35 +00:00
Adam Hathcock
c5814502f6 clean up and fixing tests....need to revisit disposal 2026-01-22 16:24:07 +00:00
copilot-swe-agent[bot]
d9be6389ca Address code review feedback - remove extra blank lines and use consistent property access
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-22 15:51:21 +00:00
copilot-swe-agent[bot]
336a8f2876 Fix SharpCompressStream Dispose methods to set _isDisposed and call base.Dispose even when LeaveOpen is true
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-22 15:50:05 +00:00
copilot-swe-agent[bot]
b4f949ba9b Initial plan 2026-01-22 15:44:08 +00:00
Adam Hathcock
9403c12793 Add await 2026-01-22 15:42:54 +00:00
Adam Hathcock
77c1cebefc Merge remote-tracking branch 'origin/master' into adam/async-creation
# Conflicts:
#	src/SharpCompress/Common/EntryStream.cs
#	tests/SharpCompress.Test/SharpCompress.Test.csproj
2026-01-22 15:29:38 +00:00
Adam Hathcock
caa7acdbc5 Merge pull request #1151 from adamhathcock/copilot/fix-entrystream-flush-issue
Fix EntryStream.Dispose() throwing NotSupportedException on non-seekable streams
2026-01-22 15:23:13 +00:00
Adam Hathcock
1522e64797 fix async tests 2026-01-22 15:15:57 +00:00
Adam Hathcock
5152e3197e fix build flags 2026-01-22 15:12:18 +00:00
Adam Hathcock
ae4f2c08fd check if second stream is zip header without changing position - fix 2026-01-22 15:06:58 +00:00
copilot-swe-agent[bot]
9628f2dda1 Add async tests for EntryStream.Dispose on non-seekable streams
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-22 14:35:16 +00:00
Adam Hathcock
65208a30c1 fix more tests 2026-01-22 14:15:47 +00:00
Adam Hathcock
4c838db876 everything compiles and passes (minus 3 tests) 2026-01-22 14:08:20 +00:00
Adam Hathcock
d1f6fd9af1 move more and fmt 2026-01-22 14:05:23 +00:00
Adam Hathcock
61c6f8403a some manual moving 2026-01-22 13:52:24 +00:00
Adam Hathcock
a8f47237d7 divide async and sync into new files 2026-01-22 13:38:20 +00:00
copilot-swe-agent[bot]
7cbdc5b46c Format code with CSharpier
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-22 13:21:36 +00:00
copilot-swe-agent[bot]
8b74243e79 Update test comments to include version context
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-22 13:07:32 +00:00
copilot-swe-agent[bot]
f77a2aabab Fix EntryStream.Dispose() to not throw NotSupportedException on non-seekable streams
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-22 13:04:58 +00:00
copilot-swe-agent[bot]
e6fb704780 Initial plan 2026-01-22 12:59:30 +00:00
Adam Hathcock
c5d7407919 Update from Task to ValueTask where I can 2026-01-22 09:18:07 +00:00
Adam Hathcock
b9ed2b09c1 fmt 2026-01-22 09:05:26 +00:00
Adam Hathcock
db0bb8a30d fix some 7z tests 2026-01-22 08:52:00 +00:00
Adam Hathcock
85d82e5c86 fix tar issue 2026-01-22 08:22:57 +00:00
Adam Hathcock
1a87075f33 GZip fix 2026-01-22 08:13:34 +00:00
Adam Hathcock
8df9232171 use extension where appropriate with more fixes 2026-01-21 16:57:25 +00:00
Adam Hathcock
7b7eba8cd9 more fixes 2026-01-21 16:11:40 +00:00
Adam Hathcock
169364f6ae fix disposal 2026-01-21 15:37:56 +00:00
Adam Hathcock
c38f74d34c Merge remote-tracking branch 'origin/master' into adam/async-creation
# Conflicts:
#	src/SharpCompress/Compressors/BZip2/BZip2Stream.cs
#	src/SharpCompress/Compressors/BZip2/CBZip2InputStream.cs
#	src/SharpCompress/Compressors/Deflate/DeflateStream.cs
2026-01-21 15:31:44 +00:00
Adam Hathcock
895699d22e fmt 2026-01-20 16:53:08 +00:00
Adam Hathcock
cf901c2784 fix test 2026-01-20 16:44:34 +00:00
Adam Hathcock
e1bbc65f5b more bzip tests pass 2026-01-20 16:39:15 +00:00
Adam Hathcock
f6faaa83ec better async bzip input stream 2026-01-20 16:32:30 +00:00
Adam Hathcock
4d3ae3a97f Merge branch 'opencode/curious-river' into adam/bzip2-async 2026-01-20 16:03:11 +00:00
Adam Hathcock
cc47fde57f works? 2026-01-20 15:37:15 +00:00
Adam Hathcock
a8d5b8e86b intermediate commit 2026-01-20 15:19:46 +00:00
Adam Hathcock
0a9c5bfe15 format changes 2026-01-20 13:40:51 +00:00
Adam Hathcock
ff0769e988 Create factory for CBZip2InputStream 2026-01-20 13:21:11 +00:00
Adam Hathcock
3987733079 LZW async 2026-01-20 12:56:13 +00:00
Adam Hathcock
b26d38b7e4 another tar test fix 2026-01-20 12:34:49 +00:00
Adam Hathcock
2175cb299d tar fixes 2026-01-20 12:22:38 +00:00
Adam Hathcock
8abb972f87 Fix test 2026-01-20 11:01:17 +00:00
Adam Hathcock
05bf22f518 rar works now 2026-01-20 10:41:37 +00:00
Adam Hathcock
3b5ee481c5 fix for another async typo 2026-01-20 10:17:28 +00:00
Adam Hathcock
b54617238b more async fixes? 2026-01-20 10:09:13 +00:00
Adam Hathcock
44174e7b03 some fixes 2026-01-20 09:07:57 +00:00
Adam Hathcock
ecd9317ab3 more basic LLM async and fixed CRC async 2026-01-19 16:08:46 +00:00
Adam Hathcock
884f0b702e some grunt rar header async 2026-01-19 16:08:20 +00:00
Adam Hathcock
2e95832bea factory the headers instead of creating 2026-01-19 14:19:15 +00:00
Adam Hathcock
97879f18b6 Merge pull request #1146 from adamhathcock/adam/pr-1145-release
Merge pull request #1145 from adamhathcock/copilot/add-leaveopen-para…
2026-01-19 10:35:33 +00:00
Adam Hathcock
d74454f7e9 Merge pull request #1145 from adamhathcock/copilot/add-leaveopen-parameter-lzipstream
Add leaveOpen parameter to LZipStream and BZip2Stream
2026-01-19 09:58:10 +00:00
Adam Hathcock
ce01cc7ce1 Merge pull request #1145 from adamhathcock/copilot/add-leaveopen-parameter-lzipstream
Add leaveOpen parameter to LZipStream and BZip2Stream
2026-01-19 09:57:39 +00:00
copilot-swe-agent[bot]
9454466be7 Add comprehensive tests for leaveOpen behavior and fix BZip2 stream disposal
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-19 07:33:22 +00:00
copilot-swe-agent[bot]
0e4a159998 Add leaveOpen parameter to LZipStream and BZip2Stream
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-19 07:31:20 +00:00
copilot-swe-agent[bot]
4998676476 Initial plan 2026-01-19 07:22:01 +00:00
Adam Hathcock
f359f553b3 some minor fixes 2026-01-18 15:31:10 +00:00
Adam Hathcock
08118f7286 add more async writing 2026-01-18 15:07:02 +00:00
Adam Hathcock
408d2e6663 Async add entry 2026-01-18 14:57:01 +00:00
Adam Hathcock
4c4b727bd7 Tar detection works 2026-01-17 13:39:57 +00:00
Adam Hathcock
8e54b10b7f tar tests are better? 2026-01-16 15:10:08 +00:00
Adam Hathcock
f99e421115 fix factory 2026-01-16 15:04:01 +00:00
Adam Hathcock
82d56b9678 multi-file rars done manually 2026-01-16 13:43:26 +00:00
Adam Hathcock
447d35267f some fixes 2026-01-16 13:19:41 +00:00
Adam Hathcock
763805e03a async IsRarFile 2026-01-16 12:12:51 +00:00
Adam Hathcock
cd70a7760e remvoe AutoFactory 2026-01-16 11:44:12 +00:00
Adam Hathcock
ec7c359341 Arj works 2026-01-16 11:12:26 +00:00
Adam Hathcock
cc59c1960a fix ace tests 2026-01-16 10:49:18 +00:00
Adam Hathcock
1cc80e7675 Merge pull request #1141 from adamhathcock/copilot/sub-pr-1132
[WIP] Address feedback on async creation cleanup changes
2026-01-16 10:12:08 +00:00
Adam Hathcock
cfe59fc515 Merge branch 'adam/async-creation' into copilot/sub-pr-1132 2026-01-16 10:11:45 +00:00
copilot-swe-agent[bot]
2180df3318 Pass CancellationToken.None explicitly to OpenAsyncArchive methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 10:09:54 +00:00
Adam Hathcock
29f4c7fe2e Merge pull request #1142 from adamhathcock/copilot/sub-pr-1132-another-one
Fix ReadFullyAsync with ArrayPool buffer in SevenZipArchive signature check
2026-01-16 10:09:07 +00:00
Adam Hathcock
d5f9815561 Merge pull request #1136 from adamhathcock/adam/upgrade-xunit
Upgrade xunit to v3
2026-01-16 10:08:23 +00:00
copilot-swe-agent[bot]
6e5e47f041 Update SevenZipFactory to consistently call OpenAsyncArchive methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 10:07:37 +00:00
copilot-swe-agent[bot]
b0fde2b8c7 Fix ReadFullyAsync call to specify offset and count for ArrayPool buffer
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 10:03:48 +00:00
copilot-swe-agent[bot]
4b9b20de42 Initial plan 2026-01-16 09:59:14 +00:00
Adam Hathcock
f7c91bb26f Update src/SharpCompress/Factories/SevenZipFactory.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-16 09:58:09 +00:00
copilot-swe-agent[bot]
4b34dd61d3 Initial plan 2026-01-16 09:58:06 +00:00
Adam Hathcock
c958d184d0 Merge pull request #1137 from adamhathcock/copilot/sub-pr-1136
Fix async test failures after xunit v3 upgrade
2026-01-16 09:54:04 +00:00
copilot-swe-agent[bot]
0de5c59a77 Restore AsyncOnlyStream in archive async tests as requested
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:50:36 +00:00
Adam Hathcock
3b10be53b5 Merge pull request #1140 from adamhathcock/copilot/sub-pr-1132-another-one
Replace empty catch blocks with explicit exception handling in TarArchive validation methods
2026-01-16 09:39:45 +00:00
Adam Hathcock
5336eb6fe6 Merge pull request #1138 from adamhathcock/copilot/sub-pr-1132
Remove redundant stream field in AsyncOnlyStream
2026-01-16 09:38:42 +00:00
copilot-swe-agent[bot]
9fa686b8f9 Fix empty catch blocks in TarArchive.Factory.cs with explicit exception handling
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:35:24 +00:00
copilot-swe-agent[bot]
2012077fb0 Remove redundant _stream field from AsyncOnlyStream and use base Stream property
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:34:29 +00:00
copilot-swe-agent[bot]
302cf2e14f Initial plan 2026-01-16 09:30:05 +00:00
Adam Hathcock
b9fccbd691 Update src/SharpCompress/Factories/ZStandardFactory.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-16 09:29:13 +00:00
copilot-swe-agent[bot]
bbbbc8810a Initial plan 2026-01-16 09:29:09 +00:00
copilot-swe-agent[bot]
c7da19f3a5 Format code with CSharpier
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:26:04 +00:00
copilot-swe-agent[bot]
e919930cf6 Fix Archive async tests to not use AsyncOnlyStream (archives need seekable streams)
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:23:32 +00:00
copilot-swe-agent[bot]
2906529080 Fix ReaderFactory.OpenAsyncReader to use async IsArchiveAsync methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:12:54 +00:00
copilot-swe-agent[bot]
75cc36849b Initial plan 2026-01-16 09:00:13 +00:00
Adam Hathcock
63e124e72f Upgrade xunit to v3 2026-01-16 08:58:26 +00:00
Adam Hathcock
394d982168 Merge pull request #1133 from adamhathcock/copilot/sub-pr-1132
Add async I/O support for SevenZip archive initialization
2026-01-16 08:44:04 +00:00
Adam Hathcock
f4ce4cbad8 fix tests for both frameworks 2026-01-16 08:43:13 +00:00
Adam Hathcock
491beabe03 uncomment tests 2026-01-16 08:35:49 +00:00
copilot-swe-agent[bot]
9bb670ad19 Fix SevenZipArchive async stream handling by adding async Open and ReadDatabase methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 17:28:05 +00:00
copilot-swe-agent[bot]
bbba2e6c7a Initial plan for fixing SevenZipArchive_LZMA_AsyncStreamExtraction test
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 16:53:18 +00:00
copilot-swe-agent[bot]
0b2158f74c Initial plan 2026-01-15 16:44:57 +00:00
Adam Hathcock
5c06b8c48f enable single test 2026-01-15 16:41:58 +00:00
Adam Hathcock
810df8a18b revert lazy archive 2026-01-15 16:40:08 +00:00
Adam Hathcock
63736efcac Merge remote-tracking branch 'origin/master' into adam/async-creation
# Conflicts:
#	tests/SharpCompress.Test/SevenZip/SevenZipArchiveAsyncTests.cs
2026-01-15 16:21:30 +00:00
Adam Hathcock
33b6447c18 Merge remote-tracking branch 'origin/master' into adam/async-creation 2026-01-15 16:16:41 +00:00
Adam Hathcock
2d597e6e43 be more lazy with loading of sync stuff 2026-01-15 15:09:23 +00:00
Adam Hathcock
a410f73bf3 archive asyncs are more right 2026-01-15 14:52:10 +00:00
381 changed files with 23942 additions and 7165 deletions

View File

@@ -179,3 +179,58 @@ SharpCompress supports multiple archive and compression formats:
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
4. **Tar + non-seekable stream** - Must provide file size or it will throw
6. **Format detection** - Use `ReaderFactory.Open()` for auto-detection, test with actual archive files
### Async Struct-Copy Bug in LZMA RangeCoder
When implementing async methods on mutable `struct` types (like `BitEncoder` and `BitDecoder` in the LZMA RangeCoder), be aware that the async state machine copies the struct when `await` is encountered. This means mutations to struct fields after the `await` point may not persist back to the original struct stored in arrays or fields.
**The Bug:**
```csharp
// BAD: async method on mutable struct
public async ValueTask<uint> DecodeAsync(Decoder decoder, CancellationToken cancellationToken = default)
{
var newBound = (decoder._range >> K_NUM_BIT_MODEL_TOTAL_BITS) * _prob;
if (decoder._code < newBound)
{
decoder._range = newBound;
_prob += (K_BIT_MODEL_TOTAL - _prob) >> K_NUM_MOVE_BITS; // Mutates _prob
await decoder.Normalize2Async(cancellationToken).ConfigureAwait(false); // Struct gets copied here
return 0; // Original _prob update may be lost
}
// ...
}
```
**The Fix:**
Refactor async methods on mutable structs to perform all struct mutations synchronously before any `await`, or use a helper method to separate the await from the struct mutation:
```csharp
// GOOD: struct mutations happen synchronously, await is conditional
public ValueTask<uint> DecodeAsync(Decoder decoder, CancellationToken cancellationToken = default)
{
var newBound = (decoder._range >> K_NUM_BIT_MODEL_TOTAL_BITS) * _prob;
if (decoder._code < newBound)
{
decoder._range = newBound;
_prob += (K_BIT_MODEL_TOTAL - _prob) >> K_NUM_MOVE_BITS; // All mutations complete
return DecodeAsyncHelper(decoder.Normalize2Async(cancellationToken), 0); // Await in helper
}
decoder._range -= newBound;
decoder._code -= newBound;
_prob -= (_prob) >> K_NUM_MOVE_BITS; // All mutations complete
return DecodeAsyncHelper(decoder.Normalize2Async(cancellationToken), 1); // Await in helper
}
private static async ValueTask<uint> DecodeAsyncHelper(ValueTask normalizeTask, uint result)
{
await normalizeTask.ConfigureAwait(false);
return result;
}
```
**Why This Matters:**
In LZMA, the `BitEncoder` and `BitDecoder` structs maintain adaptive probability models in their `_prob` field. When these structs are stored in arrays (e.g., `_models[m]`), the async state machine copy breaks the adaptive model, causing incorrect bit decoding and eventually `DataErrorException` exceptions.
**Related Files:**
- `src/SharpCompress/Compressors/LZMA/RangeCoder/RangeCoderBit.Async.cs` - Fixed
- `src/SharpCompress/Compressors/LZMA/RangeCoder/RangeCoderBitTree.Async.cs` - Uses readonly structs, so this pattern doesn't apply

View File

@@ -12,7 +12,7 @@
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
<PackageVersion Include="System.Buffers" Version="4.6.1" />
<PackageVersion Include="System.Memory" Version="4.6.3" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.v3" Version="3.2.1" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<GlobalPackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />

View File

@@ -230,7 +230,7 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
}
else
{
// Not tagged - create prerelease version based on next minor version
// Not tagged - create prerelease version
var allTags = (await GetGitOutput("tag", "--list"))
.Split('\n', StringSplitOptions.RemoveEmptyEntries)
.Where(tag => Regex.IsMatch(tag.Trim(), @"^\d+\.\d+\.\d+$"))
@@ -240,8 +240,22 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
var lastTag = allTags.OrderBy(tag => Version.Parse(tag)).LastOrDefault() ?? "0.0.0";
var lastVersion = Version.Parse(lastTag);
// Increment minor version for next release
var nextVersion = new Version(lastVersion.Major, lastVersion.Minor + 1, 0);
// Determine version increment based on branch
var currentBranch = await GetCurrentBranch();
Version nextVersion;
if (currentBranch == "release")
{
// Release branch: increment patch version
nextVersion = new Version(lastVersion.Major, lastVersion.Minor, lastVersion.Build + 1);
Console.WriteLine($"Building prerelease for release branch (patch increment)");
}
else
{
// Master or other branches: increment minor version
nextVersion = new Version(lastVersion.Major, lastVersion.Minor + 1, 0);
Console.WriteLine($"Building prerelease for {currentBranch} branch (minor increment)");
}
// Use commit count since the last version tag if available; otherwise, fall back to total count
var revListArgs = allTags.Any() ? $"--count {lastTag}..HEAD" : "--count HEAD";
@@ -253,6 +267,28 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
}
}
static async Task<string> GetCurrentBranch()
{
// In GitHub Actions, GITHUB_REF_NAME contains the branch name
var githubRefName = Environment.GetEnvironmentVariable("GITHUB_REF_NAME");
if (!string.IsNullOrEmpty(githubRefName))
{
return githubRefName;
}
// Fallback to git command for local builds
try
{
var (output, _) = await ReadAsync("git", "branch --show-current");
return output.Trim();
}
catch (Exception ex)
{
Console.WriteLine($"Warning: Could not determine current branch: {ex.Message}");
return "unknown";
}
}
static async Task<string> GetGitOutput(string command, string args)
{
try

View File

@@ -0,0 +1,129 @@
# DataDescriptorStream and RewindableStream Fix
## Summary
Fixed the `Zip_Uncompressed_Read_All` test failure caused by incompatibility between `DataDescriptorStream` seeking requirements and the new `RewindableStream` wrapper used in `StreamingZipHeaderFactory`.
## Problem Description
### Symptom
The test `Zip_Uncompressed_Read_All` was failing with:
```
System.NotSupportedException : Cannot seek outside buffered region.
```
### Root Cause
The issue had two related aspects:
#### 1. Double-Wrapping of RewindableStream
`StreamingZipHeaderFactory.ReadStreamHeader()` was creating a new `RewindableStream` wrapper:
```csharp
var rewindableStream = new RewindableStream(stream);
```
When `ReaderFactory.OpenReader()` already wraps the input stream with `SeekableRewindableStream` (for seekable streams), this resulted in double-wrapping:
```
DataDescriptorStream
-> NonDisposingStream
-> RewindableStream (new, plain) <-- created by ReadStreamHeader
-> SeekableRewindableStream <-- created by ReaderFactory
-> FileStream
```
The inner plain `RewindableStream` lost the seeking capability of `SeekableRewindableStream`.
#### 2. Recording State Interference
Even after fixing the double-wrapping using `RewindableStream.EnsureSeekable()`, there was another issue:
`StreamingZipHeaderFactory.ReadStreamHeader()` contains code to peek ahead when checking for zero-length files with `UsePostDataDescriptor`:
```csharp
rewindableStream.StartRecording();
var nextHeaderBytes = reader.ReadUInt32();
rewindableStream.Rewind(true);
```
This code was interfering with the recording state that `ReaderFactory.OpenReader()` had set up:
1. `ReaderFactory.OpenReader()` calls `bStream.StartRecording()` at position 0
2. Factory detection calls `StreamingZipHeaderFactory.ReadStreamHeader()` via `IsZipFile()`
3. Inside `ReadStreamHeader`, the above code overwrites the recorded position
4. `Rewind(true)` stops recording and seeks to the wrong position
5. When control returns to `Factory.TryOpenReader()`, it calls `stream.Rewind(true)`, but recording is already stopped, so nothing happens
6. The stream position is not at the beginning, causing subsequent reads to fail
## Solution
### Fix 1: Use EnsureSeekable instead of new RewindableStream
Changed `StreamingZipHeaderFactory.ReadStreamHeader()` to use:
```csharp
var rewindableStream = RewindableStream.EnsureSeekable(stream);
```
This method:
- Returns the existing `RewindableStream` if the stream is already one (avoids double-wrapping)
- Creates a `SeekableRewindableStream` if the underlying stream is seekable
- Creates a plain `RewindableStream` only for non-seekable streams
### Fix 2: Use direct position save/restore for SeekableRewindableStream
For the peek-ahead logic, changed the code to check for `SeekableRewindableStream` specifically and use direct position manipulation:
```csharp
if (rewindableStream is SeekableRewindableStream)
{
// Direct position save/restore avoids interfering with caller's recording state
var savedPosition = rewindableStream.Position;
var nextHeaderBytes = reader.ReadUInt32();
rewindableStream.Position = savedPosition;
header.HasData = !IsHeader(nextHeaderBytes);
}
else
{
// Plain RewindableStream was created fresh by EnsureSeekable, safe to use recording
rewindableStream.StartRecording();
var nextHeaderBytes = reader.ReadUInt32();
rewindableStream.Rewind(true);
header.HasData = !IsHeader(nextHeaderBytes);
}
```
This approach:
- For `SeekableRewindableStream` (reused from caller): Uses direct position save/restore to avoid clobbering the caller's recording state
- For plain `RewindableStream` (freshly created): Uses the recording mechanism which is safe since the stream isn't shared
## Files Changed
- `src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs`
- `src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.Async.cs`
## Design Notes
### Why not fix RewindableStream.CanSeek?
`RewindableStream.CanSeek` returns `true` even though it can only seek within its buffered region. We considered changing this to `false`, but:
1. It would be a breaking change for existing code that relies on `CanSeek`
2. The `RewindableStream` does provide limited seeking capability (within buffer)
3. Checking for `SeekableRewindableStream` specifically is more precise
### Stream Wrapper Hierarchy
Understanding the stream wrapper hierarchy is crucial:
**For seekable source streams (e.g., FileStream):**
```
SeekableRewindableStream (full seeking via underlying stream)
-> FileStream
```
**For non-seekable source streams (e.g., decompression streams):**
```
RewindableStream (limited seeking via buffer)
-> DecompressionStream
```
`DataDescriptorStream` needs backward seeking to position the stream correctly after finding the data descriptor marker. This is why proper stream wrapper selection matters.

View File

@@ -0,0 +1,103 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public abstract partial class AbstractArchive<TEntry, TVolume>
where TEntry : IArchiveEntry
where TVolume : IVolume
{
#region Async Support
// Async properties
public virtual IAsyncEnumerable<TEntry> EntriesAsync => _lazyEntriesAsync;
public IAsyncEnumerable<TVolume> VolumesAsync => _lazyVolumesAsync;
protected virtual async IAsyncEnumerable<TEntry> LoadEntriesAsync(
IAsyncEnumerable<TVolume> volumes
)
{
foreach (var item in LoadEntries(await volumes.ToListAsync()))
{
yield return item;
}
}
public virtual async ValueTask DisposeAsync()
{
if (!_disposed)
{
await foreach (var v in _lazyVolumesAsync)
{
v.Dispose();
}
foreach (var v in _lazyEntriesAsync.GetLoaded().Cast<Entry>())
{
v.Close();
}
_sourceStream?.Dispose();
_disposed = true;
}
}
private async ValueTask EnsureEntriesLoadedAsync()
{
await _lazyEntriesAsync.EnsureFullyLoaded();
await _lazyVolumesAsync.EnsureFullyLoaded();
}
private async IAsyncEnumerable<IArchiveEntry> EntriesAsyncCast()
{
await foreach (var entry in EntriesAsync)
{
yield return entry;
}
}
IAsyncEnumerable<IArchiveEntry> IAsyncArchive.EntriesAsync => EntriesAsyncCast();
IAsyncEnumerable<IVolume> IAsyncArchive.VolumesAsync => VolumesAsyncCast();
private async IAsyncEnumerable<IVolume> VolumesAsyncCast()
{
await foreach (var volume in _lazyVolumesAsync)
{
yield return volume;
}
}
public async ValueTask<IAsyncReader> ExtractAllEntriesAsync()
{
if (!await IsSolidAsync() && Type != ArchiveType.SevenZip)
{
throw new SharpCompressException(
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
await EnsureEntriesLoadedAsync();
return await CreateReaderForSolidExtractionAsync();
}
public virtual ValueTask<bool> IsSolidAsync() => new(false);
public async ValueTask<bool> IsCompleteAsync()
{
await EnsureEntriesLoadedAsync();
return await EntriesAsync.AllAsync(x => x.IsComplete);
}
public async ValueTask<long> TotalSizeAsync() =>
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.CompressedSize);
public async ValueTask<long> TotalUncompressedSizeAsync() =>
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.Size);
public ValueTask<bool> IsEncryptedAsync() => new(IsEncrypted);
#endregion
}

View File

@@ -7,7 +7,7 @@ using SharpCompress.Readers;
namespace SharpCompress.Archives;
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
public abstract partial class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
@@ -16,6 +16,10 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
private bool _disposed;
private readonly SourceStream? _sourceStream;
// Async fields - kept in original file per refactoring rules
private readonly LazyAsyncReadOnlyCollection<TVolume> _lazyVolumesAsync;
private readonly LazyAsyncReadOnlyCollection<TEntry> _lazyEntriesAsync;
protected ReaderOptions ReaderOptions { get; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
@@ -77,16 +81,6 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
protected virtual IAsyncEnumerable<TVolume> LoadVolumesAsync(SourceStream sourceStream) =>
LoadVolumes(sourceStream).ToAsyncEnumerable();
protected virtual async IAsyncEnumerable<TEntry> LoadEntriesAsync(
IAsyncEnumerable<TVolume> volumes
)
{
foreach (var item in LoadEntries(await volumes.ToListAsync()))
{
yield return item;
}
}
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
@@ -156,85 +150,4 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
return Entries.All(x => x.IsComplete);
}
}
#region Async Support
private readonly LazyAsyncReadOnlyCollection<TVolume> _lazyVolumesAsync;
private readonly LazyAsyncReadOnlyCollection<TEntry> _lazyEntriesAsync;
public virtual async ValueTask DisposeAsync()
{
if (!_disposed)
{
await foreach (var v in _lazyVolumesAsync)
{
v.Dispose();
}
foreach (var v in _lazyEntriesAsync.GetLoaded().Cast<Entry>())
{
v.Close();
}
_sourceStream?.Dispose();
_disposed = true;
}
}
private async ValueTask EnsureEntriesLoadedAsync()
{
await _lazyEntriesAsync.EnsureFullyLoaded();
await _lazyVolumesAsync.EnsureFullyLoaded();
}
public virtual IAsyncEnumerable<TEntry> EntriesAsync => _lazyEntriesAsync;
private async IAsyncEnumerable<IArchiveEntry> EntriesAsyncCast()
{
await foreach (var entry in EntriesAsync)
{
yield return entry;
}
}
IAsyncEnumerable<IArchiveEntry> IAsyncArchive.EntriesAsync => EntriesAsyncCast();
private async IAsyncEnumerable<IVolume> VolumesAsyncCast()
{
await foreach (var volume in VolumesAsync)
{
yield return volume;
}
}
public IAsyncEnumerable<IVolume> VolumesAsync => VolumesAsyncCast();
public async ValueTask<IAsyncReader> ExtractAllEntriesAsync()
{
if (!IsSolid && Type != ArchiveType.SevenZip)
{
throw new SharpCompressException(
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
await EnsureEntriesLoadedAsync();
return await CreateReaderForSolidExtractionAsync();
}
public virtual ValueTask<bool> IsSolidAsync() => new(false);
public async ValueTask<bool> IsCompleteAsync()
{
await EnsureEntriesLoadedAsync();
return await EntriesAsync.AllAsync(x => x.IsComplete);
}
public async ValueTask<long> TotalSizeAsync() =>
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.CompressedSize);
public async ValueTask<long> TotalUncompressedSizeAsync() =>
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.Size);
public ValueTask<bool> IsEncryptedAsync() => new(IsEncrypted);
#endregion
}

View File

@@ -0,0 +1,123 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
public abstract partial class AbstractWritableArchive<TEntry, TVolume>
where TEntry : IArchiveEntry
where TVolume : IVolume
{
// Async property moved from main file
private IAsyncEnumerable<TEntry> OldEntriesAsync =>
base.EntriesAsync.Where(x => !removedEntries.Contains(x));
private async ValueTask RebuildModifiedCollectionAsync()
{
if (pauseRebuilding)
{
return;
}
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
await foreach (var entry in OldEntriesAsync)
{
modifiedEntries.Add(entry);
}
modifiedEntries.AddRange(newEntries);
}
public async ValueTask RemoveEntryAsync(TEntry entry)
{
if (!removedEntries.Contains(entry))
{
removedEntries.Add(entry);
await RebuildModifiedCollectionAsync();
}
}
private async ValueTask<bool> DoesKeyMatchExistingAsync(
string key,
CancellationToken cancellationToken
)
{
await foreach (
var entry in EntriesAsync.WithCancellation(cancellationToken).ConfigureAwait(false)
)
{
var path = entry.Key;
if (path is null)
{
continue;
}
var p = path.Replace('/', '\\');
if (p.Length > 0 && p[0] == '\\')
{
p = p.Substring(1);
}
return string.Equals(p, key, StringComparison.OrdinalIgnoreCase);
}
return false;
}
public async ValueTask<TEntry> AddEntryAsync(
string key,
Stream source,
bool closeStream,
long size = 0,
DateTime? modified = null,
CancellationToken cancellationToken = default
)
{
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
if (await DoesKeyMatchExistingAsync(key, cancellationToken).ConfigureAwait(false))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = CreateEntry(key, source, size, modified, closeStream);
newEntries.Add(entry);
await RebuildModifiedCollectionAsync();
return entry;
}
public async ValueTask<TEntry> AddDirectoryEntryAsync(
string key,
DateTime? modified = null,
CancellationToken cancellationToken = default
)
{
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
if (await DoesKeyMatchExistingAsync(key, cancellationToken).ConfigureAwait(false))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = CreateDirectoryEntry(key, modified);
newEntries.Add(entry);
await RebuildModifiedCollectionAsync();
return entry;
}
public async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
CancellationToken cancellationToken = default
)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
await SaveToAsync(stream, options, OldEntriesAsync, newEntries, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -10,7 +10,7 @@ using SharpCompress.Writers;
namespace SharpCompress.Archives;
public abstract class AbstractWritableArchive<TEntry, TVolume>
public abstract partial class AbstractWritableArchive<TEntry, TVolume>
: AbstractArchive<TEntry, TVolume>,
IWritableArchive,
IWritableAsyncArchive
@@ -84,12 +84,12 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
}
}
void IWritableArchiveCommon.RemoveEntry(IArchiveEntry entry) => RemoveEntry((TEntry)entry);
void IWritableArchive.RemoveEntry(IArchiveEntry entry) => RemoveEntry((TEntry)entry);
public TEntry AddEntry(string key, Stream source, long size = 0, DateTime? modified = null) =>
AddEntry(key, source, false, size, modified);
IArchiveEntry IWritableArchiveCommon.AddEntry(
IArchiveEntry IWritableArchive.AddEntry(
string key,
Stream source,
bool closeStream,
@@ -97,7 +97,7 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
DateTime? modified
) => AddEntry(key, source, closeStream, size, modified);
IArchiveEntry IWritableArchiveCommon.AddDirectoryEntry(string key, DateTime? modified) =>
IArchiveEntry IWritableArchive.AddDirectoryEntry(string key, DateTime? modified) =>
AddDirectoryEntry(key, modified);
public TEntry AddEntry(
@@ -140,6 +140,24 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
return false;
}
ValueTask IWritableAsyncArchive.RemoveEntryAsync(IArchiveEntry entry) =>
RemoveEntryAsync((TEntry)entry);
async ValueTask<IArchiveEntry> IWritableAsyncArchive.AddEntryAsync(
string key,
Stream source,
bool closeStream,
long size,
DateTime? modified,
CancellationToken cancellationToken
) => await AddEntryAsync(key, source, closeStream, size, modified, cancellationToken);
async ValueTask<IArchiveEntry> IWritableAsyncArchive.AddDirectoryEntryAsync(
string key,
DateTime? modified,
CancellationToken cancellationToken
) => await AddDirectoryEntryAsync(key, modified, cancellationToken);
public TEntry AddDirectoryEntry(string key, DateTime? modified = null)
{
if (key.Length > 0 && key[0] is '/' or '\\')
@@ -163,18 +181,6 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
SaveTo(stream, options, OldEntries, newEntries);
}
public async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
CancellationToken cancellationToken = default
)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
await SaveToAsync(stream, options, OldEntries, newEntries, cancellationToken)
.ConfigureAwait(false);
}
protected TEntry CreateEntry(
string key,
Stream source,
@@ -212,7 +218,7 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
protected abstract ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<TEntry> oldEntries,
IAsyncEnumerable<TEntry> oldEntries,
IEnumerable<TEntry> newEntries,
CancellationToken cancellationToken = default
);

View File

@@ -0,0 +1,158 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public static partial class ArchiveFactory
{
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken);
return factory.OpenAsyncArchive(stream, readerOptions);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenAsyncArchive(new FileInfo(filePath), options, cancellationToken);
}
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken);
return factory.OpenAsyncArchive(fileInfo, options);
}
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return await OpenAsyncArchive(fileInfo, options, cancellationToken);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IMultiArchiveFactory>(fileInfo, cancellationToken);
return factory.OpenAsyncArchive(filesArray, options, cancellationToken);
}
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
IEnumerable<Stream> streams,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return await OpenAsyncArchive(firstStream, options, cancellationToken);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
var factory = await FindFactoryAsync<IMultiArchiveFactory>(firstStream, cancellationToken);
return factory.OpenAsyncArchive(streamsArray, options);
}
public static ValueTask<T> FindFactoryAsync<T>(
string path,
CancellationToken cancellationToken = default
)
where T : IFactory
{
path.NotNullOrEmpty(nameof(path));
return FindFactoryAsync<T>(new FileInfo(path), cancellationToken);
}
private static async ValueTask<T> FindFactoryAsync<T>(
FileInfo finfo,
CancellationToken cancellationToken
)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return await FindFactoryAsync<T>(stream, cancellationToken);
}
private static async ValueTask<T> FindFactoryAsync<T>(
Stream stream,
CancellationToken cancellationToken
)
where T : IFactory
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
var factories = Factory.Factories.OfType<T>();
var startPosition = stream.Position;
foreach (var factory in factories)
{
stream.Seek(startPosition, SeekOrigin.Begin);
if (await factory.IsArchiveAsync(stream, cancellationToken: cancellationToken))
{
stream.Seek(startPosition, SeekOrigin.Begin);
return factory;
}
}
var extensions = string.Join(", ", factories.Select(item => item.Name));
throw new InvalidOperationException(
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
);
}
}

View File

@@ -11,7 +11,7 @@ using SharpCompress.Readers;
namespace SharpCompress.Archives;
public static class ArchiveFactory
public static partial class ArchiveFactory
{
public static IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
{
@@ -20,18 +20,6 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(stream).OpenArchive(stream, readerOptions);
}
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken);
return factory.OpenAsyncArchive(stream, readerOptions);
}
public static IWritableArchive CreateArchive(ArchiveType type)
{
var factory = Factory
@@ -52,16 +40,6 @@ public static class ArchiveFactory
return OpenArchive(new FileInfo(filePath), options);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenAsyncArchive(new FileInfo(filePath), options, cancellationToken);
}
public static IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
@@ -69,18 +47,6 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(fileInfo).OpenArchive(fileInfo, options);
}
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken);
return factory.OpenAsyncArchive(fileInfo, options, cancellationToken);
}
public static IArchive OpenArchive(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? options = null
@@ -105,32 +71,6 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(fileInfo).OpenArchive(filesArray, options);
}
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return await OpenAsyncArchive(fileInfo, options, cancellationToken);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IMultiArchiveFactory>(fileInfo, cancellationToken);
return factory.OpenAsyncArchive(filesArray, options, cancellationToken);
}
public static IArchive OpenArchive(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.NotNull(nameof(streams));
@@ -152,33 +92,6 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(firstStream).OpenArchive(streamsArray, options);
}
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
IEnumerable<Stream> streams,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return await OpenAsyncArchive(firstStream, options, cancellationToken);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
var factory = FindFactory<IMultiArchiveFactory>(firstStream);
return factory.OpenAsyncArchive(streamsArray, options);
}
public static void WriteToDirectory(
string sourceArchive,
string destinationDirectory,
@@ -189,7 +102,15 @@ public static class ArchiveFactory
archive.WriteToDirectory(destinationDirectory, options);
}
private static T FindFactory<T>(FileInfo finfo)
public static T FindFactory<T>(string path)
where T : IFactory
{
path.NotNullOrEmpty(nameof(path));
using Stream stream = File.OpenRead(path);
return FindFactory<T>(stream);
}
public static T FindFactory<T>(FileInfo finfo)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
@@ -197,7 +118,7 @@ public static class ArchiveFactory
return FindFactory<T>(stream);
}
private static T FindFactory<T>(Stream stream)
public static T FindFactory<T>(Stream stream)
where T : IFactory
{
stream.NotNull(nameof(stream));
@@ -229,68 +150,14 @@ public static class ArchiveFactory
);
}
private static async ValueTask<T> FindFactoryAsync<T>(
FileInfo finfo,
CancellationToken cancellationToken
)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return await FindFactoryAsync<T>(stream, cancellationToken);
}
private static async ValueTask<T> FindFactoryAsync<T>(
Stream stream,
CancellationToken cancellationToken
)
where T : IFactory
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
var factories = Factory.Factories.OfType<T>();
var startPosition = stream.Position;
foreach (var factory in factories)
{
stream.Seek(startPosition, SeekOrigin.Begin);
if (await factory.IsArchiveAsync(stream, cancellationToken: cancellationToken))
{
stream.Seek(startPosition, SeekOrigin.Begin);
return factory;
}
}
var extensions = string.Join(", ", factories.Select(item => item.Name));
throw new InvalidOperationException(
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
);
}
public static bool IsArchive(
string filePath,
out ArchiveType? type,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public static bool IsArchive(string filePath, out ArchiveType? type)
{
filePath.NotNullOrEmpty(nameof(filePath));
using Stream s = File.OpenRead(filePath);
return IsArchive(s, out type, bufferSize);
return IsArchive(s, out type);
}
public static bool IsArchive(
Stream stream,
out ArchiveType? type,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public static bool IsArchive(Stream stream, out ArchiveType? type)
{
type = null;
stream.NotNull(nameof(stream));
@@ -345,6 +212,4 @@ public static class ArchiveFactory
}
}
}
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
}

View File

@@ -13,6 +13,7 @@ internal abstract class ArchiveVolumeFactory
//split 001, 002 ...
var m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
{
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
@@ -22,9 +23,13 @@ internal abstract class ArchiveVolumeFactory
)
)
);
}
if (item != null && item.Exists)
{
return item;
}
return null;
}
}

View File

@@ -1,52 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
internal class AutoArchiveFactory : IArchiveFactory
{
public string Name => nameof(AutoArchiveFactory);
public ArchiveType? KnownArchiveType => null;
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
public bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => throw new NotSupportedException();
public ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => throw new NotSupportedException();
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
public IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null) =>
ArchiveFactory.OpenArchive(stream, readerOptions);
public IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null) =>
(IAsyncArchive)OpenArchive(stream, readerOptions);
public IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ArchiveFactory.OpenArchive(fileInfo, readerOptions);
public IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
}

View File

@@ -0,0 +1,86 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
namespace SharpCompress.Archives.GZip;
public partial class GZipArchive
{
public ValueTask SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
SaveToAsync(new FileInfo(filePath), cancellationToken);
public async ValueTask SaveToAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
.ConfigureAwait(false);
}
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IAsyncEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
if (Entries.Count > 1)
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)
{
if (!entry.IsDirectory)
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
cancellationToken
)
.ConfigureAwait(false);
}
}
foreach (var entry in newEntries.Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken)
.ConfigureAwait(false);
}
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)GZipReader.OpenReader(stream));
}
protected override async IAsyncEnumerable<GZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<GZipVolume> volumes
)
{
var stream = (await volumes.SingleAsync()).Stream;
yield return new GZipArchiveEntry(
this,
await GZipFilePart.CreateAsync(stream, ReaderOptions.ArchiveEncoding)
);
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -180,18 +181,21 @@ public partial class GZipArchive
CancellationToken cancellationToken = default
)
{
byte[] header = new byte[10];
if (!await stream.ReadFullyAsync(header, cancellationToken).ConfigureAwait(false))
var header = ArrayPool<byte>.Shared.Rent(10);
try
{
return false;
}
await stream.ReadFullyAsync(header, 0, 10, cancellationToken).ConfigureAwait(false);
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
finally
{
return false;
ArrayPool<byte>.Shared.Return(header);
}
return true;
}
}

View File

@@ -36,19 +36,6 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
public ValueTask SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
SaveToAsync(new FileInfo(filePath), cancellationToken);
public async ValueTask SaveToAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
.ConfigureAwait(false);
}
protected override GZipArchiveEntry CreateEntryInternal(
string filePath,
Stream source,
@@ -92,28 +79,6 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
}
}
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
if (Entries.Count > 1)
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken)
.ConfigureAwait(false);
}
}
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
var stream = volumes.Single().Stream;
@@ -123,28 +88,10 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
);
}
protected override async IAsyncEnumerable<GZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<GZipVolume> volumes
)
{
var stream = (await volumes.SingleAsync()).Stream;
yield return new GZipArchiveEntry(
this,
await GZipFilePart.CreateAsync(stream, ReaderOptions.ArchiveEncoding)
);
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return GZipReader.OpenReader(stream);
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)GZipReader.OpenReader(stream));
}
}

View File

@@ -9,8 +9,6 @@ namespace SharpCompress.Archives;
public static class IArchiveEntryExtensions
{
private const int BufferSize = 81920;
/// <param name="archiveEntry">The archive entry to extract.</param>
extension(IArchiveEntry archiveEntry)
{
@@ -28,7 +26,7 @@ public static class IArchiveEntryExtensions
using var entryStream = archiveEntry.OpenEntryStream();
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
sourceStream.CopyTo(streamToWriteTo, BufferSize);
sourceStream.CopyTo(streamToWriteTo, Constants.BufferSize);
}
/// <summary>
@@ -51,7 +49,7 @@ public static class IArchiveEntryExtensions
using var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
await sourceStream
.CopyToAsync(streamToWriteTo, BufferSize, cancellationToken)
.CopyToAsync(streamToWriteTo, Constants.BufferSize, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -47,9 +47,5 @@ public interface IArchiveFactory : IFactory
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null);
}

View File

@@ -20,7 +20,7 @@ public static class IAsyncArchiveExtensions
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public async Task WriteToDirectoryAsync(
public async ValueTask WriteToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null,
@@ -47,7 +47,7 @@ public static class IAsyncArchiveExtensions
}
}
private async Task WriteToDirectoryAsyncInternal(
private async ValueTask WriteToDirectoryAsyncInternal(
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress,

View File

@@ -13,12 +13,10 @@ public interface IWritableArchiveCommon
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
}
/// <summary>
/// Removes the specified entry from the archive.
/// </summary>
void RemoveEntry(IArchiveEntry entry);
public interface IWritableArchive : IArchive, IWritableArchiveCommon
{
IArchiveEntry AddEntry(
string key,
Stream source,
@@ -28,14 +26,16 @@ public interface IWritableArchiveCommon
);
IArchiveEntry AddDirectoryEntry(string key, DateTime? modified = null);
}
public interface IWritableArchive : IArchive, IWritableArchiveCommon
{
/// <summary>
/// Saves the archive to the specified stream using the given writer options.
/// </summary>
void SaveTo(Stream stream, WriterOptions options);
/// <summary>
/// Removes the specified entry from the archive.
/// </summary>
void RemoveEntry(IArchiveEntry entry);
}
public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
@@ -48,4 +48,30 @@ public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
WriterOptions options,
CancellationToken cancellationToken = default
);
/// <summary>
/// Asynchronously adds an entry to the archive with the specified key, source stream, and options.
/// </summary>
ValueTask<IArchiveEntry> AddEntryAsync(
string key,
Stream source,
bool closeStream,
long size = 0,
DateTime? modified = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Asynchronously adds a directory entry to the archive with the specified key and modification time.
/// </summary>
ValueTask<IArchiveEntry> AddDirectoryEntryAsync(
string key,
DateTime? modified = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Removes the specified entry from the archive.
/// </summary>
ValueTask RemoveEntryAsync(IArchiveEntry entry);
}

View File

@@ -1,59 +0,0 @@
using System;
using System.IO;
namespace SharpCompress.Archives;
public static class IWritableArchiveCommonExtensions
{
extension(IWritableArchiveCommon writableArchive)
{
public void AddAllFromDirectory(
string filePath,
string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories
)
{
using (writableArchive.PauseEntryRebuilding())
{
foreach (
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
)
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(
path.Substring(filePath.Length),
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}
public IArchiveEntry AddEntry(string key, string file) =>
writableArchive.AddEntry(key, new FileInfo(file));
public IArchiveEntry AddEntry(
string key,
Stream source,
long size = 0,
DateTime? modified = null
) => writableArchive.AddEntry(key, source, false, size, modified);
public IArchiveEntry AddEntry(string key, FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntry(
key,
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}

View File

@@ -1,3 +1,4 @@
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Writers;
@@ -8,6 +9,55 @@ public static class IWritableArchiveExtensions
{
extension(IWritableArchive writableArchive)
{
public void AddAllFromDirectory(
string filePath,
string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories
)
{
using (writableArchive.PauseEntryRebuilding())
{
foreach (
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
)
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(
path.Substring(filePath.Length),
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}
public IArchiveEntry AddEntry(string key, string file) =>
writableArchive.AddEntry(key, new FileInfo(file));
public IArchiveEntry AddEntry(
string key,
Stream source,
long size = 0,
DateTime? modified = null
) => writableArchive.AddEntry(key, source, false, size, modified);
public IArchiveEntry AddEntry(string key, FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntry(
key,
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
public void SaveTo(string filePath, WriterOptions? options = null) =>
writableArchive.SaveTo(new FileInfo(filePath), options ?? new(CompressionType.Deflate));

View File

@@ -1,3 +1,4 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
@@ -10,6 +11,55 @@ public static class IWritableAsyncArchiveExtensions
{
extension(IWritableAsyncArchive writableArchive)
{
public async ValueTask AddAllFromDirectoryAsync(
string filePath,
string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories
)
{
using (writableArchive.PauseEntryRebuilding())
{
foreach (
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
)
{
var fileInfo = new FileInfo(path);
await writableArchive.AddEntryAsync(
path.Substring(filePath.Length),
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}
public ValueTask<IArchiveEntry> AddEntryAsync(string key, string file) =>
writableArchive.AddEntryAsync(key, new FileInfo(file));
public ValueTask<IArchiveEntry> AddEntryAsync(
string key,
Stream source,
long size = 0,
DateTime? modified = null
) => writableArchive.AddEntryAsync(key, source, false, size, modified);
public ValueTask<IArchiveEntry> AddEntryAsync(string key, FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntryAsync(
key,
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
public ValueTask SaveToAsync(
string filePath,
WriterOptions? options = null,

View File

@@ -36,4 +36,7 @@ internal class FileInfoRarArchiveVolume : RarVolume
new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
internal override IEnumerable<RarFilePart> ReadFileParts() => FileParts;
internal override IAsyncEnumerable<RarFilePart> ReadFilePartsAsync() =>
FileParts.ToAsyncEnumerable();
}

View File

@@ -0,0 +1,53 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar;
public partial class RarArchive
{
public override async ValueTask DisposeAsync()
{
if (!_disposed)
{
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
{
unpackV1.Dispose();
}
_disposed = true;
await base.DisposeAsync();
}
}
protected override async ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
if (await this.IsMultipartVolumeAsync())
{
var streams = await VolumesAsync
.Select(volume =>
{
volume.Stream.Position = 0;
return volume.Stream;
})
.ToListAsync();
return (RarReader)RarReader.OpenReader(streams, ReaderOptions);
}
var stream = (await VolumesAsync.FirstAsync()).Stream;
stream.Position = 0;
return (RarReader)RarReader.OpenReader(stream, ReaderOptions);
}
public override async ValueTask<bool> IsSolidAsync() =>
await (await VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsSolidArchiveAsync();
}

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -163,4 +164,24 @@ public partial class RarArchive
return false;
}
}
public static async ValueTask<bool> IsRarFileAsync(
Stream stream,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
await MarkHeader
.ReadAsync(stream, true, false, cancellationToken)
.ConfigureAwait(false);
return true;
}
catch
{
return false;
}
}
}

View File

@@ -24,7 +24,10 @@ public interface IRarArchive : IArchive, IRarArchiveCommon { }
public interface IRarAsyncArchive : IAsyncArchive, IRarArchiveCommon { }
public partial class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>, IRarArchive
public partial class RarArchive
: AbstractArchive<RarArchiveEntry, RarVolume>,
IRarArchive,
IRarAsyncArchive
{
private bool _disposed;
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
@@ -48,23 +51,14 @@ public partial class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>, I
}
}
public override async ValueTask DisposeAsync()
{
if (!_disposed)
{
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
{
unpackV1.Dispose();
}
_disposed = true;
await base.DisposeAsync();
}
}
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
// Simple async property - kept in original file
protected override IAsyncEnumerable<RarArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<RarVolume> volumes
) => RarArchiveEntryFactory.GetEntriesAsync(this, volumes, ReaderOptions);
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.LoadAllParts();
@@ -86,13 +80,7 @@ public partial class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>, I
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
}
protected override IReader CreateReaderForSolidExtraction() =>
CreateReaderForSolidExtractionInternal();
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync() =>
new(CreateReaderForSolidExtractionInternal());
private RarReader CreateReaderForSolidExtractionInternal()
protected override IReader CreateReaderForSolidExtraction()
{
if (this.IsMultipartVolume())
{
@@ -114,5 +102,6 @@ public partial class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>, I
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;
}

View File

@@ -0,0 +1,43 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar;
public partial class RarArchiveEntry
{
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
RarStream stream;
if (IsRarV3)
{
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
await MultiVolumeReadOnlyAsyncStream.Create(
Parts.ToAsyncEnumerable().CastAsync<RarFilePart>()
)
);
}
else
{
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
await MultiVolumeReadOnlyAsyncStream.Create(
Parts.ToAsyncEnumerable().CastAsync<RarFilePart>()
)
);
}
await stream.InitializeAsync(cancellationToken);
return stream;
}
}

View File

@@ -12,7 +12,7 @@ using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar;
public class RarArchiveEntry : RarEntry, IArchiveEntry
public partial class RarArchiveEntry : RarEntry, IArchiveEntry
{
private readonly ICollection<RarFilePart> parts;
private readonly RarArchive archive;
@@ -92,32 +92,6 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
return stream;
}
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
RarStream stream;
if (IsRarV3)
{
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
else
{
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
await stream.InitializeAsync(cancellationToken);
return stream;
}
public bool IsComplete
{
get

View File

@@ -17,6 +17,19 @@ internal static class RarArchiveEntryFactory
}
}
private static async IAsyncEnumerable<RarFilePart> GetFilePartsAsync(
IAsyncEnumerable<RarVolume> parts
)
{
await foreach (var rarPart in parts)
{
await foreach (var fp in rarPart.ReadFilePartsAsync())
{
yield return fp;
}
}
}
private static IEnumerable<IEnumerable<RarFilePart>> GetMatchedFileParts(
IEnumerable<RarVolume> parts
)
@@ -38,6 +51,27 @@ internal static class RarArchiveEntryFactory
}
}
private static async IAsyncEnumerable<IEnumerable<RarFilePart>> GetMatchedFilePartsAsync(
IAsyncEnumerable<RarVolume> parts
)
{
var groupedParts = new List<RarFilePart>();
await foreach (var fp in GetFilePartsAsync(parts))
{
groupedParts.Add(fp);
if (!fp.FileHeader.IsSplitAfter)
{
yield return groupedParts;
groupedParts = new List<RarFilePart>();
}
}
if (groupedParts.Count > 0)
{
yield return groupedParts;
}
}
internal static IEnumerable<RarArchiveEntry> GetEntries(
RarArchive archive,
IEnumerable<RarVolume> rarParts,
@@ -49,4 +83,16 @@ internal static class RarArchiveEntryFactory
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
}
}
internal static async IAsyncEnumerable<RarArchiveEntry> GetEntriesAsync(
RarArchive archive,
IAsyncEnumerable<RarVolume> rarParts,
ReaderOptions readerOptions
)
{
await foreach (var groupedParts in GetMatchedFilePartsAsync(rarParts))
{
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
}
}
}

View File

@@ -13,6 +13,7 @@ internal static class RarArchiveVolumeFactory
//new style rar - ..part1 | /part01 | part001 ....
var m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
if (m.Success)
{
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
@@ -23,11 +24,13 @@ internal static class RarArchiveVolumeFactory
)
)
);
}
else
{
//old style - ...rar, .r00, .r01 ...
m = Regex.Match(part1.Name, @"^(.*\.)([r-z{])(ar|[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
{
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
@@ -40,12 +43,17 @@ internal static class RarArchiveVolumeFactory
)
)
);
}
else //split .001, .002 ....
{
return ArchiveVolumeFactory.GetFilePart(index, part1);
}
}
if (item != null && item.Exists)
{
return item;
}
return null; //no more items
}

View File

@@ -14,6 +14,9 @@ internal class StreamRarArchiveVolume : RarVolume
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
internal override IAsyncEnumerable<RarFilePart> ReadFilePartsAsync() =>
GetVolumeFilePartsAsync();
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
new SeekableFilePart(markHeader, fileHeader, Index, Stream, ReaderOptions.Password);
}

View File

@@ -0,0 +1,73 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip;
public partial class SevenZipArchive
{
private async ValueTask LoadFactoryAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
if (_database is null)
{
stream.Position = 0;
var reader = new ArchiveReader();
await reader.OpenAsync(
stream,
lookForHeader: ReaderOptions.LookForHeader,
cancellationToken
);
_database = await reader.ReadDatabaseAsync(
new PasswordProvider(ReaderOptions.Password),
cancellationToken
);
}
}
protected override async IAsyncEnumerable<SevenZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<SevenZipVolume> volumes
)
{
var stream = (await volumes.SingleAsync()).Stream;
await LoadFactoryAsync(stream);
if (_database is null)
{
yield break;
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
{
var file = _database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
);
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
var isSolid = false;
foreach (var entry in group)
{
entry.IsSolid = isSolid;
isSolid = true;
}
}
foreach (var entry in entries)
{
yield return entry;
}
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync() =>
new(new SevenZipReader(ReaderOptions, this));
}

View File

@@ -1,12 +1,10 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -157,13 +155,56 @@ public partial class SevenZipArchive
}
}
private static ReadOnlySpan<byte> Signature =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
public static async ValueTask<bool> IsSevenZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
return await SignatureMatchAsync(stream, cancellationToken);
}
catch
{
return false;
}
}
private static ReadOnlySpan<byte> Signature => [(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C];
private static bool SignatureMatch(Stream stream)
{
var reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(Signature);
var buffer = ArrayPool<byte>.Shared.Rent(6);
try
{
stream.ReadExact(buffer, 0, 6);
return buffer.AsSpan().Slice(0, 6).SequenceEqual(Signature);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
private static async ValueTask<bool> SignatureMatchAsync(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = ArrayPool<byte>.Shared.Rent(6);
try
{
if (!await stream.ReadFullyAsync(buffer, 0, 6, cancellationToken).ConfigureAwait(false))
{
return false;
}
return buffer.AsSpan().Slice(0, 6).SequenceEqual(Signature);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
}

View File

@@ -16,48 +16,65 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
{
private ArchiveDatabase? _database;
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private SevenZipArchive(SourceStream sourceStream)
: base(ArchiveType.SevenZip, sourceStream) { }
internal SevenZipArchive()
: base(ArchiveType.SevenZip) { }
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts();
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable();
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
}
internal SevenZipArchive()
: base(ArchiveType.SevenZip) { }
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(
IEnumerable<SevenZipVolume> volumes
)
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
if (_database is null)
foreach (var volume in volumes)
{
return Enumerable.Empty<SevenZipArchiveEntry>();
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
{
var file = _database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
);
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
var isSolid = false;
foreach (var entry in group)
LoadFactory(volume.Stream);
if (_database is null)
{
entry.IsSolid = isSolid;
isSolid = true;
yield break;
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
{
var file = _database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(
volume.Stream,
_database,
i,
file,
ReaderOptions.ArchiveEncoding
)
);
}
foreach (
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
)
{
var isSolid = false;
foreach (var entry in group)
{
entry.IsSolid = isSolid;
isSolid = true;
}
}
foreach (var entry in entries)
{
yield return entry;
}
}
return entries;
}
private void LoadFactory(Stream stream)
@@ -74,9 +91,6 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
protected override IReader CreateReaderForSolidExtraction() =>
new SevenZipReader(ReaderOptions, this);
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync() =>
new(new SevenZipReader(ReaderOptions, this));
public override bool IsSolid =>
Entries
.Where(x => !x.IsDirectory)
@@ -88,13 +102,34 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
public override long TotalSize =>
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
internal sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive _archive;
private SevenZipEntry? _currentEntry;
private Stream? _currentFolderStream;
private CFolder? _currentFolder;
/// <summary>
/// Enables internal diagnostics for tests.
/// When disabled (default), diagnostics properties return null to avoid exposing internal state.
/// </summary>
internal bool DiagnosticsEnabled { get; set; }
/// <summary>
/// Current folder instance used to decide whether the solid folder stream should be reused.
/// Only available when <see cref="DiagnosticsEnabled"/> is true.
/// </summary>
internal object? DiagnosticsCurrentFolder => DiagnosticsEnabled ? _currentFolder : null;
/// <summary>
/// Current shared folder stream instance.
/// Only available when <see cref="DiagnosticsEnabled"/> is true.
/// </summary>
internal Stream? DiagnosticsCurrentFolderStream =>
DiagnosticsEnabled ? _currentFolderStream : null;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
: base(readerOptions, ArchiveType.SevenZip, false) => this._archive = archive;
public override SevenZipVolume Volume => _archive.Volumes.Single();
@@ -107,6 +142,10 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
_currentEntry = dir;
yield return dir;
}
// For solid archives (entries in the same folder share a compressed stream),
// we must iterate entries sequentially and maintain the folder stream state
// across entries in the same folder to avoid recreating the decompression
// stream for each file, which breaks contiguous streaming.
foreach (var entry in entries.Where(x => !x.IsDirectory))
{
_currentEntry = entry;
@@ -121,10 +160,53 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
{
return CreateEntryStream(Stream.Null);
}
return CreateEntryStream(new SyncOnlyStream(entry.FilePart.GetCompressedStream()));
var folder = entry.FilePart.Folder;
// Check if we're starting a new folder - dispose old folder stream if needed
if (folder != _currentFolder)
{
_currentFolderStream?.Dispose();
_currentFolderStream = null;
_currentFolder = folder;
}
// Create the folder stream once per folder
if (_currentFolderStream is null)
{
_currentFolderStream = _archive._database!.GetFolderStream(
_archive.Volumes.Single().Stream,
folder!,
_archive._database.PasswordProvider
);
}
// Wrap with SyncOnlyStream to work around LZMA async bugs
// Return a ReadOnlySubStream that reads from the shared folder stream
return CreateEntryStream(
new SyncOnlyStream(
new ReadOnlySubStream(_currentFolderStream, entry.Size, leaveOpen: true)
)
);
}
public override void Dispose()
{
_currentFolderStream?.Dispose();
_currentFolderStream = null;
base.Dispose();
}
}
/// <summary>
/// WORKAROUND: Forces async operations to use synchronous equivalents.
/// This is necessary because the LZMA decoder has bugs in its async implementation
/// that cause state corruption (IndexOutOfRangeException, DataErrorException).
///
/// The proper fix would be to repair the LZMA decoder's async methods
/// (LzmaStream.ReadAsync, Decoder.CodeAsync, OutWindow async operations),
/// but that requires deep changes to the decoder state machine.
/// </summary>
private sealed class SyncOnlyStream : Stream
{
private readonly Stream _baseStream;
@@ -154,6 +236,7 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
public override void Write(byte[] buffer, int offset, int count) =>
_baseStream.Write(buffer, offset, count);
// Force async operations to use sync equivalents to avoid LZMA decoder bugs
public override Task<int> ReadAsync(
byte[] buffer,
int offset,

View File

@@ -12,8 +12,9 @@ public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
new(OpenEntryStream());
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
) => (await FilePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
public IArchive Archive { get; }

View File

@@ -0,0 +1,161 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
namespace SharpCompress.Archives.Tar;
public partial class TarArchive
{
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IAsyncEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size,
cancellationToken
)
.ConfigureAwait(false);
}
}
foreach (var entry in newEntries)
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size,
cancellationToken
)
.ConfigureAwait(false);
}
}
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)TarReader.OpenReader(stream));
}
protected override async IAsyncEnumerable<TarArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<TarVolume> volumes
)
{
var stream = (await volumes.SingleAsync()).Stream;
if (stream.CanSeek)
{
stream.Position = 0;
}
// Always use async header reading in LoadEntriesAsync for consistency
{
// Use async header reading for async-only streams
TarHeader? previousHeader = null;
await foreach (
var header in TarHeaderFactory.ReadHeaderAsync(
StreamingMode.Seekable,
stream,
ReaderOptions.ArchiveEncoding
)
)
{
if (header != null)
{
if (header.EntryType == EntryType.LongName)
{
previousHeader = header;
}
else
{
if (previousHeader != null)
{
var entry = new TarArchiveEntry(
this,
new TarFilePart(previousHeader, stream),
CompressionType.None
);
var oldStreamPos = stream.Position;
using (var entryStream = entry.OpenEntryStream())
{
using var memoryStream = new MemoryStream();
await entryStream.CopyToAsync(memoryStream);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
header.Name = ReaderOptions
.ArchiveEncoding.Decode(bytes)
.TrimNulls();
}
stream.Position = oldStreamPos;
previousHeader = null;
}
yield return new TarArchiveEntry(
this,
new TarFilePart(header, stream),
CompressionType.None
);
}
}
else
{
throw new IncompleteArchiveException("Failed to read TAR header");
}
}
}
}
}

View File

@@ -2,15 +2,13 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
namespace SharpCompress.Archives.Tar;
@@ -23,7 +21,7 @@ public partial class TarArchive
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenArchive(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
return OpenArchive(new FileInfo(filePath), readerOptions);
}
public static IWritableArchive OpenArchive(
@@ -36,7 +34,7 @@ public partial class TarArchive
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
);
}
@@ -52,7 +50,7 @@ public partial class TarArchive
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
);
}
@@ -154,15 +152,44 @@ public partial class TarArchive
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
var reader = new BinaryReader(stream, Encoding.UTF8, false);
var readSucceeded = tarHeader.Read(reader);
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch { }
return false;
catch (Exception)
{
// Catch all exceptions during tar header reading to determine if this is a valid tar file
// Invalid tar files or corrupted streams will throw various exceptions
return false;
}
}
public static async ValueTask<bool> IsTarFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var reader = new AsyncBinaryReader(stream, false);
var readSucceeded = await tarHeader.ReadAsync(reader);
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch (Exception)
{
// Catch all exceptions during tar header reading to determine if this is a valid tar file
// Invalid tar files or corrupted streams will throw various exceptions
return false;
}
}
public static IWritableArchive CreateArchive() => new TarArchive();

View File

@@ -32,6 +32,10 @@ public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVo
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
var stream = volumes.Single().Stream;
if (stream.CanSeek)
{
stream.Position = 0;
}
TarHeader? previousHeader = null;
foreach (
var header in TarHeaderFactory.ReadHeader(
@@ -62,7 +66,7 @@ public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVo
using (var entryStream = entry.OpenEntryStream())
{
using var memoryStream = new MemoryStream();
entryStream.CopyTo(memoryStream);
entryStream.CopyTo(memoryStream, Constants.BufferSize);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
@@ -139,54 +143,10 @@ public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVo
}
}
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size,
cancellationToken
)
.ConfigureAwait(false);
}
}
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return TarReader.OpenReader(stream);
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)TarReader.OpenReader(stream));
}
}

View File

@@ -0,0 +1,132 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
namespace SharpCompress.Archives.Zip;
public partial class ZipArchive
{
protected override async IAsyncEnumerable<ZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<ZipVolume> volumes
)
{
var vols = await volumes.ToListAsync();
var volsArray = vols.ToArray();
await foreach (
var h in headerFactory.NotNull().ReadSeekableHeaderAsync(volsArray.Last().Stream)
)
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
var deh = (DirectoryEntryHeader)h;
Stream s;
if (
deh.RelativeOffsetOfEntryHeader + deh.CompressedSize
> volsArray[deh.DiskNumberStart].Stream.Length
)
{
var v = volsArray.Skip(deh.DiskNumberStart).ToArray();
s = new SourceStream(
v[0].Stream,
i => i < v.Length ? v[i].Stream : null,
new ReaderOptions() { LeaveStreamOpen = true }
);
}
else
{
s = volsArray[deh.DiskNumberStart].Stream;
}
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
);
}
break;
case ZipHeaderType.DirectoryEnd:
{
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volsArray.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}
}
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IAsyncEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
cancellationToken
)
.ConfigureAwait(false);
}
}
foreach (var entry in newEntries)
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
cancellationToken
)
.ConfigureAwait(false);
}
}
}
}

View File

@@ -21,7 +21,7 @@ public partial class ZipArchive
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenArchive(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
return OpenArchive(new FileInfo(filePath), readerOptions);
}
public static IWritableArchive OpenArchive(
@@ -34,7 +34,7 @@ public partial class ZipArchive
new SourceStream(
fileInfo,
i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
);
}
@@ -50,7 +50,7 @@ public partial class ZipArchive
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
);
}
@@ -135,40 +135,24 @@ public partial class ZipArchive
return (IWritableAsyncArchive)OpenArchive(fileInfos, readerOptions);
}
public static bool IsZipFile(
string filePath,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => IsZipFile(new FileInfo(filePath), password, bufferSize);
public static bool IsZipFile(string filePath, string? password = null) =>
IsZipFile(new FileInfo(filePath), password);
public static bool IsZipFile(
FileInfo fileInfo,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsZipFile(stream, password, bufferSize);
return IsZipFile(stream, password);
}
public static bool IsZipFile(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public static bool IsZipFile(Stream stream, string? password = null)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
@@ -188,18 +172,14 @@ public partial class ZipArchive
}
}
public static bool IsZipMulti(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public static bool IsZipMulti(Stream stream, string? password = null)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
}
var header = headerFactory
@@ -210,7 +190,7 @@ public partial class ZipArchive
if (stream.CanSeek)
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream, useSync: true).FirstOrDefault();
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
@@ -233,7 +213,6 @@ public partial class ZipArchive
public static async ValueTask<bool> IsZipFileAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
@@ -243,7 +222,7 @@ public partial class ZipArchive
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
}
var header = await headerFactory
@@ -273,7 +252,6 @@ public partial class ZipArchive
public static async ValueTask<bool> IsZipMultiAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
@@ -283,7 +261,7 @@ public partial class ZipArchive
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
}
var header = headerFactory

View File

@@ -41,9 +41,12 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
var idx = 0;
if (streams.Count() > 1)
{
streams[1].Position += 4;
var isZip = IsZipFile(streams[1], ReaderOptions.Password, ReaderOptions.BufferSize);
streams[1].Position -= 4;
//check if second stream is zip header without changing position
var headerProbeStream = streams[1];
var startPosition = headerProbeStream.Position;
headerProbeStream.Position = startPosition + 4;
var isZip = IsZipFile(headerProbeStream, ReaderOptions.Password);
headerProbeStream.Position = startPosition;
if (isZip)
{
stream.IsVolumes = true;
@@ -62,9 +65,7 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var vols = volumes.ToArray();
foreach (
var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream, useSync: true)
)
foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream))
{
if (h != null)
{
@@ -108,59 +109,6 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
}
}
protected override async IAsyncEnumerable<ZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<ZipVolume> volumes
)
{
var vols = await volumes.ToListAsync();
var volsArray = vols.ToArray();
await foreach (
var h in headerFactory.NotNull().ReadSeekableHeaderAsync(volsArray.Last().Stream)
)
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
var deh = (DirectoryEntryHeader)h;
Stream s;
if (
deh.RelativeOffsetOfEntryHeader + deh.CompressedSize
> volsArray[deh.DiskNumberStart].Stream.Length
)
{
var v = volsArray.Skip(deh.DiskNumberStart).ToArray();
s = new SourceStream(
v[0].Stream,
i => i < v.Length ? v[i].Stream : null,
new ReaderOptions() { LeaveStreamOpen = true }
);
}
else
{
s = volsArray[deh.DiskNumberStart].Stream;
}
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
);
}
break;
case ZipHeaderType.DirectoryEnd:
{
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volsArray.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}
}
public void SaveTo(Stream stream) => SaveTo(stream, new WriterOptions(CompressionType.Deflate));
protected override void SaveTo(
@@ -192,41 +140,6 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
}
}
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
cancellationToken
)
.ConfigureAwait(false);
}
}
}
protected override ZipArchiveEntry CreateEntryInternal(
string filePath,
Stream source,

View File

@@ -0,0 +1,22 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip;
namespace SharpCompress.Archives.Zip;
public partial class ZipArchiveEntry
{
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
var part = Parts.Single();
if (part is SeekableZipFilePart seekablePart)
{
return (await seekablePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
}
return OpenEntryStream();
}
}

View File

@@ -6,25 +6,13 @@ using SharpCompress.Common.Zip;
namespace SharpCompress.Archives.Zip;
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
public partial class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
: base(part) => Archive = archive;
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
var part = Parts.Single();
if (part is SeekableZipFilePart seekablePart)
{
return (await seekablePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
}
return OpenEntryStream();
}
#region IArchiveEntry Members
public IArchive Archive { get; }

View File

@@ -14,6 +14,7 @@ internal static class ZipArchiveVolumeFactory
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
var m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
{
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
@@ -24,11 +25,16 @@ internal static class ZipArchiveVolumeFactory
)
)
);
}
else //split - 001, 002 ...
{
return ArchiveVolumeFactory.GetFilePart(index, part1);
}
if (item != null && item.Exists)
{
return item;
}
return null; //no more items
}

View File

@@ -22,9 +22,13 @@ namespace SharpCompress.Common.Ace
for (int j = 0; j < 8; j++)
{
if ((crc & 1) != 0)
{
crc = (crc >> 1) ^ 0xEDB88320u;
}
else
{
crc >>= 1;
}
}
table[i] = crc;

View File

@@ -0,0 +1,111 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Arc;
namespace SharpCompress.Common.Ace.Headers;
public sealed partial class AceFileHeader
{
/// <summary>
/// Asynchronously reads the next file entry header from the stream.
/// Returns null if no more entries or end of archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override async ValueTask<AceHeader?> ReadAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var headerData = await ReadHeaderAsync(stream, cancellationToken);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
// Header type (1 byte)
HeaderType = headerData[offset++];
// Skip recovery record headers (ACE 2.0 feature)
if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
{
// Skip to next header
return null;
}
if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
{
// Unknown header type - skip
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Packed size (4 bytes)
PackedSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Original size (4 bytes)
OriginalSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// File date/time in DOS format (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// File attributes (4 bytes)
Attributes = (int)BitConverter.ToUInt32(headerData, offset);
offset += 4;
// CRC32 (4 bytes)
Crc32 = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Compression type (1 byte)
byte compressionType = headerData[offset++];
CompressionType = GetCompressionType(compressionType);
// Compression quality/parameter (1 byte)
byte compressionQuality = headerData[offset++];
CompressionQuality = GetCompressionQuality(compressionQuality);
// Parameters (2 bytes)
Parameters = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Reserved (2 bytes) - skip
offset += 2;
// Filename length (2 bytes)
var filenameLength = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Filename
if (offset + filenameLength <= headerData.Length)
{
Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
offset += filenameLength;
}
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
// Comment length (2 bytes)
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength; // Skip comment
}
}
// Store the data start position
DataStartPosition = stream.Position;
return this;
}
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using System.Xml.Linq;
using SharpCompress.Common.Arc;
@@ -10,7 +12,7 @@ namespace SharpCompress.Common.Ace.Headers
/// <summary>
/// ACE file entry header
/// </summary>
public sealed class AceFileHeader : AceHeader
public sealed partial class AceFileHeader : AceHeader
{
public long DataStartPosition { get; private set; }
public long PackedSize { get; set; }
@@ -147,6 +149,8 @@ namespace SharpCompress.Common.Ace.Headers
return this;
}
// ReadAsync moved to AceFileHeader.Async.cs
public CompressionType GetCompressionType(byte value) =>
value switch
{

View File

@@ -0,0 +1,69 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Ace.Headers;
public abstract partial class AceHeader
{
public abstract ValueTask<AceHeader?> ReadAsync(
Stream reader,
CancellationToken cancellationToken = default
);
public async ValueTask<byte[]> ReadHeaderAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// Read header CRC (2 bytes) and header size (2 bytes)
var headerBytes = new byte[4];
if (await stream.ReadAsync(headerBytes, 0, 4, cancellationToken) != 4)
{
return Array.Empty<byte>();
}
HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
if (HeaderSize == 0)
{
return Array.Empty<byte>();
}
// Read the header data
var body = new byte[HeaderSize];
if (await stream.ReadAsync(body, 0, HeaderSize, cancellationToken) != HeaderSize)
{
return Array.Empty<byte>();
}
// Verify crc
var checksum = AceCrc.AceCrc16(body);
if (checksum != HeaderCrc)
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
/// <summary>
/// Asynchronously checks if the stream is an ACE archive
/// </summary>
/// <param name="stream">The stream to read from</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>True if the stream is an ACE archive, false otherwise</returns>
public static async ValueTask<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var bytes = new byte[14];
if (await stream.ReadAsync(bytes, 0, 14, cancellationToken) != 14)
{
return false;
}
return CheckMagicBytes(bytes, 7);
}
}

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Arj.Headers;
using SharpCompress.Crypto;
@@ -17,7 +19,7 @@ namespace SharpCompress.Common.Ace.Headers
RECOVERY64B = 4,
}
public abstract class AceHeader
public abstract partial class AceHeader
{
// ACE signature: bytes at offset 7 should be "**ACE**"
private static readonly byte[] AceSignature =
@@ -58,6 +60,8 @@ namespace SharpCompress.Common.Ace.Headers
public abstract AceHeader? Read(Stream reader);
// Async methods moved to AceHeader.Async.cs
public byte[] ReadHeader(Stream stream)
{
// Read header CRC (2 bytes) and header size (2 bytes)

View File

@@ -0,0 +1,83 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Ace.Headers;
public sealed partial class AceMainHeader
{
/// <summary>
/// Asynchronously reads the main archive header from the stream.
/// Returns header if this is a valid ACE archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override async ValueTask<AceHeader?> ReadAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var headerData = await ReadHeaderAsync(stream, cancellationToken);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
// Header type should be 0 for main header
if (headerData[offset++] != HeaderType)
{
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Skip signature "**ACE**" (7 bytes)
if (!CheckMagicBytes(headerData, offset))
{
throw new InvalidDataException("Invalid ACE archive signature.");
}
offset += 7;
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
AceVersion = headerData[offset++];
ExtractVersion = headerData[offset++];
// Host OS (1 byte)
if (offset < headerData.Length)
{
var hostOsByte = headerData[offset++];
HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
}
// Volume number (1 byte)
VolumeNumber = headerData[offset++];
// Creation date/time (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// Reserved fields (8 bytes)
if (offset + 8 <= headerData.Length)
{
offset += 8;
}
// Skip additional fields based on flags
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength;
}
}
return this;
}
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Ace.Headers;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Crypto;
@@ -11,7 +13,7 @@ namespace SharpCompress.Common.Ace.Headers
/// <summary>
/// ACE main archive header
/// </summary>
public sealed class AceMainHeader : AceHeader
public sealed partial class AceMainHeader : AceHeader
{
public byte ExtractVersion { get; set; }
public byte CreatorVersion { get; set; }
@@ -93,5 +95,7 @@ namespace SharpCompress.Common.Ace.Headers
return this;
}
// ReadAsync moved to AceMainHeader.Async.cs
}
}

View File

@@ -0,0 +1,132 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Arj.Headers;
public abstract partial class ArjHeader
{
public abstract ValueTask<ArjHeader?> ReadAsync(
Stream reader,
CancellationToken cancellationToken = default
);
public async ValueTask<byte[]> ReadHeaderAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// check for magic bytes
var magic = new byte[2];
if (await stream.ReadAsync(magic, 0, 2, cancellationToken) != 2)
{
return Array.Empty<byte>();
}
if (!CheckMagicBytes(magic))
{
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
}
// read header_size
byte[] headerBytes = new byte[2];
await stream.ReadAsync(headerBytes, 0, 2, cancellationToken);
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
if (headerSize < 1)
{
return Array.Empty<byte>();
}
var body = new byte[headerSize];
var read = await stream.ReadAsync(body, 0, headerSize, cancellationToken);
if (read < headerSize)
{
return Array.Empty<byte>();
}
byte[] crc = new byte[4];
read = await stream.ReadAsync(crc, 0, 4, cancellationToken);
var checksum = Crc32Stream.Compute(body);
// Compute the hash value
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
protected async ValueTask<List<byte[]>> ReadExtendedHeadersAsync(
Stream reader,
CancellationToken cancellationToken = default
)
{
List<byte[]> extendedHeader = new List<byte[]>();
byte[] buffer = new byte[2];
while (true)
{
int bytesRead = await reader.ReadAsync(buffer, 0, 2, cancellationToken);
if (bytesRead < 2)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header size."
);
}
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
if (extHeaderSize == 0)
{
return extendedHeader;
}
byte[] header = new byte[extHeaderSize];
bytesRead = await reader.ReadAsync(header, 0, extHeaderSize, cancellationToken);
if (bytesRead < extHeaderSize)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header data."
);
}
byte[] crcextended = new byte[4];
bytesRead = await reader.ReadAsync(crcextended, 0, 4, cancellationToken);
if (bytesRead < 4)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header CRC."
);
}
var checksum = Crc32Stream.Compute(header);
if (checksum != BitConverter.ToUInt32(crcextended, 0))
{
throw new InvalidDataException("Extended header checksum is invalid");
}
extendedHeader.Add(header);
}
}
/// <summary>
/// Asynchronously checks if the stream is an ARJ archive
/// </summary>
/// <param name="stream">The stream to read from</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>True if the stream is an ARJ archive, false otherwise</returns>
public static async ValueTask<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var bytes = new byte[2];
if (await stream.ReadAsync(bytes, 0, 2, cancellationToken) != 2)
{
return false;
}
return CheckMagicBytes(bytes);
}
}

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Crypto;
@@ -15,7 +16,7 @@ namespace SharpCompress.Common.Arj.Headers
LocalHeader,
}
public abstract class ArjHeader
public abstract partial class ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
@@ -31,6 +32,8 @@ namespace SharpCompress.Common.Arj.Headers
public abstract ArjHeader? Read(Stream reader);
// Async methods moved to ArjHeader.Async.cs
public byte[] ReadHeader(Stream stream)
{
// check for magic bytes
@@ -72,6 +75,8 @@ namespace SharpCompress.Common.Arj.Headers
return body;
}
// ReadHeaderAsync moved to ArjHeader.Async.cs
protected List<byte[]> ReadExtendedHeaders(Stream reader)
{
List<byte[]> extendedHeader = new List<byte[]>();

View File

@@ -0,0 +1,24 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers;
public partial class ArjLocalHeader
{
public override async ValueTask<ArjHeader?> ReadAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var body = await ReadHeaderAsync(stream, cancellationToken);
if (body.Length > 0)
{
await ReadExtendedHeadersAsync(stream, cancellationToken);
var header = LoadFrom(body);
header.DataStartPosition = stream.Position;
return header;
}
return null;
}
}

View File

@@ -4,11 +4,12 @@ using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers
{
public class ArjLocalHeader : ArjHeader
public partial class ArjLocalHeader : ArjHeader
{
public ArchiveEncoding ArchiveEncoding { get; }
public long DataStartPosition { get; protected set; }
@@ -55,6 +56,8 @@ namespace SharpCompress.Common.Arj.Headers
return null;
}
// ReadAsync moved to ArjLocalHeader.Async.cs
public ArjLocalHeader LoadFrom(byte[] headerBytes)
{
int offset = 0;

View File

@@ -0,0 +1,18 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers;
public partial class ArjMainHeader
{
public override async ValueTask<ArjHeader?> ReadAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var body = await ReadHeaderAsync(stream, cancellationToken);
await ReadExtendedHeadersAsync(stream, cancellationToken);
return LoadFrom(body);
}
}

View File

@@ -1,12 +1,14 @@
using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Arj.Headers
{
public class ArjMainHeader : ArjHeader
public partial class ArjMainHeader : ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
@@ -45,6 +47,8 @@ namespace SharpCompress.Common.Arj.Headers
return LoadFrom(body);
}
// ReadAsync moved to ArjMainHeader.Async.cs
public ArjMainHeader LoadFrom(byte[] headerBytes)
{
var offset = 1;

View File

@@ -16,6 +16,11 @@ namespace SharpCompress.Common
public AsyncBinaryReader(Stream stream, bool leaveOpen = false, int bufferSize = 4096)
{
if (!stream.CanRead)
{
throw new ArgumentException("Stream must be readable.");
}
_originalStream = stream ?? throw new ArgumentNullException(nameof(stream));
_leaveOpen = leaveOpen;

View File

@@ -0,0 +1,12 @@
namespace SharpCompress.Common;
public static class Constants
{
/// <summary>
/// The default buffer size for stream operations, matching .NET's Stream.CopyTo default of 81920 bytes.
/// This can be modified globally at runtime.
/// </summary>
public static int BufferSize { get; set; } = 81920;
public static int RewindableBufferSize { get; set; } = 81920;
}

View File

@@ -0,0 +1,84 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common;
public partial class EntryStream
{
/// <summary>
/// Asynchronously skip the rest of the entry stream.
/// </summary>
public async ValueTask SkipEntryAsync(CancellationToken cancellationToken = default)
{
await this.SkipAsync(cancellationToken).ConfigureAwait(false);
_completed = true;
}
#if !LEGACY_DOTNET
public override async ValueTask DisposeAsync()
{
if (_isDisposed)
{
return;
}
_isDisposed = true;
if (!(_completed || _reader.Cancelled))
{
await SkipEntryAsync().ConfigureAwait(false);
}
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
if (_stream is IStreamStack ss)
{
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
{
await deflateStream.FlushAsync().ConfigureAwait(false);
}
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
{
await lzmaStream.FlushAsync().ConfigureAwait(false);
}
}
#if DEBUG_STREAMS
this.DebugDispose(typeof(EntryStream));
#endif
await base.DisposeAsync().ConfigureAwait(false);
await _stream.DisposeAsync().ConfigureAwait(false);
}
#endif
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
var read = await _stream
.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
if (read <= 0)
{
_completed = true;
}
return read;
}
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
var read = await _stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (read <= 0)
{
_completed = true;
}
return read;
}
#endif
}

View File

@@ -8,7 +8,7 @@ using SharpCompress.Readers;
namespace SharpCompress.Common;
public class EntryStream : Stream, IStreamStack
public partial class EntryStream : Stream, IStreamStack
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
@@ -53,15 +53,6 @@ public class EntryStream : Stream, IStreamStack
_completed = true;
}
/// <summary>
/// Asynchronously skip the rest of the entry stream.
/// </summary>
public async ValueTask SkipEntryAsync(CancellationToken cancellationToken = default)
{
await this.SkipAsync(cancellationToken).ConfigureAwait(false);
_completed = true;
}
protected override void Dispose(bool disposing)
{
if (_isDisposed)
@@ -93,39 +84,6 @@ public class EntryStream : Stream, IStreamStack
_stream.Dispose();
}
#if !LEGACY_DOTNET
public override async ValueTask DisposeAsync()
{
if (_isDisposed)
{
return;
}
_isDisposed = true;
if (!(_completed || _reader.Cancelled))
{
await SkipEntryAsync().ConfigureAwait(false);
}
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
if (_stream is IStreamStack ss)
{
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
{
await deflateStream.FlushAsync().ConfigureAwait(false);
}
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
{
await lzmaStream.FlushAsync().ConfigureAwait(false);
}
}
#if DEBUG_STREAMS
this.DebugDispose(typeof(EntryStream));
#endif
await base.DisposeAsync().ConfigureAwait(false);
await _stream.DisposeAsync().ConfigureAwait(false);
}
#endif
public override bool CanRead => true;
public override bool CanSeek => false;
@@ -154,38 +112,6 @@ public class EntryStream : Stream, IStreamStack
return read;
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
var read = await _stream
.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
if (read <= 0)
{
_completed = true;
}
return read;
}
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
var read = await _stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (read <= 0)
{
_completed = true;
}
return read;
}
#endif
public override int ReadByte()
{
var value = _stream.ReadByte();

View File

@@ -0,0 +1,116 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common;
internal static partial class ExtractionMethods
{
public static async ValueTask WriteEntryToDirectoryAsync(
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Func<string, ExtractionOptions?, CancellationToken, ValueTask> writeAsync,
CancellationToken cancellationToken = default
)
{
string destinationFileName;
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
//check for trailing slash.
if (
fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1]
!= Path.DirectorySeparatorChar
)
{
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
}
if (!Directory.Exists(fullDestinationDirectoryPath))
{
throw new ExtractionException(
$"Directory does not exist to extract to: {fullDestinationDirectoryPath}"
);
}
options ??= new ExtractionOptions() { Overwrite = true };
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
file = Utility.ReplaceInvalidFileNameChars(file);
if (options.ExtractFullPath)
{
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
.NotNull("Directory is null");
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath, PathComparison))
{
throw new ExtractionException(
"Entry is trying to create a directory outside of the destination directory."
);
}
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
if (!entry.IsDirectory)
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, PathComparison))
{
throw new ExtractionException(
"Entry is trying to write a file outside of the destination directory."
);
}
await writeAsync(destinationFileName, options, cancellationToken).ConfigureAwait(false);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
}
public static async ValueTask WriteEntryToFileAsync(
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Func<string, FileMode, CancellationToken, ValueTask> openAndWriteAsync,
CancellationToken cancellationToken = default
)
{
if (entry.LinkTarget != null)
{
if (options?.WriteSymbolicLink is null)
{
throw new ExtractionException(
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
);
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
}
else
{
var fm = FileMode.Create;
options ??= new ExtractionOptions() { Overwrite = true };
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
await openAndWriteAsync(destinationFileName, fm, cancellationToken)
.ConfigureAwait(false);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}
}

View File

@@ -6,7 +6,7 @@ using System.Threading.Tasks;
namespace SharpCompress.Common;
internal static class ExtractionMethods
internal static partial class ExtractionMethods
{
/// <summary>
/// Gets the appropriate StringComparison for path checks based on the file system.
@@ -123,111 +123,4 @@ internal static class ExtractionMethods
entry.PreserveExtractionOptions(destinationFileName, options);
}
}
public static async ValueTask WriteEntryToDirectoryAsync(
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Func<string, ExtractionOptions?, CancellationToken, ValueTask> writeAsync,
CancellationToken cancellationToken = default
)
{
string destinationFileName;
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
//check for trailing slash.
if (
fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1]
!= Path.DirectorySeparatorChar
)
{
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
}
if (!Directory.Exists(fullDestinationDirectoryPath))
{
throw new ExtractionException(
$"Directory does not exist to extract to: {fullDestinationDirectoryPath}"
);
}
options ??= new ExtractionOptions() { Overwrite = true };
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
file = Utility.ReplaceInvalidFileNameChars(file);
if (options.ExtractFullPath)
{
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
.NotNull("Directory is null");
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath, PathComparison))
{
throw new ExtractionException(
"Entry is trying to create a directory outside of the destination directory."
);
}
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
if (!entry.IsDirectory)
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, PathComparison))
{
throw new ExtractionException(
"Entry is trying to write a file outside of the destination directory."
);
}
await writeAsync(destinationFileName, options, cancellationToken).ConfigureAwait(false);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
}
public static async ValueTask WriteEntryToFileAsync(
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Func<string, FileMode, CancellationToken, ValueTask> openAndWriteAsync,
CancellationToken cancellationToken = default
)
{
if (entry.LinkTarget != null)
{
if (options?.WriteSymbolicLink is null)
{
throw new ExtractionException(
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
);
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
}
else
{
var fm = FileMode.Create;
options ??= new ExtractionOptions() { Overwrite = true };
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
await openAndWriteAsync(destinationFileName, fm, cancellationToken)
.ConfigureAwait(false);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}
}

View File

@@ -0,0 +1,15 @@
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.GZip;
public partial class GZipEntry
{
internal static async IAsyncEnumerable<GZipEntry> GetEntriesAsync(
Stream stream,
OptionsBase options
)
{
yield return new GZipEntry(await GZipFilePart.CreateAsync(stream, options.ArchiveEncoding));
}
}

View File

@@ -4,7 +4,7 @@ using System.IO;
namespace SharpCompress.Common.GZip;
public class GZipEntry : Entry
public partial class GZipEntry : Entry
{
private readonly GZipFilePart? _filePart;
@@ -42,4 +42,6 @@ public class GZipEntry : Entry
{
yield return new GZipEntry(GZipFilePart.Create(stream, options.ArchiveEncoding));
}
// Async methods moved to GZipEntry.Async.cs
}

View File

@@ -0,0 +1,133 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Common.GZip;
internal sealed partial class GZipFilePart
{
internal static async ValueTask<GZipFilePart> CreateAsync(
Stream stream,
IArchiveEncoding archiveEncoding,
CancellationToken cancellationToken = default
)
{
var part = new GZipFilePart(stream, archiveEncoding);
await part.ReadAndValidateGzipHeaderAsync(cancellationToken);
if (stream.CanSeek)
{
var position = stream.Position;
stream.Position = stream.Length - 8;
await part.ReadTrailerAsync(cancellationToken);
stream.Position = position;
part.EntryStartPosition = position;
}
else
{
// For non-seekable streams, we can't read the trailer or track position.
// Set to 0 since the stream will be read sequentially from its current position.
part.EntryStartPosition = 0;
}
return part;
}
private async ValueTask ReadTrailerAsync(CancellationToken cancellationToken = default)
{
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
var trailer = new byte[8];
_ = await _stream.ReadFullyAsync(trailer, 0, 8, cancellationToken);
Crc = BinaryPrimitives.ReadUInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadUInt32LittleEndian(trailer.AsSpan().Slice(4));
}
private async ValueTask ReadAndValidateGzipHeaderAsync(
CancellationToken cancellationToken = default
)
{
// read the header on the first read
var header = new byte[10];
var n = await _stream.ReadAsync(header, 0, 10, cancellationToken);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
{
return;
}
if (n != 10)
{
throw new ZlibException("Not a valid GZIP stream.");
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
throw new ZlibException("Bad GZIP header.");
}
var timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan().Slice(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
var lengthField = new byte[2];
_ = await _stream.ReadAsync(lengthField, 0, 2, cancellationToken);
var extraLength = (short)(lengthField[0] + (lengthField[1] * 256));
var extra = new byte[extraLength];
if (!await _stream.ReadFullyAsync(extra, cancellationToken))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
}
if ((header[3] & 0x08) == 0x08)
{
_name = await ReadZeroTerminatedStringAsync(_stream, cancellationToken);
}
if ((header[3] & 0x10) == 0x010)
{
await ReadZeroTerminatedStringAsync(_stream, cancellationToken);
}
if ((header[3] & 0x02) == 0x02)
{
var buf = new byte[1];
_ = await _stream.ReadAsync(buf, 0, 1, cancellationToken); // CRC16, ignore
}
}
private async ValueTask<string> ReadZeroTerminatedStringAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var buf1 = new byte[1];
var list = new List<byte>();
var done = false;
do
{
// workitem 7740
var n = await stream.ReadAsync(buf1, 0, 1, cancellationToken);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
}
if (buf1[0] == 0)
{
done = true;
}
else
{
list.Add(buf1[0]);
}
} while (!done);
var buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
}
}

View File

@@ -2,15 +2,13 @@ using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Common.GZip;
internal sealed class GZipFilePart : FilePart
internal sealed partial class GZipFilePart : FilePart
{
private string? _name;
private readonly Stream _stream;
@@ -37,32 +35,6 @@ internal sealed class GZipFilePart : FilePart
return part;
}
internal static async ValueTask<GZipFilePart> CreateAsync(
Stream stream,
IArchiveEncoding archiveEncoding,
CancellationToken cancellationToken = default
)
{
var part = new GZipFilePart(stream, archiveEncoding);
await part.ReadAndValidateGzipHeaderAsync(cancellationToken);
if (stream.CanSeek)
{
var position = stream.Position;
stream.Position = stream.Length - 8;
await part.ReadTrailerAsync(cancellationToken);
stream.Position = position;
part.EntryStartPosition = position;
}
else
{
// For non-seekable streams, we can't read the trailer or track position.
// Set to 0 since the stream will be read sequentially from its current position.
part.EntryStartPosition = 0;
}
return part;
}
private GZipFilePart(Stream stream, IArchiveEncoding archiveEncoding)
: base(archiveEncoding) => _stream = stream;
@@ -75,7 +47,12 @@ internal sealed class GZipFilePart : FilePart
internal override string? FilePartName => _name;
internal override Stream GetCompressedStream() =>
new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
new DeflateStream(
_stream,
CompressionMode.Decompress,
CompressionLevel.Default,
leaveOpen: true
);
internal override Stream GetRawStream() => _stream;
@@ -89,16 +66,6 @@ internal sealed class GZipFilePart : FilePart
UncompressedSize = BinaryPrimitives.ReadUInt32LittleEndian(trailer.Slice(4));
}
private async ValueTask ReadTrailerAsync(CancellationToken cancellationToken = default)
{
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
var trailer = new byte[8];
_ = await _stream.ReadFullyAsync(trailer, 0, 8, cancellationToken);
Crc = BinaryPrimitives.ReadUInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadUInt32LittleEndian(trailer.AsSpan().Slice(4));
}
private void ReadAndValidateGzipHeader()
{
// read the header on the first read
@@ -151,61 +118,6 @@ internal sealed class GZipFilePart : FilePart
}
}
private async ValueTask ReadAndValidateGzipHeaderAsync(
CancellationToken cancellationToken = default
)
{
// read the header on the first read
var header = new byte[10];
var n = await _stream.ReadAsync(header, 0, 10, cancellationToken);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
{
return;
}
if (n != 10)
{
throw new ZlibException("Not a valid GZIP stream.");
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
throw new ZlibException("Bad GZIP header.");
}
var timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan().Slice(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
var lengthField = new byte[2];
_ = await _stream.ReadAsync(lengthField, 0, 2, cancellationToken);
var extraLength = (short)(lengthField[0] + (lengthField[1] * 256));
var extra = new byte[extraLength];
if (!await _stream.ReadFullyAsync(extra, cancellationToken))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
}
if ((header[3] & 0x08) == 0x08)
{
_name = await ReadZeroTerminatedStringAsync(_stream, cancellationToken);
}
if ((header[3] & 0x10) == 0x010)
{
await ReadZeroTerminatedStringAsync(_stream, cancellationToken);
}
if ((header[3] & 0x02) == 0x02)
{
var buf = new byte[1];
_ = await _stream.ReadAsync(buf, 0, 1, cancellationToken); // CRC16, ignore
}
}
private string ReadZeroTerminatedString(Stream stream)
{
Span<byte> buf1 = stackalloc byte[1];
@@ -231,33 +143,4 @@ internal sealed class GZipFilePart : FilePart
var buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
}
private async ValueTask<string> ReadZeroTerminatedStringAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var buf1 = new byte[1];
var list = new List<byte>();
var done = false;
do
{
// workitem 7740
var n = await stream.ReadAsync(buf1, 0, 1, cancellationToken);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
}
if (buf1[0] == 0)
{
done = true;
}
else
{
list.Add(buf1[0]);
}
} while (!done);
var buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
}
}

View File

@@ -2,7 +2,7 @@ using System;
namespace SharpCompress.Common;
public interface IVolume : IDisposable
public interface IVolume : IDisposable, IAsyncDisposable
{
int Index { get; }

View File

@@ -0,0 +1,189 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Common.Rar;
internal class AsyncMarkingBinaryReader
{
private readonly AsyncBinaryReader _reader;
public AsyncMarkingBinaryReader(Stream stream)
{
_reader = new AsyncBinaryReader(stream, leaveOpen: true);
}
public Stream BaseStream => _reader.BaseStream;
public virtual long CurrentReadByteCount { get; protected set; }
public virtual void Mark() => CurrentReadByteCount = 0;
public virtual async ValueTask<bool> ReadBooleanAsync(
CancellationToken cancellationToken = default
) => await ReadByteAsync(cancellationToken).ConfigureAwait(false) != 0;
public virtual async ValueTask<byte> ReadByteAsync(
CancellationToken cancellationToken = default
)
{
CurrentReadByteCount++;
return await _reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
}
public virtual async ValueTask<byte[]> ReadBytesAsync(
int count,
CancellationToken cancellationToken = default
)
{
CurrentReadByteCount += count;
var bytes = new byte[count];
await _reader.ReadBytesAsync(bytes, 0, count, cancellationToken).ConfigureAwait(false);
return bytes;
}
public async ValueTask<ushort> ReadUInt16Async(CancellationToken cancellationToken = default)
{
var bytes = await ReadBytesAsync(2, cancellationToken).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt16LittleEndian(bytes);
}
public async ValueTask<uint> ReadUInt32Async(CancellationToken cancellationToken = default)
{
var bytes = await ReadBytesAsync(4, cancellationToken).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt32LittleEndian(bytes);
}
public virtual async ValueTask<ulong> ReadUInt64Async(
CancellationToken cancellationToken = default
)
{
var bytes = await ReadBytesAsync(8, cancellationToken).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt64LittleEndian(bytes);
}
public virtual async ValueTask<short> ReadInt16Async(
CancellationToken cancellationToken = default
)
{
var bytes = await ReadBytesAsync(2, cancellationToken).ConfigureAwait(false);
return BinaryPrimitives.ReadInt16LittleEndian(bytes);
}
public virtual async ValueTask<int> ReadInt32Async(
CancellationToken cancellationToken = default
)
{
var bytes = await ReadBytesAsync(4, cancellationToken).ConfigureAwait(false);
return BinaryPrimitives.ReadInt32LittleEndian(bytes);
}
public virtual async ValueTask<long> ReadInt64Async(
CancellationToken cancellationToken = default
)
{
var bytes = await ReadBytesAsync(8, cancellationToken).ConfigureAwait(false);
return BinaryPrimitives.ReadInt64LittleEndian(bytes);
}
public async ValueTask<ulong> ReadRarVIntAsync(
CancellationToken cancellationToken = default,
int maxBytes = 10
) => await DoReadRarVIntAsync((maxBytes - 1) * 7, cancellationToken).ConfigureAwait(false);
private async ValueTask<ulong> DoReadRarVIntAsync(
int maxShift,
CancellationToken cancellationToken
)
{
var shift = 0;
ulong result = 0;
do
{
var b0 = await ReadByteAsync(cancellationToken).ConfigureAwait(false);
var b1 = ((uint)b0) & 0x7f;
ulong n = b1;
var shifted = n << shift;
if (n != shifted >> shift)
{
// overflow
break;
}
result |= shifted;
if (b0 == b1)
{
return result;
}
shift += 7;
} while (shift <= maxShift);
throw new FormatException("malformed vint");
}
public async ValueTask<uint> ReadRarVIntUInt32Async(
int maxBytes = 5,
CancellationToken cancellationToken = default
) =>
// hopefully this gets inlined
await DoReadRarVIntUInt32Async((maxBytes - 1) * 7, cancellationToken).ConfigureAwait(false);
public async ValueTask<ushort> ReadRarVIntUInt16Async(
int maxBytes = 3,
CancellationToken cancellationToken = default
) =>
// hopefully this gets inlined
checked(
(ushort)
await DoReadRarVIntUInt32Async((maxBytes - 1) * 7, cancellationToken)
.ConfigureAwait(false)
);
public async ValueTask<byte> ReadRarVIntByteAsync(
int maxBytes = 2,
CancellationToken cancellationToken = default
) =>
// hopefully this gets inlined
checked(
(byte)
await DoReadRarVIntUInt32Async((maxBytes - 1) * 7, cancellationToken)
.ConfigureAwait(false)
);
public async ValueTask SkipAsync(int count, CancellationToken cancellationToken = default)
{
CurrentReadByteCount += count;
await _reader.SkipAsync(count, cancellationToken).ConfigureAwait(false);
}
private async ValueTask<uint> DoReadRarVIntUInt32Async(
int maxShift,
CancellationToken cancellationToken = default
)
{
var shift = 0;
uint result = 0;
do
{
var b0 = await ReadByteAsync(cancellationToken).ConfigureAwait(false);
var b1 = ((uint)b0) & 0x7f;
var n = b1;
var shifted = n << shift;
if (n != shifted >> shift)
{
// overflow
break;
}
result |= shifted;
if (b0 == b1)
{
return result;
}
shift += 7;
} while (shift <= maxShift);
throw new FormatException("malformed vint");
}
}

View File

@@ -0,0 +1,45 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Rar;
namespace SharpCompress.Common.Rar;
internal class AsyncRarCrcBinaryReader(Stream stream) : AsyncMarkingBinaryReader(stream)
{
private uint _currentCrc;
public uint GetCrc32() => ~_currentCrc;
public void ResetCrc() => _currentCrc = 0xffffffff;
protected void UpdateCrc(byte b) => _currentCrc = RarCRC.CheckCrc(_currentCrc, b);
protected async ValueTask<byte[]> ReadBytesNoCrcAsync(
int count,
CancellationToken cancellationToken = default
)
{
return await base.ReadBytesAsync(count, cancellationToken).ConfigureAwait(false);
}
public override async ValueTask<byte> ReadByteAsync(
CancellationToken cancellationToken = default
)
{
var b = await base.ReadByteAsync(cancellationToken).ConfigureAwait(false);
_currentCrc = RarCRC.CheckCrc(_currentCrc, b);
return b;
}
public override async ValueTask<byte[]> ReadBytesAsync(
int count,
CancellationToken cancellationToken = default
)
{
var result = await base.ReadBytesAsync(count, cancellationToken).ConfigureAwait(false);
_currentCrc = RarCRC.CheckCrc(_currentCrc, result, 0, result.Length);
return result;
}
}

View File

@@ -0,0 +1,109 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Rar;
internal sealed class AsyncRarCryptoBinaryReader : AsyncRarCrcBinaryReader
{
private BlockTransformer _rijndael = default!;
private readonly Queue<byte> _data = new();
private long _readCount;
private AsyncRarCryptoBinaryReader(Stream stream)
: base(stream) { }
public static async ValueTask<AsyncRarCryptoBinaryReader> Create(
Stream stream,
ICryptKey cryptKey,
byte[]? salt = null
)
{
var binary = new AsyncRarCryptoBinaryReader(stream);
if (salt == null)
{
salt = await binary.ReadBytesAsyncBase(EncryptionConstV5.SIZE_SALT30);
binary._readCount += EncryptionConstV5.SIZE_SALT30;
}
binary._rijndael = new BlockTransformer(cryptKey.Transformer(salt));
return binary;
}
public override long CurrentReadByteCount
{
get => _readCount;
protected set
{
// ignore
}
}
public override void Mark() => _readCount = 0;
public override async ValueTask<byte> ReadByteAsync(
CancellationToken cancellationToken = default
)
{
var bytes = await ReadAndDecryptBytesAsync(1, cancellationToken).ConfigureAwait(false);
return bytes[0];
}
private ValueTask<byte[]> ReadBytesAsyncBase(int count) => base.ReadBytesAsync(count);
public override async ValueTask<byte[]> ReadBytesAsync(
int count,
CancellationToken cancellationToken = default
)
{
return await ReadAndDecryptBytesAsync(count, cancellationToken).ConfigureAwait(false);
}
private async ValueTask<byte[]> ReadAndDecryptBytesAsync(
int count,
CancellationToken cancellationToken
)
{
var queueSize = _data.Count;
var sizeToRead = count - queueSize;
if (sizeToRead > 0)
{
var alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
for (var i = 0; i < alignedSize / 16; i++)
{
var cipherText = await ReadBytesNoCrcAsync(16, cancellationToken)
.ConfigureAwait(false);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
}
var decryptedBytes = new byte[count];
for (var i = 0; i < count; i++)
{
var b = _data.Dequeue();
decryptedBytes[i] = b;
UpdateCrc(b);
}
_readCount += count;
return decryptedBytes;
}
public void ClearQueue() => _data.Clear();
public void SkipQueue()
{
var position = BaseStream.Position;
BaseStream.Position = position + _data.Count;
ClearQueue();
}
}

View File

@@ -4,13 +4,14 @@ namespace SharpCompress.Common.Rar.Headers;
internal class AvHeader : RarHeader
{
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
public static AvHeader Create(RarHeader header, RarCrcBinaryReader reader)
{
if (IsRar5)
var c = CreateChild<AvHeader>(header, reader, HeaderType.Av);
if (c.IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
return c;
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -0,0 +1,32 @@
#nullable disable
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
internal sealed partial class ArchiveCryptHeader
{
public static async ValueTask<ArchiveCryptHeader> CreateAsync(
RarHeader header,
AsyncRarCrcBinaryReader reader,
CancellationToken cancellationToken = default
) =>
await CreateChildAsync<ArchiveCryptHeader>(
header,
reader,
HeaderType.Crypt,
cancellationToken
)
.ConfigureAwait(false);
protected sealed override async ValueTask ReadFinishAsync(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
)
{
CryptInfo = await Rar5CryptoInfo.CreateAsync(reader, false);
}
}

View File

@@ -1,16 +1,17 @@
#nullable disable
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
internal class ArchiveCryptHeader : RarHeader
internal sealed partial class ArchiveCryptHeader : RarHeader
{
public ArchiveCryptHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Crypt) { }
public static ArchiveCryptHeader Create(RarHeader header, RarCrcBinaryReader reader) =>
CreateChild<ArchiveCryptHeader>(header, reader, HeaderType.Crypt);
public Rar5CryptoInfo CryptInfo = new();
public Rar5CryptoInfo CryptInfo = default!;
protected override void ReadFinish(MarkingBinaryReader reader) =>
CryptInfo = new Rar5CryptoInfo(reader, false);
protected sealed override void ReadFinish(MarkingBinaryReader reader) =>
CryptInfo = Rar5CryptoInfo.Create(reader, false);
}

View File

@@ -0,0 +1,53 @@
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
internal sealed partial class ArchiveHeader
{
public static async ValueTask<ArchiveHeader> CreateAsync(
RarHeader header,
AsyncRarCrcBinaryReader reader,
CancellationToken cancellationToken = default
) =>
await CreateChildAsync<ArchiveHeader>(header, reader, HeaderType.Archive, cancellationToken)
.ConfigureAwait(false);
protected sealed override async ValueTask ReadFinishAsync(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
)
{
if (IsRar5)
{
Flags = await reader
.ReadRarVIntUInt16Async(cancellationToken: cancellationToken)
.ConfigureAwait(false);
if (HasFlag(ArchiveFlagsV5.HAS_VOLUME_NUMBER))
{
VolumeNumber = (int)
await reader
.ReadRarVIntUInt32Async(cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
// later: we may have a locator record if we need it
//if (ExtraSize != 0) {
// ReadLocator(reader);
//}
}
else
{
Flags = HeaderFlags;
HighPosAv = await reader.ReadInt16Async(cancellationToken).ConfigureAwait(false);
PosAv = await reader.ReadInt32Async(cancellationToken).ConfigureAwait(false);
if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER))
{
EncryptionVersion = await reader
.ReadByteAsync(cancellationToken)
.ConfigureAwait(false);
}
}
}
}

View File

@@ -1,13 +1,14 @@
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
internal sealed class ArchiveHeader : RarHeader
internal sealed partial class ArchiveHeader : RarHeader
{
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Archive) { }
public static ArchiveHeader Create(RarHeader header, RarCrcBinaryReader reader) =>
CreateChild<ArchiveHeader>(header, reader, HeaderType.Archive);
protected override void ReadFinish(MarkingBinaryReader reader)
protected sealed override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
{

View File

@@ -4,13 +4,14 @@ namespace SharpCompress.Common.Rar.Headers;
internal class CommentHeader : RarHeader
{
protected CommentHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Comment)
public static CommentHeader Create(RarHeader header, RarCrcBinaryReader reader)
{
if (IsRar5)
var c = CreateChild<CommentHeader>(header, reader, HeaderType.Comment);
if (c.IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
return c;
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -0,0 +1,47 @@
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
internal sealed partial class EndArchiveHeader
{
public static async ValueTask<EndArchiveHeader> CreateAsync(
RarHeader header,
AsyncRarCrcBinaryReader reader,
CancellationToken cancellationToken = default
) =>
await CreateChildAsync<EndArchiveHeader>(
header,
reader,
HeaderType.EndArchive,
cancellationToken
)
.ConfigureAwait(false);
protected sealed override async ValueTask ReadFinishAsync(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
)
{
if (IsRar5)
{
Flags = await reader
.ReadRarVIntUInt16Async(cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
else
{
Flags = HeaderFlags;
if (HasFlag(EndArchiveFlagsV4.DATA_CRC))
{
ArchiveCrc = await reader.ReadInt32Async(cancellationToken).ConfigureAwait(false);
}
if (HasFlag(EndArchiveFlagsV4.VOLUME_NUMBER))
{
VolumeNumber = await reader.ReadInt16Async(cancellationToken).ConfigureAwait(false);
}
}
}
}

View File

@@ -1,13 +1,14 @@
using SharpCompress.IO;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
internal class EndArchiveHeader : RarHeader
internal sealed partial class EndArchiveHeader : RarHeader
{
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.EndArchive) { }
public static EndArchiveHeader Create(RarHeader header, RarCrcBinaryReader reader) =>
CreateChild<EndArchiveHeader>(header, reader, HeaderType.EndArchive);
protected override void ReadFinish(MarkingBinaryReader reader)
protected sealed override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
{

View File

@@ -0,0 +1,443 @@
using System;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
#if !Rar2017_64bit
using size_t = System.UInt32;
#else
using nint = System.Int64;
using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
namespace SharpCompress.Common.Rar.Headers;
internal partial class FileHeader
{
public static async ValueTask<FileHeader> CreateAsync(
RarHeader header,
AsyncRarCrcBinaryReader reader,
HeaderType headerType,
CancellationToken cancellationToken = default
) =>
await CreateChildAsync<FileHeader>(header, reader, headerType, cancellationToken)
.ConfigureAwait(false);
protected override async ValueTask ReadFinishAsync(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken
)
{
if (IsRar5)
{
await ReadFromReaderV5Async(reader, cancellationToken).ConfigureAwait(false);
}
else
{
await ReadFromReaderV4Async(reader, cancellationToken).ConfigureAwait(false);
}
}
private async ValueTask ReadFromReaderV5Async(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken
)
{
Flags = await reader
.ReadRarVIntUInt16Async(cancellationToken: cancellationToken)
.ConfigureAwait(false);
var lvalue = checked(
(long)
await reader
.ReadRarVIntAsync(cancellationToken: cancellationToken)
.ConfigureAwait(false)
);
UncompressedSize = HasFlag(FileFlagsV5.UNPACKED_SIZE_UNKNOWN) ? long.MaxValue : lvalue;
FileAttributes = await reader
.ReadRarVIntUInt32Async(cancellationToken: cancellationToken)
.ConfigureAwait(false);
if (HasFlag(FileFlagsV5.HAS_MOD_TIME))
{
FileLastModifiedTime = Utility.UnixTimeToDateTime(
await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false)
);
}
if (HasFlag(FileFlagsV5.HAS_CRC32))
{
FileCrc = await reader.ReadBytesAsync(4, cancellationToken).ConfigureAwait(false);
}
var compressionInfo = await reader
.ReadRarVIntUInt16Async(cancellationToken: cancellationToken)
.ConfigureAwait(false);
CompressionAlgorithm = (byte)((compressionInfo & 0x3f) + 50);
IsSolid = (compressionInfo & 0x40) == 0x40;
CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
HostOs = await reader
.ReadRarVIntByteAsync(cancellationToken: cancellationToken)
.ConfigureAwait(false);
var nameSize = await reader
.ReadRarVIntUInt16Async(cancellationToken: cancellationToken)
.ConfigureAwait(false);
var b = await reader.ReadBytesAsync(nameSize, cancellationToken).ConfigureAwait(false);
FileName = ConvertPathV5(Encoding.UTF8.GetString(b, 0, b.Length));
if (ExtraSize != (uint)RemainingHeaderBytesAsync(reader))
{
throw new InvalidFormatException("rar5 header size / extra size inconsistency");
}
const ushort FHEXTRA_CRYPT = 0x01;
const ushort FHEXTRA_HASH = 0x02;
const ushort FHEXTRA_HTIME = 0x03;
const ushort FHEXTRA_REDIR = 0x05;
while (reader.CurrentReadByteCount < HeaderSize)
{
var size = await reader
.ReadRarVIntUInt16Async(cancellationToken: cancellationToken)
.ConfigureAwait(false);
var n = HeaderSize - reader.CurrentReadByteCount;
var type = await reader
.ReadRarVIntUInt16Async(cancellationToken: cancellationToken)
.ConfigureAwait(false);
switch (type)
{
case FHEXTRA_CRYPT:
{
Rar5CryptoInfo = await Rar5CryptoInfo
.CreateAsync(reader, true)
.ConfigureAwait(false);
if (Rar5CryptoInfo.PswCheck.All(singleByte => singleByte == 0))
{
Rar5CryptoInfo = null;
}
}
break;
case FHEXTRA_HASH:
{
const uint FHEXTRA_HASH_BLAKE2 = 0x0;
const int BLAKE2_DIGEST_SIZE = 0x20;
if (
await reader
.ReadRarVIntUInt32Async(cancellationToken: cancellationToken)
.ConfigureAwait(false) == FHEXTRA_HASH_BLAKE2
)
{
_hash = await reader
.ReadBytesAsync(BLAKE2_DIGEST_SIZE, cancellationToken)
.ConfigureAwait(false);
}
}
break;
case FHEXTRA_HTIME:
{
var flags = await reader
.ReadRarVIntUInt16Async(cancellationToken: cancellationToken)
.ConfigureAwait(false);
var isWindowsTime = (flags & 1) == 0;
if ((flags & 0x2) == 0x2)
{
FileLastModifiedTime = await ReadExtendedTimeV5Async(
reader,
isWindowsTime,
cancellationToken
)
.ConfigureAwait(false);
}
if ((flags & 0x4) == 0x4)
{
FileCreatedTime = await ReadExtendedTimeV5Async(
reader,
isWindowsTime,
cancellationToken
)
.ConfigureAwait(false);
}
if ((flags & 0x8) == 0x8)
{
FileLastAccessedTime = await ReadExtendedTimeV5Async(
reader,
isWindowsTime,
cancellationToken
)
.ConfigureAwait(false);
}
}
break;
case FHEXTRA_REDIR:
{
RedirType = await reader
.ReadRarVIntByteAsync(cancellationToken: cancellationToken)
.ConfigureAwait(false);
RedirFlags = await reader
.ReadRarVIntByteAsync(cancellationToken: cancellationToken)
.ConfigureAwait(false);
var nn = await reader
.ReadRarVIntUInt16Async(cancellationToken: cancellationToken)
.ConfigureAwait(false);
var bb = await reader
.ReadBytesAsync(nn, cancellationToken)
.ConfigureAwait(false);
RedirTargetName = ConvertPathV5(Encoding.UTF8.GetString(bb, 0, bb.Length));
}
break;
default:
break;
}
var did = (int)(n - (HeaderSize - reader.CurrentReadByteCount));
var drain = size - did;
if (drain > 0)
{
await reader.ReadBytesAsync(drain, cancellationToken).ConfigureAwait(false);
}
}
if (AdditionalDataSize != 0)
{
CompressedSize = AdditionalDataSize;
}
}
private async ValueTask ReadFromReaderV4Async(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken
)
{
Flags = HeaderFlags;
IsSolid = HasFlag(FileFlagsV4.SOLID);
WindowSize = IsDirectory
? 0U
: ((size_t)0x10000) << ((Flags & FileFlagsV4.WINDOW_MASK) >> 5);
var lowUncompressedSize = await reader
.ReadUInt32Async(cancellationToken)
.ConfigureAwait(false);
HostOs = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
FileCrc = await reader.ReadBytesAsync(4, cancellationToken).ConfigureAwait(false);
FileLastModifiedTime = Utility.DosDateToDateTime(
await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false)
);
CompressionAlgorithm = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
CompressionMethod = (byte)(
(await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false)) - 0x30
);
var nameSize = await reader.ReadInt16Async(cancellationToken).ConfigureAwait(false);
FileAttributes = await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false);
uint highCompressedSize = 0;
uint highUncompressedkSize = 0;
if (HasFlag(FileFlagsV4.LARGE))
{
highCompressedSize = await reader
.ReadUInt32Async(cancellationToken)
.ConfigureAwait(false);
highUncompressedkSize = await reader
.ReadUInt32Async(cancellationToken)
.ConfigureAwait(false);
}
else
{
if (lowUncompressedSize == 0xffffffff)
{
lowUncompressedSize = 0xffffffff;
highUncompressedkSize = int.MaxValue;
}
}
CompressedSize = UInt32To64(highCompressedSize, checked((uint)AdditionalDataSize));
UncompressedSize = UInt32To64(highUncompressedkSize, lowUncompressedSize);
nameSize = nameSize > 4 * 1024 ? (short)(4 * 1024) : nameSize;
var fileNameBytes = await reader
.ReadBytesAsync(nameSize, cancellationToken)
.ConfigureAwait(false);
const int newLhdSize = 32;
switch (HeaderCode)
{
case HeaderCodeV.RAR4_FILE_HEADER:
{
if (HasFlag(FileFlagsV4.UNICODE))
{
var length = 0;
while (length < fileNameBytes.Length && fileNameBytes[length] != 0)
{
length++;
}
if (length != nameSize)
{
length++;
FileName = FileNameDecoder.Decode(fileNameBytes, length);
}
else
{
FileName = ArchiveEncoding.Decode(fileNameBytes);
}
}
else
{
FileName = ArchiveEncoding.Decode(fileNameBytes);
}
FileName = ConvertPathV4(FileName);
}
break;
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
{
var datasize = HeaderSize - newLhdSize - nameSize;
if (HasFlag(FileFlagsV4.SALT))
{
datasize -= EncryptionConstV5.SIZE_SALT30;
}
if (datasize > 0)
{
SubData = await reader
.ReadBytesAsync(datasize, cancellationToken)
.ConfigureAwait(false);
}
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes.Take(4).ToArray()))
{
if (SubData is null)
{
throw new InvalidFormatException();
}
RecoverySectors =
SubData[8]
+ (SubData[9] << 8)
+ (SubData[10] << 16)
+ (SubData[11] << 24);
}
}
break;
}
if (HasFlag(FileFlagsV4.SALT))
{
R4Salt = await reader.ReadBytesAsync(EncryptionConstV5.SIZE_SALT30, cancellationToken);
}
if (HasFlag(FileFlagsV4.EXT_TIME))
{
if (reader.CurrentReadByteCount >= 2)
{
var extendedFlags = await reader
.ReadUInt16Async(cancellationToken)
.ConfigureAwait(false);
if (FileLastModifiedTime is not null)
{
FileLastModifiedTime = await ProcessExtendedTimeV4Async(
extendedFlags,
FileLastModifiedTime,
reader,
0,
cancellationToken
)
.ConfigureAwait(false);
}
FileCreatedTime = await ProcessExtendedTimeV4Async(
extendedFlags,
null,
reader,
1,
cancellationToken
)
.ConfigureAwait(false);
FileLastAccessedTime = await ProcessExtendedTimeV4Async(
extendedFlags,
null,
reader,
2,
cancellationToken
)
.ConfigureAwait(false);
FileArchivedTime = await ProcessExtendedTimeV4Async(
extendedFlags,
null,
reader,
3,
cancellationToken
)
.ConfigureAwait(false);
}
}
}
private static async ValueTask<DateTime> ReadExtendedTimeV5Async(
AsyncMarkingBinaryReader reader,
bool isWindowsTime,
CancellationToken cancellationToken
)
{
if (isWindowsTime)
{
return DateTime.FromFileTime(
await reader.ReadInt64Async(cancellationToken).ConfigureAwait(false)
);
}
else
{
return Utility.UnixTimeToDateTime(
await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false)
);
}
}
private static async ValueTask<DateTime?> ProcessExtendedTimeV4Async(
ushort extendedFlags,
DateTime? time,
AsyncMarkingBinaryReader reader,
int i,
CancellationToken cancellationToken
)
{
var rmode = (uint)extendedFlags >> ((3 - i) * 4);
if ((rmode & 8) == 0)
{
return null;
}
if (i != 0)
{
var dosTime = await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false);
time = Utility.DosDateToDateTime(dosTime);
}
if ((rmode & 4) == 0 && time is not null)
{
time = time.Value.AddSeconds(1);
}
uint nanosecondHundreds = 0;
var count = (int)rmode & 3;
for (var j = 0; j < count; j++)
{
var b = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
nanosecondHundreds |= (((uint)b) << ((j + 3 - count) * 8));
}
if (time is not null)
{
return time.Value.AddMilliseconds(nanosecondHundreds * Math.Pow(10, -4));
}
return null;
}
}

View File

@@ -2,6 +2,9 @@ using System;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
#if !Rar2017_64bit
using size_t = System.UInt32;
@@ -13,12 +16,15 @@ using size_t = System.UInt64;
namespace SharpCompress.Common.Rar.Headers;
internal class FileHeader : RarHeader
internal partial class FileHeader : RarHeader
{
private byte[]? _hash;
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType) { }
public static FileHeader Create(
RarHeader header,
RarCrcBinaryReader reader,
HeaderType headerType
) => CreateChild<FileHeader>(header, reader, headerType);
protected override void ReadFinish(MarkingBinaryReader reader)
{
@@ -76,23 +82,6 @@ internal class FileHeader : RarHeader
var nameSize = reader.ReadRarVIntUInt16();
// Variable length field containing Name length bytes in UTF-8 format without trailing zero.
// For file header this is a name of archived file. Forward slash character is used as the path separator both for Unix and Windows names.
// Backslashes are treated as a part of name for Unix names and as invalid character for Windows file names. Type of name is defined by Host OS field.
//
// TODO: not sure if anything needs to be done to handle the following:
// If Unix file name contains any high ASCII characters which cannot be correctly converted to Unicode and UTF-8
// we map such characters to to 0xE080 - 0xE0FF private use Unicode area and insert 0xFFFE Unicode non-character
// to resulting string to indicate that it contains mapped characters, which need to be converted back when extracting.
// Concrete position of 0xFFFE is not defined, we need to search the entire string for it. Such mapped names are not
// portable and can be correctly unpacked only on the same system where they were created.
//
// For service header this field contains a name of service header. Now the following names are used:
// CMT Archive comment
// QO Archive quick open data
// ACL NTFS file permissions
// STM NTFS alternate data stream
// RR Recovery record
var b = reader.ReadBytes(nameSize);
FileName = ConvertPathV5(Encoding.UTF8.GetString(b, 0, b.Length));
@@ -119,7 +108,7 @@ internal class FileHeader : RarHeader
{
case FHEXTRA_CRYPT: // file encryption
{
Rar5CryptoInfo = new Rar5CryptoInfo(reader, true);
Rar5CryptoInfo = Rar5CryptoInfo.Create(reader, true);
if (Rar5CryptoInfo.PswCheck.All(singleByte => singleByte == 0))
{
@@ -130,14 +119,11 @@ internal class FileHeader : RarHeader
case FHEXTRA_HASH:
{
const uint FHEXTRA_HASH_BLAKE2 = 0x0;
// const uint HASH_BLAKE2 = 0x03;
const int BLAKE2_DIGEST_SIZE = 0x20;
if ((uint)reader.ReadRarVInt() == FHEXTRA_HASH_BLAKE2)
{
// var hash = HASH_BLAKE2;
_hash = reader.ReadBytes(BLAKE2_DIGEST_SIZE);
}
// enum HASH_TYPE {HASH_NONE,HASH_RAR14,HASH_CRC32,HASH_BLAKE2};
}
break;
case FHEXTRA_HTIME: // file time
@@ -158,12 +144,6 @@ internal class FileHeader : RarHeader
}
}
break;
//TODO
// case FHEXTRA_VERSION: // file version
// {
//
// }
// break;
case FHEXTRA_REDIR: // file system redirection
{
RedirType = reader.ReadRarVIntByte();
@@ -173,21 +153,7 @@ internal class FileHeader : RarHeader
RedirTargetName = ConvertPathV5(Encoding.UTF8.GetString(bb, 0, bb.Length));
}
break;
//TODO
// case FHEXTRA_UOWNER: // unix owner
// {
//
// }
// break;
// case FHEXTRA_SUBDATA: // service data
// {
//
// }
// break;
default:
// skip unknown record types to allow new record types to be added in the future
//Console.WriteLine($"unhandled rar header field type {type}");
break;
}
// drain any trailing bytes of extra record
@@ -336,8 +302,6 @@ internal class FileHeader : RarHeader
}
if (HasFlag(FileFlagsV4.EXT_TIME))
{
// verify that the end of the header hasn't been reached before reading the Extended Time.
// some tools incorrectly omit Extended Time despite specifying FileFlags.EXTTIME, which most parsers tolerate.
if (RemainingHeaderBytes(reader) >= 2)
{
var extendedFlags = reader.ReadUInt16();

View File

@@ -0,0 +1,132 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Rar.Headers;
internal partial class MarkHeader
{
private static async ValueTask<byte> GetByteAsync(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = new byte[1];
var bytesRead = await stream
.ReadAsync(buffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
if (bytesRead == 1)
{
return buffer[0];
}
throw new EndOfStreamException();
}
public static async ValueTask<MarkHeader> ReadAsync(
Stream stream,
bool leaveStreamOpen,
bool lookForHeader,
CancellationToken cancellationToken = default
)
{
var maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
try
{
var start = -1;
var b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
while (start <= maxScanIndex)
{
if (b == 0x52)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b == 0x61)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x72)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x21)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x1a)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x07)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b == 1)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0)
{
continue;
}
return new MarkHeader(true); // Rar5
}
else if (b == 0)
{
return new MarkHeader(false); // Rar4
}
}
else if (b == 0x45)
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x7e)
{
continue;
}
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
if (b != 0x5e)
{
continue;
}
throw new InvalidFormatException(
"Rar format version pre-4 is unsupported."
);
}
}
else
{
b = await GetByteAsync(stream, cancellationToken).ConfigureAwait(false);
start++;
}
}
}
catch (Exception e)
{
if (!leaveStreamOpen)
{
stream.Dispose();
}
throw new InvalidFormatException("Error trying to read rar signature.", e);
}
throw new InvalidFormatException("Rar signature not found");
}
}

View File

@@ -1,9 +1,11 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Rar.Headers;
internal class MarkHeader : IRarHeader
internal partial class MarkHeader : IRarHeader
{
private const int MAX_SFX_SIZE = 0x80000 - 16; //archive.cpp line 136

View File

@@ -0,0 +1,40 @@
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
internal sealed partial class ProtectHeader
{
public static async ValueTask<ProtectHeader> CreateAsync(
RarHeader header,
AsyncRarCrcBinaryReader reader,
CancellationToken cancellationToken = default
)
{
var c = await CreateChildAsync<ProtectHeader>(
header,
reader,
HeaderType.Protect,
cancellationToken
)
.ConfigureAwait(false);
if (c.IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
return c;
}
protected sealed override async ValueTask ReadFinishAsync(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
)
{
Version = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
RecSectors = await reader.ReadUInt16Async(cancellationToken).ConfigureAwait(false);
TotalBlocks = await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false);
Mark = await reader.ReadBytesAsync(8, cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -1,20 +1,21 @@
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
// ProtectHeader is part of the Recovery Record feature
internal sealed class ProtectHeader : RarHeader
internal sealed partial class ProtectHeader : RarHeader
{
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Protect)
public static ProtectHeader Create(RarHeader header, RarCrcBinaryReader reader)
{
if (IsRar5)
var c = CreateChild<ProtectHeader>(header, reader, HeaderType.Protect);
if (c.IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
return c;
}
protected override void ReadFinish(MarkingBinaryReader reader)
protected sealed override void ReadFinish(MarkingBinaryReader reader)
{
Version = reader.ReadByte();
RecSectors = reader.ReadUInt16();

View File

@@ -0,0 +1,115 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
internal partial class RarHeader
{
internal static async ValueTask<RarHeader?> TryReadBaseAsync(
AsyncRarCrcBinaryReader reader,
bool isRar5,
IArchiveEncoding archiveEncoding,
CancellationToken cancellationToken = default
)
{
try
{
var header = new RarHeader();
await header
.InitializeAsync(reader, isRar5, archiveEncoding, cancellationToken)
.ConfigureAwait(false);
return header;
}
catch (InvalidFormatException)
{
return null;
}
}
private async ValueTask InitializeAsync(
AsyncRarCrcBinaryReader reader,
bool isRar5,
IArchiveEncoding archiveEncoding,
CancellationToken cancellationToken
)
{
_headerType = HeaderType.Null;
_isRar5 = isRar5;
ArchiveEncoding = archiveEncoding;
if (IsRar5)
{
HeaderCrc = await reader.ReadUInt32Async(cancellationToken).ConfigureAwait(false);
reader.ResetCrc();
HeaderSize = (int)
await reader.ReadRarVIntUInt32Async(3, cancellationToken).ConfigureAwait(false);
reader.Mark();
HeaderCode = await reader
.ReadRarVIntByteAsync(cancellationToken: cancellationToken)
.ConfigureAwait(false);
HeaderFlags = await reader
.ReadRarVIntUInt16Async(2, cancellationToken)
.ConfigureAwait(false);
if (HasHeaderFlag(HeaderFlagsV5.HAS_EXTRA))
{
ExtraSize = await reader
.ReadRarVIntUInt32Async(cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
if (HasHeaderFlag(HeaderFlagsV5.HAS_DATA))
{
AdditionalDataSize = (long)
await reader
.ReadRarVIntAsync(cancellationToken: cancellationToken)
.ConfigureAwait(false);
}
}
else
{
reader.Mark();
HeaderCrc = await reader.ReadUInt16Async(cancellationToken).ConfigureAwait(false);
reader.ResetCrc();
HeaderCode = await reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
HeaderFlags = await reader.ReadUInt16Async(cancellationToken).ConfigureAwait(false);
HeaderSize = await reader.ReadInt16Async(cancellationToken).ConfigureAwait(false);
if (HasHeaderFlag(HeaderFlagsV4.HAS_DATA))
{
AdditionalDataSize = await reader
.ReadUInt32Async(cancellationToken)
.ConfigureAwait(false);
}
}
}
internal static async ValueTask<T> CreateChildAsync<T>(
RarHeader header,
AsyncRarCrcBinaryReader reader,
HeaderType headerType,
CancellationToken cancellationToken = default
)
where T : RarHeader, new()
{
var child = new T() { ArchiveEncoding = header.ArchiveEncoding };
child._headerType = headerType;
child._isRar5 = header.IsRar5;
child.HeaderCrc = header.HeaderCrc;
child.HeaderCode = header.HeaderCode;
child.HeaderFlags = header.HeaderFlags;
child.HeaderSize = header.HeaderSize;
child.ExtraSize = header.ExtraSize;
child.AdditionalDataSize = header.AdditionalDataSize;
await child.ReadFinishAsync(reader, cancellationToken).ConfigureAwait(false);
var n = child.RemainingHeaderBytesAsync(reader);
if (n > 0)
{
await reader.ReadBytesAsync(n, cancellationToken).ConfigureAwait(false);
}
child.VerifyHeaderCrc(reader.GetCrc32());
return child;
}
}

View File

@@ -1,14 +1,22 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
// http://www.forensicswiki.org/w/images/5/5b/RARFileStructure.txt
// https://www.rarlab.com/technote.htm
internal class RarHeader : IRarHeader
internal partial class RarHeader : IRarHeader
{
private readonly HeaderType _headerType;
private readonly bool _isRar5;
private HeaderType _headerType;
private bool _isRar5;
protected RarHeader()
{
ArchiveEncoding = new ArchiveEncoding();
}
internal static RarHeader? TryReadBase(
RarCrcBinaryReader reader,
@@ -18,7 +26,9 @@ internal class RarHeader : IRarHeader
{
try
{
return new RarHeader(reader, isRar5, archiveEncoding);
var header = new RarHeader();
header.Initialize(reader, isRar5, archiveEncoding);
return header;
}
catch (InvalidFormatException)
{
@@ -26,7 +36,11 @@ internal class RarHeader : IRarHeader
}
}
private RarHeader(RarCrcBinaryReader reader, bool isRar5, IArchiveEncoding archiveEncoding)
private void Initialize(
RarCrcBinaryReader reader,
bool isRar5,
IArchiveEncoding archiveEncoding
)
{
_headerType = HeaderType.Null;
_isRar5 = isRar5;
@@ -64,34 +78,48 @@ internal class RarHeader : IRarHeader
}
}
protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
internal static T CreateChild<T>(
RarHeader header,
RarCrcBinaryReader reader,
HeaderType headerType
)
where T : RarHeader, new()
{
_headerType = headerType;
_isRar5 = header.IsRar5;
HeaderCrc = header.HeaderCrc;
HeaderCode = header.HeaderCode;
HeaderFlags = header.HeaderFlags;
HeaderSize = header.HeaderSize;
ExtraSize = header.ExtraSize;
AdditionalDataSize = header.AdditionalDataSize;
ArchiveEncoding = header.ArchiveEncoding;
ReadFinish(reader);
var child = new T() { ArchiveEncoding = header.ArchiveEncoding };
child._headerType = headerType;
child._isRar5 = header.IsRar5;
child.HeaderCrc = header.HeaderCrc;
child.HeaderCode = header.HeaderCode;
child.HeaderFlags = header.HeaderFlags;
child.HeaderSize = header.HeaderSize;
child.ExtraSize = header.ExtraSize;
child.AdditionalDataSize = header.AdditionalDataSize;
child.ReadFinish(reader);
var n = RemainingHeaderBytes(reader);
var n = child.RemainingHeaderBytes(reader);
if (n > 0)
{
reader.ReadBytes(n);
}
VerifyHeaderCrc(reader.GetCrc32());
child.VerifyHeaderCrc(reader.GetCrc32());
return child;
}
protected int RemainingHeaderBytes(MarkingBinaryReader reader) =>
checked(HeaderSize - (int)reader.CurrentReadByteCount);
protected int RemainingHeaderBytesAsync(AsyncMarkingBinaryReader reader) =>
checked(HeaderSize - (int)reader.CurrentReadByteCount);
protected virtual void ReadFinish(MarkingBinaryReader reader) =>
throw new NotImplementedException();
protected virtual ValueTask ReadFinishAsync(
AsyncMarkingBinaryReader reader,
CancellationToken cancellationToken = default
) => throw new NotImplementedException();
private void VerifyHeaderCrc(uint crc32)
{
var b = (IsRar5 ? crc32 : (ushort)crc32) == HeaderCrc;
@@ -103,27 +131,27 @@ internal class RarHeader : IRarHeader
public HeaderType HeaderType => _headerType;
protected bool IsRar5 => _isRar5;
internal bool IsRar5 => _isRar5;
protected uint HeaderCrc { get; }
protected uint HeaderCrc { get; private set; }
internal byte HeaderCode { get; }
internal byte HeaderCode { get; private set; }
protected ushort HeaderFlags { get; }
protected ushort HeaderFlags { get; private set; }
protected bool HasHeaderFlag(ushort flag) => (HeaderFlags & flag) == flag;
protected int HeaderSize { get; }
protected int HeaderSize { get; private set; }
internal IArchiveEncoding ArchiveEncoding { get; }
internal IArchiveEncoding ArchiveEncoding { get; private set; }
/// <summary>
/// Extra header size.
/// </summary>
protected uint ExtraSize { get; }
protected uint ExtraSize { get; private set; }
/// <summary>
/// Size of additional data (eg file contents)
/// </summary>
protected long AdditionalDataSize { get; }
protected long AdditionalDataSize { get; private set; }
}

View File

@@ -0,0 +1,256 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.Rar.Headers;
public partial class RarHeaderFactory
{
public async IAsyncEnumerable<IRarHeader> ReadHeadersAsync(Stream stream)
{
var markHeader = await MarkHeader
.ReadAsync(
stream,
Options.LeaveStreamOpen,
Options.LookForHeader,
CancellationToken.None
)
.ConfigureAwait(false);
_isRar5 = markHeader.IsRar5;
yield return markHeader;
RarHeader? header;
while (
(
header = await TryReadNextHeaderAsync(stream, CancellationToken.None)
.ConfigureAwait(false)
) != null
)
{
yield return header;
if (header.HeaderType == HeaderType.EndArchive)
{
// End of archive marker. RAR does not read anything after this header letting to use third
// party tools to add extra information such as a digital signature to archive.
yield break;
}
}
}
private async ValueTask<RarHeader?> TryReadNextHeaderAsync(
Stream stream,
CancellationToken cancellationToken
)
{
AsyncRarCrcBinaryReader reader;
if (!IsEncrypted)
{
reader = new AsyncRarCrcBinaryReader(stream);
}
else
{
if (Options.Password is null)
{
throw new CryptographicException(
"Encrypted Rar archive has no password specified."
);
}
if (_isRar5 && _cryptInfo != null)
{
await _cryptInfo.ReadInitVAsync(new AsyncMarkingBinaryReader(stream));
var _headerKey = new CryptKey5(Options.Password!, _cryptInfo);
reader = await AsyncRarCryptoBinaryReader.Create(
stream,
_headerKey,
_cryptInfo.Salt
);
}
else
{
var key = new CryptKey3(Options.Password);
reader = await AsyncRarCryptoBinaryReader.Create(stream, key);
}
}
var header = await RarHeader
.TryReadBaseAsync(reader, _isRar5, Options.ArchiveEncoding, cancellationToken)
.ConfigureAwait(false);
if (header is null)
{
return null;
}
switch (header.HeaderCode)
{
case HeaderCodeV.RAR5_ARCHIVE_HEADER:
case HeaderCodeV.RAR4_ARCHIVE_HEADER:
{
var ah = await ArchiveHeader
.CreateAsync(header, reader, cancellationToken)
.ConfigureAwait(false);
if (ah.IsEncrypted == true)
{
//!!! rar5 we don't know yet
IsEncrypted = true;
}
return ah;
}
case HeaderCodeV.RAR4_PROTECT_HEADER:
{
var ph = await ProtectHeader
.CreateAsync(header, reader, cancellationToken)
.ConfigureAwait(false);
// skip the recovery record data, we do not use it.
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
await reader
.BaseStream.SkipAsync(ph.DataSize, cancellationToken)
.ConfigureAwait(false);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return ph;
}
case HeaderCodeV.RAR5_SERVICE_HEADER:
{
var fh = await FileHeader
.CreateAsync(header, reader, HeaderType.Service, cancellationToken)
.ConfigureAwait(false);
if (fh.FileName == "CMT")
{
fh.PackedStream = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
}
else
{
await SkipDataAsync(fh, reader, cancellationToken).ConfigureAwait(false);
}
return fh;
}
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
{
var fh = await FileHeader
.CreateAsync(header, reader, HeaderType.NewSub, cancellationToken)
.ConfigureAwait(false);
await SkipDataAsync(fh, reader, cancellationToken).ConfigureAwait(false);
return fh;
}
case HeaderCodeV.RAR5_FILE_HEADER:
case HeaderCodeV.RAR4_FILE_HEADER:
{
var fh = await FileHeader
.CreateAsync(header, reader, HeaderType.File, cancellationToken)
.ConfigureAwait(false);
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.R4Salt is null && fh.Rar5CryptoInfo is null)
{
fh.PackedStream = ms;
}
else
{
fh.PackedStream = new RarCryptoWrapper(
ms,
fh.R4Salt is null
? fh.Rar5CryptoInfo.NotNull().Salt
: fh.R4Salt,
fh.R4Salt is null
? new CryptKey5(
Options.Password,
fh.Rar5CryptoInfo.NotNull()
)
: new CryptKey3(Options.Password)
);
}
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return fh;
}
case HeaderCodeV.RAR5_END_ARCHIVE_HEADER:
case HeaderCodeV.RAR4_END_ARCHIVE_HEADER:
{
return await EndArchiveHeader
.CreateAsync(header, reader, cancellationToken)
.ConfigureAwait(false);
}
case HeaderCodeV.RAR5_ARCHIVE_ENCRYPTION_HEADER:
{
var cryptoHeader = await ArchiveCryptHeader
.CreateAsync(header, reader, cancellationToken)
.ConfigureAwait(false);
IsEncrypted = true;
_cryptInfo = cryptoHeader.CryptInfo;
return cryptoHeader;
}
default:
{
throw new InvalidFormatException("Unknown Rar Header: " + header.HeaderCode);
}
}
}
private async ValueTask SkipDataAsync(
FileHeader fh,
AsyncRarCrcBinaryReader reader,
CancellationToken cancellationToken
)
{
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
//skip the data because it's useless?
await reader
.BaseStream.SkipAsync(fh.CompressedSize, cancellationToken)
.ConfigureAwait(false);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
}

View File

@@ -1,11 +1,14 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.Rar.Headers;
public class RarHeaderFactory
public partial class RarHeaderFactory
{
private bool _isRar5;
@@ -61,12 +64,12 @@ public class RarHeaderFactory
_cryptInfo.ReadInitV(new MarkingBinaryReader(stream));
var _headerKey = new CryptKey5(Options.Password!, _cryptInfo);
reader = new RarCryptoBinaryReader(stream, _headerKey, _cryptInfo.Salt);
reader = RarCryptoBinaryReader.Create(stream, _headerKey, _cryptInfo.Salt);
}
else
{
var key = new CryptKey3(Options.Password);
reader = new RarCryptoBinaryReader(stream, key);
reader = RarCryptoBinaryReader.Create(stream, key);
}
}
@@ -80,7 +83,7 @@ public class RarHeaderFactory
case HeaderCodeV.RAR5_ARCHIVE_HEADER:
case HeaderCodeV.RAR4_ARCHIVE_HEADER:
{
var ah = new ArchiveHeader(header, reader);
var ah = ArchiveHeader.Create(header, reader);
if (ah.IsEncrypted == true)
{
//!!! rar5 we don't know yet
@@ -91,7 +94,7 @@ public class RarHeaderFactory
case HeaderCodeV.RAR4_PROTECT_HEADER:
{
var ph = new ProtectHeader(header, reader);
var ph = ProtectHeader.Create(header, reader);
// skip the recovery record data, we do not use it.
switch (StreamingMode)
{
@@ -116,7 +119,7 @@ public class RarHeaderFactory
case HeaderCodeV.RAR5_SERVICE_HEADER:
{
var fh = new FileHeader(header, reader, HeaderType.Service);
var fh = FileHeader.Create(header, reader, HeaderType.Service);
if (fh.FileName == "CMT")
{
fh.PackedStream = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
@@ -130,7 +133,7 @@ public class RarHeaderFactory
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
{
var fh = new FileHeader(header, reader, HeaderType.NewSub);
var fh = FileHeader.Create(header, reader, HeaderType.NewSub);
SkipData(fh, reader);
return fh;
}
@@ -138,7 +141,7 @@ public class RarHeaderFactory
case HeaderCodeV.RAR5_FILE_HEADER:
case HeaderCodeV.RAR4_FILE_HEADER:
{
var fh = new FileHeader(header, reader, HeaderType.File);
var fh = FileHeader.Create(header, reader, HeaderType.File);
switch (StreamingMode)
{
case StreamingMode.Seekable:
@@ -181,11 +184,11 @@ public class RarHeaderFactory
case HeaderCodeV.RAR5_END_ARCHIVE_HEADER:
case HeaderCodeV.RAR4_END_ARCHIVE_HEADER:
{
return new EndArchiveHeader(header, reader);
return EndArchiveHeader.Create(header, reader);
}
case HeaderCodeV.RAR5_ARCHIVE_ENCRYPTION_HEADER:
{
var cryptoHeader = new ArchiveCryptHeader(header, reader);
var cryptoHeader = ArchiveCryptHeader.Create(header, reader);
IsEncrypted = true;
_cryptInfo = cryptoHeader.CryptInfo;

View File

@@ -4,13 +4,14 @@ namespace SharpCompress.Common.Rar.Headers;
internal class SignHeader : RarHeader
{
protected SignHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Sign)
public static SignHeader Create(RarHeader header, RarCrcBinaryReader reader)
{
if (IsRar5)
var c = CreateChild<SignHeader>(header, reader, HeaderType.Sign);
if (c.IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
return c;
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -1,5 +1,7 @@
using System;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -7,44 +9,110 @@ namespace SharpCompress.Common.Rar;
internal class Rar5CryptoInfo
{
public Rar5CryptoInfo() { }
private Rar5CryptoInfo() { }
public Rar5CryptoInfo(MarkingBinaryReader reader, bool readInitV)
public static Rar5CryptoInfo Create(MarkingBinaryReader reader, bool readInitV)
{
var cryptoInfo = new Rar5CryptoInfo();
var cryptVersion = reader.ReadRarVIntUInt32();
if (cryptVersion > EncryptionConstV5.VERSION)
{
throw new CryptographicException($"Unsupported crypto version of {cryptVersion}");
}
var encryptionFlags = reader.ReadRarVIntUInt32();
UsePswCheck = FlagUtility.HasFlag(encryptionFlags, EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
LG2Count = reader.ReadRarVIntByte(1);
cryptoInfo.UsePswCheck = FlagUtility.HasFlag(
encryptionFlags,
EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK
);
cryptoInfo.LG2Count = reader.ReadRarVIntByte(1);
if (LG2Count > EncryptionConstV5.CRYPT5_KDF_LG2_COUNT_MAX)
if (cryptoInfo.LG2Count > EncryptionConstV5.CRYPT5_KDF_LG2_COUNT_MAX)
{
throw new CryptographicException($"Unsupported LG2 count of {LG2Count}.");
throw new CryptographicException($"Unsupported LG2 count of {cryptoInfo.LG2Count}.");
}
Salt = reader.ReadBytes(EncryptionConstV5.SIZE_SALT50);
cryptoInfo.Salt = reader.ReadBytes(EncryptionConstV5.SIZE_SALT50);
if (readInitV) // File header needs to read IV here
{
ReadInitV(reader);
cryptoInfo.ReadInitV(reader);
}
if (UsePswCheck)
if (cryptoInfo.UsePswCheck)
{
PswCheck = reader.ReadBytes(EncryptionConstV5.SIZE_PSWCHECK);
cryptoInfo.PswCheck = reader.ReadBytes(EncryptionConstV5.SIZE_PSWCHECK);
var _pswCheckCsm = reader.ReadBytes(EncryptionConstV5.SIZE_PSWCHECK_CSUM);
var sha = SHA256.Create();
UsePswCheck = sha.ComputeHash(PswCheck).AsSpan().StartsWith(_pswCheckCsm.AsSpan());
cryptoInfo.UsePswCheck = sha.ComputeHash(cryptoInfo.PswCheck)
.AsSpan()
.StartsWith(_pswCheckCsm.AsSpan());
}
return cryptoInfo;
}
public static async ValueTask<Rar5CryptoInfo> CreateAsync(
AsyncMarkingBinaryReader reader,
bool readInitV
)
{
var cryptoInfo = new Rar5CryptoInfo();
var cryptVersion = await reader.ReadRarVIntUInt32Async(
cancellationToken: CancellationToken.None
);
if (cryptVersion > EncryptionConstV5.VERSION)
{
throw new CryptographicException($"Unsupported crypto version of {cryptVersion}");
}
var encryptionFlags = await reader.ReadRarVIntUInt32Async(
cancellationToken: CancellationToken.None
);
cryptoInfo.UsePswCheck = FlagUtility.HasFlag(
encryptionFlags,
EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK
);
cryptoInfo.LG2Count = (int)
await reader.ReadRarVIntUInt32Async(cancellationToken: CancellationToken.None);
if (cryptoInfo.LG2Count > EncryptionConstV5.CRYPT5_KDF_LG2_COUNT_MAX)
{
throw new CryptographicException($"Unsupported LG2 count of {cryptoInfo.LG2Count}.");
}
cryptoInfo.Salt = await reader.ReadBytesAsync(
EncryptionConstV5.SIZE_SALT50,
CancellationToken.None
);
if (readInitV)
{
await cryptoInfo.ReadInitVAsync(reader);
}
if (cryptoInfo.UsePswCheck)
{
cryptoInfo.PswCheck = await reader.ReadBytesAsync(
EncryptionConstV5.SIZE_PSWCHECK,
CancellationToken.None
);
var _pswCheckCsm = await reader.ReadBytesAsync(
EncryptionConstV5.SIZE_PSWCHECK_CSUM,
CancellationToken.None
);
var sha = SHA256.Create();
cryptoInfo.UsePswCheck = sha.ComputeHash(cryptoInfo.PswCheck)
.AsSpan()
.StartsWith(_pswCheckCsm.AsSpan());
}
return cryptoInfo;
}
public void ReadInitV(MarkingBinaryReader reader) =>
InitV = reader.ReadBytes(EncryptionConstV5.SIZE_INITV);
public async ValueTask ReadInitVAsync(AsyncMarkingBinaryReader reader) =>
InitV = await reader.ReadBytesAsync(EncryptionConstV5.SIZE_INITV, CancellationToken.None);
public bool UsePswCheck = false;
public int LG2Count = 0;

View File

@@ -1,5 +1,3 @@
#nullable disable
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar.Headers;
@@ -9,20 +7,28 @@ namespace SharpCompress.Common.Rar;
internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
{
private BlockTransformer _rijndael;
private BlockTransformer _rijndael = default!;
private readonly Queue<byte> _data = new();
private long _readCount;
public RarCryptoBinaryReader(Stream stream, ICryptKey cryptKey)
: base(stream)
{
var salt = base.ReadBytes(EncryptionConstV5.SIZE_SALT30);
_readCount += EncryptionConstV5.SIZE_SALT30;
_rijndael = new BlockTransformer(cryptKey.Transformer(salt));
}
private RarCryptoBinaryReader(Stream stream)
: base(stream) { }
public RarCryptoBinaryReader(Stream stream, ICryptKey cryptKey, byte[] salt)
: base(stream) => _rijndael = new BlockTransformer(cryptKey.Transformer(salt));
public static RarCryptoBinaryReader Create(
Stream stream,
ICryptKey cryptKey,
byte[]? salt = null
)
{
var binary = new RarCryptoBinaryReader(stream);
if (salt == null)
{
salt = binary.ReadBytesBase(EncryptionConstV5.SIZE_SALT30);
binary._readCount += EncryptionConstV5.SIZE_SALT30;
}
binary._rijndael = new BlockTransformer(cryptKey.Transformer(salt));
return binary;
}
// track read count ourselves rather than using the underlying stream since we buffer
public override long CurrentReadByteCount
@@ -40,6 +46,8 @@ internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
public override byte[] ReadBytes(int count) => ReadAndDecryptBytes(count);
private byte[] ReadBytesBase(int count) => base.ReadBytes(count);
private byte[] ReadAndDecryptBytes(int count)
{
var queueSize = _data.Count;

View File

@@ -2,7 +2,10 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -26,6 +29,8 @@ public abstract class RarVolume : Volume
internal abstract IEnumerable<RarFilePart> ReadFileParts();
internal abstract IAsyncEnumerable<RarFilePart> ReadFilePartsAsync();
internal abstract RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader);
internal IEnumerable<RarFilePart> GetVolumeFileParts()
@@ -71,6 +76,55 @@ public abstract class RarVolume : Volume
}
}
internal async IAsyncEnumerable<RarFilePart> GetVolumeFilePartsAsync(
[EnumeratorCancellation] CancellationToken cancellationToken = default
)
{
MarkHeader? lastMarkHeader = null;
await foreach (
var header in _headerFactory
.ReadHeadersAsync(Stream)
.WithCancellation(cancellationToken)
)
{
switch (header.HeaderType)
{
case HeaderType.Mark:
{
lastMarkHeader = (MarkHeader)header;
}
break;
case HeaderType.Archive:
{
ArchiveHeader = (ArchiveHeader)header;
}
break;
case HeaderType.File:
{
var fh = (FileHeader)header;
if (_maxCompressionAlgorithm < fh.CompressionAlgorithm)
{
_maxCompressionAlgorithm = fh.CompressionAlgorithm;
}
yield return CreateFilePart(lastMarkHeader!, fh);
}
break;
case HeaderType.Service:
{
var fh = (FileHeader)header;
if (fh.FileName == "CMT")
{
var buffer = new byte[fh.CompressedSize];
fh.PackedStream.NotNull().ReadFully(buffer);
Comment = Encoding.UTF8.GetString(buffer, 0, buffer.Length - 1);
}
}
break;
}
}
}
private void EnsureArchiveHeaderLoaded()
{
if (ArchiveHeader is null)
@@ -126,6 +180,12 @@ public abstract class RarVolume : Volume
}
}
public async ValueTask<bool> IsSolidArchiveAsync(CancellationToken cancellationToken = default)
{
await EnsureArchiveHeaderLoadedAsync(cancellationToken);
return ArchiveHeader?.IsSolid ?? false;
}
public int MinVersion
{
get
@@ -174,5 +234,68 @@ public abstract class RarVolume : Volume
}
}
private async ValueTask EnsureArchiveHeaderLoadedAsync(CancellationToken cancellationToken)
{
if (ArchiveHeader is null)
{
if (Mode == StreamingMode.Streaming)
{
throw new InvalidOperationException(
"ArchiveHeader should never been null in a streaming read."
);
}
// we only want to load the archive header to avoid overhead but have to do the nasty thing and reset the stream
await GetVolumeFilePartsAsync(cancellationToken).FirstAsync();
Stream.Position = 0;
}
}
public virtual async ValueTask<int> MinVersionAsync(
CancellationToken cancellationToken = default
)
{
await EnsureArchiveHeaderLoadedAsync(cancellationToken).ConfigureAwait(false);
if (_maxCompressionAlgorithm >= 50)
{
return 5; //5-6
}
else if (_maxCompressionAlgorithm >= 29)
{
return 3; //3-4
}
else if (_maxCompressionAlgorithm >= 20)
{
return 2; //2
}
else
{
return 1;
}
}
public virtual async ValueTask<int> MaxVersionAsync(
CancellationToken cancellationToken = default
)
{
await EnsureArchiveHeaderLoadedAsync(cancellationToken).ConfigureAwait(false);
if (_maxCompressionAlgorithm >= 50)
{
return 6; //5-6
}
else if (_maxCompressionAlgorithm >= 29)
{
return 4; //3-4
}
else if (_maxCompressionAlgorithm >= 20)
{
return 2; //2
}
else
{
return 1;
}
}
public string? Comment { get; internal set; }
}

View File

@@ -0,0 +1,40 @@
#nullable disable
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilites;
namespace SharpCompress.Common.SevenZip;
internal sealed partial class ArchiveDatabase
{
internal async ValueTask<Stream> GetFolderStreamAsync(
Stream stream,
CFolder folder,
IPasswordProvider pw,
CancellationToken cancellationToken
)
{
var packStreamIndex = folder._firstPackStreamId;
var folderStartPackPos = GetFolderStreamPos(folder, 0);
var count = folder._packStreams.Count;
var packSizes = new long[count];
for (var j = 0; j < count; j++)
{
packSizes[j] = _packSizes[packStreamIndex + j];
}
return await DecoderStreamHelper
.CreateDecoderStreamAsync(
stream,
folderStartPackPos,
packSizes,
folder,
pw,
cancellationToken
)
.ConfigureAwait(false);
}
}

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using System.Collections.Generic;
@@ -8,7 +8,7 @@ using SharpCompress.Compressors.LZMA.Utilites;
namespace SharpCompress.Common.SevenZip;
internal class ArchiveDatabase
internal partial class ArchiveDatabase
{
internal byte _majorVersion;
internal byte _minorVersion;

View File

@@ -0,0 +1,580 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
namespace SharpCompress.Common.SevenZip;
internal sealed partial class ArchiveReader
{
public async ValueTask OpenAsync(
Stream stream,
bool lookForHeader,
CancellationToken cancellationToken = default
)
{
Close();
_streamOrigin = stream.Position;
_streamEnding = stream.Length;
var canScan = lookForHeader ? 0x80000 - 20 : 0;
while (true)
{
// TODO: Check Signature!
_header = new byte[0x20];
await stream.ReadExactAsync(_header, 0, 0x20, cancellationToken);
if (
!lookForHeader
|| _header
.AsSpan(0, length: 6)
.SequenceEqual<byte>([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])
)
{
break;
}
if (canScan == 0)
{
throw new InvalidFormatException("Unable to find 7z signature");
}
canScan--;
stream.Position = ++_streamOrigin;
}
_stream = stream;
}
public async ValueTask<ArchiveDatabase> ReadDatabaseAsync(
IPasswordProvider pass,
CancellationToken cancellationToken = default
)
{
var db = new ArchiveDatabase(pass);
db.Clear();
db._majorVersion = _header[6];
db._minorVersion = _header[7];
if (db._majorVersion != 0)
{
throw new InvalidOperationException();
}
var crcFromArchive = DataReader.Get32(_header, 8);
var nextHeaderOffset = (long)DataReader.Get64(_header, 0xC);
var nextHeaderSize = (long)DataReader.Get64(_header, 0x14);
var nextHeaderCrc = DataReader.Get32(_header, 0x1C);
var crc = Crc.INIT_CRC;
crc = Crc.Update(crc, nextHeaderOffset);
crc = Crc.Update(crc, nextHeaderSize);
crc = Crc.Update(crc, nextHeaderCrc);
crc = Crc.Finish(crc);
if (crc != crcFromArchive)
{
throw new InvalidOperationException();
}
db._startPositionAfterHeader = _streamOrigin + 0x20;
// empty header is ok
if (nextHeaderSize == 0)
{
db.Fill();
return db;
}
if (nextHeaderOffset < 0 || nextHeaderSize < 0 || nextHeaderSize > int.MaxValue)
{
throw new InvalidOperationException();
}
if (nextHeaderOffset > _streamEnding - db._startPositionAfterHeader)
{
throw new InvalidOperationException("nextHeaderOffset is invalid");
}
_stream.Seek(nextHeaderOffset, SeekOrigin.Current);
var header = new byte[nextHeaderSize];
await _stream.ReadExactAsync(header, 0, header.Length, cancellationToken);
if (Crc.Finish(Crc.Update(Crc.INIT_CRC, header, 0, header.Length)) != nextHeaderCrc)
{
throw new InvalidOperationException();
}
using (var streamSwitch = new CStreamSwitch())
{
streamSwitch.Set(this, header);
var type = ReadId();
if (type != BlockType.Header)
{
if (type != BlockType.EncodedHeader)
{
throw new InvalidOperationException();
}
var dataVector = await ReadAndDecodePackedStreamsAsync(
db._startPositionAfterHeader,
db.PasswordProvider,
cancellationToken
)
.ConfigureAwait(false);
// compressed header without content is odd but ok
if (dataVector.Count == 0)
{
db.Fill();
return db;
}
if (dataVector.Count != 1)
{
throw new InvalidOperationException();
}
streamSwitch.Set(this, dataVector[0]);
if (ReadId() != BlockType.Header)
{
throw new InvalidOperationException();
}
}
await ReadHeaderAsync(db, db.PasswordProvider, cancellationToken).ConfigureAwait(false);
}
db.Fill();
return db;
}
private async ValueTask<List<byte[]>> ReadAndDecodePackedStreamsAsync(
long baseOffset,
IPasswordProvider pass,
CancellationToken cancellationToken
)
{
#if DEBUG
Log.WriteLine("-- ReadAndDecodePackedStreamsAsync --");
Log.PushIndent();
#endif
try
{
ReadStreamsInfo(
null,
out var dataStartPos,
out var packSizes,
out var packCrCs,
out var folders,
out var numUnpackStreamsInFolders,
out var unpackSizes,
out var digests
);
dataStartPos += baseOffset;
var dataVector = new List<byte[]>(folders.Count);
var packIndex = 0;
foreach (var folder in folders)
{
var oldDataStartPos = dataStartPos;
var myPackSizes = new long[folder._packStreams.Count];
for (var i = 0; i < myPackSizes.Length; i++)
{
var packSize = packSizes[packIndex + i];
myPackSizes[i] = packSize;
dataStartPos += packSize;
}
var outStream = await DecoderStreamHelper
.CreateDecoderStreamAsync(
_stream,
oldDataStartPos,
myPackSizes,
folder,
pass,
cancellationToken
)
.ConfigureAwait(false);
var unpackSize = checked((int)folder.GetUnpackSize());
var data = new byte[unpackSize];
await outStream
.ReadExactAsync(data, 0, data.Length, cancellationToken)
.ConfigureAwait(false);
if (outStream.ReadByte() >= 0)
{
throw new InvalidFormatException("Decoded stream is longer than expected.");
}
dataVector.Add(data);
if (folder.UnpackCrcDefined)
{
if (
Crc.Finish(Crc.Update(Crc.INIT_CRC, data, 0, unpackSize))
!= folder._unpackCrc
)
{
throw new InvalidFormatException(
"Decoded stream does not match expected CRC."
);
}
}
}
return dataVector;
}
finally
{
#if DEBUG
Log.PopIndent();
#endif
}
}
private async ValueTask ReadHeaderAsync(
ArchiveDatabase db,
IPasswordProvider getTextPassword,
CancellationToken cancellationToken
)
{
#if DEBUG
Log.WriteLine("-- ReadHeaderAsync --");
Log.PushIndent();
#endif
try
{
var type = ReadId();
if (type == BlockType.ArchiveProperties)
{
ReadArchiveProperties();
type = ReadId();
}
List<byte[]> dataVector = null;
if (type == BlockType.AdditionalStreamsInfo)
{
dataVector = await ReadAndDecodePackedStreamsAsync(
db._startPositionAfterHeader,
getTextPassword,
cancellationToken
)
.ConfigureAwait(false);
type = ReadId();
}
List<long> unpackSizes;
List<uint?> digests;
if (type == BlockType.MainStreamsInfo)
{
ReadStreamsInfo(
dataVector,
out db._dataStartPosition,
out db._packSizes,
out db._packCrCs,
out db._folders,
out db._numUnpackStreamsVector,
out unpackSizes,
out digests
);
db._dataStartPosition += db._startPositionAfterHeader;
type = ReadId();
}
else
{
unpackSizes = new List<long>(db._folders.Count);
digests = new List<uint?>(db._folders.Count);
db._numUnpackStreamsVector = new List<int>(db._folders.Count);
for (var i = 0; i < db._folders.Count; i++)
{
var folder = db._folders[i];
unpackSizes.Add(folder.GetUnpackSize());
digests.Add(folder._unpackCrc);
db._numUnpackStreamsVector.Add(1);
}
}
db._files.Clear();
if (type == BlockType.End)
{
return;
}
if (type != BlockType.FilesInfo)
{
throw new InvalidOperationException();
}
var numFiles = ReadNum();
#if DEBUG
Log.WriteLine("NumFiles: " + numFiles);
#endif
db._files = new List<CFileItem>(numFiles);
for (var i = 0; i < numFiles; i++)
{
db._files.Add(new CFileItem());
}
var emptyStreamVector = new BitVector(numFiles);
BitVector emptyFileVector = null;
BitVector antiFileVector = null;
var numEmptyStreams = 0;
for (; ; )
{
type = ReadId();
if (type == BlockType.End)
{
break;
}
var size = checked((long)ReadNumber());
var oldPos = _currentReader.Offset;
switch (type)
{
case BlockType.Name:
using (var streamSwitch = new CStreamSwitch())
{
streamSwitch.Set(this, dataVector);
#if DEBUG
Log.Write("FileNames:");
#endif
for (var i = 0; i < db._files.Count; i++)
{
db._files[i].Name = _currentReader.ReadString();
#if DEBUG
Log.Write(" " + db._files[i].Name);
#endif
}
#if DEBUG
Log.WriteLine();
#endif
}
break;
case BlockType.WinAttributes:
#if DEBUG
Log.Write("WinAttributes:");
#endif
ReadAttributeVector(
dataVector,
numFiles,
delegate(int i, uint? attr)
{
db._files[i].ExtendedAttrib = attr;
if (attr.HasValue && (attr.Value >> 16) != 0)
{
attr = attr.Value & 0x7FFFu;
}
db._files[i].Attrib = attr;
#if DEBUG
Log.Write(
" " + (attr.HasValue ? attr.Value.ToString("x8") : "n/a")
);
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.EmptyStream:
emptyStreamVector = ReadBitVector(numFiles);
#if DEBUG
Log.Write("EmptyStream: ");
#endif
for (var i = 0; i < emptyStreamVector.Length; i++)
{
if (emptyStreamVector[i])
{
#if DEBUG
Log.Write("x");
#endif
numEmptyStreams++;
}
else
{
#if DEBUG
Log.Write(".");
#endif
}
}
#if DEBUG
Log.WriteLine();
#endif
emptyFileVector = new BitVector(numEmptyStreams);
antiFileVector = new BitVector(numEmptyStreams);
break;
case BlockType.EmptyFile:
emptyFileVector = ReadBitVector(numEmptyStreams);
#if DEBUG
Log.Write("EmptyFile: ");
for (var i = 0; i < numEmptyStreams; i++)
{
Log.Write(emptyFileVector[i] ? "x" : ".");
}
Log.WriteLine();
#endif
break;
case BlockType.Anti:
antiFileVector = ReadBitVector(numEmptyStreams);
#if DEBUG
Log.Write("Anti: ");
for (var i = 0; i < numEmptyStreams; i++)
{
Log.Write(antiFileVector[i] ? "x" : ".");
}
Log.WriteLine();
#endif
break;
case BlockType.StartPos:
#if DEBUG
Log.Write("StartPos:");
#endif
ReadNumberVector(
dataVector,
numFiles,
delegate(int i, long? startPos)
{
db._files[i].StartPos = startPos;
#if DEBUG
Log.Write(
" " + (startPos.HasValue ? startPos.Value.ToString() : "n/a")
);
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.CTime:
#if DEBUG
Log.Write("CTime:");
#endif
ReadDateTimeVector(
dataVector,
numFiles,
delegate(int i, DateTime? time)
{
db._files[i].CTime = time;
#if DEBUG
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.ATime:
#if DEBUG
Log.Write("ATime:");
#endif
ReadDateTimeVector(
dataVector,
numFiles,
delegate(int i, DateTime? time)
{
db._files[i].ATime = time;
#if DEBUG
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.MTime:
#if DEBUG
Log.Write("MTime:");
#endif
ReadDateTimeVector(
dataVector,
numFiles,
delegate(int i, DateTime? time)
{
db._files[i].MTime = time;
#if DEBUG
Log.Write(" " + (time.HasValue ? time.Value.ToString() : "n/a"));
#endif
}
);
#if DEBUG
Log.WriteLine();
#endif
break;
case BlockType.Dummy:
#if DEBUG
Log.Write("Dummy: " + size);
#endif
for (long j = 0; j < size; j++)
{
if (ReadByte() != 0)
{
throw new InvalidOperationException();
}
}
break;
default:
SkipData(size);
break;
}
var checkRecordsSize = (db._majorVersion > 0 || db._minorVersion > 2);
if (checkRecordsSize && _currentReader.Offset - oldPos != size)
{
throw new InvalidOperationException();
}
}
var emptyFileIndex = 0;
var sizeIndex = 0;
for (var i = 0; i < numFiles; i++)
{
var file = db._files[i];
file.HasStream = !emptyStreamVector[i];
if (file.HasStream)
{
file.IsDir = false;
file.IsAnti = false;
file.Size = unpackSizes[sizeIndex];
file.Crc = digests[sizeIndex];
sizeIndex++;
}
else
{
file.IsDir = !emptyFileVector[emptyFileIndex];
file.IsAnti = antiFileVector[emptyFileIndex];
emptyFileIndex++;
file.Size = 0;
file.Crc = null;
}
}
}
finally
{
#if DEBUG
Log.PopIndent();
#endif
}
}
}

View File

@@ -5,13 +5,15 @@ using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
namespace SharpCompress.Common.SevenZip;
internal class ArchiveReader
internal partial class ArchiveReader
{
internal Stream _stream;
internal Stack<DataReader> _readerStack = new();
@@ -1270,6 +1272,8 @@ internal class ArchiveReader
_stream = stream;
}
// OpenAsync moved to ArchiveReader.Async.cs
public void Close()
{
_stream?.Dispose();
@@ -1383,6 +1387,8 @@ internal class ArchiveReader
return db;
}
// ReadDatabaseAsync moved to ArchiveReader.Async.cs
internal class CExtractFolderInfo
{
internal int _fileIndex;

View File

@@ -1,5 +1,7 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common.SevenZip;
@@ -58,6 +60,35 @@ internal class SevenZipFilePart : FilePart
return new ReadOnlySubStream(folderStream, Header.Size, leaveOpen: false);
}
internal override async ValueTask<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
if (!Header.HasStream)
{
return Stream.Null;
}
var folderStream = await _database.GetFolderStreamAsync(
_stream,
Folder!,
_database.PasswordProvider,
cancellationToken
);
var firstFileIndex = _database._folderStartFileIndex[_database._folders.IndexOf(Folder!)];
var skipCount = Index - firstFileIndex;
long skipSize = 0;
for (var i = 0; i < skipCount; i++)
{
skipSize += _database._files[firstFileIndex + i].Size;
}
if (skipSize > 0)
{
await folderStream.SkipAsync(skipSize, cancellationToken);
}
return new ReadOnlySubStream(folderStream, Header.Size, leaveOpen: false);
}
public CompressionType CompressionType
{
get

View File

@@ -0,0 +1,339 @@
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Tar.Headers;
internal sealed partial class TarHeader
{
internal async ValueTask WriteAsync(
Stream output,
CancellationToken cancellationToken = default
)
{
switch (WriteFormat)
{
case TarHeaderWriteFormat.GNU_TAR_LONG_LINK:
await WriteGnuTarLongLinkAsync(output, cancellationToken);
break;
case TarHeaderWriteFormat.USTAR:
await WriteUstarAsync(output, cancellationToken);
break;
default:
throw new Exception("This should be impossible...");
}
}
private async ValueTask WriteUstarAsync(Stream output, CancellationToken cancellationToken)
{
var buffer = new byte[BLOCK_SIZE];
WriteOctalBytes(511, buffer, 100, 8);
WriteOctalBytes(0, buffer, 108, 8);
WriteOctalBytes(0, buffer, 116, 8);
var nameByteCount = ArchiveEncoding
.GetEncoding()
.GetByteCount(Name.NotNull("Name is null"));
if (nameByteCount > 100)
{
string fullName = Name.NotNull("Name is null");
List<int> dirSeps = new List<int>();
for (int i = 0; i < fullName.Length; i++)
{
if (fullName[i] == Path.DirectorySeparatorChar)
{
dirSeps.Add(i);
}
}
int splitIndex = -1;
for (int i = 0; i < dirSeps.Count; i++)
{
int count = ArchiveEncoding
.GetEncoding()
.GetByteCount(fullName.Substring(0, dirSeps[i]));
if (count < 155)
{
splitIndex = dirSeps[i];
}
else
{
break;
}
}
if (splitIndex == -1)
{
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Directory separator not found! Try using GNU Tar format instead!"
);
}
string namePrefix = fullName.Substring(0, splitIndex);
string name = fullName.Substring(splitIndex + 1);
if (this.ArchiveEncoding.GetEncoding().GetByteCount(namePrefix) >= 155)
{
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
}
if (this.ArchiveEncoding.GetEncoding().GetByteCount(name) >= 100)
{
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
}
WriteStringBytes(ArchiveEncoding.Encode(namePrefix), buffer, 345, 100);
WriteStringBytes(ArchiveEncoding.Encode(name), buffer, 100);
}
else
{
WriteStringBytes(ArchiveEncoding.Encode(Name.NotNull("Name is null")), buffer, 100);
}
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
buffer[156] = (byte)EntryType;
WriteStringBytes(Encoding.ASCII.GetBytes("ustar"), buffer, 257, 6);
buffer[263] = 0x30;
buffer[264] = 0x30;
var crc = RecalculateChecksum(buffer);
WriteOctalBytes(crc, buffer, 148, 8);
await output.WriteAsync(buffer, 0, buffer.Length, cancellationToken).ConfigureAwait(false);
}
private async ValueTask WriteGnuTarLongLinkAsync(
Stream output,
CancellationToken cancellationToken
)
{
var buffer = new byte[BLOCK_SIZE];
WriteOctalBytes(511, buffer, 100, 8);
WriteOctalBytes(0, buffer, 108, 8);
WriteOctalBytes(0, buffer, 116, 8);
var nameByteCount = ArchiveEncoding
.GetEncoding()
.GetByteCount(Name.NotNull("Name is null"));
if (nameByteCount > 100)
{
WriteStringBytes("././@LongLink", buffer, 0, 100);
buffer[156] = (byte)EntryType.LongName;
WriteOctalBytes(nameByteCount + 1, buffer, 124, 12);
}
else
{
WriteStringBytes(ArchiveEncoding.Encode(Name.NotNull("Name is null")), buffer, 100);
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
buffer[156] = (byte)EntryType;
if (Size >= 0x1FFFFFFFF)
{
Span<byte> bytes12 = stackalloc byte[12];
BinaryPrimitives.WriteInt64BigEndian(bytes12.Slice(4), Size);
bytes12[0] |= 0x80;
bytes12.CopyTo(buffer.AsSpan(124));
}
}
var crc = RecalculateChecksum(buffer);
WriteOctalBytes(crc, buffer, 148, 8);
await output.WriteAsync(buffer, 0, buffer.Length, cancellationToken).ConfigureAwait(false);
if (nameByteCount > 100)
{
await WriteLongFilenameHeaderAsync(output, cancellationToken);
Name = ArchiveEncoding.Decode(
ArchiveEncoding.Encode(Name.NotNull("Name is null")),
0,
100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1)
);
await WriteGnuTarLongLinkAsync(output, cancellationToken);
}
}
private async ValueTask WriteLongFilenameHeaderAsync(
Stream output,
CancellationToken cancellationToken
)
{
var nameBytes = ArchiveEncoding.Encode(Name.NotNull("Name is null"));
await output
.WriteAsync(nameBytes, 0, nameBytes.Length, cancellationToken)
.ConfigureAwait(false);
var numPaddingBytes = BLOCK_SIZE - (nameBytes.Length % BLOCK_SIZE);
if (numPaddingBytes == 0)
{
numPaddingBytes = BLOCK_SIZE;
}
await output
.WriteAsync(new byte[numPaddingBytes], 0, numPaddingBytes, cancellationToken)
.ConfigureAwait(false);
}
internal async ValueTask<bool> ReadAsync(AsyncBinaryReader reader)
{
string? longName = null;
string? longLinkName = null;
var hasLongValue = true;
byte[] buffer;
EntryType entryType;
do
{
buffer = await ReadBlockAsync(reader);
if (buffer.Length == 0)
{
return false;
}
entryType = ReadEntryType(buffer);
// LongName and LongLink headers can follow each other and need
// to apply to the header that follows them.
if (entryType == EntryType.LongName)
{
longName = await ReadLongNameAsync(reader, buffer);
continue;
}
else if (entryType == EntryType.LongLink)
{
longLinkName = await ReadLongNameAsync(reader, buffer);
continue;
}
hasLongValue = false;
} while (hasLongValue);
// Check header checksum
if (!checkChecksum(buffer))
{
return false;
}
Name = longName ?? ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
EntryType = entryType;
Size = ReadSize(buffer);
// for symlinks, additionally read the linkname
if (entryType == EntryType.SymLink || entryType == EntryType.HardLink)
{
LinkName = longLinkName ?? ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
if (entryType == EntryType.Directory)
{
Mode |= 0b1_000_000_000;
}
UserId = ReadAsciiInt64Base8oldGnu(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8oldGnu(buffer, 116, 7);
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic))
{
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls();
if (!string.IsNullOrEmpty(namePrefix))
{
Name = namePrefix + "/" + Name;
}
}
if (entryType != EntryType.LongName && Name.Length == 0)
{
return false;
}
return true;
}
private static async ValueTask<byte[]> ReadBlockAsync(AsyncBinaryReader reader)
{
var buffer = ArrayPool<byte>.Shared.Rent(BLOCK_SIZE);
try
{
await reader.ReadBytesAsync(buffer, 0, BLOCK_SIZE);
if (buffer.Length != 0 && buffer.Length < BLOCK_SIZE)
{
throw new InvalidFormatException("Buffer is invalid size");
}
return buffer;
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
private async ValueTask<string> ReadLongNameAsync(AsyncBinaryReader reader, byte[] buffer)
{
var size = ReadSize(buffer);
// Validate size to prevent memory exhaustion from malformed headers
if (size < 0 || size > MAX_LONG_NAME_SIZE)
{
throw new InvalidFormatException(
$"Long name size {size} is invalid or exceeds maximum allowed size of {MAX_LONG_NAME_SIZE} bytes"
);
}
var nameLength = (int)size;
var nameBytes = ArrayPool<byte>.Shared.Rent(nameLength);
try
{
await reader.ReadBytesAsync(nameBytes, 0, nameLength);
var remainingBytesToRead = BLOCK_SIZE - (nameLength % BLOCK_SIZE);
// Read the rest of the block and discard the data
if (remainingBytesToRead < BLOCK_SIZE)
{
var remainingBytes = ArrayPool<byte>.Shared.Rent(remainingBytesToRead);
try
{
await reader.ReadBytesAsync(remainingBytes, 0, remainingBytesToRead);
}
finally
{
ArrayPool<byte>.Shared.Return(remainingBytes);
}
}
return ArchiveEncoding.Decode(nameBytes, 0, nameLength).TrimNulls();
}
finally
{
ArrayPool<byte>.Shared.Return(nameBytes);
}
}
}

View File

@@ -1,12 +1,14 @@
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Common.Tar.Headers;
internal sealed class TarHeader
internal sealed partial class TarHeader
{
internal static readonly DateTime EPOCH = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
@@ -110,14 +112,18 @@ internal sealed class TarHeader
string name = fullName.Substring(splitIndex + 1);
if (this.ArchiveEncoding.GetEncoding().GetByteCount(namePrefix) >= 155)
{
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
}
if (this.ArchiveEncoding.GetEncoding().GetByteCount(name) >= 100)
{
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
}
// write name prefix
WriteStringBytes(ArchiveEncoding.Encode(namePrefix), buffer, 345, 100);

Some files were not shown because too many files have changed in this diff Show More