Compare commits

..

323 Commits

Author SHA1 Message Date
Adam Hathcock
73fd2d70ba Merge pull request #1184 from adamhathcock/adam/cleanup
Some clean up post-async merging
2026-02-04 16:47:59 +00:00
Adam Hathcock
c026e02390 remove some redundant casts 2026-02-04 16:47:27 +00:00
Adam Hathcock
9676d6ccc9 fix readonly 2026-02-04 16:14:48 +00:00
Adam Hathcock
fad7cf3b1d fix usages of AsyncBinaryReader 2026-02-04 16:07:06 +00:00
Adam Hathcock
17cb952772 use DIspose on BinaryReader 2026-02-04 15:59:22 +00:00
Adam Hathcock
923217ef0e remove memorystream 2026-02-04 15:45:38 +00:00
Adam Hathcock
0d81d7f243 Merge remote-tracking branch 'origin/master' into adam/cleanup 2026-02-04 15:05:12 +00:00
Adam Hathcock
2070ab282f Merge pull request #1183 from adamhathcock/adam/release-merge
release to master merge
2026-02-04 15:01:18 +00:00
Adam Hathcock
faacff414d changed checks to be less than or equal to zero 2026-02-04 14:57:17 +00:00
Adam Hathcock
642b8bddb8 fixed merge 2026-02-04 14:41:55 +00:00
Adam Hathcock
27f7221902 Merge remote-tracking branch 'origin/release' into adam/release-merge
# Conflicts:
#	Directory.Packages.props
#	build/packages.lock.json
#	src/SharpCompress/Common/Zip/WinzipAesEncryptionData.cs
#	src/SharpCompress/SharpCompress.csproj
#	src/SharpCompress/packages.lock.json
#	tests/SharpCompress.Performance/packages.lock.json
#	tests/SharpCompress.Test/SharpCompress.Test.csproj
#	tests/SharpCompress.Test/packages.lock.json
2026-02-04 14:35:40 +00:00
Adam Hathcock
eb738b44a8 clean up naming 2026-02-04 14:32:50 +00:00
Adam Hathcock
57c0d00b37 rename Rewindable to SharpCompressStream 2026-02-04 14:12:04 +00:00
Adam Hathcock
43276b32b7 better sync over async 2026-02-04 13:25:05 +00:00
Adam Hathcock
94b275c41b Merge branch 'master' into adam/cleanup
# Conflicts:
#	src/SharpCompress/Common/EntryStream.cs
2026-02-04 12:55:46 +00:00
Adam Hathcock
ae4ae799b9 merge NonDisposingStream into RewindableStream 2026-02-04 12:40:39 +00:00
Adam Hathcock
9def35f78a Merge pull request #1174 from adamhathcock/adam/merge-release-to-master
merge release to master
2026-02-04 12:37:59 +00:00
Adam Hathcock
7e8005a9d8 fmt 2026-02-04 11:27:03 +00:00
Adam Hathcock
b93ed79ef3 another sync over async 2026-02-04 11:26:47 +00:00
Adam Hathcock
c7b8021c2e remove extra debug 2026-02-04 10:08:50 +00:00
Adam Hathcock
3af0e1091c remove the rar flag 2026-02-04 09:46:19 +00:00
Adam Hathcock
1323c96bc8 remove more scoped namespaces 2026-02-04 09:25:48 +00:00
Adam Hathcock
94716a5ba9 add sync over async dispose 2026-02-04 09:20:43 +00:00
Adam Hathcock
f67168f479 try to fix test 2026-02-04 08:40:43 +00:00
Adam Hathcock
7e54f91bfa fmt 2026-02-04 08:31:58 +00:00
Adam Hathcock
3ab4478275 use ringbuffer 2026-02-04 08:30:02 +00:00
Adam Hathcock
8759cf08ff tests all pass 2026-02-04 08:20:05 +00:00
Adam Hathcock
8cff7cb551 rewinding works more? 2026-02-03 17:21:29 +00:00
Adam Hathcock
1b0ec2410d fix deflate rewinding? 2026-02-03 17:07:39 +00:00
Adam Hathcock
22d15f73f0 Merge pull request #1181 from adamhathcock/adam/add-aot
Add AOT to props and clean up in release
2026-02-03 16:55:59 +00:00
Adam Hathcock
4e0d78d6c8 update desc 2026-02-03 16:41:19 +00:00
Adam Hathcock
63a1927838 Merge pull request #1182 from adamhathcock/copilot/sub-pr-1181
[WIP] WIP address feedback on AOT props and cleanup
2026-02-03 16:32:02 +00:00
copilot-swe-agent[bot]
3d745bfa05 Fix invalid TFM: change netstandard20 to netstandard2.0
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-03 16:30:47 +00:00
copilot-swe-agent[bot]
ce26d50792 Initial plan 2026-02-03 16:28:28 +00:00
Adam Hathcock
01e162fcc4 properly add netstandard 2 support 2026-02-03 16:17:58 +00:00
Adam Hathcock
443f7b8b0c Add AOT to props and clean up in release 2026-02-03 16:13:15 +00:00
Adam Hathcock
08d64ee8a1 format 2026-02-03 14:13:57 +00:00
Adam Hathcock
eedc7c7a0f Arj async passes with header fix 2026-02-03 09:29:12 +00:00
Adam Hathcock
3198b32008 Arc is now async 2026-02-03 09:03:33 +00:00
Adam Hathcock
dff17a95e8 new fix for RewindableStream with tests 2026-02-03 08:56:35 +00:00
Adam Hathcock
236ee215b9 fix async parts of arc 2026-02-03 08:14:25 +00:00
Adam Hathcock
6af612fd54 fix ace async 2026-02-02 17:26:37 +00:00
Adam Hathcock
361e695380 non-async ace works 2026-02-02 14:38:16 +00:00
Adam Hathcock
8a8784a974 async xz and tar completed 2026-02-02 13:28:13 +00:00
Adam Hathcock
ddc01527bd async filtering for lzma and others 2026-02-02 12:58:13 +00:00
Adam Hathcock
e6ad44def8 more async streams 2026-02-02 12:39:36 +00:00
Adam Hathcock
df63e152c1 Merge pull request #1178 from adamhathcock/copilot/fix-infinite-loop-rar-archive-again
Fix infinite loop in SourceStream.Seek for malformed archives
2026-02-02 10:57:10 +00:00
copilot-swe-agent[bot]
ad7e64ba43 Fix test to use correct RarArchive API - all RAR tests passing
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-02 09:33:37 +00:00
copilot-swe-agent[bot]
8737b7a38e Apply infinite loop fix to SourceStream.cs and add test case
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-02 09:31:38 +00:00
copilot-swe-agent[bot]
13199fcfd1 Initial plan 2026-02-02 09:28:50 +00:00
Adam Hathcock
9b8e3d8530 remove some tests 2026-02-02 08:52:21 +00:00
Adam Hathcock
0b15b60506 remove some extra markdown 2026-02-01 13:19:21 +00:00
Adam Hathcock
46e2ea8507 more async fixes 2026-02-01 09:38:46 +00:00
Adam Hathcock
62b8fc92d1 not sure I like this fix 2026-02-01 08:47:17 +00:00
Adam Hathcock
cb27b117b4 remove IStreamStack from non specialized streams 2026-01-31 19:03:49 +00:00
Adam Hathcock
6112b2d1d9 recording isn't great here but it works better 2026-01-31 17:42:32 +00:00
Adam Hathcock
0e2f8068a6 async crypto 2026-01-31 15:56:44 +00:00
Adam Hathcock
227e70926b fmt 2026-01-31 15:53:52 +00:00
Adam Hathcock
86e412cf77 more fixes? 2026-01-31 15:44:09 +00:00
Adam Hathcock
037b6842bf remove SharpCompressStream 2026-01-31 15:29:34 +00:00
Adam Hathcock
895dd02830 another fix 2026-01-31 14:20:01 +00:00
Adam Hathcock
7112dba345 some shrink fixes 2026-01-31 13:56:58 +00:00
Adam Hathcock
0767292bb0 ReduceStream is async 2026-01-31 13:19:10 +00:00
Adam Hathcock
b40e1a002a Merge remote-tracking branch 'origin/adam/data-descriptor-fix' into adam/more-explode-async 2026-01-31 11:18:42 +00:00
Adam Hathcock
c096164486 add shrink stream async 2026-01-31 11:18:16 +00:00
Adam Hathcock
d92def91b0 Opus 4.5 did this fix, need to understand it 2026-01-31 10:59:30 +00:00
Adam Hathcock
b48e938c98 finish PPMD? 2026-01-30 13:46:30 +00:00
Adam Hathcock
4ed1f89866 more ppmd async 2026-01-30 13:19:24 +00:00
Adam Hathcock
525bcea989 ppmd create 2026-01-30 12:37:21 +00:00
Adam Hathcock
6c3f7c86da lzma works with zip 2026-01-30 12:28:01 +00:00
Adam Hathcock
595a97bd62 more explode async 2026-01-30 07:25:49 +00:00
Adam Hathcock
c9db03335b Fixed AsyncMarkingBinaryReader 2026-01-29 16:23:37 +00:00
Adam Hathcock
659f5d7834 fix some more tests 2026-01-29 15:47:22 +00:00
Adam Hathcock
42f6c77419 rewindable with memory stream 2026-01-29 15:23:53 +00:00
Adam Hathcock
bcaec86514 save this 2026-01-29 14:43:05 +00:00
Adam Hathcock
1ca914823f more rework 2026-01-29 14:42:29 +00:00
Adam Hathcock
be8841075a fixes 2026-01-29 11:08:38 +00:00
Adam Hathcock
a94e319935 clean up rewindable stream 2026-01-29 11:01:59 +00:00
Adam Hathcock
d60abc3f45 fmt 2026-01-29 10:16:37 +00:00
Adam Hathcock
b994f0ab55 more 7z async 2026-01-29 10:13:55 +00:00
Adam Hathcock
e2cb9f39ab fix up rewindable stream and use it more, add NonDisposingStream 2026-01-29 09:08:40 +00:00
Adam Hathcock
58459bda12 using a byte array instead of memory streams 2026-01-28 19:15:34 +00:00
Adam Hathcock
8dfd5349f0 making RewindableStream more proper 2026-01-28 16:50:35 +00:00
Adam Hathcock
c770bc4788 reintroduce RewindableStream stream. SharpCompressStream does too much 2026-01-28 16:33:19 +00:00
Adam Hathcock
24b4ef8780 fix test 2026-01-28 11:48:11 +00:00
Adam Hathcock
6ddcbf2bc9 fix some tests 2026-01-28 11:37:24 +00:00
Adam Hathcock
8d5d686b79 more fixes 2026-01-28 11:23:41 +00:00
Adam Hathcock
f4369e540a fmt 2026-01-28 11:13:29 +00:00
Adam Hathcock
c219eb4abb Merge branch 'release'
# Conflicts:
#	src/SharpCompress/Archives/ArchiveFactory.cs
#	src/SharpCompress/Archives/AutoArchiveFactory.cs
#	src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs
#	src/SharpCompress/Archives/Zip/ZipArchive.cs
#	src/SharpCompress/Factories/AceFactory.cs
#	src/SharpCompress/Factories/ArcFactory.cs
#	src/SharpCompress/Factories/ArjFactory.cs
#	src/SharpCompress/Factories/Factory.cs
#	src/SharpCompress/Factories/GZipFactory.cs
#	src/SharpCompress/Factories/IFactory.cs
#	src/SharpCompress/Factories/RarFactory.cs
#	src/SharpCompress/Factories/SevenZipFactory.cs
#	src/SharpCompress/Factories/TarFactory.cs
#	src/SharpCompress/Factories/ZStandardFactory.cs
#	src/SharpCompress/Factories/ZipFactory.cs
#	src/SharpCompress/IO/SharpCompressStream.cs
#	src/SharpCompress/Readers/AbstractReader.cs
#	src/SharpCompress/Utility.cs
2026-01-28 11:12:49 +00:00
Adam Hathcock
9a7bdd39e8 Merge pull request #1172 from adamhathcock/copilot/fix-sevenzip-contiguous-streams
Fix SevenZipReader to maintain contiguous stream state for solid archives
2026-01-28 08:35:28 +00:00
Adam Hathcock
484bc740d7 Update src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-28 08:26:28 +00:00
Adam Hathcock
8a67d501a8 Don't use reflection in tests 2026-01-28 08:10:06 +00:00
copilot-swe-agent[bot]
3c87242bd0 Add test to verify folder stream reuse in solid archives
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 17:29:44 +00:00
copilot-swe-agent[bot]
999124e68e Remove unused _currentFolderIndex field
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 17:03:20 +00:00
copilot-swe-agent[bot]
db2f5c9cb9 Fix SevenZipReader to iterate entries as contiguous streams
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 17:01:18 +00:00
Adam Hathcock
af08a7cd54 Merge pull request #1169 from adamhathcock/copilot/fix-zip-parsing-regression
Fix ZIP parsing failure on non-seekable streams with short reads
2026-01-27 16:54:12 +00:00
copilot-swe-agent[bot]
72eaf66f05 Initial plan 2026-01-27 16:53:53 +00:00
Adam Hathcock
8a3be35d67 Update tests/SharpCompress.Test/Zip/ZipShortReadTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-27 16:43:13 +00:00
copilot-swe-agent[bot]
d59e4c2a0d Refactor FillBuffer to use ReadFully pattern
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 16:25:24 +00:00
copilot-swe-agent[bot]
71655e04c4 Apply code formatting with CSharpier
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 16:02:26 +00:00
copilot-swe-agent[bot]
a706a9d725 Fix ZIP parsing regression with short reads on non-seekable streams
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 16:00:44 +00:00
copilot-swe-agent[bot]
970934a40b Initial plan 2026-01-27 15:51:50 +00:00
Adam Hathcock
a9c28a7b62 Merge pull request #1165 from adamhathcock/adam/buffer-size-consolidation
(Release) Buffer size consolidation
2026-01-27 14:41:14 +00:00
Adam Hathcock
4d31436740 constant should be a static property 2026-01-27 12:39:01 +00:00
Adam Hathcock
c82744c51c fmt 2026-01-27 12:15:31 +00:00
Adam Hathcock
f0eaddc6a6 Merge remote-tracking branch 'origin/adam/buffer-size-consolidation' into adam/buffer-size-consolidation 2026-01-27 12:14:17 +00:00
Adam Hathcock
d6156f0f1e release branch builds increment patch versions and master builds increment minor versions 2026-01-27 12:14:03 +00:00
Adam Hathcock
3c88c7fdd5 Merge pull request #1167 from adamhathcock/copilot/sub-pr-1165-again
Fix grammatical errors in ArcFactory comment documentation
2026-01-27 11:58:25 +00:00
Adam Hathcock
d11f6aefb0 Merge pull request #1166 from adamhathcock/copilot/sub-pr-1165
Add [Obsolete] attribute to ReaderOptions.DefaultBufferSize for backward compatibility
2026-01-27 11:57:54 +00:00
copilot-swe-agent[bot]
010a38bb73 Add clarifying comment about buffer size value difference
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 11:54:50 +00:00
copilot-swe-agent[bot]
53f12d75db Add [Obsolete] attribute to ReaderOptions.DefaultBufferSize
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 11:53:37 +00:00
copilot-swe-agent[bot]
6c866324b2 Fix grammatical errors in ArcFactory comments
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-27 11:49:29 +00:00
copilot-swe-agent[bot]
a114155189 Initial plan 2026-01-27 11:48:05 +00:00
copilot-swe-agent[bot]
014bbc3ea4 Initial plan 2026-01-27 11:47:52 +00:00
Adam Hathcock
d52facd4ab Remove change 2026-01-27 10:48:32 +00:00
Adam Hathcock
0a50386ada Using Constants class differently 2026-01-27 10:46:54 +00:00
Adam Hathcock
f64fa53ed1 Merge pull request #1132 from adamhathcock/adam/async-creation
Clean up for async creation
2026-01-27 07:29:45 +00:00
Adam Hathcock
335db1eb9e fix ValueTask struct copying 2026-01-26 18:10:59 +00:00
Adam Hathcock
27fe2d807e more lzma porting 2026-01-26 18:09:44 +00:00
Adam Hathcock
27cf2795ef More LZMA fixes? 2026-01-26 15:50:50 +00:00
Adam Hathcock
979c8d9234 Merge fixes 2026-01-26 14:20:42 +00:00
Adam Hathcock
04eabb7866 Merge remote-tracking branch 'origin/master' into adam/async-creation
# Conflicts:
#	src/SharpCompress/Common/EntryStream.cs
#	src/SharpCompress/IO/BufferedSubStream.cs
#	src/SharpCompress/packages.lock.json
2026-01-26 14:16:14 +00:00
Adam Hathcock
f4eccea20c Merge pull request #1162 from adamhathcock/adam/release-to-master
release to master
2026-01-26 13:40:22 +00:00
Adam Hathcock
fc63217dd0 Merge remote-tracking branch 'origin/release' into adam/release-to-master
# Conflicts:
#	src/SharpCompress/IO/BufferedSubStream.cs
#	tests/SharpCompress.Test/Zip/ZipReaderAsyncTests.cs
#	tests/SharpCompress.Test/Zip/ZipReaderTests.cs
2026-01-26 13:24:25 +00:00
Adam Hathcock
b9fc680548 Merge pull request #1160 from adamhathcock/adam/check-if-seek
add check to see if we need to seek before hand
2026-01-26 12:24:39 +00:00
Adam Hathcock
7dcc13c1f0 Merge pull request #1161 from adamhathcock/copilot/sub-pr-1160
Fix ArrayPool corruption from double-disposal in BufferedSubStream
2026-01-26 12:15:55 +00:00
copilot-swe-agent[bot]
56d3091688 Fix condition order to check CanSeek before Position
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-26 12:12:08 +00:00
copilot-swe-agent[bot]
a0af0604d1 Add disposal checks to RefillCache methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-26 12:11:16 +00:00
copilot-swe-agent[bot]
875c2d7694 Fix BufferedSubStream double-dispose issue with ArrayPool
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-26 12:10:19 +00:00
Adam Hathcock
8c95f863cb do CanSeek first 2026-01-26 12:06:57 +00:00
copilot-swe-agent[bot]
ddf37e82c2 Initial plan 2026-01-26 12:06:38 +00:00
Adam Hathcock
a82fda98d7 more testing and add pooling to cache 2026-01-26 11:45:25 +00:00
Adam Hathcock
44e4b1804e add check to see if we need to seek before hand 2026-01-26 09:41:13 +00:00
Adam Hathcock
984ea8f46f remove posix 2026-01-25 16:38:28 +00:00
Adam Hathcock
4d84394417 LZMA Lencoder uses async 2026-01-25 16:38:17 +00:00
Adam Hathcock
507074cf72 Merge branch 'opencode/glowing-wolf' into adam/async-creation 2026-01-25 15:24:17 +00:00
Adam Hathcock
f364b68e09 remove more buffer 2026-01-25 15:23:10 +00:00
Adam Hathcock
244acc0c9e implemented async rangecoder 2026-01-25 15:17:44 +00:00
Adam Hathcock
def0bce221 remove mono dep as it's annoying 2026-01-25 15:12:17 +00:00
Adam Hathcock
d0823db595 fmt 2026-01-25 15:04:28 +00:00
Adam Hathcock
73704bcd7e Merge branch 'opencode/clever-knight' into adam/async-creation 2026-01-25 15:04:07 +00:00
Adam Hathcock
86c3b93fa5 Merge branch 'opencode/glowing-wolf' into adam/async-creation 2026-01-25 15:04:01 +00:00
Adam Hathcock
e89fb211ce gzipwriter async 2026-01-25 15:03:51 +00:00
Adam Hathcock
55100cb37a ExplodeStream is async 2026-01-25 15:03:06 +00:00
Adam Hathcock
14fd880dac add tar writing async 2026-01-25 14:57:44 +00:00
Adam Hathcock
4ca1a7713e Merge pull request #1157 from adamhathcock/adam/1154-release
Merge pull request #1156 from adamhathcock/copilot/fix-sharpcompress-…
2026-01-25 11:36:59 +00:00
Adam Hathcock
9caf7be928 Revert testing 2026-01-24 10:23:02 +00:00
Adam Hathcock
bf4217fde6 Merge pull request #1156 from adamhathcock/copilot/fix-sharpcompress-archive-iteration
Fix silent iteration failure when input stream throws on Flush()
# Conflicts:
#	src/SharpCompress/packages.lock.json
2026-01-24 10:18:02 +00:00
Adam Hathcock
de3cda9034 Merge pull request #1156 from adamhathcock/copilot/fix-sharpcompress-archive-iteration
Fix silent iteration failure when input stream throws on Flush()
2026-01-24 10:11:16 +00:00
Adam Hathcock
f1102dc980 Undoing https://github.com/adamhathcock/sharpcompress/pull/1151 2026-01-24 10:01:49 +00:00
copilot-swe-agent[bot]
f2bb81d611 Add async versions of archive iteration regression tests
- Added Archive_Iteration_DoesNotBreak_WhenFlushThrows_Deflate_Async
- Added Archive_Iteration_DoesNotBreak_WhenFlushThrows_LZMA_Async
- Both async tests mirror the sync versions and pass successfully

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-23 16:42:44 +00:00
copilot-swe-agent[bot]
41e0c151de Fix regression: archive iteration breaking when input stream throws in Flush()
- Modified ZlibBaseStream.Flush() and FlushAsync() to only flush the underlying stream when in Writer mode
- Added ThrowOnFlushStream mock for testing
- Added regression tests for Deflate and LZMA compressed archives
- All tests pass successfully

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-23 16:04:09 +00:00
copilot-swe-agent[bot]
d0f44839ff Initial plan 2026-01-23 15:58:14 +00:00
Adam Hathcock
414cad1241 add braces 2026-01-23 10:55:51 +00:00
Adam Hathcock
abe0087cfd fmt 2026-01-23 10:32:11 +00:00
Adam Hathcock
060b1ed5dd fix disposal and add tests 2026-01-23 10:25:41 +00:00
Adam Hathcock
fbc168fafe Merge remote-tracking branch 'origin/adam/async-creation' into adam/async-creation 2026-01-23 09:46:53 +00:00
Adam Hathcock
d5a8c37113 Merge pull request #1154 from adamhathcock/adam/1151-release
Adam/1151 release cherry pick
2026-01-23 09:31:03 +00:00
Adam Hathcock
21ce9a38e6 fix up tests 2026-01-23 09:04:55 +00:00
Adam Hathcock
7732fbb698 Merge pull request #1151 from adamhathcock/copilot/fix-entrystream-flush-issue
Fix EntryStream.Dispose() throwing NotSupportedException on non-seekable streams
2026-01-23 08:59:56 +00:00
Adam Hathcock
44402414a6 LZMA create 2026-01-22 17:01:48 +00:00
Adam Hathcock
11b92d102a Create for explodestream 2026-01-22 16:48:53 +00:00
Adam Hathcock
16831e1e6e Merge pull request #1152 from adamhathcock/copilot/sub-pr-1132
Fix dispose methods to always set _isDisposed and call base.Dispose() when LeaveOpen is true
2026-01-22 16:39:47 +00:00
Adam Hathcock
3b83d08e2a fmt 2026-01-22 16:38:44 +00:00
Adam Hathcock
b622a2ce73 fix disposal and other simple issues 2026-01-22 16:38:35 +00:00
Adam Hathcock
c5814502f6 clean up and fixing tests....need to revisit disposal 2026-01-22 16:24:07 +00:00
copilot-swe-agent[bot]
d9be6389ca Address code review feedback - remove extra blank lines and use consistent property access
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-22 15:51:21 +00:00
copilot-swe-agent[bot]
336a8f2876 Fix SharpCompressStream Dispose methods to set _isDisposed and call base.Dispose even when LeaveOpen is true
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-22 15:50:05 +00:00
copilot-swe-agent[bot]
b4f949ba9b Initial plan 2026-01-22 15:44:08 +00:00
Adam Hathcock
9403c12793 Add await 2026-01-22 15:42:54 +00:00
Adam Hathcock
77c1cebefc Merge remote-tracking branch 'origin/master' into adam/async-creation
# Conflicts:
#	src/SharpCompress/Common/EntryStream.cs
#	tests/SharpCompress.Test/SharpCompress.Test.csproj
2026-01-22 15:29:38 +00:00
Adam Hathcock
caa7acdbc5 Merge pull request #1151 from adamhathcock/copilot/fix-entrystream-flush-issue
Fix EntryStream.Dispose() throwing NotSupportedException on non-seekable streams
2026-01-22 15:23:13 +00:00
Adam Hathcock
1522e64797 fix async tests 2026-01-22 15:15:57 +00:00
Adam Hathcock
5152e3197e fix build flags 2026-01-22 15:12:18 +00:00
Adam Hathcock
ae4f2c08fd check if second stream is zip header without changing position - fix 2026-01-22 15:06:58 +00:00
copilot-swe-agent[bot]
9628f2dda1 Add async tests for EntryStream.Dispose on non-seekable streams
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-22 14:35:16 +00:00
Adam Hathcock
65208a30c1 fix more tests 2026-01-22 14:15:47 +00:00
Adam Hathcock
4c838db876 everything compiles and passes (minus 3 tests) 2026-01-22 14:08:20 +00:00
Adam Hathcock
d1f6fd9af1 move more and fmt 2026-01-22 14:05:23 +00:00
Adam Hathcock
61c6f8403a some manual moving 2026-01-22 13:52:24 +00:00
Adam Hathcock
a8f47237d7 divide async and sync into new files 2026-01-22 13:38:20 +00:00
copilot-swe-agent[bot]
7cbdc5b46c Format code with CSharpier
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-22 13:21:36 +00:00
copilot-swe-agent[bot]
8b74243e79 Update test comments to include version context
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-22 13:07:32 +00:00
copilot-swe-agent[bot]
f77a2aabab Fix EntryStream.Dispose() to not throw NotSupportedException on non-seekable streams
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-22 13:04:58 +00:00
copilot-swe-agent[bot]
e6fb704780 Initial plan 2026-01-22 12:59:30 +00:00
Adam Hathcock
c5d7407919 Update from Task to ValueTask where I can 2026-01-22 09:18:07 +00:00
Adam Hathcock
b9ed2b09c1 fmt 2026-01-22 09:05:26 +00:00
Adam Hathcock
db0bb8a30d fix some 7z tests 2026-01-22 08:52:00 +00:00
Adam Hathcock
85d82e5c86 fix tar issue 2026-01-22 08:22:57 +00:00
Adam Hathcock
1a87075f33 GZip fix 2026-01-22 08:13:34 +00:00
Adam Hathcock
8df9232171 use extension where appropriate with more fixes 2026-01-21 16:57:25 +00:00
Adam Hathcock
7b7eba8cd9 more fixes 2026-01-21 16:11:40 +00:00
Adam Hathcock
169364f6ae fix disposal 2026-01-21 15:37:56 +00:00
Adam Hathcock
c38f74d34c Merge remote-tracking branch 'origin/master' into adam/async-creation
# Conflicts:
#	src/SharpCompress/Compressors/BZip2/BZip2Stream.cs
#	src/SharpCompress/Compressors/BZip2/CBZip2InputStream.cs
#	src/SharpCompress/Compressors/Deflate/DeflateStream.cs
2026-01-21 15:31:44 +00:00
Adam Hathcock
895699d22e fmt 2026-01-20 16:53:08 +00:00
Adam Hathcock
cf901c2784 fix test 2026-01-20 16:44:34 +00:00
Adam Hathcock
e1bbc65f5b more bzip tests pass 2026-01-20 16:39:15 +00:00
Adam Hathcock
f6faaa83ec better async bzip input stream 2026-01-20 16:32:30 +00:00
Adam Hathcock
4d3ae3a97f Merge branch 'opencode/curious-river' into adam/bzip2-async 2026-01-20 16:03:11 +00:00
Adam Hathcock
cc47fde57f works? 2026-01-20 15:37:15 +00:00
Adam Hathcock
a8d5b8e86b intermediate commit 2026-01-20 15:19:46 +00:00
Adam Hathcock
0a9c5bfe15 format changes 2026-01-20 13:40:51 +00:00
Adam Hathcock
ff0769e988 Create factory for CBZip2InputStream 2026-01-20 13:21:11 +00:00
Adam Hathcock
3987733079 LZW async 2026-01-20 12:56:13 +00:00
Adam Hathcock
b26d38b7e4 another tar test fix 2026-01-20 12:34:49 +00:00
Adam Hathcock
2175cb299d tar fixes 2026-01-20 12:22:38 +00:00
Adam Hathcock
8abb972f87 Fix test 2026-01-20 11:01:17 +00:00
Adam Hathcock
05bf22f518 rar works now 2026-01-20 10:41:37 +00:00
Adam Hathcock
3b5ee481c5 fix for another async typo 2026-01-20 10:17:28 +00:00
Adam Hathcock
b54617238b more async fixes? 2026-01-20 10:09:13 +00:00
Adam Hathcock
44174e7b03 some fixes 2026-01-20 09:07:57 +00:00
Adam Hathcock
ecd9317ab3 more basic LLM async and fixed CRC async 2026-01-19 16:08:46 +00:00
Adam Hathcock
884f0b702e some grunt rar header async 2026-01-19 16:08:20 +00:00
Adam Hathcock
2e95832bea factory the headers instead of creating 2026-01-19 14:19:15 +00:00
Adam Hathcock
97879f18b6 Merge pull request #1146 from adamhathcock/adam/pr-1145-release
Merge pull request #1145 from adamhathcock/copilot/add-leaveopen-para…
2026-01-19 10:35:33 +00:00
Adam Hathcock
d74454f7e9 Merge pull request #1145 from adamhathcock/copilot/add-leaveopen-parameter-lzipstream
Add leaveOpen parameter to LZipStream and BZip2Stream
2026-01-19 09:58:10 +00:00
Adam Hathcock
ce01cc7ce1 Merge pull request #1145 from adamhathcock/copilot/add-leaveopen-parameter-lzipstream
Add leaveOpen parameter to LZipStream and BZip2Stream
2026-01-19 09:57:39 +00:00
copilot-swe-agent[bot]
9454466be7 Add comprehensive tests for leaveOpen behavior and fix BZip2 stream disposal
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-19 07:33:22 +00:00
copilot-swe-agent[bot]
0e4a159998 Add leaveOpen parameter to LZipStream and BZip2Stream
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-19 07:31:20 +00:00
copilot-swe-agent[bot]
4998676476 Initial plan 2026-01-19 07:22:01 +00:00
Adam Hathcock
f359f553b3 some minor fixes 2026-01-18 15:31:10 +00:00
Adam Hathcock
08118f7286 add more async writing 2026-01-18 15:07:02 +00:00
Adam Hathcock
408d2e6663 Async add entry 2026-01-18 14:57:01 +00:00
Adam Hathcock
4c4b727bd7 Tar detection works 2026-01-17 13:39:57 +00:00
Adam Hathcock
8e54b10b7f tar tests are better? 2026-01-16 15:10:08 +00:00
Adam Hathcock
f99e421115 fix factory 2026-01-16 15:04:01 +00:00
Adam Hathcock
82d56b9678 multi-file rars done manually 2026-01-16 13:43:26 +00:00
Adam Hathcock
447d35267f some fixes 2026-01-16 13:19:41 +00:00
Adam Hathcock
763805e03a async IsRarFile 2026-01-16 12:12:51 +00:00
Adam Hathcock
cd70a7760e remvoe AutoFactory 2026-01-16 11:44:12 +00:00
Adam Hathcock
ec7c359341 Arj works 2026-01-16 11:12:26 +00:00
Adam Hathcock
cc59c1960a fix ace tests 2026-01-16 10:49:18 +00:00
Adam Hathcock
1cc80e7675 Merge pull request #1141 from adamhathcock/copilot/sub-pr-1132
[WIP] Address feedback on async creation cleanup changes
2026-01-16 10:12:08 +00:00
Adam Hathcock
cfe59fc515 Merge branch 'adam/async-creation' into copilot/sub-pr-1132 2026-01-16 10:11:45 +00:00
copilot-swe-agent[bot]
2180df3318 Pass CancellationToken.None explicitly to OpenAsyncArchive methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 10:09:54 +00:00
Adam Hathcock
29f4c7fe2e Merge pull request #1142 from adamhathcock/copilot/sub-pr-1132-another-one
Fix ReadFullyAsync with ArrayPool buffer in SevenZipArchive signature check
2026-01-16 10:09:07 +00:00
Adam Hathcock
d5f9815561 Merge pull request #1136 from adamhathcock/adam/upgrade-xunit
Upgrade xunit to v3
2026-01-16 10:08:23 +00:00
copilot-swe-agent[bot]
6e5e47f041 Update SevenZipFactory to consistently call OpenAsyncArchive methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 10:07:37 +00:00
copilot-swe-agent[bot]
b0fde2b8c7 Fix ReadFullyAsync call to specify offset and count for ArrayPool buffer
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 10:03:48 +00:00
copilot-swe-agent[bot]
4b9b20de42 Initial plan 2026-01-16 09:59:14 +00:00
Adam Hathcock
f7c91bb26f Update src/SharpCompress/Factories/SevenZipFactory.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-16 09:58:09 +00:00
copilot-swe-agent[bot]
4b34dd61d3 Initial plan 2026-01-16 09:58:06 +00:00
Adam Hathcock
c958d184d0 Merge pull request #1137 from adamhathcock/copilot/sub-pr-1136
Fix async test failures after xunit v3 upgrade
2026-01-16 09:54:04 +00:00
copilot-swe-agent[bot]
0de5c59a77 Restore AsyncOnlyStream in archive async tests as requested
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:50:36 +00:00
Adam Hathcock
3b10be53b5 Merge pull request #1140 from adamhathcock/copilot/sub-pr-1132-another-one
Replace empty catch blocks with explicit exception handling in TarArchive validation methods
2026-01-16 09:39:45 +00:00
Adam Hathcock
5336eb6fe6 Merge pull request #1138 from adamhathcock/copilot/sub-pr-1132
Remove redundant stream field in AsyncOnlyStream
2026-01-16 09:38:42 +00:00
copilot-swe-agent[bot]
9fa686b8f9 Fix empty catch blocks in TarArchive.Factory.cs with explicit exception handling
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:35:24 +00:00
copilot-swe-agent[bot]
2012077fb0 Remove redundant _stream field from AsyncOnlyStream and use base Stream property
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:34:29 +00:00
copilot-swe-agent[bot]
302cf2e14f Initial plan 2026-01-16 09:30:05 +00:00
Adam Hathcock
b9fccbd691 Update src/SharpCompress/Factories/ZStandardFactory.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-16 09:29:13 +00:00
copilot-swe-agent[bot]
bbbbc8810a Initial plan 2026-01-16 09:29:09 +00:00
copilot-swe-agent[bot]
c7da19f3a5 Format code with CSharpier
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:26:04 +00:00
copilot-swe-agent[bot]
e919930cf6 Fix Archive async tests to not use AsyncOnlyStream (archives need seekable streams)
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:23:32 +00:00
copilot-swe-agent[bot]
2906529080 Fix ReaderFactory.OpenAsyncReader to use async IsArchiveAsync methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-16 09:12:54 +00:00
copilot-swe-agent[bot]
75cc36849b Initial plan 2026-01-16 09:00:13 +00:00
Adam Hathcock
63e124e72f Upgrade xunit to v3 2026-01-16 08:58:26 +00:00
Adam Hathcock
394d982168 Merge pull request #1133 from adamhathcock/copilot/sub-pr-1132
Add async I/O support for SevenZip archive initialization
2026-01-16 08:44:04 +00:00
Adam Hathcock
f4ce4cbad8 fix tests for both frameworks 2026-01-16 08:43:13 +00:00
Adam Hathcock
491beabe03 uncomment tests 2026-01-16 08:35:49 +00:00
Adam Hathcock
f5d83c0e33 Merge pull request #1135 from adamhathcock/copilot/consolidate-compile-flags 2026-01-15 18:47:37 +00:00
copilot-swe-agent[bot]
d2cb792d91 Change NET6_0_OR_GREATER to NET8_0_OR_GREATER
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 18:31:06 +00:00
copilot-swe-agent[bot]
52fef492a5 Additional simplifications: Remove NETCF, fix NET60 typo, consolidate NETCOREAPP2_1 pattern
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 18:09:06 +00:00
copilot-swe-agent[bot]
a5300f3383 Replace NETFRAMEWORK and NETSTANDARD2_0 with LEGACY_DOTNET compile flag
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 18:05:14 +00:00
copilot-swe-agent[bot]
cab3e7d498 Initial analysis: Planning compile flags consolidation
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 17:55:37 +00:00
copilot-swe-agent[bot]
405dbb30cd Initial plan 2026-01-15 17:50:54 +00:00
copilot-swe-agent[bot]
9bb670ad19 Fix SevenZipArchive async stream handling by adding async Open and ReadDatabase methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 17:28:05 +00:00
copilot-swe-agent[bot]
bbba2e6c7a Initial plan for fixing SevenZipArchive_LZMA_AsyncStreamExtraction test
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-15 16:53:18 +00:00
copilot-swe-agent[bot]
0b2158f74c Initial plan 2026-01-15 16:44:57 +00:00
Adam Hathcock
5c06b8c48f enable single test 2026-01-15 16:41:58 +00:00
Adam Hathcock
810df8a18b revert lazy archive 2026-01-15 16:40:08 +00:00
Adam Hathcock
63736efcac Merge remote-tracking branch 'origin/master' into adam/async-creation
# Conflicts:
#	tests/SharpCompress.Test/SevenZip/SevenZipArchiveAsyncTests.cs
2026-01-15 16:21:30 +00:00
Adam Hathcock
3e219fa9ec Merge pull request #1131 from adamhathcock/adam/async-again
More test fixes and some perf changes
2026-01-15 16:20:25 +00:00
Adam Hathcock
33b6447c18 Merge remote-tracking branch 'origin/master' into adam/async-creation 2026-01-15 16:16:41 +00:00
Adam Hathcock
ec310c87de merge fixes and fmt 2026-01-15 15:20:52 +00:00
Adam Hathcock
c55a383112 Merge remote-tracking branch 'origin/master' into adam/async-again
# Conflicts:
#	tests/SharpCompress.Test/GZip/GZipReaderAsyncTests.cs
#	tests/SharpCompress.Test/Rar/RarArchiveAsyncTests.cs
#	tests/SharpCompress.Test/SevenZip/SevenZipArchiveAsyncTests.cs
#	tests/SharpCompress.Test/Tar/TarArchiveAsyncTests.cs
#	tests/SharpCompress.Test/Tar/TarReaderAsyncTests.cs
#	tests/SharpCompress.Test/Zip/Zip64AsyncTests.cs
#	tests/SharpCompress.Test/Zip/ZipMemoryArchiveWithCrcAsyncTests.cs
2026-01-15 15:18:05 +00:00
Adam Hathcock
227fec66ad more pooling 2026-01-15 15:16:11 +00:00
Adam Hathcock
38eec23e07 rar byte[] better 2026-01-15 15:16:04 +00:00
Adam Hathcock
437271c6a2 change byte[] to memory using pool 2026-01-15 15:15:57 +00:00
Adam Hathcock
81a2060c75 reduce memory usage on headers 2026-01-15 15:15:50 +00:00
Adam Hathcock
5e90cfd6c5 Merge pull request #1128 from adamhathcock/adam/async-interface
Change interfaces to be consistent for new Async paths (definitely breaks things)
2026-01-15 15:13:33 +00:00
Adam Hathcock
2d597e6e43 be more lazy with loading of sync stuff 2026-01-15 15:09:23 +00:00
Adam Hathcock
a410f73bf3 archive asyncs are more right 2026-01-15 14:52:10 +00:00
Adam Hathcock
b41296194f more updates to docs 2026-01-15 13:29:57 +00:00
Adam Hathcock
bf7416753a update docs 2026-01-15 13:04:09 +00:00
Adam Hathcock
7fbd751d27 change tests to work 2026-01-15 13:00:05 +00:00
Adam Hathcock
85b28dfe68 more refactoring 2026-01-15 12:20:35 +00:00
Adam Hathcock
779fba5deb finish the open refactor? 2026-01-15 12:06:54 +00:00
Adam Hathcock
2756b1f6f8 more refactor 2026-01-15 11:55:56 +00:00
Adam Hathcock
7b76858ae1 refactoring naming again 2026-01-15 11:41:30 +00:00
Adam Hathcock
84b5b5a717 add more tests 2026-01-14 14:33:20 +00:00
Adam Hathcock
ebfa16f09f more test fixes 2026-01-14 14:12:53 +00:00
Adam Hathcock
c1d240b516 Fix more tests 2026-01-14 14:06:39 +00:00
Adam Hathcock
5c4719f4a9 missing extensions 2026-01-14 13:39:11 +00:00
Adam Hathcock
95d2278d8b fmt 2026-01-14 12:13:29 +00:00
Adam Hathcock
e63ee57ef0 same for writers 2026-01-14 09:29:44 +00:00
Adam Hathcock
775efa1b26 Reader open factories 2026-01-14 09:23:18 +00:00
Adam Hathcock
3677b4b193 add default interfaces to enforce consistency 2026-01-14 08:57:12 +00:00
Adam Hathcock
c32f4b4f2a fix test reference 2026-01-14 08:33:49 +00:00
Adam Hathcock
8d34f88ca6 fix up gitignore 2026-01-13 16:42:54 +00:00
Adam Hathcock
ca4cf25a1f clean up lazy readonly collections and add tests 2026-01-13 16:39:55 +00:00
Adam Hathcock
4fa976b478 remove unused ref 2026-01-13 15:29:02 +00:00
Adam Hathcock
767f3a4985 fix up extensions to more like polyfills 2026-01-13 15:26:45 +00:00
Adam Hathcock
ddc08e068e fix async error 2026-01-13 15:16:38 +00:00
Adam Hathcock
a1a86cdde8 fmt 2026-01-13 14:29:10 +00:00
Adam Hathcock
fc85f1fa2c more tar async fixes 2026-01-13 14:28:45 +00:00
Adam Hathcock
0b8081f320 gzip fixes 2026-01-13 14:24:36 +00:00
Adam Hathcock
0b5371d986 more async fixing 2026-01-13 14:06:14 +00:00
Adam Hathcock
cdca909d84 fmt 2026-01-13 13:58:31 +00:00
Adam Hathcock
ec7d2e357d fix lock? 2026-01-13 13:58:03 +00:00
Adam Hathcock
1c0183ef11 force async tests 2026-01-13 13:56:56 +00:00
Adam Hathcock
9cf2b3129c fixed up async writer 2026-01-13 13:54:15 +00:00
Adam Hathcock
9a4e864f5e fix usage of ArchiveFactory 2026-01-13 13:49:35 +00:00
Adam Hathcock
4df952db1b split out factories for archive 2026-01-12 16:32:26 +00:00
Adam Hathcock
1b4cedfa13 misc fixes 2026-01-12 16:21:20 +00:00
Adam Hathcock
6d6103afd6 update docs 2026-01-12 16:08:16 +00:00
Adam Hathcock
d727d76299 Merge remote-tracking branch 'origin/master' into adam/async-interface 2026-01-12 15:02:19 +00:00
Adam Hathcock
0502ff545e test fixes and fmt 2026-01-12 15:01:29 +00:00
Adam Hathcock
fce4a96718 make Writable interfaces for archive 2026-01-12 14:57:13 +00:00
Adam Hathcock
38203fb950 Fix async reader variable types - Remove double await on ReaderFactory.OpenAsync and use IAsyncReader
- Removed 'await' keyword before ReaderFactory.OpenAsync() calls since the method returns IAsyncReader directly (not Task)
- Changed ZipReader.Open() to ReaderFactory.OpenAsync() in Zip64AsyncTests.ReadForwardOnlyAsync()
- Changed TarReader.Open() to ReaderFactory.OpenAsync() in TarReaderAsyncTests.Tar_BZip2_Entry_Stream_Async()
- Fixed EntryStream disposal from 'await using' to 'using' since EntryStream doesn't implement IAsyncDisposable
- These changes fix compilation errors where async methods were being called on IReader (synchronous) instead of IAsyncReader (asynchronous)
2026-01-12 14:14:46 +00:00
Adam Hathcock
0615d17b8b fix async interfacing for open 2026-01-12 13:45:21 +00:00
Adam Hathcock
c1f8580d89 Remove unnecessary ValueTask wrappers from async factory methods
Change return types from ValueTask<T> to direct interface types (IAsyncArchive, IAsyncReader, IWriter) for wrapper methods that don't perform async work. This eliminates unnecessary async state machine allocations while maintaining the same public API behavior.

Changes:
- Interface definitions: Updated IArchiveFactory, IMultiArchiveFactory, IReaderFactory, IWriterFactory
- Concrete factories: Updated archive factories (Zip, Tar, Rar, GZip, SevenZip) and reader-only factories (Ace, Arc, Arj)
- Static factory methods: Updated ReaderFactory, ArchiveFactory, WriterFactory to use new signatures
- Archive classes: Updated static OpenAsync methods in ZipArchive, TarArchive, RarArchive, SevenZipArchive, GZipArchive
- Supporting changes: Updated Factory.cs and async polyfills

Performance benefit: Reduced GC pressure by eliminating unnecessary state machine overhead for non-async wrapper methods.
2026-01-12 13:16:44 +00:00
Adam Hathcock
c5a6f900df Merge branch 'adam/async' into adam/async-interface 2026-01-12 13:02:51 +00:00
Adam Hathcock
05ebf22009 Start using the interface to draw distinction between async and sync 2026-01-12 12:08:25 +00:00
552 changed files with 35045 additions and 16410 deletions

View File

@@ -1,7 +0,0 @@
enabled: true
agent:
name: copilot-coding-agent
allow:
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
actions: ["create", "modify"]
require_review_before_merge: true

View File

@@ -307,7 +307,6 @@ dotnet_diagnostic.CS8602.severity = error
dotnet_diagnostic.CS8604.severity = error
dotnet_diagnostic.CS8618.severity = error
dotnet_diagnostic.CS0618.severity = suggestion
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS4014.severity = error
dotnet_diagnostic.CS8600.severity = error
dotnet_diagnostic.CS8603.severity = error

View File

@@ -1,17 +0,0 @@
enabled: true
agent:
name: copilot-coding-agent
allow:
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
actions: ["create", "modify", "delete"]
require_review_before_merge: true
required_approvals: 1
allowed_merge_strategies:
- squash
- merge
auto_merge_on_green: false
run_workflows: true
notes: |
- This manifest expresses the policy for the Copilot coding agent in this repository.
- It does NOT install or authorize the agent; a repository admin must install the Copilot coding agent app and grant the repository the necessary permissions (contents: write, pull_requests: write, checks: write, actions: write/read, issues: write) to allow the agent to act.
- Keep allow paths narrow and prefer require_review_before_merge during initial rollout.

View File

@@ -1,25 +0,0 @@
# Plan: Implement Missing Async Functionality in SharpCompress
SharpCompress has async support for low-level stream operations and Reader/Writer APIs, but critical entry points (Archive.Open, factory methods, initialization) remain synchronous. This plan adds async overloads for all user-facing I/O operations and fixes existing async bugs, enabling full end-to-end async workflows.
## Steps
1. **Add async factory methods** to [ArchiveFactory.cs](src/SharpCompress/Factories/ArchiveFactory.cs), [ReaderFactory.cs](src/SharpCompress/Factories/ReaderFactory.cs), and [WriterFactory.cs](src/SharpCompress/Factories/WriterFactory.cs) with `OpenAsync` and `CreateAsync` overloads accepting `CancellationToken`
2. **Implement async Open methods** on concrete archive types ([ZipArchive.cs](src/SharpCompress/Archives/Zip/ZipArchive.cs), [TarArchive.cs](src/SharpCompress/Archives/Tar/TarArchive.cs), [RarArchive.cs](src/SharpCompress/Archives/Rar/RarArchive.cs), [GZipArchive.cs](src/SharpCompress/Archives/GZip/GZipArchive.cs), [SevenZipArchive.cs](src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs)) and reader types ([ZipReader.cs](src/SharpCompress/Readers/Zip/ZipReader.cs), [TarReader.cs](src/SharpCompress/Readers/Tar/TarReader.cs), etc.)
3. **Convert archive initialization logic to async** including header reading, volume loading, and format signature detection across archive constructors and internal initialization methods
4. **Fix LZMA decoder async bugs** in [LzmaStream.cs](src/SharpCompress/Compressors/LZMA/LzmaStream.cs), [Decoder.cs](src/SharpCompress/Compressors/LZMA/Decoder.cs), and [OutWindow.cs](src/SharpCompress/Compressors/LZMA/OutWindow.cs) to enable true async 7Zip support and remove `NonDisposingStream` workaround
5. **Complete Rar async implementation** by converting `UnpackV2017` methods to async in [UnpackV2017.cs](src/SharpCompress/Compressors/Rar/UnpackV2017.cs) and updating Rar20 decompression
6. **Add comprehensive async tests** covering all new async entry points, cancellation scenarios, and concurrent operations across all archive formats in test files
## Further Considerations
1. **Breaking changes** - Should new async methods be added alongside existing sync methods (non-breaking), or should sync methods eventually be deprecated? Recommend additive approach for backward compatibility.
2. **Performance impact** - Header parsing for formats like Zip/Tar is often small; consider whether truly async parsing adds value vs sync parsing wrapped in Task, or make it conditional based on stream type (network vs file).
3. **7Zip complexity** - The LZMA async bug fix (Step 4) may be challenging due to state management in the decoder; consider whether to scope it separately or implement a simpler workaround that maintains correctness.

View File

@@ -1,123 +0,0 @@
# Plan: Modernize SharpCompress Public API
Based on comprehensive analysis, the API has several inconsistencies around factory patterns, async support, format capabilities, and options classes. Most improvements can be done incrementally without breaking changes.
## Steps
1. **Standardize factory patterns** by deprecating format-specific static `Open` methods in [Archives/Zip/ZipArchive.cs](src/SharpCompress/Archives/Zip/ZipArchive.cs), [Archives/Tar/TarArchive.cs](src/SharpCompress/Archives/Tar/TarArchive.cs), etc. in favor of centralized [Factories/ArchiveFactory.cs](src/SharpCompress/Factories/ArchiveFactory.cs)
2. **Complete async implementation** in [Writers/Zip/ZipWriter.cs](src/SharpCompress/Writers/Zip/ZipWriter.cs) and other writers that currently use sync-over-async, implementing true async I/O throughout the writer hierarchy
3. **Unify options classes** by making [Common/ExtractionOptions.cs](src/SharpCompress/Common/ExtractionOptions.cs) inherit from `OptionsBase` and adding progress reporting to extraction methods consistently
4. **Clarify GZip semantics** in [Archives/GZip/GZipArchive.cs](src/SharpCompress/Archives/GZip/GZipArchive.cs) by adding XML documentation explaining single-entry limitation and relationship to GZip compression used in Tar.gz
## Further Considerations
1. **Breaking changes roadmap** - Should we plan a major version (2.0) to remove deprecated factory methods, clean up `ArchiveType` enum (remove Arc/Arj or add full support), and consolidate naming patterns?
2. **Progress reporting consistency** - Should `IProgress<ArchiveExtractionProgress<IEntry>>` be added to all extraction extension methods or consolidated into options classes?
## Detailed Analysis
### Factory Pattern Issues
Three different factory patterns exist with overlapping functionality:
1. **Static Factories**: ArchiveFactory, ReaderFactory, WriterFactory
2. **Instance Factories**: IArchiveFactory, IReaderFactory, IWriterFactory
3. **Format-specific static methods**: Each archive class has static `Open` methods
**Example confusion:**
```csharp
// Three ways to open a Zip archive - which is recommended?
var archive1 = ArchiveFactory.Open("file.zip");
var archive2 = ZipArchive.Open("file.zip");
var archive3 = ArchiveFactory.AutoFactory.Open(fileInfo, options);
```
### Async Support Gaps
Base `IWriter` interface has async methods, but writer implementations provide minimal async support. Most writers just call synchronous methods:
```csharp
public virtual async Task WriteAsync(...)
{
// Default implementation calls synchronous version
Write(filename, source, modificationTime);
await Task.CompletedTask.ConfigureAwait(false);
}
```
Real async implementations only in:
- `TarWriter` - Proper async implementation
- Most other writers use sync-over-async
### GZip Archive Special Case
GZip is treated as both a compression format and an archive format, but only supports single-entry archives:
```csharp
protected override GZipArchiveEntry CreateEntryInternal(...)
{
if (Entries.Any())
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
// ...
}
```
### Options Class Hierarchy
```
OptionsBase (LeaveStreamOpen, ArchiveEncoding)
├─ ReaderOptions (LookForHeader, Password, DisableCheckIncomplete, BufferSize, ExtensionHint, Progress)
├─ WriterOptions (CompressionType, CompressionLevel, Progress)
│ ├─ ZipWriterOptions (ArchiveComment, UseZip64)
│ ├─ TarWriterOptions (FinalizeArchiveOnClose, HeaderFormat)
│ └─ GZipWriterOptions (no additional properties)
└─ ExtractionOptions (standalone - Overwrite, ExtractFullPath, PreserveFileTime, PreserveAttributes)
```
**Issues:**
- `ExtractionOptions` doesn't inherit from `OptionsBase` - no encoding support during extraction
- Progress reporting inconsistency between readers and extraction
- Obsolete properties (`ChecksumIsValid`, `Version`) with unclear migration path
### Implementation Priorities
**High Priority (Non-Breaking):**
1. Add API usage guide (Archive vs Reader, factory recommendations, async best practices)
2. Fix progress reporting consistency
3. Complete async implementation in writers
**Medium Priority (Next Major Version):**
1. Unify factory pattern - deprecate format-specific static `Open` methods
2. Clean up options classes - make `ExtractionOptions` inherit from `OptionsBase`
3. Clarify archive types - remove Arc/Arj from `ArchiveType` enum or add full support
4. Standardize naming across archive types
**Low Priority:**
1. Add BZip2 archive support similar to GZipArchive
2. Complete obsolete property cleanup with migration guide
### Backward Compatibility Strategy
**Safe (Non-Breaking) Changes:**
- Add new methods to interfaces (use default implementations)
- Add new options properties (with defaults)
- Add new factory methods
- Improve async implementations
- Add progress reporting support
**Breaking Changes to Avoid:**
- ❌ Removing format-specific `Open` methods (deprecate instead)
- ❌ Changing `LeaveStreamOpen` default (currently `true`)
- ❌ Removing obsolete properties before major version bump
- ❌ Changing return types or signatures of existing methods
**Deprecation Pattern:**
- Use `[Obsolete]` for one major version
- Use `[EditorBrowsable(EditorBrowsableState.Never)]` in next major version
- Remove in following major version

8
.gitignore vendored
View File

@@ -4,8 +4,8 @@ _ReSharper.SharpCompress/
bin/
*.suo
*.user
TestArchives/Scratch/
TestArchives/Scratch2/
tests/TestArchives/Scratch/
tests/TestArchives/Scratch2/
TestResults/
*.nupkg
packages/*/
@@ -20,7 +20,3 @@ artifacts/
.DS_Store
*.snupkg
# BenchmarkDotNet artifacts
BenchmarkDotNet.Artifacts/
**/BenchmarkDotNet.Artifacts/

View File

@@ -178,5 +178,59 @@ SharpCompress supports multiple archive and compression formats:
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
4. **Tar + non-seekable stream** - Must provide file size or it will throw
5. **Multi-framework differences** - Some features differ between .NET Framework and modern .NET (e.g., Mono.Posix)
6. **Format detection** - Use `ReaderFactory.Open()` for auto-detection, test with actual archive files
### Async Struct-Copy Bug in LZMA RangeCoder
When implementing async methods on mutable `struct` types (like `BitEncoder` and `BitDecoder` in the LZMA RangeCoder), be aware that the async state machine copies the struct when `await` is encountered. This means mutations to struct fields after the `await` point may not persist back to the original struct stored in arrays or fields.
**The Bug:**
```csharp
// BAD: async method on mutable struct
public async ValueTask<uint> DecodeAsync(Decoder decoder, CancellationToken cancellationToken = default)
{
var newBound = (decoder._range >> K_NUM_BIT_MODEL_TOTAL_BITS) * _prob;
if (decoder._code < newBound)
{
decoder._range = newBound;
_prob += (K_BIT_MODEL_TOTAL - _prob) >> K_NUM_MOVE_BITS; // Mutates _prob
await decoder.Normalize2Async(cancellationToken).ConfigureAwait(false); // Struct gets copied here
return 0; // Original _prob update may be lost
}
// ...
}
```
**The Fix:**
Refactor async methods on mutable structs to perform all struct mutations synchronously before any `await`, or use a helper method to separate the await from the struct mutation:
```csharp
// GOOD: struct mutations happen synchronously, await is conditional
public ValueTask<uint> DecodeAsync(Decoder decoder, CancellationToken cancellationToken = default)
{
var newBound = (decoder._range >> K_NUM_BIT_MODEL_TOTAL_BITS) * _prob;
if (decoder._code < newBound)
{
decoder._range = newBound;
_prob += (K_BIT_MODEL_TOTAL - _prob) >> K_NUM_MOVE_BITS; // All mutations complete
return DecodeAsyncHelper(decoder.Normalize2Async(cancellationToken), 0); // Await in helper
}
decoder._range -= newBound;
decoder._code -= newBound;
_prob -= (_prob) >> K_NUM_MOVE_BITS; // All mutations complete
return DecodeAsyncHelper(decoder.Normalize2Async(cancellationToken), 1); // Await in helper
}
private static async ValueTask<uint> DecodeAsyncHelper(ValueTask normalizeTask, uint result)
{
await normalizeTask.ConfigureAwait(false);
return result;
}
```
**Why This Matters:**
In LZMA, the `BitEncoder` and `BitDecoder` structs maintain adaptive probability models in their `_prob` field. When these structs are stored in arrays (e.g., `_models[m]`), the async state machine copy breaks the adaptive model, causing incorrect bit decoding and eventually `DataErrorException` exceptions.
**Related Files:**
- `src/SharpCompress/Compressors/LZMA/RangeCoder/RangeCoderBit.Async.cs` - Fixed
- `src/SharpCompress/Compressors/LZMA/RangeCoder/RangeCoderBitTree.Async.cs` - Uses readonly structs, so this pattern doesn't apply

View File

@@ -12,5 +12,6 @@
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
<CentralPackageTransitivePinningEnabled>true</CentralPackageTransitivePinningEnabled>
</PropertyGroup>
</Project>

View File

@@ -1,10 +1,9 @@
<Project>
<ItemGroup>
<PackageVersion Include="BenchmarkDotNet" Version="0.14.0" />
<PackageVersion Include="Bullseye" Version="6.1.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.15" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.16" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
@@ -12,9 +11,9 @@
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
<PackageVersion Include="System.Buffers" Version="4.6.1" />
<PackageVersion Include="System.Memory" Version="4.6.3" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.v3" Version="3.2.2" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="10.0.102" />
<GlobalPackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
<GlobalPackageReference
Include="Microsoft.VisualStudio.Threading.Analyzers"

View File

@@ -18,12 +18,11 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
Directory.Build.props = Directory.Build.props
global.json = global.json
.editorconfig = .editorconfig
.gitignore = .gitignore
Directory.Packages.props = Directory.Packages.props
NuGet.config = NuGet.config
.github\workflows\nuget-release.yml = .github\workflows\nuget-release.yml
USAGE.md = USAGE.md
README.md = README.md
FORMATS.md = FORMATS.md
AGENTS.md = AGENTS.md
EndProjectSection
EndProject

View File

@@ -230,7 +230,7 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
}
else
{
// Not tagged - create prerelease version based on next minor version
// Not tagged - create prerelease version
var allTags = (await GetGitOutput("tag", "--list"))
.Split('\n', StringSplitOptions.RemoveEmptyEntries)
.Where(tag => Regex.IsMatch(tag.Trim(), @"^\d+\.\d+\.\d+$"))
@@ -240,8 +240,22 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
var lastTag = allTags.OrderBy(tag => Version.Parse(tag)).LastOrDefault() ?? "0.0.0";
var lastVersion = Version.Parse(lastTag);
// Increment minor version for next release
var nextVersion = new Version(lastVersion.Major, lastVersion.Minor + 1, 0);
// Determine version increment based on branch
var currentBranch = await GetCurrentBranch();
Version nextVersion;
if (currentBranch == "release")
{
// Release branch: increment patch version
nextVersion = new Version(lastVersion.Major, lastVersion.Minor, lastVersion.Build + 1);
Console.WriteLine($"Building prerelease for release branch (patch increment)");
}
else
{
// Master or other branches: increment minor version
nextVersion = new Version(lastVersion.Major, lastVersion.Minor + 1, 0);
Console.WriteLine($"Building prerelease for {currentBranch} branch (minor increment)");
}
// Use commit count since the last version tag if available; otherwise, fall back to total count
var revListArgs = allTags.Any() ? $"--count {lastTag}..HEAD" : "--count HEAD";
@@ -253,6 +267,28 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
}
}
static async Task<string> GetCurrentBranch()
{
// In GitHub Actions, GITHUB_REF_NAME contains the branch name
var githubRefName = Environment.GetEnvironmentVariable("GITHUB_REF_NAME");
if (!string.IsNullOrEmpty(githubRefName))
{
return githubRefName;
}
// Fallback to git command for local builds
try
{
var (output, _) = await ReadAsync("git", "branch --show-current");
return output.Trim();
}
catch (Exception ex)
{
Console.WriteLine($"Warning: Could not determine current branch: {ex.Message}");
return "unknown";
}
}
static async Task<string> GetGitOutput(string command, string args)
{
try

View File

@@ -25,12 +25,12 @@
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"requested": "[10.0.102, )",
"resolved": "10.0.102",
"contentHash": "Oxq3RCIJSdtpIU4hLqO7XaDe/Ra3HS9Wi8rJl838SAg6Zu1iQjerA0+xXWBgUFYbgknUGCLOU0T+lzMLkvY9Qg==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
"Microsoft.Build.Tasks.Git": "10.0.102",
"Microsoft.SourceLink.Common": "10.0.102"
}
},
"Microsoft.VisualStudio.Threading.Analyzers": {
@@ -47,8 +47,8 @@
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
"resolved": "10.0.102",
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
@@ -57,8 +57,8 @@
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
"resolved": "10.0.102",
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
}
}
}

View File

@@ -8,49 +8,49 @@ Quick reference for commonly used SharpCompress APIs.
```csharp
// Auto-detect format
using (var reader = ReaderFactory.Open(stream))
using (var reader = ReaderFactory.OpenReader(stream))
{
// Works with Zip, Tar, GZip, Rar, 7Zip, etc.
}
// Specific format - Archive API
using (var archive = ZipArchive.Open("file.zip"))
using (var archive = TarArchive.Open("file.tar"))
using (var archive = RarArchive.Open("file.rar"))
using (var archive = SevenZipArchive.Open("file.7z"))
using (var archive = GZipArchive.Open("file.gz"))
using (var archive = ZipArchive.OpenArchive("file.zip"))
using (var archive = TarArchive.OpenArchive("file.tar"))
using (var archive = RarArchive.OpenArchive("file.rar"))
using (var archive = SevenZipArchive.OpenArchive("file.7z"))
using (var archive = GZipArchive.OpenArchive("file.gz"))
// With options
var options = new ReaderOptions
{
var options = new ReaderOptions
{
Password = "password",
LeaveStreamOpen = true,
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
};
using (var archive = ZipArchive.Open("encrypted.zip", options))
using (var archive = ZipArchive.OpenArchive("encrypted.zip", options))
```
### Creating Archives
```csharp
// Writer Factory
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Zip, CompressionType.Deflate))
{
// Write entries
}
// Specific writer
using (var archive = ZipArchive.Create())
using (var archive = TarArchive.Create())
using (var archive = GZipArchive.Create())
using (var archive = ZipArchive.CreateArchive())
using (var archive = TarArchive.CreateArchive())
using (var archive = GZipArchive.CreateArchive())
// With options
var options = new WriterOptions(CompressionType.Deflate)
{
var options = new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 9,
LeaveStreamOpen = false
};
using (var archive = ZipArchive.Create())
using (var archive = ZipArchive.CreateArchive())
{
archive.SaveTo("output.zip", options);
}
@@ -63,26 +63,26 @@ using (var archive = ZipArchive.Create())
### Reading/Extracting
```csharp
using (var archive = ZipArchive.Open("file.zip"))
using (var archive = ZipArchive.OpenArchive("file.zip"))
{
// Get all entries
IEnumerable<IEntry> entries = archive.Entries;
IEnumerable<IArchiveEntry> entries = archive.Entries;
// Find specific entry
var entry = archive.Entries.FirstOrDefault(e => e.Key == "file.txt");
// Extract all
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
{
ExtractFullPath = true,
Overwrite = true
});
// Extract single entry
var entry = archive.Entries.First();
entry.WriteToFile(@"C:\output\file.txt");
entry.WriteToFile(@"C:\output\file.txt", new ExtractionOptions { Overwrite = true });
// Get entry stream
using (var stream = entry.OpenEntryStream())
{
@@ -90,8 +90,15 @@ using (var archive = ZipArchive.Open("file.zip"))
}
}
// Async variants
await archive.WriteToDirectoryAsync(@"C:\output", options, cancellationToken);
// Async extraction (requires IAsyncArchive)
using (var asyncArchive = await ZipArchive.OpenAsyncArchive("file.zip"))
{
await asyncArchive.WriteToDirectoryAsync(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cancellationToken: cancellationToken
);
}
using (var stream = await entry.OpenEntryStreamAsync(cancellationToken))
{
// ...
@@ -115,18 +122,18 @@ foreach (var entry in archive.Entries)
### Creating Archives
```csharp
using (var archive = ZipArchive.Create())
using (var archive = ZipArchive.CreateArchive())
{
// Add file
archive.AddEntry("file.txt", "C:\\source\\file.txt");
archive.AddEntry("file.txt", @"C:\source\file.txt");
// Add multiple files
archive.AddAllFromDirectory("C:\\source");
archive.AddAllFromDirectory("C:\\source", "*.txt"); // Pattern
archive.AddAllFromDirectory(@"C:\source");
archive.AddAllFromDirectory(@"C:\source", "*.txt"); // Pattern
// Save to file
archive.SaveTo("output.zip", CompressionType.Deflate);
// Save to stream
archive.SaveTo(outputStream, new WriterOptions(CompressionType.Deflate)
{
@@ -144,18 +151,18 @@ using (var archive = ZipArchive.Create())
```csharp
using (var stream = File.OpenRead("file.zip"))
using (var reader = ReaderFactory.Open(stream))
using (var reader = ReaderFactory.OpenReader(stream))
{
while (reader.MoveToNextEntry())
{
IEntry entry = reader.Entry;
IArchiveEntry entry = reader.Entry;
if (!entry.IsDirectory)
{
// Extract entry
reader.WriteEntryToDirectory(@"C:\output");
reader.WriteEntryToFile(@"C:\output\file.txt");
// Or get stream
using (var entryStream = reader.OpenEntryStream())
{
@@ -165,16 +172,25 @@ using (var reader = ReaderFactory.Open(stream))
}
}
// Async variants
while (await reader.MoveToNextEntryAsync())
// Async variants (use OpenAsyncReader to get IAsyncReader)
using (var stream = File.OpenRead("file.zip"))
using (var reader = await ReaderFactory.OpenAsyncReader(stream))
{
await reader.WriteEntryToFileAsync(@"C:\output\" + reader.Entry.Key, cancellationToken);
}
while (await reader.MoveToNextEntryAsync())
{
await reader.WriteEntryToFileAsync(
@"C:\output\" + reader.Entry.Key,
cancellationToken: cancellationToken
);
}
// Async extraction
await reader.WriteAllToDirectoryAsync(@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cancellationToken);
// Async extraction of all entries
await reader.WriteAllToDirectoryAsync(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
```
---
@@ -185,7 +201,7 @@ await reader.WriteAllToDirectoryAsync(@"C:\output",
```csharp
using (var stream = File.Create("output.zip"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Zip, CompressionType.Deflate))
{
// Write single file
using (var fileStream = File.OpenRead("source.txt"))
@@ -223,7 +239,7 @@ var options = new ReaderOptions
Default = Encoding.GetEncoding(932)
}
};
using (var archive = ZipArchive.Open("file.zip", options))
using (var archive = ZipArchive.OpenArchive("file.zip", options))
{
// ...
}
@@ -262,15 +278,20 @@ archive.WriteToDirectory(@"C:\output", options);
// For creating archives
CompressionType.None // No compression (store)
CompressionType.Deflate // DEFLATE (default for ZIP/GZip)
CompressionType.Deflate64 // Deflate64
CompressionType.BZip2 // BZip2
CompressionType.LZMA // LZMA (for 7Zip, LZip, XZ)
CompressionType.PPMd // PPMd (for ZIP)
CompressionType.Rar // RAR compression (read-only)
CompressionType.ZStandard // ZStandard
ArchiveType.Arc
ArchiveType.Arj
ArchiveType.Ace
// For Tar archives
// Use CompressionType in TarWriter constructor
using (var writer = TarWriter(stream, CompressionType.GZip)) // Tar.GZip
using (var writer = TarWriter(stream, CompressionType.BZip2)) // Tar.BZip2
// For Tar archives with compression
// Use WriterFactory to create compressed tar archives
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, CompressionType.GZip)) // Tar.GZip
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, CompressionType.BZip2)) // Tar.BZip2
```
### Archive Types
@@ -328,7 +349,7 @@ var progress = new Progress<ProgressReport>(report =>
});
var options = new ReaderOptions { Progress = progress };
using (var archive = ZipArchive.Open("archive.zip", options))
using (var archive = ZipArchive.OpenArchive("archive.zip", options))
{
archive.WriteToDirectory(@"C:\output");
}
@@ -342,11 +363,13 @@ cts.CancelAfter(TimeSpan.FromMinutes(5));
try
{
using (var archive = ZipArchive.Open("archive.zip"))
using (var archive = await ZipArchive.OpenAsyncArchive("archive.zip"))
{
await archive.WriteToDirectoryAsync(@"C:\output",
await archive.WriteToDirectoryAsync(
@"C:\output",
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
cts.Token);
cancellationToken: cts.Token
);
}
}
catch (OperationCanceledException)
@@ -358,23 +381,23 @@ catch (OperationCanceledException)
### Create with Custom Compression
```csharp
using (var archive = ZipArchive.Create())
using (var archive = ZipArchive.CreateArchive())
{
archive.AddAllFromDirectory(@"D:\source");
// Fastest
archive.SaveTo("fast.zip", new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 1
archive.SaveTo("fast.zip", new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 1
});
// Balanced (default)
archive.SaveTo("normal.zip", CompressionType.Deflate);
// Best compression
archive.SaveTo("best.zip", new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 9
archive.SaveTo("best.zip", new WriterOptions(CompressionType.Deflate)
{
CompressionLevel = 9
});
}
```
@@ -383,7 +406,7 @@ using (var archive = ZipArchive.Create())
```csharp
using (var outputStream = new MemoryStream())
using (var archive = ZipArchive.Create())
using (var archive = ZipArchive.CreateArchive())
{
// Add content from memory
using (var contentStream = new MemoryStream(Encoding.UTF8.GetBytes("Hello")))
@@ -402,7 +425,7 @@ using (var archive = ZipArchive.Create())
### Extract Specific Files
```csharp
using (var archive = ZipArchive.Open("archive.zip"))
using (var archive = ZipArchive.OpenArchive("archive.zip"))
{
var filesToExtract = new[] { "file1.txt", "file2.txt" };
@@ -416,7 +439,7 @@ using (var archive = ZipArchive.Open("archive.zip"))
### List Archive Contents
```csharp
using (var archive = ZipArchive.Open("archive.zip"))
using (var archive = ZipArchive.OpenArchive("archive.zip"))
{
foreach (var entry in archive.Entries)
{
@@ -436,7 +459,7 @@ using (var archive = ZipArchive.Open("archive.zip"))
```csharp
var stream = File.OpenRead("archive.zip");
var archive = ZipArchive.Open(stream);
var archive = ZipArchive.OpenArchive(stream);
archive.WriteToDirectory(@"C:\output");
// stream not disposed - leaked resource
```
@@ -445,7 +468,7 @@ archive.WriteToDirectory(@"C:\output");
```csharp
using (var stream = File.OpenRead("archive.zip"))
using (var archive = ZipArchive.Open(stream))
using (var archive = ZipArchive.OpenArchive(stream))
{
archive.WriteToDirectory(@"C:\output");
}
@@ -456,7 +479,7 @@ using (var archive = ZipArchive.Open(stream))
```csharp
// Loading entire archive then iterating
using (var archive = ZipArchive.Open("large.zip"))
using (var archive = ZipArchive.OpenArchive("large.zip"))
{
var entries = archive.Entries.ToList(); // Loads all in memory
foreach (var e in entries)
@@ -471,7 +494,7 @@ using (var archive = ZipArchive.Open("large.zip"))
```csharp
// Streaming iteration
using (var stream = File.OpenRead("large.zip"))
using (var reader = ReaderFactory.Open(stream))
using (var reader = ReaderFactory.OpenReader(stream))
{
while (reader.MoveToNextEntry())
{

View File

@@ -76,7 +76,7 @@ Factory classes for auto-detecting archive format and creating appropriate reade
- Format-specific: `ZipFactory.cs`, `TarFactory.cs`, `RarFactory.cs`, etc.
**How It Works:**
1. `ReaderFactory.Open(stream)` probes stream signatures
1. `ReaderFactory.OpenReader(stream)` probes stream signatures
2. Identifies format by magic bytes
3. Creates appropriate reader instance
4. Returns generic `IReader` interface
@@ -142,7 +142,7 @@ Stream wrappers and utilities.
**Example:**
```csharp
// User calls factory
using (var reader = ReaderFactory.Open(stream)) // Returns IReader
using (var reader = ReaderFactory.OpenReader(stream)) // Returns IReader
{
while (reader.MoveToNextEntry())
{
@@ -175,7 +175,7 @@ CompressionType.LZMA // LZMA
CompressionType.PPMd // PPMd
// Writer uses strategy pattern
var archive = ZipArchive.Create();
var archive = ZipArchive.CreateArchive();
archive.SaveTo("output.zip", CompressionType.Deflate); // Use Deflate
archive.SaveTo("output.bz2", CompressionType.BZip2); // Use BZip2
```
@@ -248,7 +248,7 @@ foreach (var entry in entries)
}
// Reader API - provides iterator
IReader reader = ReaderFactory.Open(stream);
IReader reader = ReaderFactory.OpenReader(stream);
while (reader.MoveToNextEntry())
{
// Forward-only iteration - one entry at a time
@@ -381,7 +381,7 @@ public class NewFormatArchive : AbstractArchive
private NewFormatHeader _header;
private List<NewFormatEntry> _entries;
public static NewFormatArchive Open(Stream stream)
public static NewFormatArchive OpenArchive(Stream stream)
{
var archive = new NewFormatArchive();
archive._header = NewFormatHeader.Read(stream);
@@ -442,8 +442,8 @@ public class NewFormatFactory : Factory, IArchiveFactory, IReaderFactory
public static NewFormatFactory Instance { get; } = new();
public IArchive CreateArchive(Stream stream)
=> NewFormatArchive.Open(stream);
public IArchive CreateArchive(Stream stream)
=> NewFormatArchive.OpenArchive(stream);
public IReader CreateReader(Stream stream, ReaderOptions options)
=> new NewFormatReader(stream) { Options = options };
@@ -481,7 +481,7 @@ public class NewFormatTests : TestBase
public void NewFormat_Extracts_Successfully()
{
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "archive.newformat");
using (var archive = NewFormatArchive.Open(archivePath))
using (var archive = NewFormatArchive.OpenArchive(archivePath))
{
archive.WriteToDirectory(SCRATCH_FILES_PATH);
// Assert extraction
@@ -561,7 +561,7 @@ public class CustomStream : Stream
```csharp
// Correct: Nested using blocks
using (var fileStream = File.OpenRead("archive.zip"))
using (var archive = ZipArchive.Open(fileStream))
using (var archive = ZipArchive.OpenArchive(fileStream))
{
archive.WriteToDirectory(@"C:\output");
}
@@ -570,7 +570,7 @@ using (var archive = ZipArchive.Open(fileStream))
// Correct: Using with options
var options = new ReaderOptions { LeaveStreamOpen = true };
var stream = File.OpenRead("archive.zip");
using (var archive = ZipArchive.Open(stream, options))
using (var archive = ZipArchive.OpenArchive(stream, options))
{
archive.WriteToDirectory(@"C:\output");
}
@@ -641,7 +641,7 @@ public void Archive_Extraction_Works()
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "test.zip");
// Act
using (var archive = ZipArchive.Open(testArchive))
using (var archive = ZipArchive.OpenArchive(testArchive))
{
archive.WriteToDirectory(SCRATCH_FILES_PATH);
}

View File

@@ -27,7 +27,7 @@ var options = new ReaderOptions
}
};
using (var archive = ZipArchive.Open("japanese.zip", options))
using (var archive = ZipArchive.OpenArchive("japanese.zip", options))
{
foreach (var entry in archive.Entries)
{
@@ -51,7 +51,7 @@ var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
};
using (var archive = ZipArchive.Open("file.zip", options))
using (var archive = ZipArchive.OpenArchive("file.zip", options))
{
// Use archive with correct encoding
}
@@ -64,7 +64,7 @@ var options = new ReaderOptions
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
};
using (var stream = File.OpenRead("file.zip"))
using (var reader = ReaderFactory.Open(stream, options))
using (var reader = ReaderFactory.OpenReader(stream, options))
{
while (reader.MoveToNextEntry())
{
@@ -89,7 +89,7 @@ var options = new ReaderOptions
Default = Encoding.GetEncoding(932)
}
};
using (var archive = ZipArchive.Open("japanese.zip", options))
using (var archive = ZipArchive.OpenArchive("japanese.zip", options))
{
// Correctly decodes Japanese filenames
}
@@ -266,7 +266,7 @@ SharpCompress attempts to auto-detect encoding, but this isn't always reliable:
```csharp
// Auto-detection (default)
using (var archive = ZipArchive.Open("file.zip")) // Uses UTF8 by default
using (var archive = ZipArchive.OpenArchive("file.zip")) // Uses UTF8 by default
{
// May show corrupted characters if archive uses different encoding
}
@@ -276,7 +276,7 @@ var options = new ReaderOptions
{
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
};
using (var archive = ZipArchive.Open("file.zip", options))
using (var archive = ZipArchive.OpenArchive("file.zip", options))
{
// Correct characters displayed
}
@@ -324,7 +324,7 @@ var options = new ReaderOptions
}
};
using (var archive = ZipArchive.Open("mixed.zip", options))
using (var archive = ZipArchive.OpenArchive("mixed.zip", options))
{
foreach (var entry in archive.Entries)
{
@@ -388,7 +388,7 @@ var options = new ReaderOptions
}
};
using (var archive = ZipArchive.Open("japanese_files.zip", options))
using (var archive = ZipArchive.OpenArchive("japanese_files.zip", options))
{
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
{
@@ -410,7 +410,7 @@ var options = new ReaderOptions
}
};
using (var archive = ZipArchive.Open("french_files.zip", options))
using (var archive = ZipArchive.OpenArchive("french_files.zip", options))
{
archive.WriteToDirectory(@"C:\output");
}
@@ -428,7 +428,7 @@ var options = new ReaderOptions
}
};
using (var archive = ZipArchive.Open("chinese_files.zip", options))
using (var archive = ZipArchive.OpenArchive("chinese_files.zip", options))
{
archive.WriteToDirectory(@"C:\output");
}
@@ -445,7 +445,7 @@ var options = new ReaderOptions
}
};
using (var archive = ZipArchive.Open("russian_files.zip", options))
using (var archive = ZipArchive.OpenArchive("russian_files.zip", options))
{
archive.WriteToDirectory(@"C:\output");
}
@@ -463,7 +463,7 @@ var options = new ReaderOptions
};
using (var stream = File.OpenRead("japanese.zip"))
using (var reader = ReaderFactory.Open(stream, options))
using (var reader = ReaderFactory.OpenReader(stream, options))
{
while (reader.MoveToNextEntry())
{
@@ -484,7 +484,7 @@ When creating archives, SharpCompress uses UTF8 by default (recommended):
```csharp
// Create with UTF8 (default, recommended)
using (var archive = ZipArchive.Create())
using (var archive = ZipArchive.CreateArchive())
{
archive.AddAllFromDirectory(@"D:\my_files");
archive.SaveTo("output.zip", CompressionType.Deflate);

View File

@@ -24,7 +24,7 @@ Choose the right API based on your use case:
// - You need random access to entries
// - Stream is seekable (file, MemoryStream)
using (var archive = ZipArchive.Open("archive.zip"))
using (var archive = ZipArchive.OpenArchive("archive.zip"))
{
// Random access - all entries available
var specific = archive.Entries.FirstOrDefault(e => e.Key == "file.txt");
@@ -51,7 +51,7 @@ using (var archive = ZipArchive.Open("archive.zip"))
// - Forward-only processing is acceptable
using (var stream = File.OpenRead("large.zip"))
using (var reader = ReaderFactory.Open(stream))
using (var reader = ReaderFactory.OpenReader(stream))
{
while (reader.MoveToNextEntry())
{
@@ -129,7 +129,7 @@ For processing archives from downloads or pipes:
```csharp
// Download stream (non-seekable)
using (var httpStream = await httpClient.GetStreamAsync(url))
using (var reader = ReaderFactory.Open(httpStream))
using (var reader = ReaderFactory.OpenReader(httpStream))
{
// Process entries as they arrive
while (reader.MoveToNextEntry())
@@ -159,14 +159,14 @@ Choose based on your constraints:
```csharp
// Download then extract (requires disk space)
var archivePath = await DownloadFile(url, @"C:\temp\archive.zip");
using (var archive = ZipArchive.Open(archivePath))
using (var archive = ZipArchive.OpenArchive(archivePath))
{
archive.WriteToDirectory(@"C:\output");
}
// Stream during download (on-the-fly extraction)
using (var httpStream = await httpClient.GetStreamAsync(url))
using (var reader = ReaderFactory.Open(httpStream))
using (var reader = ReaderFactory.OpenReader(httpStream))
{
while (reader.MoveToNextEntry())
{
@@ -198,7 +198,7 @@ Extracting File3 requires decompressing File1 and File2 first.
**Random Extraction (Slow):**
```csharp
using (var archive = RarArchive.Open("solid.rar"))
using (var archive = RarArchive.OpenArchive("solid.rar"))
{
foreach (var entry in archive.Entries)
{
@@ -210,7 +210,7 @@ using (var archive = RarArchive.Open("solid.rar"))
**Sequential Extraction (Fast):**
```csharp
using (var archive = RarArchive.Open("solid.rar"))
using (var archive = RarArchive.OpenArchive("solid.rar"))
{
// Method 1: Use WriteToDirectory (recommended)
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
@@ -256,7 +256,7 @@ using (var archive = RarArchive.Open("solid.rar"))
// Level 9 = Slowest, best compression
// Write with different compression levels
using (var archive = ZipArchive.Create())
using (var archive = ZipArchive.CreateArchive())
{
archive.AddAllFromDirectory(@"D:\data");
@@ -293,7 +293,7 @@ using (var archive = ZipArchive.Create())
// Smaller block size = lower memory, faster
// Larger block size = better compression, slower
using (var archive = TarArchive.Create())
using (var archive = TarArchive.CreateArchive())
{
archive.AddAllFromDirectory(@"D:\data");
@@ -313,7 +313,7 @@ LZMA compression is very powerful but memory-intensive:
// - Better compression: larger dictionary
// Preset via CompressionType
using (var archive = TarArchive.Create())
using (var archive = TarArchive.CreateArchive())
{
archive.AddAllFromDirectory(@"D:\data");
archive.SaveTo("archive.tar.xz", CompressionType.LZMA); // Default settings
@@ -333,7 +333,7 @@ Async is beneficial when:
```csharp
// Async extraction (non-blocking)
using (var archive = ZipArchive.Open("archive.zip"))
using (var archive = ZipArchive.OpenArchive("archive.zip"))
{
await archive.WriteToDirectoryAsync(
@"C:\output",
@@ -353,7 +353,7 @@ Async doesn't improve performance for:
```csharp
// Sync extraction (simpler, same performance on fast I/O)
using (var archive = ZipArchive.Open("archive.zip"))
using (var archive = ZipArchive.OpenArchive("archive.zip"))
{
archive.WriteToDirectory(
@"C:\output",
@@ -373,7 +373,7 @@ cts.CancelAfter(TimeSpan.FromMinutes(5));
try
{
using (var archive = ZipArchive.Open("archive.zip"))
using (var archive = ZipArchive.OpenArchive("archive.zip"))
{
await archive.WriteToDirectoryAsync(
@"C:\output",
@@ -408,14 +408,14 @@ catch (OperationCanceledException)
// ✗ Slow - opens each archive separately
foreach (var file in files)
{
using (var archive = ZipArchive.Open("archive.zip"))
using (var archive = ZipArchive.OpenArchive("archive.zip"))
{
archive.WriteToDirectory(@"C:\output");
}
}
// ✓ Better - process multiple entries at once
using (var archive = ZipArchive.Open("archive.zip"))
using (var archive = ZipArchive.OpenArchive("archive.zip"))
{
archive.WriteToDirectory(@"C:\output");
}
@@ -425,7 +425,7 @@ using (var archive = ZipArchive.Open("archive.zip"))
```csharp
var sw = Stopwatch.StartNew();
using (var archive = ZipArchive.Open("large.zip"))
using (var archive = ZipArchive.OpenArchive("large.zip"))
{
archive.WriteToDirectory(@"C:\output");
}

View File

@@ -48,7 +48,7 @@ Also, look over the tests for more thorough [examples](https://github.com/adamha
### Create Zip Archive from multiple files
```C#
using(var archive = ZipArchive.Create())
using(var archive = ZipArchive.CreateArchive())
{
archive.AddEntry("file01.txt", "C:\\file01.txt");
archive.AddEntry("file02.txt", "C:\\file02.txt");
@@ -61,7 +61,7 @@ using(var archive = ZipArchive.Create())
### Create Zip Archive from all files in a directory to a file
```C#
using (var archive = ZipArchive.Create())
using (var archive = ZipArchive.CreateArchive())
{
archive.AddAllFromDirectory("D:\\temp");
archive.SaveTo("C:\\temp.zip", CompressionType.Deflate);
@@ -72,7 +72,7 @@ using (var archive = ZipArchive.Create())
```C#
var memoryStream = new MemoryStream();
using (var archive = ZipArchive.Create())
using (var archive = ZipArchive.CreateArchive())
{
archive.AddAllFromDirectory("D:\\temp");
archive.SaveTo(memoryStream, new WriterOptions(CompressionType.Deflate)
@@ -90,7 +90,7 @@ Note: Extracting a solid rar or 7z file needs to be done in sequential order to
`ExtractAllEntries` is primarily intended for solid archives (like solid Rar) or 7Zip archives, where sequential extraction provides the best performance. For general/simple extraction with any supported archive type, use `archive.WriteToDirectory()` instead.
```C#
using (var archive = RarArchive.Open("Test.rar"))
using (var archive = RarArchive.OpenArchive("Test.rar"))
{
// Simple extraction with RarArchive; this WriteToDirectory pattern works for all archive types
archive.WriteToDirectory(@"D:\temp", new ExtractionOptions()
@@ -104,7 +104,7 @@ using (var archive = RarArchive.Open("Test.rar"))
### Iterate over all files from a Rar file using RarArchive
```C#
using (var archive = RarArchive.Open("Test.rar"))
using (var archive = RarArchive.OpenArchive("Test.rar"))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
@@ -126,7 +126,7 @@ var progress = new Progress<ProgressReport>(report =>
Console.WriteLine($"Extracting {report.EntryPath}: {report.PercentComplete}%");
});
using (var archive = RarArchive.Open("archive.rar", new ReaderOptions { Progress = progress })) // Must be solid Rar or 7Zip
using (var archive = RarArchive.OpenArchive("archive.rar", new ReaderOptions { Progress = progress })) // Must be solid Rar or 7Zip
{
archive.WriteToDirectory(@"D:\output", new ExtractionOptions()
{
@@ -140,7 +140,7 @@ using (var archive = RarArchive.Open("archive.rar", new ReaderOptions { Progress
```C#
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
using (var reader = ReaderFactory.Open(stream))
using (var reader = ReaderFactory.OpenReader(stream))
{
while (reader.MoveToNextEntry())
{
@@ -161,7 +161,7 @@ using (var reader = ReaderFactory.Open(stream))
```C#
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
using (var reader = ReaderFactory.Open(stream))
using (var reader = ReaderFactory.OpenReader(stream))
{
while (reader.MoveToNextEntry())
{
@@ -180,7 +180,7 @@ using (var reader = ReaderFactory.Open(stream))
```C#
using (Stream stream = File.OpenWrite("C:\\temp.tgz"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)
{
LeaveOpenStream = true
}))
@@ -199,7 +199,7 @@ opts.ArchiveEncoding.CustomDecoder = (data, x, y) =>
{
return encoding.GetString(data);
};
var tr = SharpCompress.Archives.Zip.ZipArchive.Open("test.zip", opts);
var tr = SharpCompress.Archives.Zip.ZipArchive.OpenArchive("test.zip", opts);
foreach(var entry in tr.Entries)
{
Console.WriteLine($"{entry.Key}");
@@ -213,7 +213,7 @@ foreach(var entry in tr.Entries)
**Extract single entry asynchronously:**
```C#
using (Stream stream = File.OpenRead("archive.zip"))
using (var reader = ReaderFactory.Open(stream))
using (var reader = ReaderFactory.OpenReader(stream))
{
while (reader.MoveToNextEntry())
{
@@ -234,7 +234,7 @@ using (var reader = ReaderFactory.Open(stream))
**Extract all entries asynchronously:**
```C#
using (Stream stream = File.OpenRead("archive.tar.gz"))
using (var reader = ReaderFactory.Open(stream))
using (var reader = ReaderFactory.OpenReader(stream))
{
await reader.WriteAllToDirectoryAsync(
@"D:\temp",
@@ -250,7 +250,7 @@ using (var reader = ReaderFactory.Open(stream))
**Open and process entry stream asynchronously:**
```C#
using (var archive = ZipArchive.Open("archive.zip"))
using (var archive = ZipArchive.OpenArchive("archive.zip"))
{
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
@@ -268,7 +268,7 @@ using (var archive = ZipArchive.Open("archive.zip"))
**Write single file asynchronously:**
```C#
using (Stream archiveStream = File.OpenWrite("output.zip"))
using (var writer = WriterFactory.Open(archiveStream, ArchiveType.Zip, CompressionType.Deflate))
using (var writer = WriterFactory.OpenWriter(archiveStream, ArchiveType.Zip, CompressionType.Deflate))
{
using (Stream fileStream = File.OpenRead("input.txt"))
{
@@ -280,7 +280,7 @@ using (var writer = WriterFactory.Open(archiveStream, ArchiveType.Zip, Compressi
**Write entire directory asynchronously:**
```C#
using (Stream stream = File.OpenWrite("backup.tar.gz"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
{
await writer.WriteAllAsync(
@"D:\files",
@@ -299,7 +299,7 @@ var cts = new CancellationTokenSource();
cts.CancelAfter(TimeSpan.FromMinutes(5));
using (Stream stream = File.OpenWrite("archive.zip"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Zip, CompressionType.Deflate))
{
try
{
@@ -316,7 +316,7 @@ using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.
**Extract from archive asynchronously:**
```C#
using (var archive = ZipArchive.Open("archive.zip"))
using (var archive = ZipArchive.OpenArchive("archive.zip"))
{
// Simple async extraction - works for all archive types
await archive.WriteToDirectoryAsync(

View File

@@ -1,7 +1,7 @@
// Copyright (c) Six Labors.
// Licensed under the Apache License, Version 2.0.
#if !NETSTANDARD2_0 && !NETSTANDARD2_1 && !NETFRAMEWORK
#if !LEGACY_DOTNET
#define SUPPORTS_RUNTIME_INTRINSICS
#define SUPPORTS_HOTPATH
#endif

View File

@@ -0,0 +1,103 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public abstract partial class AbstractArchive<TEntry, TVolume>
where TEntry : IArchiveEntry
where TVolume : IVolume
{
#region Async Support
// Async properties
public virtual IAsyncEnumerable<TEntry> EntriesAsync => _lazyEntriesAsync;
public IAsyncEnumerable<TVolume> VolumesAsync => _lazyVolumesAsync;
protected virtual async IAsyncEnumerable<TEntry> LoadEntriesAsync(
IAsyncEnumerable<TVolume> volumes
)
{
foreach (var item in LoadEntries(await volumes.ToListAsync()))
{
yield return item;
}
}
public virtual async ValueTask DisposeAsync()
{
if (!_disposed)
{
await foreach (var v in _lazyVolumesAsync)
{
v.Dispose();
}
foreach (var v in _lazyEntriesAsync.GetLoaded().Cast<Entry>())
{
v.Close();
}
_sourceStream?.Dispose();
_disposed = true;
}
}
private async ValueTask EnsureEntriesLoadedAsync()
{
await _lazyEntriesAsync.EnsureFullyLoaded();
await _lazyVolumesAsync.EnsureFullyLoaded();
}
private async IAsyncEnumerable<IArchiveEntry> EntriesAsyncCast()
{
await foreach (var entry in EntriesAsync)
{
yield return entry;
}
}
IAsyncEnumerable<IArchiveEntry> IAsyncArchive.EntriesAsync => EntriesAsyncCast();
IAsyncEnumerable<IVolume> IAsyncArchive.VolumesAsync => VolumesAsyncCast();
private async IAsyncEnumerable<IVolume> VolumesAsyncCast()
{
await foreach (var volume in _lazyVolumesAsync)
{
yield return volume;
}
}
public async ValueTask<IAsyncReader> ExtractAllEntriesAsync()
{
if (!await IsSolidAsync() && Type != ArchiveType.SevenZip)
{
throw new SharpCompressException(
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
await EnsureEntriesLoadedAsync();
return await CreateReaderForSolidExtractionAsync();
}
public virtual ValueTask<bool> IsSolidAsync() => new(false);
public async ValueTask<bool> IsCompleteAsync()
{
await EnsureEntriesLoadedAsync();
return await EntriesAsync.AllAsync(x => x.IsComplete);
}
public async ValueTask<long> TotalSizeAsync() =>
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.CompressedSize);
public async ValueTask<long> TotalUncompressedSizeAsync() =>
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.Size);
public ValueTask<bool> IsEncryptedAsync() => new(IsEncrypted);
#endregion
}

View File

@@ -7,7 +7,7 @@ using SharpCompress.Readers;
namespace SharpCompress.Archives;
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
public abstract partial class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
@@ -16,6 +16,10 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
private bool _disposed;
private readonly SourceStream? _sourceStream;
// Async fields - kept in original file per refactoring rules
private readonly LazyAsyncReadOnlyCollection<TVolume> _lazyVolumesAsync;
private readonly LazyAsyncReadOnlyCollection<TEntry> _lazyEntriesAsync;
protected ReaderOptions ReaderOptions { get; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
@@ -68,7 +72,7 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual long TotalUncompressSize =>
public virtual long TotalUncompressedSize =>
Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
@@ -77,16 +81,6 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
protected virtual IAsyncEnumerable<TVolume> LoadVolumesAsync(SourceStream sourceStream) =>
LoadVolumes(sourceStream).ToAsyncEnumerable();
protected virtual async IAsyncEnumerable<TEntry> LoadEntriesAsync(
IAsyncEnumerable<TVolume> volumes
)
{
foreach (var item in LoadEntries(await volumes.ToListAsync()))
{
yield return item;
}
}
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
@@ -156,67 +150,4 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
return Entries.All(x => x.IsComplete);
}
}
#region Async Support
private readonly LazyAsyncReadOnlyCollection<TVolume> _lazyVolumesAsync;
private readonly LazyAsyncReadOnlyCollection<TEntry> _lazyEntriesAsync;
public virtual async ValueTask DisposeAsync()
{
if (!_disposed)
{
await foreach (var v in _lazyVolumesAsync)
{
v.Dispose();
}
foreach (var v in _lazyEntriesAsync.GetLoaded().Cast<Entry>())
{
v.Close();
}
_sourceStream?.Dispose();
_disposed = true;
}
}
private async ValueTask EnsureEntriesLoadedAsync()
{
await _lazyEntriesAsync.EnsureFullyLoaded();
await _lazyVolumesAsync.EnsureFullyLoaded();
}
public virtual IAsyncEnumerable<TEntry> EntriesAsync => _lazyEntriesAsync;
IAsyncEnumerable<IArchiveEntry> IAsyncArchive.EntriesAsync =>
EntriesAsync.Cast<TEntry, IArchiveEntry>();
public IAsyncEnumerable<IVolume> VolumesAsync => _lazyVolumesAsync.Cast<TVolume, IVolume>();
public async ValueTask<IAsyncReader> ExtractAllEntriesAsync()
{
if (!IsSolid && Type != ArchiveType.SevenZip)
{
throw new SharpCompressException(
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
await EnsureEntriesLoadedAsync();
return await CreateReaderForSolidExtractionAsync();
}
public virtual ValueTask<bool> IsSolidAsync() => new(false);
public async ValueTask<bool> IsCompleteAsync()
{
await EnsureEntriesLoadedAsync();
return await EntriesAsync.All(x => x.IsComplete);
}
public async ValueTask<long> TotalSizeAsync() =>
await EntriesAsync.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
public async ValueTask<long> TotalUncompressSizeAsync() =>
await EntriesAsync.Aggregate(0L, (total, cf) => total + cf.Size);
#endregion
}

View File

@@ -0,0 +1,123 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
public abstract partial class AbstractWritableArchive<TEntry, TVolume>
where TEntry : IArchiveEntry
where TVolume : IVolume
{
// Async property moved from main file
private IAsyncEnumerable<TEntry> OldEntriesAsync =>
base.EntriesAsync.Where(x => !removedEntries.Contains(x));
private async ValueTask RebuildModifiedCollectionAsync()
{
if (pauseRebuilding)
{
return;
}
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
await foreach (var entry in OldEntriesAsync)
{
modifiedEntries.Add(entry);
}
modifiedEntries.AddRange(newEntries);
}
public async ValueTask RemoveEntryAsync(TEntry entry)
{
if (!removedEntries.Contains(entry))
{
removedEntries.Add(entry);
await RebuildModifiedCollectionAsync();
}
}
private async ValueTask<bool> DoesKeyMatchExistingAsync(
string key,
CancellationToken cancellationToken
)
{
await foreach (
var entry in EntriesAsync.WithCancellation(cancellationToken).ConfigureAwait(false)
)
{
var path = entry.Key;
if (path is null)
{
continue;
}
var p = path.Replace('/', '\\');
if (p.Length > 0 && p[0] == '\\')
{
p = p.Substring(1);
}
return string.Equals(p, key, StringComparison.OrdinalIgnoreCase);
}
return false;
}
public async ValueTask<TEntry> AddEntryAsync(
string key,
Stream source,
bool closeStream,
long size = 0,
DateTime? modified = null,
CancellationToken cancellationToken = default
)
{
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
if (await DoesKeyMatchExistingAsync(key, cancellationToken).ConfigureAwait(false))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = CreateEntry(key, source, size, modified, closeStream);
newEntries.Add(entry);
await RebuildModifiedCollectionAsync();
return entry;
}
public async ValueTask<TEntry> AddDirectoryEntryAsync(
string key,
DateTime? modified = null,
CancellationToken cancellationToken = default
)
{
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
if (await DoesKeyMatchExistingAsync(key, cancellationToken).ConfigureAwait(false))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = CreateDirectoryEntry(key, modified);
newEntries.Add(entry);
await RebuildModifiedCollectionAsync();
return entry;
}
public async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
CancellationToken cancellationToken = default
)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
await SaveToAsync(stream, options, OldEntriesAsync, newEntries, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -10,9 +10,10 @@ using SharpCompress.Writers;
namespace SharpCompress.Archives;
public abstract class AbstractWritableArchive<TEntry, TVolume>
public abstract partial class AbstractWritableArchive<TEntry, TVolume>
: AbstractArchive<TEntry, TVolume>,
IWritableArchive
IWritableArchive,
IWritableAsyncArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
@@ -139,6 +140,24 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
return false;
}
ValueTask IWritableAsyncArchive.RemoveEntryAsync(IArchiveEntry entry) =>
RemoveEntryAsync((TEntry)entry);
async ValueTask<IArchiveEntry> IWritableAsyncArchive.AddEntryAsync(
string key,
Stream source,
bool closeStream,
long size,
DateTime? modified,
CancellationToken cancellationToken
) => await AddEntryAsync(key, source, closeStream, size, modified, cancellationToken);
async ValueTask<IArchiveEntry> IWritableAsyncArchive.AddDirectoryEntryAsync(
string key,
DateTime? modified,
CancellationToken cancellationToken
) => await AddDirectoryEntryAsync(key, modified, cancellationToken);
public TEntry AddDirectoryEntry(string key, DateTime? modified = null)
{
if (key.Length > 0 && key[0] is '/' or '\\')
@@ -162,18 +181,6 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
SaveTo(stream, options, OldEntries, newEntries);
}
public async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
CancellationToken cancellationToken = default
)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
await SaveToAsync(stream, options, OldEntries, newEntries, cancellationToken)
.ConfigureAwait(false);
}
protected TEntry CreateEntry(
string key,
Stream source,
@@ -211,7 +218,7 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
protected abstract ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<TEntry> oldEntries,
IAsyncEnumerable<TEntry> oldEntries,
IEnumerable<TEntry> newEntries,
CancellationToken cancellationToken = default
);

View File

@@ -0,0 +1,157 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public static partial class ArchiveFactory
{
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
readerOptions ??= new ReaderOptions();
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken);
return factory.OpenAsyncArchive(stream, readerOptions);
}
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenAsyncArchive(new FileInfo(filePath), options, cancellationToken);
}
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken);
return factory.OpenAsyncArchive(fileInfo, options);
}
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return await OpenAsyncArchive(fileInfo, options, cancellationToken);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IMultiArchiveFactory>(fileInfo, cancellationToken);
return factory.OpenAsyncArchive(filesArray, options, cancellationToken);
}
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
IEnumerable<Stream> streams,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return await OpenAsyncArchive(firstStream, options, cancellationToken);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
var factory = await FindFactoryAsync<IMultiArchiveFactory>(firstStream, cancellationToken);
return factory.OpenAsyncArchive(streamsArray, options);
}
public static ValueTask<T> FindFactoryAsync<T>(
string path,
CancellationToken cancellationToken = default
)
where T : IFactory
{
path.NotNullOrEmpty(nameof(path));
return FindFactoryAsync<T>(new FileInfo(path), cancellationToken);
}
private static async ValueTask<T> FindFactoryAsync<T>(
FileInfo finfo,
CancellationToken cancellationToken
)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return await FindFactoryAsync<T>(stream, cancellationToken);
}
private static async ValueTask<T> FindFactoryAsync<T>(
Stream stream,
CancellationToken cancellationToken
)
where T : IFactory
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
var factories = Factory.Factories.OfType<T>();
var startPosition = stream.Position;
foreach (var factory in factories)
{
stream.Seek(startPosition, SeekOrigin.Begin);
if (await factory.IsArchiveAsync(stream, cancellationToken: cancellationToken))
{
stream.Seek(startPosition, SeekOrigin.Begin);
return factory;
}
}
var extensions = string.Join(", ", factories.Select(item => item.Name));
throw new InvalidOperationException(
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
);
}
}

View File

@@ -11,44 +11,15 @@ using SharpCompress.Readers;
namespace SharpCompress.Archives;
public static class ArchiveFactory
public static partial class ArchiveFactory
{
/// <summary>
/// Opens an Archive for random access
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <returns></returns>
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
public static IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
return FindFactory<IArchiveFactory>(stream).OpenArchive(stream, readerOptions);
}
/// <summary>
/// Opens an Archive for random access asynchronously
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static async ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken)
.ConfigureAwait(false);
return await factory
.OpenAsync(stream, readerOptions, cancellationToken)
.ConfigureAwait(false);
}
public static IWritableArchive Create(ArchiveType type)
public static IWritableArchive CreateArchive(ArchiveType type)
{
var factory = Factory
.Factories.OfType<IWriteableArchiveFactory>()
@@ -56,106 +27,28 @@ public static class ArchiveFactory
if (factory != null)
{
return factory.CreateWriteableArchive();
return factory.CreateArchive();
}
throw new NotSupportedException("Cannot create Archives of type: " + type);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions? options = null)
public static IArchive OpenArchive(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
return OpenArchive(new FileInfo(filePath), options);
}
/// <summary>
/// Opens an Archive from a filepath asynchronously.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenAsync(new FileInfo(filePath), options, cancellationToken);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
public static IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
return FindFactory<IArchiveFactory>(fileInfo).Open(fileInfo, options);
return FindFactory<IArchiveFactory>(fileInfo).OpenArchive(fileInfo, options);
}
/// <summary>
/// Opens an Archive from a FileInfo object asynchronously.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken)
.ConfigureAwait(false);
return await factory.OpenAsync(fileInfo, options, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
{
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return Open(fileInfo, options);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
}
/// <summary>
/// Opens a multi-part archive from files asynchronously.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async ValueTask<IAsyncArchive> OpenAsync(
public static IArchive OpenArchive(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
ReaderOptions? options = null
)
{
fileInfos.NotNull(nameof(fileInfos));
@@ -168,24 +61,16 @@ public static class ArchiveFactory
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return await OpenAsync(fileInfo, options, cancellationToken).ConfigureAwait(false);
return OpenArchive(fileInfo, options);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = FindFactory<IMultiArchiveFactory>(fileInfo);
return await factory
.OpenAsync(filesArray, options, cancellationToken)
.ConfigureAwait(false);
return FindFactory<IMultiArchiveFactory>(fileInfo).OpenArchive(filesArray, options);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
public static IArchive OpenArchive(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
@@ -197,64 +82,34 @@ public static class ArchiveFactory
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return Open(firstStream, options);
return OpenArchive(firstStream, options);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
return FindFactory<IMultiArchiveFactory>(firstStream).OpenArchive(streamsArray, options);
}
/// <summary>
/// Opens a multi-part archive from streams asynchronously.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async ValueTask<IAsyncArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return await OpenAsync(firstStream, options, cancellationToken).ConfigureAwait(false);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
var factory = FindFactory<IMultiArchiveFactory>(firstStream);
return await factory
.OpenAsync(streamsArray, options, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(
string sourceArchive,
string destinationDirectory,
ExtractionOptions? options = null
)
{
using var archive = Open(sourceArchive);
using var archive = OpenArchive(sourceArchive);
archive.WriteToDirectory(destinationDirectory, options);
}
private static T FindFactory<T>(FileInfo finfo)
public static T FindFactory<T>(string path)
where T : IFactory
{
path.NotNullOrEmpty(nameof(path));
using Stream stream = File.OpenRead(path);
return FindFactory<T>(stream);
}
public static T FindFactory<T>(FileInfo finfo)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
@@ -262,7 +117,7 @@ public static class ArchiveFactory
return FindFactory<T>(stream);
}
private static T FindFactory<T>(Stream stream)
public static T FindFactory<T>(Stream stream)
where T : IFactory
{
stream.NotNull(nameof(stream));
@@ -294,68 +149,14 @@ public static class ArchiveFactory
);
}
private static async ValueTask<T> FindFactoryAsync<T>(
FileInfo finfo,
CancellationToken cancellationToken
)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return await FindFactoryAsync<T>(stream, cancellationToken);
}
private static async ValueTask<T> FindFactoryAsync<T>(
Stream stream,
CancellationToken cancellationToken
)
where T : IFactory
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
var factories = Factory.Factories.OfType<T>();
var startPosition = stream.Position;
foreach (var factory in factories)
{
stream.Seek(startPosition, SeekOrigin.Begin);
if (await factory.IsArchiveAsync(stream, cancellationToken: cancellationToken))
{
stream.Seek(startPosition, SeekOrigin.Begin);
return factory;
}
}
var extensions = string.Join(", ", factories.Select(item => item.Name));
throw new InvalidOperationException(
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
);
}
public static bool IsArchive(
string filePath,
out ArchiveType? type,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public static bool IsArchive(string filePath, out ArchiveType? type)
{
filePath.NotNullOrEmpty(nameof(filePath));
using Stream s = File.OpenRead(filePath);
return IsArchive(s, out type, bufferSize);
return IsArchive(s, out type);
}
public static bool IsArchive(
Stream stream,
out ArchiveType? type,
int bufferSize = ReaderOptions.DefaultBufferSize
)
public static bool IsArchive(Stream stream, out ArchiveType? type)
{
type = null;
stream.NotNull(nameof(stream));
@@ -382,22 +183,12 @@ public static class ArchiveFactory
return false;
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<string> GetFileParts(string part1)
{
part1.NotNullOrEmpty(nameof(part1));
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
{
part1.NotNull(nameof(part1));
@@ -411,7 +202,7 @@ public static class ArchiveFactory
if (part != null)
{
yield return part;
while ((part = factory.GetFilePart(i++, part1)) != null) //tests split too
while ((part = factory.GetFilePart(i++, part1)) != null)
{
yield return part;
}
@@ -420,6 +211,4 @@ public static class ArchiveFactory
}
}
}
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
}

View File

@@ -13,6 +13,7 @@ internal abstract class ArchiveVolumeFactory
//split 001, 002 ...
var m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
{
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
@@ -22,9 +23,13 @@ internal abstract class ArchiveVolumeFactory
)
)
);
}
if (item != null && item.Exists)
{
return item;
}
return null;
}
}

View File

@@ -1,51 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
internal class AutoArchiveFactory : IArchiveFactory
{
public string Name => nameof(AutoArchiveFactory);
public ArchiveType? KnownArchiveType => null;
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
public bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => throw new NotSupportedException();
public ValueTask<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => throw new NotSupportedException();
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(stream, readerOptions);
public async ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await ArchiveFactory.OpenAsync(stream, readerOptions, cancellationToken);
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(fileInfo, readerOptions);
public async ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => await ArchiveFactory.OpenAsync(fileInfo, readerOptions, cancellationToken);
}

View File

@@ -0,0 +1,86 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
namespace SharpCompress.Archives.GZip;
public partial class GZipArchive
{
public ValueTask SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
SaveToAsync(new FileInfo(filePath), cancellationToken);
public async ValueTask SaveToAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
.ConfigureAwait(false);
}
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IAsyncEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
if (Entries.Count > 1)
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)
{
if (!entry.IsDirectory)
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
cancellationToken
)
.ConfigureAwait(false);
}
}
foreach (var entry in newEntries.Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken)
.ConfigureAwait(false);
}
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)GZipReader.OpenReader(stream));
}
protected override async IAsyncEnumerable<GZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<GZipVolume> volumes
)
{
var stream = (await volumes.SingleAsync()).Stream;
yield return new GZipArchiveEntry(
this,
await GZipFilePart.CreateAsync(stream, ReaderOptions.ArchiveEncoding)
);
}
}

View File

@@ -0,0 +1,201 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
namespace SharpCompress.Archives.GZip;
public partial class GZipArchive
#if NET8_0_OR_GREATER
: IWritableArchiveOpenable,
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
#endif
{
public static IWritableAsyncArchive OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IWritableAsyncArchive)OpenArchive(
new FileInfo(path),
readerOptions ?? new ReaderOptions()
);
}
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenArchive(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
public static IWritableArchive OpenArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
)
{
fileInfo.NotNull(nameof(fileInfo));
return new GZipArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive OpenArchive(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new GZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive OpenArchive(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new GZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new GZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
public static IWritableAsyncArchive OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(stream, readerOptions);
}
public static IWritableAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public static IWritableAsyncArchive OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(streams, readerOptions);
}
public static IWritableAsyncArchive OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(fileInfos, readerOptions);
}
public static IWritableArchive CreateArchive() => new GZipArchive();
public static IWritableAsyncArchive CreateAsyncArchive() => new GZipArchive();
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
public static bool IsGZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsGZipFile(stream);
}
public static bool IsGZipFile(Stream stream)
{
Span<byte> header = stackalloc byte[10];
if (!stream.ReadFully(header))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
public static async ValueTask<bool> IsGZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var header = ArrayPool<byte>.Shared.Rent(10);
try
{
await stream.ReadFullyAsync(header, 0, 10, cancellationToken).ConfigureAwait(false);
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
finally
{
ArrayPool<byte>.Shared.Return(header);
}
}
}

View File

@@ -14,186 +14,20 @@ using SharpCompress.Writers.GZip;
namespace SharpCompress.Archives.GZip;
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new GZipArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new GZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new GZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new GZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Opens a GZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a GZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a GZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a GZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
public static GZipArchive Create() => new();
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private GZipArchive(SourceStream sourceStream)
: base(ArchiveType.GZip, sourceStream) { }
internal GZipArchive()
: base(ArchiveType.GZip) { }
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.LoadAllParts();
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
}
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
public static bool IsGZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsGZipFile(stream);
}
public void SaveTo(string filePath) => SaveTo(new FileInfo(filePath));
public void SaveTo(FileInfo fileInfo)
@@ -202,63 +36,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
public ValueTask SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
SaveToAsync(new FileInfo(filePath), cancellationToken);
public async ValueTask SaveToAsync(
FileInfo fileInfo,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
.ConfigureAwait(false);
}
public static bool IsGZipFile(Stream stream)
{
// read the header on the first read
Span<byte> header = stackalloc byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!stream.ReadFully(header))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
public static async ValueTask<bool> IsGZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// read the header on the first read
byte[] header = new byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!await stream.ReadFullyAsync(header, cancellationToken).ConfigureAwait(false))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
internal GZipArchive()
: base(ArchiveType.GZip) { }
protected override GZipArchiveEntry CreateEntryInternal(
string filePath,
Stream source,
@@ -302,34 +79,12 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
}
}
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
if (Entries.Count > 1)
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken)
.ConfigureAwait(false);
}
}
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
var stream = volumes.Single().Stream;
yield return new GZipArchiveEntry(
this,
new GZipFilePart(stream, ReaderOptions.ArchiveEncoding)
GZipFilePart.Create(stream, ReaderOptions.ArchiveEncoding)
);
}
@@ -337,13 +92,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return GZipReader.Open(stream);
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new(GZipReader.Open(stream));
return GZipReader.OpenReader(stream);
}
}

View File

@@ -23,12 +23,10 @@ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
return Parts.Single().GetCompressedStream().NotNull();
}
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
// GZip synchronous implementation is fast enough, just wrap it
return OpenEntryStream();
return new(OpenEntryStream());
}
#region IArchiveEntry Members

View File

@@ -58,7 +58,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
return SharpCompressStream.CreateNonDisposing(stream);
}
internal override void Close()

View File

@@ -38,5 +38,10 @@ public interface IArchive : IDisposable
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
long TotalUncompressSize { get; }
long TotalUncompressedSize { get; }
/// <summary>
/// Returns whether the archive is encrypted.
/// </summary>
bool IsEncrypted { get; }
}

View File

@@ -9,8 +9,6 @@ namespace SharpCompress.Archives;
public static class IArchiveEntryExtensions
{
private const int BufferSize = 81920;
/// <param name="archiveEntry">The archive entry to extract.</param>
extension(IArchiveEntry archiveEntry)
{
@@ -28,7 +26,7 @@ public static class IArchiveEntryExtensions
using var entryStream = archiveEntry.OpenEntryStream();
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
sourceStream.CopyTo(streamToWriteTo, BufferSize);
sourceStream.CopyTo(streamToWriteTo, Constants.BufferSize);
}
/// <summary>
@@ -48,10 +46,16 @@ public static class IArchiveEntryExtensions
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
#if LEGACY_DOTNET
using var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
#else
await using var entryStream = await archiveEntry.OpenEntryStreamAsync(
cancellationToken
);
#endif
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
await sourceStream
.CopyToAsync(streamToWriteTo, BufferSize, cancellationToken)
.CopyToAsync(streamToWriteTo, Constants.BufferSize, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -8,7 +8,6 @@ namespace SharpCompress.Archives;
public static class IArchiveExtensions
{
/// <param name="archive">The archive to extract.</param>
extension(IArchive archive)
{
/// <summary>
@@ -23,7 +22,6 @@ public static class IArchiveExtensions
IProgress<ProgressReport>? progress = null
)
{
// For solid archives (Rar, 7Zip), use the optimized reader-based approach
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
{
using var reader = archive.ExtractAllEntries();
@@ -31,7 +29,6 @@ public static class IArchiveExtensions
}
else
{
// For non-solid archives, extract entries directly
archive.WriteToDirectoryInternal(destinationDirectory, options, progress);
}
}
@@ -42,14 +39,10 @@ public static class IArchiveExtensions
IProgress<ProgressReport>? progress
)
{
// Prepare for progress reporting
var totalBytes = archive.TotalUncompressSize;
var totalBytes = archive.TotalUncompressedSize;
var bytesRead = 0L;
// Tracking for created directories.
var seenDirectories = new HashSet<string>();
// Extract
foreach (var entry in archive.Entries)
{
if (entry.IsDirectory)
@@ -68,10 +61,8 @@ public static class IArchiveExtensions
continue;
}
// Use the entry's WriteToDirectory method which respects ExtractionOptions
entry.WriteToDirectory(destinationDirectory, options);
// Update progress
bytesRead += entry.Size;
progress?.Report(
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -26,26 +25,21 @@ public interface IArchiveFactory : IFactory
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(Stream stream, ReaderOptions? readerOptions = null);
IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens an Archive for random access asynchronously.
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens an Archive from a FileInfo object asynchronously.
@@ -53,9 +47,5 @@ public interface IArchiveFactory : IFactory
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null);
}

View File

@@ -0,0 +1,40 @@
#if NET8_0_OR_GREATER
using System.IO;
using System.Threading;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public interface IArchiveOpenable<TSync, TASync>
where TSync : IArchive
where TASync : IAsyncArchive
{
public static abstract TSync OpenArchive(string filePath, ReaderOptions? readerOptions = null);
public static abstract TSync OpenArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
);
public static abstract TSync OpenArchive(Stream stream, ReaderOptions? readerOptions = null);
public static abstract TASync OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract TASync OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract TASync OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}
#endif

View File

@@ -39,5 +39,10 @@ public interface IAsyncArchive : IAsyncDisposable
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
ValueTask<long> TotalUncompressSizeAsync();
ValueTask<long> TotalUncompressedSizeAsync();
/// <summary>
/// Returns whether the archive is encrypted.
/// </summary>
ValueTask<bool> IsEncryptedAsync();
}

View File

@@ -10,84 +10,83 @@ namespace SharpCompress.Archives;
public static class IAsyncArchiveExtensions
{
/// <summary>
/// Extract to specific directory asynchronously with progress reporting and cancellation support
/// </summary>
/// <param name="archive">The archive to extract.</param>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public static async Task WriteToDirectoryAsync(
this IAsyncArchive archive,
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
)
extension(IAsyncArchive archive)
{
// For solid archives (Rar, 7Zip), use the optimized reader-based approach
if (await archive.IsSolidAsync() || archive.Type == ArchiveType.SevenZip)
/// <summary>
/// Extract to specific directory asynchronously with progress reporting and cancellation support
/// </summary>
/// <param name="archive">The archive to extract.</param>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public async ValueTask WriteToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
)
{
await using var reader = await archive.ExtractAllEntriesAsync();
await reader.WriteAllToDirectoryAsync(destinationDirectory, options, cancellationToken);
}
else
{
// For non-solid archives, extract entries directly
await archive.WriteToDirectoryAsyncInternal(
destinationDirectory,
options,
progress,
cancellationToken
);
}
}
private static async Task WriteToDirectoryAsyncInternal(
this IAsyncArchive archive,
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress,
CancellationToken cancellationToken
)
{
// Prepare for progress reporting
var totalBytes = await archive.TotalUncompressSizeAsync();
var bytesRead = 0L;
// Tracking for created directories.
var seenDirectories = new HashSet<string>();
// Extract
await foreach (var entry in archive.EntriesAsync.WithCancellation(cancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
if (entry.IsDirectory)
if (await archive.IsSolidAsync() || archive.Type == ArchiveType.SevenZip)
{
var dirPath = Path.Combine(
await using var reader = await archive.ExtractAllEntriesAsync();
await reader.WriteAllToDirectoryAsync(
destinationDirectory,
entry.Key.NotNull("Entry Key is null")
options,
cancellationToken
);
if (
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(parentDirectory);
}
continue;
}
else
{
await archive.WriteToDirectoryAsyncInternal(
destinationDirectory,
options,
progress,
cancellationToken
);
}
}
// Use the entry's WriteToDirectoryAsync method which respects ExtractionOptions
await entry
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
.ConfigureAwait(false);
private async ValueTask WriteToDirectoryAsyncInternal(
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress,
CancellationToken cancellationToken
)
{
var totalBytes = await archive.TotalUncompressedSizeAsync();
var bytesRead = 0L;
var seenDirectories = new HashSet<string>();
// Update progress
bytesRead += entry.Size;
progress?.Report(new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes));
await foreach (var entry in archive.EntriesAsync.WithCancellation(cancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
if (entry.IsDirectory)
{
var dirPath = Path.Combine(
destinationDirectory,
entry.Key.NotNull("Entry Key is null")
);
if (
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(parentDirectory);
}
continue;
}
await entry
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
.ConfigureAwait(false);
bytesRead += entry.Size;
progress?.Report(
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
);
}
}
}
}

View File

@@ -1,7 +1,6 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -27,18 +26,16 @@ public interface IMultiArchiveFactory : IFactory
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
IArchive OpenArchive(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens a multi-part archive from streams asynchronously.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<IAsyncArchive> OpenAsync(
IAsyncArchive OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
ReaderOptions? readerOptions = null
);
/// <summary>
@@ -46,7 +43,7 @@ public interface IMultiArchiveFactory : IFactory
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
IArchive OpenArchive(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens a multi-part archive from files asynchronously.
@@ -54,7 +51,7 @@ public interface IMultiArchiveFactory : IFactory
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
ValueTask<IAsyncArchive> OpenAsync(
IAsyncArchive OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default

View File

@@ -0,0 +1,35 @@
#if NET8_0_OR_GREATER
using System.Collections.Generic;
using System.IO;
using System.Threading;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
public interface IMultiArchiveOpenable<TSync, TASync>
where TSync : IArchive
where TASync : IAsyncArchive
{
public static abstract TSync OpenArchive(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
);
public static abstract TSync OpenArchive(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
);
public static abstract TASync OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
public static abstract TASync OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}
#endif

View File

@@ -6,10 +6,17 @@ using SharpCompress.Writers;
namespace SharpCompress.Archives;
public interface IWritableArchive : IArchive
public interface IWritableArchiveCommon
{
void RemoveEntry(IArchiveEntry entry);
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
}
public interface IWritableArchive : IArchive, IWritableArchiveCommon
{
IArchiveEntry AddEntry(
string key,
Stream source,
@@ -20,8 +27,22 @@ public interface IWritableArchive : IArchive
IArchiveEntry AddDirectoryEntry(string key, DateTime? modified = null);
/// <summary>
/// Saves the archive to the specified stream using the given writer options.
/// </summary>
void SaveTo(Stream stream, WriterOptions options);
/// <summary>
/// Removes the specified entry from the archive.
/// </summary>
void RemoveEntry(IArchiveEntry entry);
}
public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
{
/// <summary>
/// Asynchronously saves the archive to the specified stream using the given writer options.
/// </summary>
ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
@@ -29,8 +50,28 @@ public interface IWritableArchive : IArchive
);
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// Asynchronously adds an entry to the archive with the specified key, source stream, and options.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
ValueTask<IArchiveEntry> AddEntryAsync(
string key,
Stream source,
bool closeStream,
long size = 0,
DateTime? modified = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Asynchronously adds a directory entry to the archive with the specified key and modification time.
/// </summary>
ValueTask<IArchiveEntry> AddDirectoryEntryAsync(
string key,
DateTime? modified = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Removes the specified entry from the archive.
/// </summary>
ValueTask RemoveEntryAsync(IArchiveEntry entry);
}

View File

@@ -1,106 +1,70 @@
using System;
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
public static class IWritableArchiveExtensions
{
public static void AddEntry(
this IWritableArchive writableArchive,
string entryPath,
string filePath
)
extension(IWritableArchive writableArchive)
{
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
public void AddAllFromDirectory(
string filePath,
string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories
)
{
throw new FileNotFoundException("Could not AddEntry: " + filePath);
}
writableArchive.AddEntry(
entryPath,
new FileInfo(filePath).OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
public static void SaveTo(
this IWritableArchive writableArchive,
string filePath,
WriterOptions options
) => writableArchive.SaveTo(new FileInfo(filePath), options);
public static void SaveTo(
this IWritableArchive writableArchive,
FileInfo fileInfo,
WriterOptions options
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
writableArchive.SaveTo(stream, options);
}
public static ValueTask SaveToAsync(
this IWritableArchive writableArchive,
string filePath,
WriterOptions options,
CancellationToken cancellationToken = default
) => writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken);
public static async ValueTask SaveToAsync(
this IWritableArchive writableArchive,
FileInfo fileInfo,
WriterOptions options,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await writableArchive.SaveToAsync(stream, options, cancellationToken).ConfigureAwait(false);
}
public static void AddAllFromDirectory(
this IWritableArchive writableArchive,
string filePath,
string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories
)
{
using (writableArchive.PauseEntryRebuilding())
{
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
using (writableArchive.PauseEntryRebuilding())
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(
path.Substring(filePath.Length),
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
foreach (
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
)
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(
path.Substring(filePath.Length),
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}
}
public static IArchiveEntry AddEntry(
this IWritableArchive writableArchive,
string key,
FileInfo fileInfo
)
{
if (!fileInfo.Exists)
public IArchiveEntry AddEntry(string key, string file) =>
writableArchive.AddEntry(key, new FileInfo(file));
public IArchiveEntry AddEntry(
string key,
Stream source,
long size = 0,
DateTime? modified = null
) => writableArchive.AddEntry(key, source, false, size, modified);
public IArchiveEntry AddEntry(string key, FileInfo fileInfo)
{
throw new ArgumentException("FileInfo does not exist.");
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntry(
key,
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
public void SaveTo(string filePath, WriterOptions? options = null) =>
writableArchive.SaveTo(new FileInfo(filePath), options ?? new(CompressionType.Deflate));
public void SaveTo(FileInfo fileInfo, WriterOptions? options = null)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
writableArchive.SaveTo(stream, options ?? new(CompressionType.Deflate));
}
return writableArchive.AddEntry(
key,
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}

View File

@@ -0,0 +1,10 @@
#if NET8_0_OR_GREATER
namespace SharpCompress.Archives;
public interface IWritableArchiveOpenable
: IArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
{
public static abstract IWritableArchive CreateArchive();
public static abstract IWritableAsyncArchive CreateAsyncArchive();
}
#endif

View File

@@ -0,0 +1,86 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
public static class IWritableAsyncArchiveExtensions
{
extension(IWritableAsyncArchive writableArchive)
{
public async ValueTask AddAllFromDirectoryAsync(
string filePath,
string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories
)
{
using (writableArchive.PauseEntryRebuilding())
{
foreach (
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
)
{
var fileInfo = new FileInfo(path);
await writableArchive.AddEntryAsync(
path.Substring(filePath.Length),
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
}
public ValueTask<IArchiveEntry> AddEntryAsync(string key, string file) =>
writableArchive.AddEntryAsync(key, new FileInfo(file));
public ValueTask<IArchiveEntry> AddEntryAsync(
string key,
Stream source,
long size = 0,
DateTime? modified = null
) => writableArchive.AddEntryAsync(key, source, false, size, modified);
public ValueTask<IArchiveEntry> AddEntryAsync(string key, FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntryAsync(
key,
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
public ValueTask SaveToAsync(
string filePath,
WriterOptions? options = null,
CancellationToken cancellationToken = default
) =>
writableArchive.SaveToAsync(
new FileInfo(filePath),
options ?? new(CompressionType.Deflate),
cancellationToken
);
public async ValueTask SaveToAsync(
FileInfo fileInfo,
WriterOptions? options = null,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await writableArchive
.SaveToAsync(stream, options ?? new(CompressionType.Deflate), cancellationToken)
.ConfigureAwait(false);
}
}
}

View File

@@ -16,5 +16,5 @@ public interface IWriteableArchiveFactory : Factories.IFactory
/// Creates a new, empty archive, ready to be written.
/// </summary>
/// <returns></returns>
IWritableArchive CreateWriteableArchive();
IWritableArchive CreateArchive();
}

View File

@@ -36,4 +36,7 @@ internal class FileInfoRarArchiveVolume : RarVolume
new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
internal override IEnumerable<RarFilePart> ReadFileParts() => FileParts;
internal override IAsyncEnumerable<RarFilePart> ReadFilePartsAsync() =>
FileParts.ToAsyncEnumerable();
}

View File

@@ -0,0 +1,53 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar;
public partial class RarArchive
{
public override async ValueTask DisposeAsync()
{
if (!_disposed)
{
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
{
unpackV1.Dispose();
}
_disposed = true;
await base.DisposeAsync();
}
}
protected override async ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
if (await this.IsMultipartVolumeAsync())
{
var streams = await VolumesAsync
.Select(volume =>
{
volume.Stream.Position = 0;
return volume.Stream;
})
.ToListAsync();
return (RarReader)RarReader.OpenReader(streams, ReaderOptions);
}
var stream = (await VolumesAsync.FirstAsync()).Stream;
stream.Position = 0;
return (RarReader)RarReader.OpenReader(stream, ReaderOptions);
}
public override async ValueTask<bool> IsSolidAsync() =>
await (await VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsSolidArchiveAsync();
}

View File

@@ -1,18 +1,36 @@
using System.Linq;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
namespace SharpCompress.Archives.Rar;
public static class RarArchiveExtensions
{
/// <summary>
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
/// </summary>
public static bool IsFirstVolume(this RarArchive archive) =>
archive.Volumes.First().IsFirstVolume;
extension(IRarArchive archive)
{
/// <summary>
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
/// </summary>
public bool IsFirstVolume() => archive.Volumes.Cast<RarVolume>().First().IsFirstVolume;
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public static bool IsMultipartVolume(this RarArchive archive) =>
archive.Volumes.First().IsMultiVolume;
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public bool IsMultipartVolume() => archive.Volumes.Cast<RarVolume>().First().IsMultiVolume;
}
extension(IRarAsyncArchive archive)
{
/// <summary>
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
/// </summary>
public async ValueTask<bool> IsFirstVolumeAsync() =>
(await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsFirstVolume;
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public async ValueTask<bool> IsMultipartVolumeAsync() =>
(await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsMultiVolume;
}
}

View File

@@ -0,0 +1,187 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar;
public partial class RarArchive
#if NET8_0_OR_GREATER
: IArchiveOpenable<IRarArchive, IRarAsyncArchive>,
IMultiArchiveOpenable<IRarArchive, IRarAsyncArchive>
#endif
{
public static IRarAsyncArchive OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty(nameof(path));
return (IRarAsyncArchive)OpenArchive(new FileInfo(path), readerOptions);
}
public static IRarArchive OpenArchive(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
var fileInfo = new FileInfo(filePath);
return new RarArchive(
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
options ?? new ReaderOptions()
)
);
}
public static IRarArchive OpenArchive(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new RarArchive(
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
options ?? new ReaderOptions()
)
);
}
public static IRarArchive OpenArchive(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
}
public static IRarArchive OpenArchive(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new RarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IRarArchive OpenArchive(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new RarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IRarAsyncArchive OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IRarAsyncArchive)OpenArchive(stream, readerOptions);
}
public static IRarAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IRarAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public static IRarAsyncArchive OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IRarAsyncArchive)OpenArchive(streams, readerOptions);
}
public static IRarAsyncArchive OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IRarAsyncArchive)OpenArchive(fileInfos, readerOptions);
}
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
public static bool IsRarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsRarFile(stream);
}
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try
{
MarkHeader.Read(stream, true, false);
return true;
}
catch
{
return false;
}
}
public static async ValueTask<bool> IsRarFileAsync(
Stream stream,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
await MarkHeader
.ReadAsync(stream, true, false, cancellationToken)
.ConfigureAwait(false);
return true;
}
catch
{
return false;
}
}
}

View File

@@ -14,17 +14,26 @@ using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar;
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public interface IRarArchiveCommon
{
int MinVersion { get; }
int MaxVersion { get; }
}
public interface IRarArchive : IArchive, IRarArchiveCommon { }
public interface IRarAsyncArchive : IAsyncArchive, IRarArchiveCommon { }
public partial class RarArchive
: AbstractArchive<RarArchiveEntry, RarVolume>,
IRarArchive,
IRarAsyncArchive
{
private bool _disposed;
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
new(() => new Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private RarArchive(SourceStream sourceStream)
: base(ArchiveType.Rar, sourceStream) { }
@@ -45,12 +54,17 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
// Simple async property - kept in original file
protected override IAsyncEnumerable<RarArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<RarVolume> volumes
) => RarArchiveEntryFactory.GetEntriesAsync(this, volumes, ReaderOptions);
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.LoadAllParts(); //request all streams
sourceStream.LoadAllParts();
var streams = sourceStream.Streams.ToArray();
var i = 0;
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions))
{
sourceStream.IsVolumes = true;
streams[1].Position = 0;
@@ -63,17 +77,10 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
));
}
//split mode or single file
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
}
protected override IReader CreateReaderForSolidExtraction() =>
CreateReaderForSolidExtractionInternal();
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync() =>
new(CreateReaderForSolidExtractionInternal());
private RarReader CreateReaderForSolidExtractionInternal()
protected override IReader CreateReaderForSolidExtraction()
{
if (this.IsMultipartVolume())
{
@@ -82,12 +89,12 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
volume.Stream.Position = 0;
return volume.Stream;
});
return RarReader.Open(streams, ReaderOptions);
return (RarReader)RarReader.OpenReader(streams, ReaderOptions);
}
var stream = Volumes.First().Stream;
stream.Position = 0;
return RarReader.Open(stream, ReaderOptions);
return (RarReader)RarReader.OpenReader(stream, ReaderOptions);
}
public override bool IsSolid => Volumes.First().IsSolidArchive;
@@ -95,188 +102,6 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;
#region Creation
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
var fileInfo = new FileInfo(filePath);
return new RarArchive(
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
options ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new RarArchive(
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
options ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new RarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new RarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Opens a RarArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a RarArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a RarArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a RarArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
public static bool IsRarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsRarFile(stream);
}
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try
{
MarkHeader.Read(stream, true, false);
return true;
}
catch
{
return false;
}
}
#endregion
}

View File

@@ -0,0 +1,43 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar;
public partial class RarArchiveEntry
{
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
RarStream stream;
if (IsRarV3)
{
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
await MultiVolumeReadOnlyAsyncStream.Create(
Parts.ToAsyncEnumerable().CastAsync<RarFilePart>()
)
);
}
else
{
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
await MultiVolumeReadOnlyAsyncStream.Create(
Parts.ToAsyncEnumerable().CastAsync<RarFilePart>()
)
);
}
await stream.InitializeAsync(cancellationToken);
return stream;
}
}

View File

@@ -12,7 +12,7 @@ using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar;
public class RarArchiveEntry : RarEntry, IArchiveEntry
public partial class RarArchiveEntry : RarEntry, IArchiveEntry
{
private readonly ICollection<RarFilePart> parts;
private readonly RarArchive archive;
@@ -92,32 +92,6 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
return stream;
}
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
RarStream stream;
if (IsRarV3)
{
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
else
{
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
await stream.InitializeAsync(cancellationToken);
return stream;
}
public bool IsComplete
{
get

View File

@@ -17,6 +17,19 @@ internal static class RarArchiveEntryFactory
}
}
private static async IAsyncEnumerable<RarFilePart> GetFilePartsAsync(
IAsyncEnumerable<RarVolume> parts
)
{
await foreach (var rarPart in parts)
{
await foreach (var fp in rarPart.ReadFilePartsAsync())
{
yield return fp;
}
}
}
private static IEnumerable<IEnumerable<RarFilePart>> GetMatchedFileParts(
IEnumerable<RarVolume> parts
)
@@ -38,6 +51,27 @@ internal static class RarArchiveEntryFactory
}
}
private static async IAsyncEnumerable<IEnumerable<RarFilePart>> GetMatchedFilePartsAsync(
IAsyncEnumerable<RarVolume> parts
)
{
var groupedParts = new List<RarFilePart>();
await foreach (var fp in GetFilePartsAsync(parts))
{
groupedParts.Add(fp);
if (!fp.FileHeader.IsSplitAfter)
{
yield return groupedParts;
groupedParts = new List<RarFilePart>();
}
}
if (groupedParts.Count > 0)
{
yield return groupedParts;
}
}
internal static IEnumerable<RarArchiveEntry> GetEntries(
RarArchive archive,
IEnumerable<RarVolume> rarParts,
@@ -49,4 +83,16 @@ internal static class RarArchiveEntryFactory
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
}
}
internal static async IAsyncEnumerable<RarArchiveEntry> GetEntriesAsync(
RarArchive archive,
IAsyncEnumerable<RarVolume> rarParts,
ReaderOptions readerOptions
)
{
await foreach (var groupedParts in GetMatchedFilePartsAsync(rarParts))
{
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
}
}
}

View File

@@ -13,6 +13,7 @@ internal static class RarArchiveVolumeFactory
//new style rar - ..part1 | /part01 | part001 ....
var m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
if (m.Success)
{
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
@@ -23,11 +24,13 @@ internal static class RarArchiveVolumeFactory
)
)
);
}
else
{
//old style - ...rar, .r00, .r01 ...
m = Regex.Match(part1.Name, @"^(.*\.)([r-z{])(ar|[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
{
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
@@ -40,12 +43,17 @@ internal static class RarArchiveVolumeFactory
)
)
);
}
else //split .001, .002 ....
{
return ArchiveVolumeFactory.GetFilePart(index, part1);
}
}
if (item != null && item.Exists)
{
return item;
}
return null; //no more items
}

View File

@@ -14,6 +14,9 @@ internal class StreamRarArchiveVolume : RarVolume
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
internal override IAsyncEnumerable<RarFilePart> ReadFilePartsAsync() =>
GetVolumeFilePartsAsync();
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
new SeekableFilePart(markHeader, fileHeader, Index, Stream, ReaderOptions.Password);
}

View File

@@ -0,0 +1,73 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip;
public partial class SevenZipArchive
{
private async ValueTask LoadFactoryAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
if (_database is null)
{
stream.Position = 0;
var reader = new ArchiveReader();
await reader.OpenAsync(
stream,
lookForHeader: ReaderOptions.LookForHeader,
cancellationToken
);
_database = await reader.ReadDatabaseAsync(
new PasswordProvider(ReaderOptions.Password),
cancellationToken
);
}
}
protected override async IAsyncEnumerable<SevenZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<SevenZipVolume> volumes
)
{
var stream = (await volumes.SingleAsync()).Stream;
await LoadFactoryAsync(stream);
if (_database is null)
{
yield break;
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
{
var file = _database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
);
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
var isSolid = false;
foreach (var entry in group)
{
entry.IsSolid = isSolid;
isSolid = true;
}
}
foreach (var entry in entries)
{
yield return entry;
}
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync() =>
new(new SevenZipReader(ReaderOptions, this));
}

View File

@@ -0,0 +1,210 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip;
public partial class SevenZipArchive
#if NET8_0_OR_GREATER
: IArchiveOpenable<IArchive, IAsyncArchive>,
IMultiArchiveOpenable<IArchive, IAsyncArchive>
#endif
{
public static IAsyncArchive OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
path.NotNullOrEmpty("path");
return (IAsyncArchive)OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions());
}
public static IArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty("filePath");
return OpenArchive(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
public static IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull("fileInfo");
return new SevenZipArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
public static IArchive OpenArchive(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new SevenZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IArchive OpenArchive(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new SevenZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull("stream");
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new SevenZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
public static IAsyncArchive OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(stream, readerOptions);
}
public static IAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public static IAsyncArchive OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(streams, readerOptions);
}
public static IAsyncArchive OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IAsyncArchive)OpenArchive(fileInfos, readerOptions);
}
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
public static bool IsSevenZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsSevenZipFile(stream);
}
public static bool IsSevenZipFile(Stream stream)
{
try
{
return SignatureMatch(stream);
}
catch
{
return false;
}
}
public static async ValueTask<bool> IsSevenZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
return await SignatureMatchAsync(stream, cancellationToken);
}
catch
{
return false;
}
}
private static ReadOnlySpan<byte> Signature => [(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C];
private static bool SignatureMatch(Stream stream)
{
var buffer = ArrayPool<byte>.Shared.Rent(6);
try
{
stream.ReadExact(buffer, 0, 6);
return buffer.AsSpan().Slice(0, 6).SequenceEqual(Signature);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
private static async ValueTask<bool> SignatureMatchAsync(
Stream stream,
CancellationToken cancellationToken
)
{
var buffer = ArrayPool<byte>.Shared.Rent(6);
try
{
if (!await stream.ReadFullyAsync(buffer, 0, 6, cancellationToken).ConfigureAwait(false))
{
return false;
}
return buffer.AsSpan().Slice(0, 6).SequenceEqual(Signature);
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
}
}
}

View File

@@ -12,163 +12,10 @@ using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip;
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
{
private ArchiveDatabase? _database;
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull("fileInfo");
return new SevenZipArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new SevenZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new SevenZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull("stream");
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new SevenZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
@@ -182,18 +29,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
}
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
public static bool IsSevenZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsSevenZipFile(stream);
}
internal SevenZipArchive()
: base(ArchiveType.SevenZip) { }
@@ -201,32 +36,45 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
IEnumerable<SevenZipVolume> volumes
)
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
if (_database is null)
foreach (var volume in volumes)
{
return Enumerable.Empty<SevenZipArchiveEntry>();
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
{
var file = _database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
);
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
var isSolid = false;
foreach (var entry in group)
LoadFactory(volume.Stream);
if (_database is null)
{
entry.IsSolid = isSolid;
isSolid = true; //mark others in this group as solid - same as rar behaviour.
yield break;
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
{
var file = _database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(
volume.Stream,
_database,
i,
file,
ReaderOptions.ArchiveEncoding
)
);
}
foreach (
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
)
{
var isSolid = false;
foreach (var entry in group)
{
entry.IsSolid = isSolid;
isSolid = true;
}
}
foreach (var entry in entries)
{
yield return entry;
}
}
return entries;
}
private void LoadFactory(Stream stream)
@@ -240,34 +88,9 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
public static bool IsSevenZipFile(Stream stream)
{
try
{
return SignatureMatch(stream);
}
catch
{
return false;
}
}
private static ReadOnlySpan<byte> Signature =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static bool SignatureMatch(Stream stream)
{
var reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(Signature);
}
protected override IReader CreateReaderForSolidExtraction() =>
new SevenZipReader(ReaderOptions, this);
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync() =>
new(new SevenZipReader(ReaderOptions, this));
public override bool IsSolid =>
Entries
.Where(x => !x.IsDirectory)
@@ -279,13 +102,34 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
public override long TotalSize =>
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
internal sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive _archive;
private SevenZipEntry? _currentEntry;
private Stream? _currentFolderStream;
private CFolder? _currentFolder;
/// <summary>
/// Enables internal diagnostics for tests.
/// When disabled (default), diagnostics properties return null to avoid exposing internal state.
/// </summary>
internal bool DiagnosticsEnabled { get; set; }
/// <summary>
/// Current folder instance used to decide whether the solid folder stream should be reused.
/// Only available when <see cref="DiagnosticsEnabled"/> is true.
/// </summary>
internal object? DiagnosticsCurrentFolder => DiagnosticsEnabled ? _currentFolder : null;
/// <summary>
/// Current shared folder stream instance.
/// Only available when <see cref="DiagnosticsEnabled"/> is true.
/// </summary>
internal Stream? DiagnosticsCurrentFolderStream =>
DiagnosticsEnabled ? _currentFolderStream : null;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
: base(readerOptions, ArchiveType.SevenZip, false) => this._archive = archive;
public override SevenZipVolume Volume => _archive.Volumes.Single();
@@ -298,9 +142,10 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
_currentEntry = dir;
yield return dir;
}
// For non-directory entries, yield them without creating shared streams
// Each call to GetEntryStream() will create a fresh decompression stream
// to avoid state corruption issues with async operations
// For solid archives (entries in the same folder share a compressed stream),
// we must iterate entries sequentially and maintain the folder stream state
// across entries in the same folder to avoid recreating the decompression
// stream for each file, which breaks contiguous streaming.
foreach (var entry in entries.Where(x => !x.IsDirectory))
{
_currentEntry = entry;
@@ -310,19 +155,46 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
protected override EntryStream GetEntryStream()
{
// Create a fresh decompression stream for each file (no state sharing).
// However, the LZMA decoder has bugs in its async implementation that cause
// state corruption even on fresh streams. The SyncOnlyStream wrapper
// works around these bugs by forcing async operations to use sync equivalents.
//
// TODO: Fix the LZMA decoder async bugs (in LzmaStream, Decoder, OutWindow)
// so this wrapper is no longer necessary.
var entry = _currentEntry.NotNull("currentEntry is not null");
if (entry.IsDirectory)
{
return CreateEntryStream(Stream.Null);
}
return CreateEntryStream(new SyncOnlyStream(entry.FilePart.GetCompressedStream()));
var folder = entry.FilePart.Folder;
// Check if we're starting a new folder - dispose old folder stream if needed
if (folder != _currentFolder)
{
_currentFolderStream?.Dispose();
_currentFolderStream = null;
_currentFolder = folder;
}
// Create the folder stream once per folder
if (_currentFolderStream is null)
{
_currentFolderStream = _archive._database!.GetFolderStream(
_archive.Volumes.Single().Stream,
folder!,
_archive._database.PasswordProvider
);
}
// Wrap with SyncOnlyStream to work around LZMA async bugs
// Return a ReadOnlySubStream that reads from the shared folder stream
return CreateEntryStream(
new SyncOnlyStream(
new ReadOnlySubStream(_currentFolderStream, entry.Size, leaveOpen: true)
)
);
}
public override void Dispose()
{
_currentFolderStream?.Dispose();
_currentFolderStream = null;
base.Dispose();
}
}
@@ -395,7 +267,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
return Task.CompletedTask;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !LEGACY_DOTNET
public override ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default

View File

@@ -14,7 +14,7 @@ public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
) => OpenEntryStream();
) => (await FilePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
public IArchive Archive { get; }

View File

@@ -0,0 +1,161 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
namespace SharpCompress.Archives.Tar;
public partial class TarArchive
{
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IAsyncEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size,
cancellationToken
)
.ConfigureAwait(false);
}
}
foreach (var entry in newEntries)
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size,
cancellationToken
)
.ConfigureAwait(false);
}
}
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new((IAsyncReader)TarReader.OpenReader(stream));
}
protected override async IAsyncEnumerable<TarArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<TarVolume> volumes
)
{
var stream = (await volumes.SingleAsync()).Stream;
if (stream.CanSeek)
{
stream.Position = 0;
}
// Always use async header reading in LoadEntriesAsync for consistency
{
// Use async header reading for async-only streams
TarHeader? previousHeader = null;
await foreach (
var header in TarHeaderFactory.ReadHeaderAsync(
StreamingMode.Seekable,
stream,
ReaderOptions.ArchiveEncoding
)
)
{
if (header != null)
{
if (header.EntryType == EntryType.LongName)
{
previousHeader = header;
}
else
{
if (previousHeader != null)
{
var entry = new TarArchiveEntry(
this,
new TarFilePart(previousHeader, stream),
CompressionType.None
);
var oldStreamPos = stream.Position;
using (var entryStream = entry.OpenEntryStream())
{
using var memoryStream = new MemoryStream();
await entryStream.CopyToAsync(memoryStream);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
header.Name = ReaderOptions
.ArchiveEncoding.Decode(bytes)
.TrimNulls();
}
stream.Position = oldStreamPos;
previousHeader = null;
}
yield return new TarArchiveEntry(
this,
new TarFilePart(header, stream),
CompressionType.None
);
}
}
else
{
throw new IncompleteArchiveException("Failed to read TAR header");
}
}
}
}
}

View File

@@ -0,0 +1,202 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Tar;
public partial class TarArchive
#if NET8_0_OR_GREATER
: IWritableArchiveOpenable,
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
#endif
{
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenArchive(new FileInfo(filePath), readerOptions);
}
public static IWritableArchive OpenArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
)
{
fileInfo.NotNull(nameof(fileInfo));
return new TarArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
);
}
public static IWritableArchive OpenArchive(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new TarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
);
}
public static IWritableArchive OpenArchive(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new TarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new TarArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
public static IWritableAsyncArchive OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(stream, readerOptions);
}
public static IWritableAsyncArchive OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(new FileInfo(path), readerOptions);
}
public static IWritableAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public static IWritableAsyncArchive OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(streams, readerOptions);
}
public static IWritableAsyncArchive OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(fileInfos, readerOptions);
}
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
public static bool IsTarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsTarFile(stream);
}
public static bool IsTarFile(Stream stream)
{
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var reader = new BinaryReader(stream, Encoding.UTF8, false);
var readSucceeded = tarHeader.Read(reader);
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch (Exception)
{
// Catch all exceptions during tar header reading to determine if this is a valid tar file
// Invalid tar files or corrupted streams will throw various exceptions
return false;
}
}
public static async ValueTask<bool> IsTarFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
#if NET8_0_OR_GREATER
await using var reader = new AsyncBinaryReader(stream, leaveOpen: true);
#else
using var reader = new AsyncBinaryReader(stream, leaveOpen: true);
#endif
var readSucceeded = await tarHeader.ReadAsync(reader);
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch (Exception)
{
// Catch all exceptions during tar header reading to determine if this is a valid tar file
// Invalid tar files or corrupted streams will throw various exceptions
return false;
}
}
public static IWritableArchive CreateArchive() => new TarArchive();
public static IWritableAsyncArchive CreateAsyncArchive() => new TarArchive();
}

View File

@@ -15,196 +15,14 @@ using SharpCompress.Writers.Tar;
namespace SharpCompress.Archives.Tar;
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
{
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new TarArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new TarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new TarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new TarArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Opens a TarArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a TarArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a TarArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a TarArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
public static bool IsTarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsTarFile(stream);
}
public static bool IsTarFile(Stream stream)
{
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch { }
return false;
}
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable(); //simple single volume or split, multivolume not supported
sourceStream.NotNull("SourceStream is null").LoadAllParts();
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable();
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private TarArchive(SourceStream sourceStream)
: base(ArchiveType.Tar, sourceStream) { }
@@ -214,6 +32,10 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
var stream = volumes.Single().Stream;
if (stream.CanSeek)
{
stream.Position = 0;
}
TarHeader? previousHeader = null;
foreach (
var header in TarHeaderFactory.ReadHeader(
@@ -244,7 +66,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
using (var entryStream = entry.OpenEntryStream())
{
using var memoryStream = new MemoryStream();
entryStream.CopyTo(memoryStream);
entryStream.CopyTo(memoryStream, Constants.BufferSize);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
@@ -269,8 +91,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
}
}
public static TarArchive Create() => new();
protected override TarArchiveEntry CreateEntryInternal(
string filePath,
Stream source,
@@ -323,54 +143,10 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
}
}
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size,
cancellationToken
)
.ConfigureAwait(false);
}
}
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return TarReader.Open(stream);
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new(TarReader.Open(stream));
return TarReader.OpenReader(stream);
}
}

View File

@@ -16,7 +16,7 @@ public class TarArchiveEntry : TarEntry, IArchiveEntry
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
) => OpenEntryStream();
) => (await Parts.Single().GetCompressedStreamAsync(cancellationToken)).NotNull();
#region IArchiveEntry Members

View File

@@ -79,7 +79,7 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
}
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
return SharpCompressStream.CreateNonDisposing(stream);
}
internal override void Close()

View File

@@ -0,0 +1,132 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
namespace SharpCompress.Archives.Zip;
public partial class ZipArchive
{
protected override async IAsyncEnumerable<ZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<ZipVolume> volumes
)
{
var vols = await volumes.ToListAsync();
var volsArray = vols.ToArray();
await foreach (
var h in headerFactory.NotNull().ReadSeekableHeaderAsync(volsArray.Last().Stream)
)
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
var deh = (DirectoryEntryHeader)h;
Stream s;
if (
deh.RelativeOffsetOfEntryHeader + deh.CompressedSize
> volsArray[deh.DiskNumberStart].Stream.Length
)
{
var v = volsArray.Skip(deh.DiskNumberStart).ToArray();
s = new SourceStream(
v[0].Stream,
i => i < v.Length ? v[i].Stream : null,
new ReaderOptions() { LeaveStreamOpen = true }
);
}
else
{
s = volsArray[deh.DiskNumberStart].Stream;
}
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
);
}
break;
case ZipHeaderType.DirectoryEnd:
{
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volsArray.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}
}
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IAsyncEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
await foreach (
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
)
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
cancellationToken
)
.ConfigureAwait(false);
}
}
foreach (var entry in newEntries)
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
cancellationToken
)
.ConfigureAwait(false);
}
}
}
}

View File

@@ -0,0 +1,287 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Zip;
public partial class ZipArchive
#if NET8_0_OR_GREATER
: IWritableArchiveOpenable,
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
#endif
{
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenArchive(new FileInfo(filePath), readerOptions);
}
public static IWritableArchive OpenArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null
)
{
fileInfo.NotNull(nameof(fileInfo));
return new ZipArchive(
new SourceStream(
fileInfo,
i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
);
}
public static IWritableArchive OpenArchive(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new ZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
)
);
}
public static IWritableArchive OpenArchive(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new ZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static IWritableArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new ZipArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
public static IWritableAsyncArchive OpenAsyncArchive(
string path,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(path, readerOptions);
}
public static IWritableAsyncArchive OpenAsyncArchive(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(stream, readerOptions);
}
public static IWritableAsyncArchive OpenAsyncArchive(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(fileInfo, readerOptions);
}
public static IWritableAsyncArchive OpenAsyncArchive(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(streams, readerOptions);
}
public static IWritableAsyncArchive OpenAsyncArchive(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return (IWritableAsyncArchive)OpenArchive(fileInfos, readerOptions);
}
public static bool IsZipFile(string filePath, string? password = null) =>
IsZipFile(new FileInfo(filePath), password);
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsZipFile(stream, password);
}
public static bool IsZipFile(Stream stream, string? password = null)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static bool IsZipMulti(Stream stream, string? password = null)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek)
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static async ValueTask<bool> IsZipFileAsync(
Stream stream,
string? password = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
var header = await headerFactory
.ReadStreamHeaderAsync(stream)
.Where(x => x.ZipHeaderType != ZipHeaderType.Split)
.FirstOrDefaultAsync(cancellationToken);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static IWritableArchive CreateArchive() => new ZipArchive();
public static IWritableAsyncArchive CreateAsyncArchive() => new ZipArchive();
public static async ValueTask<bool> IsZipMultiAsync(
Stream stream,
string? password = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek)
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
ZipHeader? x = null;
await foreach (
var h in z.ReadSeekableHeaderAsync(stream)
.WithCancellation(cancellationToken)
)
{
x = h;
break;
}
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
}

View File

@@ -16,21 +16,12 @@ using SharpCompress.Writers.Zip;
namespace SharpCompress.Archives.Zip;
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
private readonly SeekableZipHeaderFactory? headerFactory;
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
/// if the compression method is set to deflate
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
/// <param name="options"></param>
internal ZipArchive(SourceStream sourceStream)
: base(ArchiveType.Zip, sourceStream) =>
headerFactory = new SeekableZipHeaderFactory(
@@ -38,377 +29,43 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
sourceStream.ReaderOptions.ArchiveEncoding
);
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new ZipArchive(
new SourceStream(
fileInfo,
i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new ZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new ZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new ZipArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Opens a ZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(stream, readerOptions));
}
/// <summary>
/// Opens a ZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfo, readerOptions));
}
/// <summary>
/// Opens a ZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(streams, readerOptions));
}
/// <summary>
/// Opens a ZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static ValueTask<IAsyncArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new(Open(fileInfos, readerOptions));
}
public static bool IsZipFile(
string filePath,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => IsZipFile(new FileInfo(filePath), password, bufferSize);
public static bool IsZipFile(
FileInfo fileInfo,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsZipFile(stream, password, bufferSize);
}
public static bool IsZipFile(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static bool IsZipMulti(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream, useSync: true).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static async ValueTask<bool> IsZipFileAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = await headerFactory
.ReadStreamHeaderAsync(stream)
.Where(x => x.ZipHeaderType != ZipHeaderType.Split)
.FirstOrDefaultAsync();
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static async ValueTask<bool> IsZipMultiAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
ZipHeader? x = null;
await foreach (
var h in z.ReadSeekableHeaderAsync(stream)
.WithCancellation(cancellationToken)
)
{
x = h;
break;
}
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
internal ZipArchive()
: base(ArchiveType.Zip) { }
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
{
stream.LoadAllParts(); //request all streams
stream.Position = 0;
stream.LoadAllParts();
//stream.Position = 0;
var streams = stream.Streams.ToList();
var idx = 0;
if (streams.Count() > 1) //test part 2 - true = multipart not split
if (streams.Count() > 1)
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
var isZip = IsZipFile(streams[1], ReaderOptions.Password, ReaderOptions.BufferSize);
streams[1].Position -= 4;
//check if second stream is zip header without changing position
var headerProbeStream = streams[1];
var startPosition = headerProbeStream.Position;
headerProbeStream.Position = startPosition + 4;
var isZip = IsZipFile(headerProbeStream, ReaderOptions.Password);
headerProbeStream.Position = startPosition;
if (isZip)
{
stream.IsVolumes = true;
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
var tmp = streams[0];
streams.RemoveAt(0);
streams.Add(tmp);
//streams[0].Position = 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
return streams.Select(a => new ZipVolume(a, ReaderOptions, idx++));
}
}
//split mode or single file
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
}
internal ZipArchive()
: base(ArchiveType.Zip) { }
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var vols = volumes.ToArray();
foreach (
var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream, useSync: true)
)
foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream))
{
if (h != null)
{
@@ -452,59 +109,6 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
}
protected override async IAsyncEnumerable<ZipArchiveEntry> LoadEntriesAsync(
IAsyncEnumerable<ZipVolume> volumes
)
{
var vols = await volumes.ToListAsync();
var volsArray = vols.ToArray();
await foreach (
var h in headerFactory.NotNull().ReadSeekableHeaderAsync(volsArray.Last().Stream)
)
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
var deh = (DirectoryEntryHeader)h;
Stream s;
if (
deh.RelativeOffsetOfEntryHeader + deh.CompressedSize
> volsArray[deh.DiskNumberStart].Stream.Length
)
{
var v = volsArray.Skip(deh.DiskNumberStart).ToArray();
s = new SourceStream(
v[0].Stream,
i => i < v.Length ? v[i].Stream : null,
new ReaderOptions() { LeaveStreamOpen = true }
);
}
else
{
s = volsArray[deh.DiskNumberStart].Stream;
}
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
);
}
break;
case ZipHeaderType.DirectoryEnd:
{
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volsArray.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}
}
public void SaveTo(Stream stream) => SaveTo(stream, new WriterOptions(CompressionType.Deflate));
protected override void SaveTo(
@@ -536,41 +140,6 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
}
protected override async ValueTask SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
cancellationToken
)
.ConfigureAwait(false);
}
}
}
protected override ZipArchiveEntry CreateEntryInternal(
string filePath,
Stream source,
@@ -584,19 +153,17 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
DateTime? modified
) => new ZipWritableArchiveEntry(this, directoryPath, modified);
public static ZipArchive Create() => new();
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
((IStreamStack)stream).StackSeek(0);
return ZipReader.Open(stream, ReaderOptions, Entries);
//stream.Position = 0;
return ZipReader.OpenReader(stream, ReaderOptions, Entries);
}
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return new(ZipReader.Open(stream));
return new((IAsyncReader)ZipReader.OpenReader(stream));
}
}

View File

@@ -0,0 +1,22 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip;
namespace SharpCompress.Archives.Zip;
public partial class ZipArchiveEntry
{
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
var part = Parts.Single();
if (part is SeekableZipFilePart seekablePart)
{
return (await seekablePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
}
return OpenEntryStream();
}
}

View File

@@ -6,25 +6,13 @@ using SharpCompress.Common.Zip;
namespace SharpCompress.Archives.Zip;
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
public partial class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
: base(part) => Archive = archive;
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
)
{
var part = Parts.Single();
if (part is SeekableZipFilePart seekablePart)
{
return (await seekablePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
}
return OpenEntryStream();
}
#region IArchiveEntry Members
public IArchive Archive { get; }

View File

@@ -14,6 +14,7 @@ internal static class ZipArchiveVolumeFactory
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
var m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
{
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
@@ -24,11 +25,16 @@ internal static class ZipArchiveVolumeFactory
)
)
);
}
else //split - 001, 002 ...
{
return ArchiveVolumeFactory.GetFilePart(index, part1);
}
if (item != null && item.Exists)
{
return item;
}
return null; //no more items
}

View File

@@ -80,7 +80,7 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
}
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
return SharpCompressStream.CreateNonDisposing(stream);
}
internal override void Close()

View File

@@ -4,58 +4,61 @@ using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Common.Ace
namespace SharpCompress.Common.Ace;
public class AceCrc
{
public class AceCrc
// CRC-32 lookup table (standard polynomial 0xEDB88320, reflected)
private static readonly uint[] Crc32Table = GenerateTable();
private static uint[] GenerateTable()
{
// CRC-32 lookup table (standard polynomial 0xEDB88320, reflected)
private static readonly uint[] Crc32Table = GenerateTable();
var table = new uint[256];
private static uint[] GenerateTable()
for (int i = 0; i < 256; i++)
{
var table = new uint[256];
uint crc = (uint)i;
for (int i = 0; i < 256; i++)
for (int j = 0; j < 8; j++)
{
uint crc = (uint)i;
for (int j = 0; j < 8; j++)
if ((crc & 1) != 0)
{
if ((crc & 1) != 0)
crc = (crc >> 1) ^ 0xEDB88320u;
else
crc >>= 1;
crc = (crc >> 1) ^ 0xEDB88320u;
}
else
{
crc >>= 1;
}
table[i] = crc;
}
return table;
table[i] = crc;
}
/// <summary>
/// Calculate ACE CRC-32 checksum.
/// ACE CRC-32 uses standard CRC-32 polynomial (0xEDB88320, reflected)
/// with init=0xFFFFFFFF but NO final XOR.
/// </summary>
public static uint AceCrc32(ReadOnlySpan<byte> data)
return table;
}
/// <summary>
/// Calculate ACE CRC-32 checksum.
/// ACE CRC-32 uses standard CRC-32 polynomial (0xEDB88320, reflected)
/// with init=0xFFFFFFFF but NO final XOR.
/// </summary>
public static uint AceCrc32(ReadOnlySpan<byte> data)
{
uint crc = 0xFFFFFFFFu;
foreach (byte b in data)
{
uint crc = 0xFFFFFFFFu;
foreach (byte b in data)
{
crc = (crc >> 8) ^ Crc32Table[(crc ^ b) & 0xFF];
}
return crc; // No final XOR for ACE
crc = (crc >> 8) ^ Crc32Table[(crc ^ b) & 0xFF];
}
/// <summary>
/// ACE CRC-16 is the lower 16 bits of the ACE CRC-32.
/// </summary>
public static ushort AceCrc16(ReadOnlySpan<byte> data)
{
return (ushort)(AceCrc32(data) & 0xFFFF);
}
return crc; // No final XOR for ACE
}
/// <summary>
/// ACE CRC-16 is the lower 16 bits of the ACE CRC-32.
/// </summary>
public static ushort AceCrc16(ReadOnlySpan<byte> data)
{
return (ushort)(AceCrc32(data) & 0xFFFF);
}
}

View File

@@ -6,63 +6,62 @@ using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Ace.Headers;
namespace SharpCompress.Common.Ace
namespace SharpCompress.Common.Ace;
public class AceEntry : Entry
{
public class AceEntry : Entry
private readonly AceFilePart _filePart;
internal AceEntry(AceFilePart filePart)
{
private readonly AceFilePart _filePart;
internal AceEntry(AceFilePart filePart)
{
_filePart = filePart;
}
public override long Crc
{
get
{
if (_filePart == null)
{
return 0;
}
return _filePart.Header.Crc32;
}
}
public override string? Key => _filePart?.Header.Filename;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.PackedSize ?? 0;
public override CompressionType CompressionType
{
get
{
if (_filePart.Header.CompressionType == Headers.CompressionType.Stored)
{
return CompressionType.None;
}
return CompressionType.AceLZ77;
}
}
public override long Size => _filePart?.Header.OriginalSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.Header.DateTime;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => _filePart.Header.IsFileEncrypted;
public override bool IsDirectory => _filePart.Header.IsDirectory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
_filePart = filePart;
}
public override long Crc
{
get
{
if (_filePart == null)
{
return 0;
}
return _filePart.Header.Crc32;
}
}
public override string? Key => _filePart?.Header.Filename;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.PackedSize ?? 0;
public override CompressionType CompressionType
{
get
{
if (_filePart.Header.CompressionType == Headers.CompressionType.Stored)
{
return CompressionType.None;
}
return CompressionType.AceLZ77;
}
}
public override long Size => _filePart?.Header.OriginalSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.Header.DateTime;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => _filePart.Header.IsFileEncrypted;
public override bool IsDirectory => _filePart.Header.IsDirectory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}

View File

@@ -7,46 +7,45 @@ using System.Threading.Tasks;
using SharpCompress.Common.Ace.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Ace
namespace SharpCompress.Common.Ace;
public class AceFilePart : FilePart
{
public class AceFilePart : FilePart
private readonly Stream _stream;
internal AceFileHeader Header { get; set; }
internal AceFilePart(AceFileHeader localAceHeader, Stream seekableStream)
: base(localAceHeader.ArchiveEncoding)
{
private readonly Stream _stream;
internal AceFileHeader Header { get; set; }
internal AceFilePart(AceFileHeader localAceHeader, Stream seekableStream)
: base(localAceHeader.ArchiveEncoding)
{
_stream = seekableStream;
Header = localAceHeader;
}
internal override string? FilePartName => Header.Filename;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionType)
{
case Headers.CompressionType.Stored:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.PackedSize
);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionQuality
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream? GetRawStream() => _stream;
_stream = seekableStream;
Header = localAceHeader;
}
internal override string? FilePartName => Header.Filename;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionType)
{
case Headers.CompressionType.Stored:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.PackedSize
);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionQuality
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream? GetRawStream() => _stream;
}

View File

@@ -7,29 +7,28 @@ using System.Threading.Tasks;
using SharpCompress.Common.Arj;
using SharpCompress.Readers;
namespace SharpCompress.Common.Ace
namespace SharpCompress.Common.Ace;
public class AceVolume : Volume
{
public class AceVolume : Volume
public AceVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
public override bool IsFirstVolume
{
public AceVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
get { return true; }
}
public override bool IsFirstVolume
{
get { return true; }
}
/// <summary>
/// ArjArchive is part of a multi-part archive.
/// </summary>
public override bool IsMultiVolume
{
get { return false; }
}
/// <summary>
/// ArjArchive is part of a multi-part archive.
/// </summary>
public override bool IsMultiVolume
{
get { return false; }
}
internal IEnumerable<AceFilePart> GetVolumeFileParts()
{
return new List<AceFilePart>();
}
internal IEnumerable<AceFilePart> GetVolumeFileParts()
{
return new List<AceFilePart>();
}
}

View File

@@ -0,0 +1,111 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Arc;
namespace SharpCompress.Common.Ace.Headers;
public sealed partial class AceFileHeader
{
/// <summary>
/// Asynchronously reads the next file entry header from the stream.
/// Returns null if no more entries or end of archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override async ValueTask<AceHeader?> ReadAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var headerData = await ReadHeaderAsync(stream, cancellationToken);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
// Header type (1 byte)
HeaderType = headerData[offset++];
// Skip recovery record headers (ACE 2.0 feature)
if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
{
// Skip to next header
return null;
}
if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
{
// Unknown header type - skip
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Packed size (4 bytes)
PackedSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Original size (4 bytes)
OriginalSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// File date/time in DOS format (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// File attributes (4 bytes)
Attributes = (int)BitConverter.ToUInt32(headerData, offset);
offset += 4;
// CRC32 (4 bytes)
Crc32 = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Compression type (1 byte)
byte compressionType = headerData[offset++];
CompressionType = GetCompressionType(compressionType);
// Compression quality/parameter (1 byte)
byte compressionQuality = headerData[offset++];
CompressionQuality = GetCompressionQuality(compressionQuality);
// Parameters (2 bytes)
Parameters = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Reserved (2 bytes) - skip
offset += 2;
// Filename length (2 bytes)
var filenameLength = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Filename
if (offset + filenameLength <= headerData.Length)
{
Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
offset += filenameLength;
}
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
// Comment length (2 bytes)
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength; // Skip comment
}
}
// Store the data start position
DataStartPosition = stream.Position;
return this;
}
}

View File

@@ -2,170 +2,173 @@ using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using System.Xml.Linq;
using SharpCompress.Common.Arc;
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// ACE file entry header
/// </summary>
public sealed partial class AceFileHeader : AceHeader
{
public long DataStartPosition { get; private set; }
public long PackedSize { get; set; }
public long OriginalSize { get; set; }
public DateTime DateTime { get; set; }
public int Attributes { get; set; }
public uint Crc32 { get; set; }
public CompressionType CompressionType { get; set; }
public CompressionQuality CompressionQuality { get; set; }
public ushort Parameters { get; set; }
public string Filename { get; set; } = string.Empty;
public List<byte> Comment { get; set; } = new();
/// <summary>
/// ACE file entry header
/// File data offset in the archive
/// </summary>
public sealed class AceFileHeader : AceHeader
public ulong DataOffset { get; set; }
public bool IsDirectory => (Attributes & 0x10) != 0;
public bool IsContinuedFromPrev =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_PREV) != 0;
public bool IsContinuedToNext =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_NEXT) != 0;
public int DictionarySize
{
public long DataStartPosition { get; private set; }
public long PackedSize { get; set; }
public long OriginalSize { get; set; }
public DateTime DateTime { get; set; }
public int Attributes { get; set; }
public uint Crc32 { get; set; }
public CompressionType CompressionType { get; set; }
public CompressionQuality CompressionQuality { get; set; }
public ushort Parameters { get; set; }
public string Filename { get; set; } = string.Empty;
public List<byte> Comment { get; set; } = new();
/// <summary>
/// File data offset in the archive
/// </summary>
public ulong DataOffset { get; set; }
public bool IsDirectory => (Attributes & 0x10) != 0;
public bool IsContinuedFromPrev =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_PREV) != 0;
public bool IsContinuedToNext =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_NEXT) != 0;
public int DictionarySize
get
{
get
{
int bits = Parameters & 0x0F;
return bits < 10 ? 1024 : 1 << bits;
}
int bits = Parameters & 0x0F;
return bits < 10 ? 1024 : 1 << bits;
}
public AceFileHeader(IArchiveEncoding archiveEncoding)
: base(archiveEncoding, AceHeaderType.FILE) { }
/// <summary>
/// Reads the next file entry header from the stream.
/// Returns null if no more entries or end of archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override AceHeader? Read(Stream stream)
{
var headerData = ReadHeader(stream);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
// Header type (1 byte)
HeaderType = headerData[offset++];
// Skip recovery record headers (ACE 2.0 feature)
if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
{
// Skip to next header
return null;
}
if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
{
// Unknown header type - skip
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Packed size (4 bytes)
PackedSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Original size (4 bytes)
OriginalSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// File date/time in DOS format (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// File attributes (4 bytes)
Attributes = (int)BitConverter.ToUInt32(headerData, offset);
offset += 4;
// CRC32 (4 bytes)
Crc32 = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Compression type (1 byte)
byte compressionType = headerData[offset++];
CompressionType = GetCompressionType(compressionType);
// Compression quality/parameter (1 byte)
byte compressionQuality = headerData[offset++];
CompressionQuality = GetCompressionQuality(compressionQuality);
// Parameters (2 bytes)
Parameters = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Reserved (2 bytes) - skip
offset += 2;
// Filename length (2 bytes)
var filenameLength = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Filename
if (offset + filenameLength <= headerData.Length)
{
Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
offset += filenameLength;
}
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
// Comment length (2 bytes)
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength; // Skip comment
}
}
// Store the data start position
DataStartPosition = stream.Position;
return this;
}
public CompressionType GetCompressionType(byte value) =>
value switch
{
0 => CompressionType.Stored,
1 => CompressionType.Lz77,
2 => CompressionType.Blocked,
_ => CompressionType.Unknown,
};
public CompressionQuality GetCompressionQuality(byte value) =>
value switch
{
0 => CompressionQuality.None,
1 => CompressionQuality.Fastest,
2 => CompressionQuality.Fast,
3 => CompressionQuality.Normal,
4 => CompressionQuality.Good,
5 => CompressionQuality.Best,
_ => CompressionQuality.Unknown,
};
}
public AceFileHeader(IArchiveEncoding archiveEncoding)
: base(archiveEncoding, AceHeaderType.FILE) { }
/// <summary>
/// Reads the next file entry header from the stream.
/// Returns null if no more entries or end of archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override AceHeader? Read(Stream stream)
{
var headerData = ReadHeader(stream);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
// Header type (1 byte)
HeaderType = headerData[offset++];
// Skip recovery record headers (ACE 2.0 feature)
if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
{
// Skip to next header
return null;
}
if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
{
// Unknown header type - skip
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Packed size (4 bytes)
PackedSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Original size (4 bytes)
OriginalSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// File date/time in DOS format (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// File attributes (4 bytes)
Attributes = (int)BitConverter.ToUInt32(headerData, offset);
offset += 4;
// CRC32 (4 bytes)
Crc32 = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Compression type (1 byte)
byte compressionType = headerData[offset++];
CompressionType = GetCompressionType(compressionType);
// Compression quality/parameter (1 byte)
byte compressionQuality = headerData[offset++];
CompressionQuality = GetCompressionQuality(compressionQuality);
// Parameters (2 bytes)
Parameters = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Reserved (2 bytes) - skip
offset += 2;
// Filename length (2 bytes)
var filenameLength = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Filename
if (offset + filenameLength <= headerData.Length)
{
Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
offset += filenameLength;
}
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
// Comment length (2 bytes)
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength; // Skip comment
}
}
// Store the data start position
DataStartPosition = stream.Position;
return this;
}
// ReadAsync moved to AceFileHeader.Async.cs
public CompressionType GetCompressionType(byte value) =>
value switch
{
0 => CompressionType.Stored,
1 => CompressionType.Lz77,
2 => CompressionType.Blocked,
_ => CompressionType.Unknown,
};
public CompressionQuality GetCompressionQuality(byte value) =>
value switch
{
0 => CompressionQuality.None,
1 => CompressionQuality.Fastest,
2 => CompressionQuality.Fast,
3 => CompressionQuality.Normal,
4 => CompressionQuality.Good,
5 => CompressionQuality.Best,
_ => CompressionQuality.Unknown,
};
}

View File

@@ -0,0 +1,69 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Ace.Headers;
public abstract partial class AceHeader
{
public abstract ValueTask<AceHeader?> ReadAsync(
Stream reader,
CancellationToken cancellationToken = default
);
public async ValueTask<byte[]> ReadHeaderAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// Read header CRC (2 bytes) and header size (2 bytes)
var headerBytes = new byte[4];
if (!await stream.ReadFullyAsync(headerBytes, 0, 4, cancellationToken))
{
return Array.Empty<byte>();
}
HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
if (HeaderSize == 0)
{
return Array.Empty<byte>();
}
// Read the header data
var body = new byte[HeaderSize];
if (!await stream.ReadFullyAsync(body, 0, HeaderSize, cancellationToken))
{
return Array.Empty<byte>();
}
// Verify crc
var checksum = AceCrc.AceCrc16(body);
if (checksum != HeaderCrc)
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
/// <summary>
/// Asynchronously checks if the stream is an ACE archive
/// </summary>
/// <param name="stream">The stream to read from</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>True if the stream is an ACE archive, false otherwise</returns>
public static async ValueTask<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var bytes = new byte[14];
if (!await stream.ReadFullyAsync(bytes, 0, 14, cancellationToken))
{
return false;
}
return CheckMagicBytes(bytes, 7);
}
}

View File

@@ -1,153 +1,156 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Arj.Headers;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// Header type constants
/// </summary>
public enum AceHeaderType
{
/// <summary>
/// Header type constants
/// </summary>
public enum AceHeaderType
MAIN = 0,
FILE = 1,
RECOVERY32 = 2,
RECOVERY64A = 3,
RECOVERY64B = 4,
}
public abstract partial class AceHeader
{
// ACE signature: bytes at offset 7 should be "**ACE**"
private static readonly byte[] AceSignature =
[
(byte)'*',
(byte)'*',
(byte)'A',
(byte)'C',
(byte)'E',
(byte)'*',
(byte)'*',
];
public AceHeader(IArchiveEncoding archiveEncoding, AceHeaderType type)
{
MAIN = 0,
FILE = 1,
RECOVERY32 = 2,
RECOVERY64A = 3,
RECOVERY64B = 4,
AceHeaderType = type;
ArchiveEncoding = archiveEncoding;
}
public abstract class AceHeader
public IArchiveEncoding ArchiveEncoding { get; }
public AceHeaderType AceHeaderType { get; }
public ushort HeaderFlags { get; set; }
public ushort HeaderCrc { get; set; }
public ushort HeaderSize { get; set; }
public byte HeaderType { get; set; }
public bool IsFileEncrypted =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.FILE_ENCRYPTED) != 0;
public bool Is64Bit =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MEMORY_64BIT) != 0;
public bool IsSolid =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.SOLID_MAIN) != 0;
public bool IsMultiVolume =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MULTIVOLUME) != 0;
public abstract AceHeader? Read(Stream reader);
// Async methods moved to AceHeader.Async.cs
public byte[] ReadHeader(Stream stream)
{
// ACE signature: bytes at offset 7 should be "**ACE**"
private static readonly byte[] AceSignature =
[
(byte)'*',
(byte)'*',
(byte)'A',
(byte)'C',
(byte)'E',
(byte)'*',
(byte)'*',
];
public AceHeader(IArchiveEncoding archiveEncoding, AceHeaderType type)
// Read header CRC (2 bytes) and header size (2 bytes)
var headerBytes = new byte[4];
if (!stream.ReadFully(headerBytes))
{
AceHeaderType = type;
ArchiveEncoding = archiveEncoding;
return Array.Empty<byte>();
}
public IArchiveEncoding ArchiveEncoding { get; }
public AceHeaderType AceHeaderType { get; }
public ushort HeaderFlags { get; set; }
public ushort HeaderCrc { get; set; }
public ushort HeaderSize { get; set; }
public byte HeaderType { get; set; }
public bool IsFileEncrypted =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.FILE_ENCRYPTED) != 0;
public bool Is64Bit =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MEMORY_64BIT) != 0;
public bool IsSolid =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.SOLID_MAIN) != 0;
public bool IsMultiVolume =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MULTIVOLUME) != 0;
public abstract AceHeader? Read(Stream reader);
public byte[] ReadHeader(Stream stream)
HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
if (HeaderSize == 0)
{
// Read header CRC (2 bytes) and header size (2 bytes)
var headerBytes = new byte[4];
if (stream.Read(headerBytes, 0, 4) != 4)
{
return Array.Empty<byte>();
}
HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
if (HeaderSize == 0)
{
return Array.Empty<byte>();
}
// Read the header data
var body = new byte[HeaderSize];
if (stream.Read(body, 0, HeaderSize) != HeaderSize)
{
return Array.Empty<byte>();
}
// Verify crc
var checksum = AceCrc.AceCrc16(body);
if (checksum != HeaderCrc)
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
return Array.Empty<byte>();
}
public static bool IsArchive(Stream stream)
// Read the header data
var body = new byte[HeaderSize];
if (!stream.ReadFully(body))
{
// ACE files have a specific signature
// First two bytes are typically 0x60 0xEA (signature bytes)
// At offset 7, there should be "**ACE**" (7 bytes)
var bytes = new byte[14];
if (stream.Read(bytes, 0, 14) != 14)
return Array.Empty<byte>();
}
// Verify crc
var checksum = AceCrc.AceCrc16(body);
if (checksum != HeaderCrc)
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
public static bool IsArchive(Stream stream)
{
// ACE files have a specific signature
// First two bytes are typically 0x60 0xEA (signature bytes)
// At offset 7, there should be "**ACE**" (7 bytes)
var bytes = new byte[14];
if (stream.Read(bytes, 0, 14) != 14)
{
return false;
}
// Check for "**ACE**" at offset 7
return CheckMagicBytes(bytes, 7);
}
protected static bool CheckMagicBytes(byte[] headerBytes, int offset)
{
// Check for "**ACE**" at specified offset
for (int i = 0; i < AceSignature.Length; i++)
{
if (headerBytes[offset + i] != AceSignature[i])
{
return false;
}
// Check for "**ACE**" at offset 7
return CheckMagicBytes(bytes, 7);
}
return true;
}
protected static bool CheckMagicBytes(byte[] headerBytes, int offset)
protected DateTime ConvertDosDateTime(uint dosDateTime)
{
try
{
// Check for "**ACE**" at specified offset
for (int i = 0; i < AceSignature.Length; i++)
{
if (headerBytes[offset + i] != AceSignature[i])
{
return false;
}
}
return true;
}
int second = (int)(dosDateTime & 0x1F) * 2;
int minute = (int)((dosDateTime >> 5) & 0x3F);
int hour = (int)((dosDateTime >> 11) & 0x1F);
int day = (int)((dosDateTime >> 16) & 0x1F);
int month = (int)((dosDateTime >> 21) & 0x0F);
int year = (int)((dosDateTime >> 25) & 0x7F) + 1980;
protected DateTime ConvertDosDateTime(uint dosDateTime)
{
try
{
int second = (int)(dosDateTime & 0x1F) * 2;
int minute = (int)((dosDateTime >> 5) & 0x3F);
int hour = (int)((dosDateTime >> 11) & 0x1F);
int day = (int)((dosDateTime >> 16) & 0x1F);
int month = (int)((dosDateTime >> 21) & 0x0F);
int year = (int)((dosDateTime >> 25) & 0x7F) + 1980;
if (
day < 1
|| day > 31
|| month < 1
|| month > 12
|| hour > 23
|| minute > 59
|| second > 59
)
{
return DateTime.MinValue;
}
return new DateTime(year, month, day, hour, minute, second);
}
catch
if (
day < 1
|| day > 31
|| month < 1
|| month > 12
|| hour > 23
|| minute > 59
|| second > 59
)
{
return DateTime.MinValue;
}
return new DateTime(year, month, day, hour, minute, second);
}
catch
{
return DateTime.MinValue;
}
}
}

View File

@@ -0,0 +1,83 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Ace.Headers;
public sealed partial class AceMainHeader
{
/// <summary>
/// Asynchronously reads the main archive header from the stream.
/// Returns header if this is a valid ACE archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override async ValueTask<AceHeader?> ReadAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var headerData = await ReadHeaderAsync(stream, cancellationToken);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
// Header type should be 0 for main header
if (headerData[offset++] != HeaderType)
{
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Skip signature "**ACE**" (7 bytes)
if (!CheckMagicBytes(headerData, offset))
{
throw new InvalidDataException("Invalid ACE archive signature.");
}
offset += 7;
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
AceVersion = headerData[offset++];
ExtractVersion = headerData[offset++];
// Host OS (1 byte)
if (offset < headerData.Length)
{
var hostOsByte = headerData[offset++];
HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
}
// Volume number (1 byte)
VolumeNumber = headerData[offset++];
// Creation date/time (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// Reserved fields (8 bytes)
if (offset + 8 <= headerData.Length)
{
offset += 8;
}
// Skip additional fields based on flags
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength;
}
}
return this;
}
}

View File

@@ -2,96 +2,99 @@ using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Ace.Headers;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// ACE main archive header
/// </summary>
public sealed partial class AceMainHeader : AceHeader
{
public byte ExtractVersion { get; set; }
public byte CreatorVersion { get; set; }
public HostOS HostOS { get; set; }
public byte VolumeNumber { get; set; }
public DateTime DateTime { get; set; }
public string Advert { get; set; } = string.Empty;
public List<byte> Comment { get; set; } = new();
public byte AceVersion { get; private set; }
public AceMainHeader(IArchiveEncoding archiveEncoding)
: base(archiveEncoding, AceHeaderType.MAIN) { }
/// <summary>
/// ACE main archive header
/// Reads the main archive header from the stream.
/// Returns header if this is a valid ACE archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public sealed class AceMainHeader : AceHeader
public override AceHeader? Read(Stream stream)
{
public byte ExtractVersion { get; set; }
public byte CreatorVersion { get; set; }
public HostOS HostOS { get; set; }
public byte VolumeNumber { get; set; }
public DateTime DateTime { get; set; }
public string Advert { get; set; } = string.Empty;
public List<byte> Comment { get; set; } = new();
public byte AceVersion { get; private set; }
public AceMainHeader(IArchiveEncoding archiveEncoding)
: base(archiveEncoding, AceHeaderType.MAIN) { }
/// <summary>
/// Reads the main archive header from the stream.
/// Returns header if this is a valid ACE archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override AceHeader? Read(Stream stream)
var headerData = ReadHeader(stream);
if (headerData.Length == 0)
{
var headerData = ReadHeader(stream);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
// Header type should be 0 for main header
if (headerData[offset++] != HeaderType)
{
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Skip signature "**ACE**" (7 bytes)
if (!CheckMagicBytes(headerData, offset))
{
throw new InvalidDataException("Invalid ACE archive signature.");
}
offset += 7;
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
AceVersion = headerData[offset++];
ExtractVersion = headerData[offset++];
// Host OS (1 byte)
if (offset < headerData.Length)
{
var hostOsByte = headerData[offset++];
HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
}
// Volume number (1 byte)
VolumeNumber = headerData[offset++];
// Creation date/time (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// Reserved fields (8 bytes)
if (offset + 8 <= headerData.Length)
{
offset += 8;
}
// Skip additional fields based on flags
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength;
}
}
return this;
return null;
}
int offset = 0;
// Header type should be 0 for main header
if (headerData[offset++] != HeaderType)
{
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Skip signature "**ACE**" (7 bytes)
if (!CheckMagicBytes(headerData, offset))
{
throw new InvalidDataException("Invalid ACE archive signature.");
}
offset += 7;
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
AceVersion = headerData[offset++];
ExtractVersion = headerData[offset++];
// Host OS (1 byte)
if (offset < headerData.Length)
{
var hostOsByte = headerData[offset++];
HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
}
// Volume number (1 byte)
VolumeNumber = headerData[offset++];
// Creation date/time (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// Reserved fields (8 bytes)
if (offset + 8 <= headerData.Length)
{
offset += 8;
}
// Skip additional fields based on flags
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength;
}
}
return this;
}
// ReadAsync moved to AceMainHeader.Async.cs
}

View File

@@ -1,16 +1,15 @@
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// Compression quality
/// </summary>
public enum CompressionQuality
{
/// <summary>
/// Compression quality
/// </summary>
public enum CompressionQuality
{
None,
Fastest,
Fast,
Normal,
Good,
Best,
Unknown,
}
None,
Fastest,
Fast,
Normal,
Good,
Best,
Unknown,
}

View File

@@ -1,13 +1,12 @@
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// Compression types
/// </summary>
public enum CompressionType
{
/// <summary>
/// Compression types
/// </summary>
public enum CompressionType
{
Stored,
Lz77,
Blocked,
Unknown,
}
Stored,
Lz77,
Blocked,
Unknown,
}

View File

@@ -1,33 +1,32 @@
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// Header flags (main + file, overlapping meanings)
/// </summary>
public static class HeaderFlags
{
/// <summary>
/// Header flags (main + file, overlapping meanings)
/// </summary>
public static class HeaderFlags
{
// Shared / low bits
public const ushort ADDSIZE = 0x0001; // extra size field present
public const ushort COMMENT = 0x0002; // comment present
public const ushort MEMORY_64BIT = 0x0004;
public const ushort AV_STRING = 0x0008; // AV string present
public const ushort SOLID = 0x0010; // solid file
public const ushort LOCKED = 0x0020;
public const ushort PROTECTED = 0x0040;
// Shared / low bits
public const ushort ADDSIZE = 0x0001; // extra size field present
public const ushort COMMENT = 0x0002; // comment present
public const ushort MEMORY_64BIT = 0x0004;
public const ushort AV_STRING = 0x0008; // AV string present
public const ushort SOLID = 0x0010; // solid file
public const ushort LOCKED = 0x0020;
public const ushort PROTECTED = 0x0040;
// Main header specific
public const ushort V20FORMAT = 0x0100;
public const ushort SFX = 0x0200;
public const ushort LIMITSFXJR = 0x0400;
public const ushort MULTIVOLUME = 0x0800;
public const ushort ADVERT = 0x1000;
public const ushort RECOVERY = 0x2000;
public const ushort LOCKED_MAIN = 0x4000;
public const ushort SOLID_MAIN = 0x8000;
// Main header specific
public const ushort V20FORMAT = 0x0100;
public const ushort SFX = 0x0200;
public const ushort LIMITSFXJR = 0x0400;
public const ushort MULTIVOLUME = 0x0800;
public const ushort ADVERT = 0x1000;
public const ushort RECOVERY = 0x2000;
public const ushort LOCKED_MAIN = 0x4000;
public const ushort SOLID_MAIN = 0x8000;
// File header specific (same bits, different meaning)
public const ushort NTSECURITY = 0x0400;
public const ushort CONTINUED_PREV = 0x1000;
public const ushort CONTINUED_NEXT = 0x2000;
public const ushort FILE_ENCRYPTED = 0x4000; // file encrypted (file header)
}
// File header specific (same bits, different meaning)
public const ushort NTSECURITY = 0x0400;
public const ushort CONTINUED_PREV = 0x1000;
public const ushort CONTINUED_NEXT = 0x2000;
public const ushort FILE_ENCRYPTED = 0x4000; // file encrypted (file header)
}

View File

@@ -1,22 +1,21 @@
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// Host OS type
/// </summary>
public enum HostOS
{
/// <summary>
/// Host OS type
/// </summary>
public enum HostOS
{
MsDos = 0,
Os2,
Windows,
Unix,
MacOs,
WinNt,
Primos,
AppleGs,
Atari,
Vax,
Amiga,
Next,
Unknown,
}
MsDos = 0,
Os2,
Windows,
Unix,
MacOs,
WinNt,
Primos,
AppleGs,
Atari,
Vax,
Amiga,
Next,
Unknown,
}

View File

@@ -7,54 +7,53 @@ using System.Threading.Tasks;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Tar;
namespace SharpCompress.Common.Arc
namespace SharpCompress.Common.Arc;
public class ArcEntry : Entry
{
public class ArcEntry : Entry
private readonly ArcFilePart? _filePart;
internal ArcEntry(ArcFilePart? filePart)
{
private readonly ArcFilePart? _filePart;
internal ArcEntry(ArcFilePart? filePart)
{
_filePart = filePart;
}
public override long Crc
{
get
{
if (_filePart == null)
{
return 0;
}
return _filePart.Header.Crc16;
}
}
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType =>
_filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
public override long Size => throw new NotImplementedException();
public override DateTime? LastModifiedTime => null;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
_filePart = filePart;
}
public override long Crc
{
get
{
if (_filePart == null)
{
return 0;
}
return _filePart.Header.Crc16;
}
}
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType =>
_filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
public override long Size => throw new NotImplementedException();
public override DateTime? LastModifiedTime => null;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}

View File

@@ -2,75 +2,93 @@ using System;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arc
namespace SharpCompress.Common.Arc;
public class ArcEntryHeader
{
public class ArcEntryHeader
public IArchiveEncoding ArchiveEncoding { get; }
public CompressionType CompressionMethod { get; private set; }
public string? Name { get; private set; }
public long CompressedSize { get; private set; }
public DateTime DateTime { get; private set; }
public int Crc16 { get; private set; }
public long OriginalSize { get; private set; }
public long DataStartPosition { get; private set; }
public ArcEntryHeader(IArchiveEncoding archiveEncoding)
{
public IArchiveEncoding ArchiveEncoding { get; }
public CompressionType CompressionMethod { get; private set; }
public string? Name { get; private set; }
public long CompressedSize { get; private set; }
public DateTime DateTime { get; private set; }
public int Crc16 { get; private set; }
public long OriginalSize { get; private set; }
public long DataStartPosition { get; private set; }
this.ArchiveEncoding = archiveEncoding;
}
public ArcEntryHeader(IArchiveEncoding archiveEncoding)
public ArcEntryHeader? ReadHeader(Stream stream)
{
byte[] headerBytes = new byte[29];
if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
{
this.ArchiveEncoding = archiveEncoding;
return null;
}
DataStartPosition = stream.Position;
return LoadFrom(headerBytes);
}
public ArcEntryHeader? ReadHeader(Stream stream)
public async ValueTask<ArcEntryHeader?> ReadHeaderAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
byte[] headerBytes = new byte[29];
if (
await stream.ReadAsync(headerBytes, 0, headerBytes.Length, cancellationToken)
!= headerBytes.Length
)
{
byte[] headerBytes = new byte[29];
if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
{
return null;
}
DataStartPosition = stream.Position;
return LoadFrom(headerBytes);
return null;
}
DataStartPosition = stream.Position;
return LoadFrom(headerBytes);
}
public ArcEntryHeader LoadFrom(byte[] headerBytes)
public ArcEntryHeader LoadFrom(byte[] headerBytes)
{
CompressionMethod = GetCompressionType(headerBytes[1]);
// Read name
int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
int offset = 15;
CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
offset += 4;
uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
DateTime = ConvertToDateTime(rawDateTime);
offset += 4;
Crc16 = BitConverter.ToUInt16(headerBytes, offset);
offset += 2;
OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
return this;
}
private CompressionType GetCompressionType(byte value)
{
return value switch
{
CompressionMethod = GetCompressionType(headerBytes[1]);
1 or 2 => CompressionType.None,
3 => CompressionType.Packed,
4 => CompressionType.Squeezed,
5 or 6 or 7 or 8 => CompressionType.Crunched,
9 => CompressionType.Squashed,
10 => CompressionType.Crushed,
11 => CompressionType.Distilled,
_ => CompressionType.Unknown,
};
}
// Read name
int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
int offset = 15;
CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
offset += 4;
uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
DateTime = ConvertToDateTime(rawDateTime);
offset += 4;
Crc16 = BitConverter.ToUInt16(headerBytes, offset);
offset += 2;
OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
return this;
}
private CompressionType GetCompressionType(byte value)
{
return value switch
{
1 or 2 => CompressionType.None,
3 => CompressionType.Packed,
4 => CompressionType.Squeezed,
5 or 6 or 7 or 8 => CompressionType.Crunched,
9 => CompressionType.Squashed,
10 => CompressionType.Crushed,
11 => CompressionType.Distilled,
_ => CompressionType.Unknown,
};
}
public static DateTime ConvertToDateTime(long rawDateTime)
{
// Convert Unix timestamp to DateTime (UTC)
return DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
}
public static DateTime ConvertToDateTime(long rawDateTime)
{
// Convert Unix timestamp to DateTime (UTC)
return DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
}
}

View File

@@ -0,0 +1,58 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;
using SharpCompress.IO;
namespace SharpCompress.Common.Arc;
public partial class ArcFilePart
{
internal override async ValueTask<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionType.None:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionType.Packed:
compressedStream = new RunLength90Stream(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Squeezed:
compressedStream = await SqueezeStream.CreateAsync(
_stream,
(int)Header.CompressedSize,
cancellationToken
);
break;
case CompressionType.Crunched:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod + " with size > 128KB"
);
}
compressedStream = new ArcLzwStream(_stream, (int)Header.CompressedSize, true);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream;
}
}

View File

@@ -13,71 +13,61 @@ using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;
using SharpCompress.IO;
namespace SharpCompress.Common.Arc
namespace SharpCompress.Common.Arc;
public partial class ArcFilePart : FilePart
{
public class ArcFilePart : FilePart
private readonly Stream? _stream;
internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
: base(localArcHeader.ArchiveEncoding)
{
private readonly Stream? _stream;
internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
: base(localArcHeader.ArchiveEncoding)
{
_stream = seekableStream;
Header = localArcHeader;
}
internal ArcEntryHeader Header { get; set; }
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionType.None:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionType.Packed:
compressedStream = new RunLength90Stream(
_stream,
(int)Header.CompressedSize
);
break;
case CompressionType.Squeezed:
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Crunched:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: "
+ Header.CompressionMethod
+ " with size > 128KB"
);
}
compressedStream = new ArcLzwStream(
_stream,
(int)Header.CompressedSize,
true
);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream? GetRawStream() => _stream;
_stream = seekableStream;
Header = localArcHeader;
}
internal ArcEntryHeader Header { get; set; }
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionType.None:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionType.Packed:
compressedStream = new RunLength90Stream(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Squeezed:
compressedStream = SqueezeStream.Create(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Crunched:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod + " with size > 128KB"
);
}
compressedStream = new ArcLzwStream(_stream, (int)Header.CompressedSize, true);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream? GetRawStream() => _stream;
}

View File

@@ -6,11 +6,10 @@ using System.Text;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Common.Arc
namespace SharpCompress.Common.Arc;
public class ArcVolume : Volume
{
public class ArcVolume : Volume
{
public ArcVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
}
public ArcVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
}

View File

@@ -6,53 +6,52 @@ using System.Threading.Tasks;
using SharpCompress.Common.Arc;
using SharpCompress.Common.Arj.Headers;
namespace SharpCompress.Common.Arj
namespace SharpCompress.Common.Arj;
public class ArjEntry : Entry
{
public class ArjEntry : Entry
private readonly ArjFilePart _filePart;
internal ArjEntry(ArjFilePart filePart)
{
private readonly ArjFilePart _filePart;
internal ArjEntry(ArjFilePart filePart)
{
_filePart = filePart;
}
public override long Crc => _filePart.Header.OriginalCrc32;
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType
{
get
{
if (_filePart.Header.CompressionMethod == CompressionMethod.Stored)
{
return CompressionType.None;
}
return CompressionType.ArjLZ77;
}
}
public override long Size => _filePart?.Header.OriginalSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.Header.DateTimeModified.DateTime;
public override DateTime? CreatedTime => _filePart.Header.DateTimeCreated.DateTime;
public override DateTime? LastAccessedTime => _filePart.Header.DateTimeAccessed.DateTime;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => _filePart.Header.FileType == FileType.Directory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
_filePart = filePart;
}
public override long Crc => _filePart.Header.OriginalCrc32;
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType
{
get
{
if (_filePart.Header.CompressionMethod == CompressionMethod.Stored)
{
return CompressionType.None;
}
return CompressionType.ArjLZ77;
}
}
public override long Size => _filePart?.Header.OriginalSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.Header.DateTimeModified.DateTime;
public override DateTime? CreatedTime => _filePart.Header.DateTimeCreated.DateTime;
public override DateTime? LastAccessedTime => _filePart.Header.DateTimeAccessed.DateTime;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => _filePart.Header.FileType == FileType.Directory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}

View File

@@ -8,65 +8,62 @@ using SharpCompress.Common.Arj.Headers;
using SharpCompress.Compressors.Arj;
using SharpCompress.IO;
namespace SharpCompress.Common.Arj
namespace SharpCompress.Common.Arj;
public class ArjFilePart : FilePart
{
public class ArjFilePart : FilePart
private readonly Stream _stream;
internal ArjLocalHeader Header { get; set; }
internal ArjFilePart(ArjLocalHeader localArjHeader, Stream seekableStream)
: base(localArjHeader.ArchiveEncoding)
{
private readonly Stream _stream;
internal ArjLocalHeader Header { get; set; }
internal ArjFilePart(ArjLocalHeader localArjHeader, Stream seekableStream)
: base(localArjHeader.ArchiveEncoding)
{
_stream = seekableStream;
Header = localArjHeader;
}
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionMethod.Stored:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionMethod.CompressedMost:
case CompressionMethod.Compressed:
case CompressionMethod.CompressedFaster:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: "
+ Header.CompressionMethod
+ " with size > 128KB"
);
}
compressedStream = new LhaStream<Lh7DecoderCfg>(
_stream,
(int)Header.OriginalSize
);
break;
case CompressionMethod.CompressedFastest:
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream GetRawStream() => _stream;
_stream = seekableStream;
Header = localArjHeader;
}
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionMethod.Stored:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionMethod.CompressedMost:
case CompressionMethod.Compressed:
case CompressionMethod.CompressedFaster:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod + " with size > 128KB"
);
}
compressedStream = new LhaStream<Lh7DecoderCfg>(
_stream,
(int)Header.OriginalSize
);
break;
case CompressionMethod.CompressedFastest:
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream GetRawStream() => _stream;
}

View File

@@ -8,29 +8,28 @@ using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Readers;
namespace SharpCompress.Common.Arj
namespace SharpCompress.Common.Arj;
public class ArjVolume : Volume
{
public class ArjVolume : Volume
public ArjVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
public override bool IsFirstVolume
{
public ArjVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
get { return true; }
}
public override bool IsFirstVolume
{
get { return true; }
}
/// <summary>
/// ArjArchive is part of a multi-part archive.
/// </summary>
public override bool IsMultiVolume
{
get { return false; }
}
/// <summary>
/// ArjArchive is part of a multi-part archive.
/// </summary>
public override bool IsMultiVolume
{
get { return false; }
}
internal IEnumerable<ArjFilePart> GetVolumeFileParts()
{
return new List<ArjFilePart>();
}
internal IEnumerable<ArjFilePart> GetVolumeFileParts()
{
return new List<ArjFilePart>();
}
}

View File

@@ -0,0 +1,132 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Arj.Headers;
public abstract partial class ArjHeader
{
public abstract ValueTask<ArjHeader?> ReadAsync(
Stream reader,
CancellationToken cancellationToken = default
);
public async ValueTask<byte[]> ReadHeaderAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// check for magic bytes
var magic = new byte[2];
if (await stream.ReadAsync(magic, 0, 2, cancellationToken) != 2)
{
return Array.Empty<byte>();
}
if (!CheckMagicBytes(magic))
{
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
}
// read header_size
byte[] headerBytes = new byte[2];
await stream.ReadAsync(headerBytes, 0, 2, cancellationToken);
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
if (headerSize < 1)
{
return Array.Empty<byte>();
}
var body = new byte[headerSize];
var read = await stream.ReadAsync(body, 0, headerSize, cancellationToken);
if (read < headerSize)
{
return Array.Empty<byte>();
}
byte[] crc = new byte[4];
read = await stream.ReadAsync(crc, 0, 4, cancellationToken);
var checksum = Crc32Stream.Compute(body);
// Compute the hash value
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
protected async ValueTask<List<byte[]>> ReadExtendedHeadersAsync(
Stream reader,
CancellationToken cancellationToken = default
)
{
List<byte[]> extendedHeader = new List<byte[]>();
byte[] buffer = new byte[2];
while (true)
{
int bytesRead = await reader.ReadAsync(buffer, 0, 2, cancellationToken);
if (bytesRead < 2)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header size."
);
}
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
if (extHeaderSize == 0)
{
return extendedHeader;
}
byte[] header = new byte[extHeaderSize];
bytesRead = await reader.ReadAsync(header, 0, extHeaderSize, cancellationToken);
if (bytesRead < extHeaderSize)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header data."
);
}
byte[] crcextended = new byte[4];
bytesRead = await reader.ReadAsync(crcextended, 0, 4, cancellationToken);
if (bytesRead < 4)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header CRC."
);
}
var checksum = Crc32Stream.Compute(header);
if (checksum != BitConverter.ToUInt32(crcextended, 0))
{
throw new InvalidDataException("Extended header checksum is invalid");
}
extendedHeader.Add(header);
}
}
/// <summary>
/// Asynchronously checks if the stream is an ARJ archive
/// </summary>
/// <param name="stream">The stream to read from</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>True if the stream is an ARJ archive, false otherwise</returns>
public static async ValueTask<bool> IsArchiveAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var bytes = new byte[2];
if (await stream.ReadAsync(bytes, 0, 2, cancellationToken) != 2)
{
return false;
}
return CheckMagicBytes(bytes);
}
}

View File

@@ -3,156 +3,158 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Arj.Headers
namespace SharpCompress.Common.Arj.Headers;
public enum ArjHeaderType
{
public enum ArjHeaderType
MainHeader,
LocalHeader,
}
public abstract partial class ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArjHeader(ArjHeaderType type)
{
MainHeader,
LocalHeader,
ArjHeaderType = type;
}
public abstract class ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArjHeaderType ArjHeaderType { get; }
public byte Flags { get; set; }
public FileType FileType { get; set; }
public ArjHeader(ArjHeaderType type)
public abstract ArjHeader? Read(Stream reader);
// Async methods moved to ArjHeader.Async.cs
public byte[] ReadHeader(Stream stream)
{
// check for magic bytes
var magic = new byte[2];
if (stream.Read(magic) != 2)
{
ArjHeaderType = type;
return Array.Empty<byte>();
}
public ArjHeaderType ArjHeaderType { get; }
public byte Flags { get; set; }
public FileType FileType { get; set; }
public abstract ArjHeader? Read(Stream reader);
public byte[] ReadHeader(Stream stream)
if (!CheckMagicBytes(magic))
{
// check for magic bytes
var magic = new byte[2];
if (stream.Read(magic) != 2)
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
}
// read header_size
byte[] headerBytes = new byte[2];
stream.Read(headerBytes, 0, 2);
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
if (headerSize < 1)
{
return Array.Empty<byte>();
}
var body = new byte[headerSize];
var read = stream.Read(body, 0, headerSize);
if (read < headerSize)
{
return Array.Empty<byte>();
}
byte[] crc = new byte[4];
read = stream.Read(crc, 0, 4);
var checksum = Crc32Stream.Compute(body);
// Compute the hash value
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
// ReadHeaderAsync moved to ArjHeader.Async.cs
protected List<byte[]> ReadExtendedHeaders(Stream reader)
{
List<byte[]> extendedHeader = new List<byte[]>();
byte[] buffer = new byte[2];
while (true)
{
int bytesRead = reader.Read(buffer, 0, 2);
if (bytesRead < 2)
{
return Array.Empty<byte>();
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header size."
);
}
if (!CheckMagicBytes(magic))
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
if (extHeaderSize == 0)
{
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
return extendedHeader;
}
// read header_size
byte[] headerBytes = new byte[2];
stream.Read(headerBytes, 0, 2);
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
if (headerSize < 1)
byte[] header = new byte[extHeaderSize];
bytesRead = reader.Read(header, 0, extHeaderSize);
if (bytesRead < extHeaderSize)
{
return Array.Empty<byte>();
}
var body = new byte[headerSize];
var read = stream.Read(body, 0, headerSize);
if (read < headerSize)
{
return Array.Empty<byte>();
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header data."
);
}
byte[] crc = new byte[4];
read = stream.Read(crc, 0, 4);
var checksum = Crc32Stream.Compute(body);
// Compute the hash value
bytesRead = reader.Read(crc, 0, 4);
if (bytesRead < 4)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header CRC."
);
}
var checksum = Crc32Stream.Compute(header);
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
protected List<byte[]> ReadExtendedHeaders(Stream reader)
{
List<byte[]> extendedHeader = new List<byte[]>();
byte[] buffer = new byte[2];
while (true)
{
int bytesRead = reader.Read(buffer, 0, 2);
if (bytesRead < 2)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header size."
);
}
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
if (extHeaderSize == 0)
{
return extendedHeader;
}
byte[] header = new byte[extHeaderSize];
bytesRead = reader.Read(header, 0, extHeaderSize);
if (bytesRead < extHeaderSize)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header data."
);
}
byte[] crc = new byte[4];
bytesRead = reader.Read(crc, 0, 4);
if (bytesRead < 4)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header CRC."
);
}
var checksum = Crc32Stream.Compute(header);
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Extended header checksum is invalid");
}
extendedHeader.Add(header);
}
}
// Flag helpers
public bool IsGabled => (Flags & 0x01) != 0;
public bool IsAnsiPage => (Flags & 0x02) != 0;
public bool IsVolume => (Flags & 0x04) != 0;
public bool IsArjProtected => (Flags & 0x08) != 0;
public bool IsPathSym => (Flags & 0x10) != 0;
public bool IsBackup => (Flags & 0x20) != 0;
public bool IsSecured => (Flags & 0x40) != 0;
public bool IsAltName => (Flags & 0x80) != 0;
public static FileType FileTypeFromByte(byte value)
{
return Enum.IsDefined(typeof(FileType), value)
? (FileType)value
: Headers.FileType.Unknown;
}
public static bool IsArchive(Stream stream)
{
var bytes = new byte[2];
if (stream.Read(bytes, 0, 2) != 2)
{
return false;
throw new InvalidDataException("Extended header checksum is invalid");
}
return CheckMagicBytes(bytes);
}
protected static bool CheckMagicBytes(byte[] headerBytes)
{
var magicValue = (ushort)(headerBytes[0] | headerBytes[1] << 8);
return magicValue == ARJ_MAGIC;
extendedHeader.Add(header);
}
}
// Flag helpers
public bool IsGabled => (Flags & 0x01) != 0;
public bool IsAnsiPage => (Flags & 0x02) != 0;
public bool IsVolume => (Flags & 0x04) != 0;
public bool IsArjProtected => (Flags & 0x08) != 0;
public bool IsPathSym => (Flags & 0x10) != 0;
public bool IsBackup => (Flags & 0x20) != 0;
public bool IsSecured => (Flags & 0x40) != 0;
public bool IsAltName => (Flags & 0x80) != 0;
public static FileType FileTypeFromByte(byte value)
{
return Enum.IsDefined(typeof(FileType), value) ? (FileType)value : Headers.FileType.Unknown;
}
public static bool IsArchive(Stream stream)
{
var bytes = new byte[2];
if (stream.Read(bytes, 0, 2) != 2)
{
return false;
}
return CheckMagicBytes(bytes);
}
protected static bool CheckMagicBytes(byte[] headerBytes)
{
var magicValue = (ushort)(headerBytes[0] | headerBytes[1] << 8);
return magicValue == ARJ_MAGIC;
}
}

View File

@@ -0,0 +1,24 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers;
public partial class ArjLocalHeader
{
public override async ValueTask<ArjHeader?> ReadAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var body = await ReadHeaderAsync(stream, cancellationToken);
if (body.Length > 0)
{
await ReadExtendedHeadersAsync(stream, cancellationToken);
var header = LoadFrom(body);
header.DataStartPosition = stream.Position;
return header;
}
return null;
}
}

View File

@@ -4,158 +4,159 @@ using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers
namespace SharpCompress.Common.Arj.Headers;
public partial class ArjLocalHeader : ArjHeader
{
public class ArjLocalHeader : ArjHeader
public ArchiveEncoding ArchiveEncoding { get; }
public long DataStartPosition { get; protected set; }
public byte ArchiverVersionNumber { get; set; }
public byte MinVersionToExtract { get; set; }
public HostOS HostOS { get; set; }
public CompressionMethod CompressionMethod { get; set; }
public DosDateTime DateTimeModified { get; set; } = new DosDateTime(0);
public long CompressedSize { get; set; }
public long OriginalSize { get; set; }
public long OriginalCrc32 { get; set; }
public int FileSpecPosition { get; set; }
public int FileAccessMode { get; set; }
public byte FirstChapter { get; set; }
public byte LastChapter { get; set; }
public long ExtendedFilePosition { get; set; }
public DosDateTime DateTimeAccessed { get; set; } = new DosDateTime(0);
public DosDateTime DateTimeCreated { get; set; } = new DosDateTime(0);
public long OriginalSizeEvenForVolumes { get; set; }
public string Name { get; set; } = string.Empty;
public string Comment { get; set; } = string.Empty;
private const byte StdHdrSize = 30;
private const byte R9HdrSize = 46;
public ArjLocalHeader(ArchiveEncoding archiveEncoding)
: base(ArjHeaderType.LocalHeader)
{
public ArchiveEncoding ArchiveEncoding { get; }
public long DataStartPosition { get; protected set; }
ArchiveEncoding =
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
}
public byte ArchiverVersionNumber { get; set; }
public byte MinVersionToExtract { get; set; }
public HostOS HostOS { get; set; }
public CompressionMethod CompressionMethod { get; set; }
public DosDateTime DateTimeModified { get; set; } = new DosDateTime(0);
public long CompressedSize { get; set; }
public long OriginalSize { get; set; }
public long OriginalCrc32 { get; set; }
public int FileSpecPosition { get; set; }
public int FileAccessMode { get; set; }
public byte FirstChapter { get; set; }
public byte LastChapter { get; set; }
public long ExtendedFilePosition { get; set; }
public DosDateTime DateTimeAccessed { get; set; } = new DosDateTime(0);
public DosDateTime DateTimeCreated { get; set; } = new DosDateTime(0);
public long OriginalSizeEvenForVolumes { get; set; }
public string Name { get; set; } = string.Empty;
public string Comment { get; set; } = string.Empty;
private const byte StdHdrSize = 30;
private const byte R9HdrSize = 46;
public ArjLocalHeader(ArchiveEncoding archiveEncoding)
: base(ArjHeaderType.LocalHeader)
public override ArjHeader? Read(Stream stream)
{
var body = ReadHeader(stream);
if (body.Length > 0)
{
ArchiveEncoding =
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
ReadExtendedHeaders(stream);
var header = LoadFrom(body);
header.DataStartPosition = stream.Position;
return header;
}
return null;
}
// ReadAsync moved to ArjLocalHeader.Async.cs
public ArjLocalHeader LoadFrom(byte[] headerBytes)
{
int offset = 0;
int ReadInt16()
{
if (offset + 1 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
return v;
}
long ReadInt32()
{
if (offset + 3 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF
| (headerBytes[offset + 1] & 0xFF) << 8
| (headerBytes[offset + 2] & 0xFF) << 16
| (headerBytes[offset + 3] & 0xFF) << 24;
offset += 4;
return v;
}
public override ArjHeader? Read(Stream stream)
byte headerSize = headerBytes[offset++];
ArchiverVersionNumber = headerBytes[offset++];
MinVersionToExtract = headerBytes[offset++];
HostOS hostOS = (HostOS)headerBytes[offset++];
Flags = headerBytes[offset++];
CompressionMethod = CompressionMethodFromByte(headerBytes[offset++]);
FileType = FileTypeFromByte(headerBytes[offset++]);
offset++; // Skip 1 byte
var rawTimestamp = ReadInt32();
DateTimeModified = rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
CompressedSize = ReadInt32();
OriginalSize = ReadInt32();
OriginalCrc32 = ReadInt32();
FileSpecPosition = ReadInt16();
FileAccessMode = ReadInt16();
FirstChapter = headerBytes[offset++];
LastChapter = headerBytes[offset++];
ExtendedFilePosition = 0;
OriginalSizeEvenForVolumes = 0;
if (headerSize > StdHdrSize)
{
var body = ReadHeader(stream);
if (body.Length > 0)
ExtendedFilePosition = ReadInt32();
if (headerSize >= R9HdrSize)
{
ReadExtendedHeaders(stream);
var header = LoadFrom(body);
header.DataStartPosition = stream.Position;
return header;
rawTimestamp = ReadInt32();
DateTimeAccessed =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
rawTimestamp = ReadInt32();
DateTimeCreated =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
OriginalSizeEvenForVolumes = ReadInt32();
}
return null;
}
public ArjLocalHeader LoadFrom(byte[] headerBytes)
Name = Encoding.ASCII.GetString(
headerBytes,
offset,
Array.IndexOf(headerBytes, (byte)0, offset) - offset
);
offset += Name.Length + 1;
Comment = Encoding.ASCII.GetString(
headerBytes,
offset,
Array.IndexOf(headerBytes, (byte)0, offset) - offset
);
offset += Comment.Length + 1;
return this;
}
public static CompressionMethod CompressionMethodFromByte(byte value)
{
return value switch
{
int offset = 0;
int ReadInt16()
{
if (offset + 1 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
return v;
}
long ReadInt32()
{
if (offset + 3 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF
| (headerBytes[offset + 1] & 0xFF) << 8
| (headerBytes[offset + 2] & 0xFF) << 16
| (headerBytes[offset + 3] & 0xFF) << 24;
offset += 4;
return v;
}
byte headerSize = headerBytes[offset++];
ArchiverVersionNumber = headerBytes[offset++];
MinVersionToExtract = headerBytes[offset++];
HostOS hostOS = (HostOS)headerBytes[offset++];
Flags = headerBytes[offset++];
CompressionMethod = CompressionMethodFromByte(headerBytes[offset++]);
FileType = FileTypeFromByte(headerBytes[offset++]);
offset++; // Skip 1 byte
var rawTimestamp = ReadInt32();
DateTimeModified =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
CompressedSize = ReadInt32();
OriginalSize = ReadInt32();
OriginalCrc32 = ReadInt32();
FileSpecPosition = ReadInt16();
FileAccessMode = ReadInt16();
FirstChapter = headerBytes[offset++];
LastChapter = headerBytes[offset++];
ExtendedFilePosition = 0;
OriginalSizeEvenForVolumes = 0;
if (headerSize > StdHdrSize)
{
ExtendedFilePosition = ReadInt32();
if (headerSize >= R9HdrSize)
{
rawTimestamp = ReadInt32();
DateTimeAccessed =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
rawTimestamp = ReadInt32();
DateTimeCreated =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
OriginalSizeEvenForVolumes = ReadInt32();
}
}
Name = Encoding.ASCII.GetString(
headerBytes,
offset,
Array.IndexOf(headerBytes, (byte)0, offset) - offset
);
offset += Name.Length + 1;
Comment = Encoding.ASCII.GetString(
headerBytes,
offset,
Array.IndexOf(headerBytes, (byte)0, offset) - offset
);
offset += Comment.Length + 1;
return this;
}
public static CompressionMethod CompressionMethodFromByte(byte value)
{
return value switch
{
0 => CompressionMethod.Stored,
1 => CompressionMethod.CompressedMost,
2 => CompressionMethod.Compressed,
3 => CompressionMethod.CompressedFaster,
4 => CompressionMethod.CompressedFastest,
8 => CompressionMethod.NoDataNoCrc,
9 => CompressionMethod.NoData,
_ => CompressionMethod.Unknown,
};
}
0 => CompressionMethod.Stored,
1 => CompressionMethod.CompressedMost,
2 => CompressionMethod.Compressed,
3 => CompressionMethod.CompressedFaster,
4 => CompressionMethod.CompressedFastest,
8 => CompressionMethod.NoDataNoCrc,
9 => CompressionMethod.NoData,
_ => CompressionMethod.Unknown,
};
}
}

View File

@@ -0,0 +1,18 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers;
public partial class ArjMainHeader
{
public override async ValueTask<ArjHeader?> ReadAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
var body = await ReadHeaderAsync(stream, cancellationToken);
await ReadExtendedHeadersAsync(stream, cancellationToken);
return LoadFrom(body);
}
}

View File

@@ -1,138 +1,141 @@
using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Arj.Headers
namespace SharpCompress.Common.Arj.Headers;
public partial class ArjMainHeader : ArjHeader
{
public class ArjMainHeader : ArjHeader
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArchiveEncoding ArchiveEncoding { get; }
public int ArchiverVersionNumber { get; private set; }
public int MinVersionToExtract { get; private set; }
public HostOS HostOs { get; private set; }
public int SecurityVersion { get; private set; }
public DosDateTime CreationDateTime { get; private set; } = new DosDateTime(0);
public long CompressedSize { get; private set; }
public long ArchiveSize { get; private set; }
public long SecurityEnvelope { get; private set; }
public int FileSpecPosition { get; private set; }
public int SecurityEnvelopeLength { get; private set; }
public int EncryptionVersion { get; private set; }
public int LastChapter { get; private set; }
public int ArjProtectionFactor { get; private set; }
public int Flags2 { get; private set; }
public string Name { get; private set; } = string.Empty;
public string Comment { get; private set; } = string.Empty;
public ArjMainHeader(ArchiveEncoding archiveEncoding)
: base(ArjHeaderType.MainHeader)
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
ArchiveEncoding =
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
}
public ArchiveEncoding ArchiveEncoding { get; }
public override ArjHeader? Read(Stream stream)
{
var body = ReadHeader(stream);
ReadExtendedHeaders(stream);
return LoadFrom(body);
}
public int ArchiverVersionNumber { get; private set; }
public int MinVersionToExtract { get; private set; }
public HostOS HostOs { get; private set; }
public int SecurityVersion { get; private set; }
public DosDateTime CreationDateTime { get; private set; } = new DosDateTime(0);
public long CompressedSize { get; private set; }
public long ArchiveSize { get; private set; }
public long SecurityEnvelope { get; private set; }
public int FileSpecPosition { get; private set; }
public int SecurityEnvelopeLength { get; private set; }
public int EncryptionVersion { get; private set; }
public int LastChapter { get; private set; }
// ReadAsync moved to ArjMainHeader.Async.cs
public int ArjProtectionFactor { get; private set; }
public int Flags2 { get; private set; }
public string Name { get; private set; } = string.Empty;
public string Comment { get; private set; } = string.Empty;
public ArjMainHeader LoadFrom(byte[] headerBytes)
{
var offset = 1;
public ArjMainHeader(ArchiveEncoding archiveEncoding)
: base(ArjHeaderType.MainHeader)
byte ReadByte()
{
ArchiveEncoding =
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
if (offset >= headerBytes.Length)
{
throw new EndOfStreamException();
}
return (byte)(headerBytes[offset++] & 0xFF);
}
public override ArjHeader? Read(Stream stream)
int ReadInt16()
{
var body = ReadHeader(stream);
ReadExtendedHeaders(stream);
return LoadFrom(body);
if (offset + 1 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
return v;
}
public ArjMainHeader LoadFrom(byte[] headerBytes)
long ReadInt32()
{
var offset = 1;
byte ReadByte()
if (offset + 3 >= headerBytes.Length)
{
if (offset >= headerBytes.Length)
{
throw new EndOfStreamException();
}
return (byte)(headerBytes[offset++] & 0xFF);
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF
| (headerBytes[offset + 1] & 0xFF) << 8
| (headerBytes[offset + 2] & 0xFF) << 16
| (headerBytes[offset + 3] & 0xFF) << 24;
offset += 4;
return v;
}
string ReadNullTerminatedString(byte[] x, int startIndex)
{
var result = new StringBuilder();
int i = startIndex;
int ReadInt16()
while (i < x.Length && x[i] != 0)
{
if (offset + 1 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
return v;
}
long ReadInt32()
{
if (offset + 3 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF
| (headerBytes[offset + 1] & 0xFF) << 8
| (headerBytes[offset + 2] & 0xFF) << 16
| (headerBytes[offset + 3] & 0xFF) << 24;
offset += 4;
return v;
}
string ReadNullTerminatedString(byte[] x, int startIndex)
{
var result = new StringBuilder();
int i = startIndex;
while (i < x.Length && x[i] != 0)
{
result.Append((char)x[i]);
i++;
}
// Skip the null terminator
result.Append((char)x[i]);
i++;
if (i < x.Length)
{
byte[] remainder = new byte[x.Length - i];
Array.Copy(x, i, remainder, 0, remainder.Length);
x = remainder;
}
return result.ToString();
}
ArchiverVersionNumber = ReadByte();
MinVersionToExtract = ReadByte();
// Skip the null terminator
i++;
if (i < x.Length)
{
byte[] remainder = new byte[x.Length - i];
Array.Copy(x, i, remainder, 0, remainder.Length);
x = remainder;
}
var hostOsByte = ReadByte();
HostOs = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
Flags = ReadByte();
SecurityVersion = ReadByte();
FileType = FileTypeFromByte(ReadByte());
offset++; // skip reserved
CreationDateTime = new DosDateTime((int)ReadInt32());
CompressedSize = ReadInt32();
ArchiveSize = ReadInt32();
SecurityEnvelope = ReadInt32();
FileSpecPosition = ReadInt16();
SecurityEnvelopeLength = ReadInt16();
EncryptionVersion = ReadByte();
LastChapter = ReadByte();
Name = ReadNullTerminatedString(headerBytes, offset);
Comment = ReadNullTerminatedString(headerBytes, offset + 1 + Name.Length);
return this;
return result.ToString();
}
ArchiverVersionNumber = ReadByte();
MinVersionToExtract = ReadByte();
var hostOsByte = ReadByte();
HostOs = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
Flags = ReadByte();
SecurityVersion = ReadByte();
FileType = FileTypeFromByte(ReadByte());
offset++; // skip reserved
CreationDateTime = new DosDateTime((int)ReadInt32());
CompressedSize = ReadInt32();
ArchiveSize = ReadInt32();
SecurityEnvelope = ReadInt32();
FileSpecPosition = ReadInt16();
SecurityEnvelopeLength = ReadInt16();
EncryptionVersion = ReadByte();
LastChapter = ReadByte();
Name = ReadNullTerminatedString(headerBytes, offset);
Comment = ReadNullTerminatedString(headerBytes, offset + 1 + Name.Length);
return this;
}
}

View File

@@ -4,17 +4,16 @@ using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers
namespace SharpCompress.Common.Arj.Headers;
public enum CompressionMethod
{
public enum CompressionMethod
{
Stored = 0,
CompressedMost = 1,
Compressed = 2,
CompressedFaster = 3,
CompressedFastest = 4,
NoDataNoCrc = 8,
NoData = 9,
Unknown,
}
Stored = 0,
CompressedMost = 1,
Compressed = 2,
CompressedFaster = 3,
CompressedFastest = 4,
NoDataNoCrc = 8,
NoData = 9,
Unknown,
}

Some files were not shown because too many files have changed in this diff Show More