Compare commits

...

582 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
3cc4ccedfe Fix decompression performance by delaying stream wrapping
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 10:58:53 +00:00
copilot-swe-agent[bot]
f27ea2d5f6 Initial plan for decompression performance fix
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 10:53:39 +00:00
copilot-swe-agent[bot]
292729028b Initial plan 2025-10-27 10:45:19 +00:00
Adam Hathcock
b3a20d05c5 Merge pull request #978 from adamhathcock/copilot/enhance-stream-io-async-support
Add comprehensive async/await support for Stream I/O operations
2025-10-27 10:23:08 +00:00
Adam Hathcock
4cd024a2b2 Merge remote-tracking branch 'origin/master' into copilot/enhance-stream-io-async-support 2025-10-27 10:20:06 +00:00
Adam Hathcock
63d08ebfd2 update agents 2025-10-27 10:19:57 +00:00
Adam Hathcock
c696197b03 formatting 2025-10-27 10:19:24 +00:00
Adam Hathcock
738a72228b added fixes and more async tests 2025-10-27 10:15:06 +00:00
Adam Hathcock
90641f4488 Merge pull request #979 from adamhathcock/dependabot/github_actions/actions/upload-artifact-5
Bump actions/upload-artifact from 4 to 5
2025-10-27 10:06:02 +00:00
Adam Hathcock
a4cc7eaf9b fully use async for zlibbase 2025-10-27 09:51:39 +00:00
Adam Hathcock
fdca728fdc add some dispose async 2025-10-27 09:47:15 +00:00
dependabot[bot]
d2c4ae8cdf Bump actions/upload-artifact from 4 to 5
Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 4 to 5.
- [Release notes](https://github.com/actions/upload-artifact/releases)
- [Commits](https://github.com/actions/upload-artifact/compare/v4...v5)

---
updated-dependencies:
- dependency-name: actions/upload-artifact
  dependency-version: '5'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-27 09:43:01 +00:00
Adam Hathcock
f3d3ac30a6 add gubbins 2025-10-27 09:39:08 +00:00
Adam Hathcock
f8cc4ade8a format 2025-10-27 09:37:00 +00:00
copilot-swe-agent[bot]
b3975b7bbd Add async tests for EntryStream and compression streams
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 09:28:34 +00:00
copilot-swe-agent[bot]
4f1b61f5bc Add async support to DeflateStream and GZipStream
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 09:20:37 +00:00
copilot-swe-agent[bot]
beeb37b4fd Add async support to EntryStream, ZlibStream, and ZlibBaseStream
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 09:11:29 +00:00
copilot-swe-agent[bot]
43aa2bad22 Integrate async/await support from PR #976 as baseline
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 09:00:38 +00:00
copilot-swe-agent[bot]
1b2ba921bb Initial plan 2025-10-27 08:48:01 +00:00
Adam Hathcock
f543da0ea8 Merge pull request #977 from adamhathcock/copilot/add-copilot-agent-config
Add Copilot agent manifest and usage documentation
2025-10-27 08:42:20 +00:00
copilot-swe-agent[bot]
e60c9efa84 Add copilot agent configuration and documentation
- Create .github/agents/copilot-agent.yml with agent manifest
- Replace AGENTS.md with agent usage and command documentation

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-25 18:16:47 +00:00
copilot-swe-agent[bot]
c52fc6f240 Initial plan 2025-10-25 18:12:45 +00:00
Adam Hathcock
ee136b024a Merge pull request #974 from adamhathcock/adam/enable-agent
chore: add Copilot coding agent config and CI workflow
2025-10-25 16:09:51 +01:00
Adam Hathcock
699bc5f34b chore: add Copilot coding agent config and CI workflow 2025-10-25 16:05:09 +01:00
Adam Hathcock
9eed8e842c Merge pull request #972 from TwanVanDongen/master
Handle vendor-specific and malformed ZIP extra fields safely
2025-10-25 13:53:10 +01:00
Twan van Dongen
6d652a12ee And again forgot to apply CSharpierAdds bounds checks to prevent exceptions when extra fields are truncated or non-standard (e.g., 0x4341 "AC"/ARC0). Stops parsing gracefully, allowing other fields to be processed. 2025-10-24 17:18:37 +02:00
Adam Hathcock
e043e06656 Merge pull request #969 from adamhathcock/adam/perf
Add JB perf testing project.
2025-10-23 14:34:43 +01:00
Adam Hathcock
14b52599f4 Update src/SharpCompress/Compressors/Rar/UnpackV1/Unpack.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-23 14:20:54 +01:00
Adam Hathcock
e3e2c0c567 Update tests/SharpCompress.Performance/LargeMemoryStream.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-23 14:19:16 +01:00
Adam Hathcock
4fc5d60f03 reduce visibility 2025-10-23 14:16:39 +01:00
Adam Hathcock
c37a9e0f82 Merge remote-tracking branch 'origin/adam/perf' into adam/perf 2025-10-23 13:50:31 +01:00
Adam Hathcock
fed17ebb96 fmt 2025-10-23 13:50:07 +01:00
Adam Hathcock
eeac678872 More usage of pool and better copy 2025-10-23 13:49:54 +01:00
Adam Hathcock
f9ed0f2df9 Update tests/SharpCompress.Performance/Program.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-23 11:47:42 +01:00
Adam Hathcock
0ddbacac85 Update src/SharpCompress/Compressors/Rar/UnpackV1/UnpackUtility.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-23 11:47:27 +01:00
Adam Hathcock
f0d28aa5cf fmt 2025-10-23 11:43:38 +01:00
Adam Hathcock
cc84f6fee4 more making rar faster 2025-10-23 11:43:21 +01:00
Adam Hathcock
00e6eef369 used AI to optimize some copys and shifting 2025-10-23 11:18:50 +01:00
Adam Hathcock
1ae71907bc don't need to clear everything 2025-10-23 10:53:54 +01:00
Adam Hathcock
3ff688fba2 clear and null check 2025-10-23 10:48:18 +01:00
Adam Hathcock
bb59b3d456 add pool to LZMA out window 2025-10-23 09:54:52 +01:00
Adam Hathcock
186ea74ada add some fixes for rar to pool memory 2025-10-23 09:40:15 +01:00
Adam Hathcock
c108f2dcf3 add perf testing project using JB memory and cpu 2025-10-23 09:39:57 +01:00
Adam Hathcock
4cca232d83 Merge pull request #959 from adamhathcock/adam/xz-wrapped-often
Removed wrappers that weren't needed (probably)
2025-10-22 11:54:47 +01:00
Adam Hathcock
1db511e9cb Merge branch 'master' into adam/xz-wrapped-often 2025-10-22 11:51:46 +01:00
Adam Hathcock
76afa7d3bf Merge pull request #968 from adamhathcock/adam/rework-deps
rework dependencies to be correct for frameworks and update
2025-10-22 11:51:30 +01:00
Adam Hathcock
3513f7b1cd Update src/SharpCompress/SharpCompress.csproj
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-22 10:51:12 +01:00
Adam Hathcock
4531fe39e6 Merge branch 'master' into adam/rework-deps 2025-10-22 10:48:16 +01:00
Adam Hathcock
8d276a85bc rework dependencies to be correct for frameworks and update 2025-10-22 10:47:43 +01:00
Adam Hathcock
5f0d042bc3 Merge pull request #967 from adamhathcock/adam/reduce-custom-utilities
Reduce custom utilities for arrays/bytes
2025-10-22 10:41:10 +01:00
Adam Hathcock
408f07e3c4 Merge branch 'master' into adam/reduce-custom-utilities 2025-10-22 10:38:01 +01:00
Adam Hathcock
d1a540c90c use windows instead of skippable fact 2025-10-22 10:32:47 +01:00
Adam Hathcock
00df8e930e add windows only compile constant 2025-10-22 10:30:40 +01:00
Adam Hathcock
3b768b1b77 Merge pull request #961 from adamhathcock/dependabot/nuget/AwesomeAssertions-9.2.1
Bump AwesomeAssertions from 9.2.0 to 9.2.1
2025-10-22 10:25:01 +01:00
Adam Hathcock
42a7ececa0 Merge branch 'master' into adam/xz-wrapped-often 2025-10-22 10:22:36 +01:00
Adam Hathcock
e8867de049 Merge branch 'master' into dependabot/nuget/AwesomeAssertions-9.2.1 2025-10-22 10:21:59 +01:00
Adam Hathcock
a1dfa3dfa3 xplat tests for path characters 2025-10-22 10:21:22 +01:00
Adam Hathcock
83917d4f79 Merge remote-tracking branch 'origin/master' into adam/reduce-custom-utilities 2025-10-22 10:17:20 +01:00
Adam Hathcock
513cd4f905 some AI suggestions 2025-10-22 10:16:45 +01:00
Adam Hathcock
eda0309df3 Merge pull request #966 from adamhathcock/adam/reduce-stackalloc
Remove a dynamically created stackalloc
2025-10-22 10:13:14 +01:00
Adam Hathcock
74e27c028e fix the span length 2025-10-22 10:10:07 +01:00
Adam Hathcock
36c06c4089 ugh, this is used because it shadows a field 2025-10-22 09:32:19 +01:00
Adam Hathcock
249b8a9cdd add AI generated tests 2025-10-22 09:28:07 +01:00
Adam Hathcock
62bee15f00 fmt 2025-10-22 09:19:30 +01:00
Adam Hathcock
d8797b69e4 remove do while 2025-10-22 09:19:09 +01:00
Adam Hathcock
084fe72b02 Consolidate not null 2025-10-22 09:17:13 +01:00
Adam Hathcock
c823acaa3f optimize ReadFully and Skip 2025-10-22 09:10:16 +01:00
Adam Hathcock
e0d6cd9cb7 Try to reduce custom functions for array/byte management 2025-10-22 09:00:21 +01:00
Adam Hathcock
01021e102b remove some extra stackallocs 2025-10-22 08:36:03 +01:00
Adam Hathcock
6de738ff17 reduce dynamic stackallocs in unpackv1 2025-10-22 08:32:19 +01:00
Adam Hathcock
c0612547eb Merge pull request #964 from adamhathcock/adam/extract-all-solid-only
Only allow extract all on archives that are solid (some rars and 7zip only)
2025-10-21 14:08:23 +01:00
Adam Hathcock
e960907698 Update src/SharpCompress/Archives/AbstractArchive.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-21 13:55:56 +01:00
Adam Hathcock
84e03b1b27 Allow 7zip files of all sizes? 2025-10-21 10:28:58 +01:00
Adam Hathcock
f1a80da34b fix tests that use extract all wrongly 2025-10-21 09:56:29 +01:00
Adam Hathcock
5a5a55e556 fmt 2025-10-21 09:22:35 +01:00
Adam Hathcock
e1f132b45b Only allow extract all on archives that are solid (some rars and 7zip only) 2025-10-21 09:21:46 +01:00
dependabot[bot]
087011aede Bump AwesomeAssertions from 9.2.0 to 9.2.1
---
updated-dependencies:
- dependency-name: AwesomeAssertions
  dependency-version: 9.2.1
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-20 10:44:17 +00:00
Adam Hathcock
1430bf9b31 fmt 2025-10-15 09:54:13 +01:00
Adam Hathcock
4e5de817ef Removed too many wrappers
# Conflicts:
#	src/SharpCompress/Compressors/Xz/XZIndex.cs
2025-10-15 09:53:46 +01:00
Adam Hathcock
5d6b94f8c3 Merge pull request #952 from adamhathcock/dependabot/github_actions/actions/checkout-5
Bump actions/checkout from 4 to 5
2025-10-14 08:25:53 +01:00
Adam Hathcock
8dfbe56f42 Merge branch 'master' into dependabot/github_actions/actions/checkout-5 2025-10-14 08:23:18 +01:00
Adam Hathcock
df79d983d7 Merge pull request #957 from adamhathcock/dependabot/github_actions/actions/setup-dotnet-5
Bump actions/setup-dotnet from 4 to 5
2025-10-14 08:22:47 +01:00
dependabot[bot]
6c23a28826 Bump actions/setup-dotnet from 4 to 5
Bumps [actions/setup-dotnet](https://github.com/actions/setup-dotnet) from 4 to 5.
- [Release notes](https://github.com/actions/setup-dotnet/releases)
- [Commits](https://github.com/actions/setup-dotnet/compare/v4...v5)

---
updated-dependencies:
- dependency-name: actions/setup-dotnet
  dependency-version: '5'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-13 16:21:25 +00:00
dependabot[bot]
f72289570a Bump actions/checkout from 4 to 5
Bumps [actions/checkout](https://github.com/actions/checkout) from 4 to 5.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v4...v5)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '5'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-13 16:12:51 +00:00
Adam Hathcock
51bc9dc20e Merge pull request #950 from adamhathcock/adamhathcock-patch-1
Configure Dependabot for NuGet updates
2025-10-13 16:52:07 +01:00
Adam Hathcock
e45ac6bfa9 Update .github/dependabot.yml
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-10-13 16:48:55 +01:00
Adam Hathcock
44d1cbdb0c Configure Dependabot for NuGet updates
Added NuGet package ecosystem configuration for Dependabot.
2025-10-13 16:47:46 +01:00
Adam Hathcock
3fa7e854d3 Merge pull request #948 from adamhathcock/adam/update-0410
update to 0.41.0 and change symbols type
2025-10-13 12:32:54 +01:00
Adam Hathcock
0f5c080be6 update to 0.41.0 and change symbols type 2025-10-13 12:29:02 +01:00
Adam Hathcock
871009ef8d Merge pull request #947 from adamhathcock/adam/update-deps
Update dependencies and csharpier
2025-10-13 12:13:51 +01:00
Adam Hathcock
89a565c6c4 format 2025-10-13 12:10:42 +01:00
Adam Hathcock
6ed60e4d5f adjust Microsoft.NETFramework.ReferenceAssemblies 2025-10-13 12:08:39 +01:00
Adam Hathcock
4dab1df3ae adjust usage of sourcelink 2025-10-13 12:06:34 +01:00
Adam Hathcock
7b7aa2cdf0 Merge remote-tracking branch 'origin/master' into adam/update-deps 2025-10-13 11:56:50 +01:00
Adam Hathcock
d1e1a65f32 add agents file 2025-10-13 11:55:41 +01:00
Adam Hathcock
9d332b4ac5 add agents file 2025-10-13 11:52:18 +01:00
Adam Hathcock
fc2462e281 Update dependencies and csharpier 2025-10-13 10:40:43 +01:00
Adam Hathcock
fb2cddabf0 Merge pull request #945 from mitchcapper/archive_extension_hinting_pr
Extension hinting for ReaderFactory for better first try factory success
2025-09-29 09:02:11 +01:00
Mitch Capper
33481a9465 Extension hinting for ReaderFactory for better first try factory success
Also used by TarFactory to hint what compressiontype to attempt first
2025-09-24 00:04:07 -07:00
Adam Hathcock
45de87cb97 Merge pull request #943 from mitchcapper/zstandard_reading_pr
ZStandard tar support
2025-09-23 13:34:26 +01:00
Mitch Capper
553c533ada ZStandard tar support 2025-09-23 03:26:33 -07:00
Adam Hathcock
634e562b93 Merge pull request #935 from Nanook/bugfix/StreamBufferFix
Rewind buffer fix for directory extract.
2025-08-22 14:08:39 +01:00
Adam Hathcock
c789ead590 Merge branch 'master' into bugfix/StreamBufferFix 2025-08-22 14:05:35 +01:00
Adam Hathcock
d23560a441 Merge pull request #939 from Morilli/fix-winaescryptostream-dispose
Fix WinzipAesCryptoStream potentially not getting disposed
2025-08-22 14:03:06 +01:00
Morilli
9f306966db Adjust logic in CreateDecompressionStream to not wrap the returned stream
a ReadOnlySubStream never disposes its passes in stream which is problematic when that stream needs to be disposed. It's hard to tell who exactly has ownership over which stream here, but I'll just assume that whatever stream is passed in here should be disposed.
2025-08-21 20:08:57 +02:00
Morilli
8eea2cef97 add failing test 2025-08-21 20:08:34 +02:00
Nanook
3abbb89c2e Comment typo edit. 2025-08-01 21:50:36 +01:00
Nanook
76de7d54c7 Rewind buffer fix for directory extract. 2025-08-01 21:47:09 +01:00
Adam Hathcock
35aee6e066 Merge pull request #934 from Nanook/feature/zstd_zip_writing
Zip ZStandard Writing with tests. Level support.
2025-07-24 08:24:32 +01:00
Nanook
8a0152fe7c Fixed up test issue after copilot "fixes" :S. 2025-07-24 01:33:18 +01:00
Nanook
7769435fc8 CSharpier fixes after copilot tweaks. 2025-07-24 01:31:30 +01:00
Nanook
ae311ea66e Update tests/SharpCompress.Test/Zip/TestPseudoTextStream.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-24 01:28:56 +01:00
Nanook
d15816f162 Update tests/SharpCompress.Test/Zip/TestPseudoTextStream.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-24 01:28:40 +01:00
Nanook
5cbb31c559 Update tests/SharpCompress.Test/Zip/TestPseudoTextStream.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-24 01:28:23 +01:00
Nanook
b7f5b36f2b Update src/SharpCompress/Writers/Zip/ZipWriter.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-24 01:28:11 +01:00
Nanook
e9dd413de6 CSharpier formatting. 2025-07-24 00:46:09 +01:00
Nanook
c5de3d8cc1 Zip ZStandard Writing with tests. Level support. 2025-07-24 00:23:55 +01:00
Adam Hathcock
624c385e27 Merge pull request #930 from Nanook/feature/StreamStack
Added IStreamStack for debugging and configurable buffer management. …
2025-07-23 08:48:58 +01:00
Adam Hathcock
893890c985 Merge branch 'master' into feature/StreamStack 2025-07-23 08:46:16 +01:00
Adam Hathcock
e12efc8b52 Merge pull request #933 from Morilli/zipentry-attribute
Implement `Attrib` for `ZipEntry`
2025-07-23 08:45:04 +01:00
Morilli
015dfa41b1 implement Attrib for ZipEntrys 2025-07-22 20:12:22 +02:00
Morilli
5e72ce5fbe add failing test 2025-07-22 20:11:13 +02:00
Adam Hathcock
236e653176 Merge branch 'master' into feature/StreamStack 2025-07-22 11:55:57 +01:00
Adam Hathcock
3946c3ba03 Merge pull request #931 from SimonCahill/feat/add-throw-on-unseekable-stream
Added ArgumentException to Archive.Open implementations
2025-07-22 11:54:23 +01:00
Adam Hathcock
5bdd39e6eb Merge branch 'master' into feat/add-throw-on-unseekable-stream 2025-07-22 11:50:04 +01:00
Adam Hathcock
46267c0531 Merge pull request #929 from Morilli/fix-zipentry-comment
Fix zipentry comment implementation
2025-07-22 11:49:12 +01:00
Adam Hathcock
7b96eb7704 Merge pull request #928 from Morilli/fix-dotsettings
fix DotSettings options to conform to current code style and editorconfig
2025-07-22 11:47:36 +01:00
Nanook
60d5955101 Final tidy. 2025-07-22 00:12:30 +01:00
Nanook
587675e488 Fixed some missing features caused by merging an earlier branch than required. 2025-07-21 23:45:57 +01:00
Simon Cahill
c0ae319c63 Reverted debug change 2025-07-21 15:41:33 +02:00
Simon Cahill
d810f8d8db Reverted back; I don't know how this file changed 2025-07-21 15:38:14 +02:00
Simon Cahill
a88390a546 Added ArgumentException which is thrown when a non-seekable Stream instance is passed 2025-07-21 15:34:03 +02:00
Nanook
8575e5615d Formatting fix. 2025-07-20 19:18:31 +01:00
Nanook
5fe9516c09 Various copilot suggestions. Tests still passing. 2025-07-20 19:16:11 +01:00
Nanook
938775789d Formated with Sharpier. 2025-07-20 18:11:37 +01:00
Nanook
f37bebe51f Merge branch 'master' into feature/StreamStack 2025-07-20 17:46:28 +01:00
Nanook
21f14cd3f2 Added IStreamStack for debugging and configurable buffer management. Added SharpCompressStream to consolodate streams to help simplify debugging. All unit tests passing. 2025-07-20 17:35:22 +01:00
Morilli
4e7baeb2c9 fix zipentry comment being lost after reading local entry header 2025-07-19 19:56:41 +02:00
Morilli
d78a682dd8 add failing test 2025-07-19 19:56:38 +02:00
Morilli
44021b7abc fix DotSettings options to conform to current code style and editorconfig 2025-07-19 19:30:52 +02:00
Adam Hathcock
7f9e543213 Merge pull request #924 from Morilli/7zip-1block-solid-archive
Fix 7-zip solid archive detection
2025-07-14 09:12:43 +01:00
Morilli
c489e5a59b fix SevenZipArchive solid detection 2025-07-13 15:09:59 +02:00
Morilli
8de30db7f9 add failing test 2025-07-13 15:08:45 +02:00
Adam Hathcock
415f0c6774 Merge pull request #921 from Morilli/fix-volume-filename
Fix volume FileName property potentially missing
2025-07-07 09:22:45 +01:00
Morilli
aa9cf195a5 Fix volume FileName property potentially missing 2025-07-06 21:56:54 +02:00
Adam Hathcock
6412fc8135 Mark for 0.40 2025-06-03 08:48:34 +01:00
Adam Hathcock
8b1ba9a00c Merge pull request #914 from adamhathcock/update-deps
Update dependencies and csharpier
2025-06-03 08:33:18 +01:00
Adam Hathcock
b02584ef9e Update deps again 2025-06-03 08:30:28 +01:00
Adam Hathcock
88cd6bfd1a Merge remote-tracking branch 'origin/master' into update-deps 2025-06-03 08:13:41 +01:00
Adam Hathcock
89a3da14c9 Merge pull request #918 from Morilli/fix-bzip2-selector-oob
[bzip2] fix possible out of bounds access due to unsanitized nSelectors usage
2025-06-03 08:11:55 +01:00
Morilli
619d987492 fix possible out of bounds access due to unsanitized nSelectors usage 2025-06-02 21:24:38 +02:00
Adam Hathcock
744e410a1a Merge pull request #916 from Morilli/rar-multipart-reader
Implement multipart rar handling for ExtractAllEntries
2025-05-14 11:33:12 +01:00
Morilli
6e51967993 format 2025-05-14 11:42:52 +02:00
Morilli
7989ab2e28 modify now-broken test
This test tested that skipping over entries using the reader interface for an encrypted multi-volume rar archive worked.
However reading those entries doesn't work and the skipping was also not working properly so I believe it's fine to "break" this functionality.
2025-05-14 11:18:41 +02:00
Morilli
a3570a568d fix AbstractReader.Skip for multipart files 2025-05-14 10:27:58 +02:00
Morilli
c0cd998836 add failing test 2025-05-14 10:27:30 +02:00
Morilli
1a452acd1c implement ExtractAllEntries for multipart rar files 2025-05-14 10:27:18 +02:00
Adam Hathcock
6c54083b08 Merge remote-tracking branch 'origin/master' into update-deps 2025-04-28 16:30:40 +01:00
Adam Hathcock
76105ebdaf fix bullseye 2025-04-28 16:30:28 +01:00
Adam Hathcock
2c452201fa Merge pull request #854 from zgabi/patch-1
return Stream.Null when 7z entry has no stream
2025-04-28 16:26:58 +01:00
Adam Hathcock
9fc7fc73f9 Merge branch 'master' into patch-1 2025-04-28 16:21:20 +01:00
Adam Hathcock
e7417e35ba Update dependencies and csharpier 2025-04-28 16:18:01 +01:00
Adam Hathcock
19eb4c7cba Merge pull request #834 from adamhathcock/exception-normalization
Add SharpCompressException and use it or children in most places
2025-04-28 16:12:25 +01:00
Adam Hathcock
1f39a0c9da Merge remote-tracking branch 'origin/master' into exception-normalization
# Conflicts:
#	src/SharpCompress/Readers/ReaderFactory.cs
2025-04-28 16:08:08 +01:00
Adam Hathcock
a480a8893c format 2025-04-28 16:06:00 +01:00
Adam Hathcock
d3a9e341a5 Merge fixes 2025-04-28 16:05:31 +01:00
Adam Hathcock
95caffe607 Merge remote-tracking branch 'origin/master' into exception-normalization
# Conflicts:
#	src/SharpCompress/Common/Rar/RarVolume.cs
#	src/SharpCompress/Common/SevenZip/SevenZipFilePart.cs
#	src/SharpCompress/Compressors/LZMA/LZipStream.cs
2025-04-28 15:59:34 +01:00
Adam Hathcock
fdeca61284 Merge pull request #913 from jdpurcell/pr-bss-fix
Fix regression with BufferedSubStream calculation
2025-03-26 09:00:38 +00:00
J.D. Purcell
45dc653191 Fix regression with size calculation 2025-03-25 20:43:14 -04:00
Adam Hathcock
6e48302b7b Merge pull request #912 from jdpurcell/pr-bss-optim
Optimize BufferedSubStream.ReadByte
2025-03-25 08:04:38 +00:00
J.D. Purcell
a7918d7b11 Optimize BufferedSubStream.ReadByte 2025-03-24 21:28:08 -04:00
Adam Hathcock
ec21253af9 Merge pull request #909 from Morilli/update-usage
Update USAGE.md to remove problematic extraction example
2025-03-24 08:20:17 +00:00
Adam Hathcock
97cdda8663 Merge branch 'master' into update-usage 2025-03-24 08:18:16 +00:00
Adam Hathcock
35ac2b9d71 Merge pull request #910 from jdpurcell/pr-rangedecoderoptim
Optimize LZMA range decoder
2025-03-24 08:17:28 +00:00
J.D. Purcell
14affd8ffa Optimize LZMA range decoder 2025-03-22 17:14:38 -04:00
Morilli
c48409c903 Update USAGE.md
added an explanatory comment to the ExtractAllEntries usage and removed the problematic previous code.

I've added a new example for iterating over entries that doesn't extract them.
2025-03-22 16:50:15 +01:00
Adam Hathcock
227f66f299 Merge pull request #907 from jdpurcell/pr-copyblockoptim
Optimize LZ OutWindow.CopyBlock
2025-03-20 08:24:39 +00:00
J.D. Purcell
c2d9bf94d1 Optimize LZ OutWindow.CopyBlock 2025-03-19 22:29:38 -04:00
Adam Hathcock
8f03841161 Merge pull request #906 from TwanVanDongen/master
Added ARC's crunched methods 5, 6, 7 & 8
2025-03-17 08:30:12 +00:00
Twan van Dongen
344a1ed912 Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-03-14 19:09:56 +01:00
Twan van Dongen
ff71993b31 Trying to overcome differences in charpier versions... 2025-03-14 19:09:52 +01:00
Twan van Dongen
18bb773b2c Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-03-14 19:06:14 +01:00
Twan van Dongen
7f905c7940 More csharpier stuff 2025-03-14 19:06:10 +01:00
Twan van Dongen
8a4ba6fc56 Removed empty lineRemoved failing test 2025-03-14 19:03:54 +01:00
Twan van Dongen
e0b275c01c Removed empty line for CSharpier 2025-03-14 19:01:07 +01:00
Twan van Dongen
5926db85df Added ARC's crunched methods 5, 6, 7 & 8 2025-03-14 18:58:34 +01:00
Adam Hathcock
a4715a10e7 Merge pull request #905 from TwanVanDongen/master
ARC decompression methods 3 and 4 added
2025-03-12 08:21:57 +00:00
Twan van Dongen
de0f5c0fcb Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-03-11 20:20:45 +01:00
Twan van Dongen
88d85ce6ac Extra line removed for csharpier 2025-03-11 20:20:41 +01:00
Twan van Dongen
131c171d3e Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-03-11 20:16:24 +01:00
Twan van Dongen
c5ddef6ef7 An exception occurred in ReadOnlySubStream when attempting to set the position to the same value. 2025-03-11 20:15:53 +01:00
Twan van Dongen
f36486d006 Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-03-11 18:57:52 +01:00
Twan van Dongen
eaf466c5c3 Implementation of squeezed and packed compression algorithms for .ARC archive format 2025-03-11 18:15:53 +01:00
Adam Hathcock
b41bcc349e Merge pull request #903 from TwanVanDongen/master
Base Reader implementation of .ARC format
2025-03-10 08:55:43 +00:00
Adam Hathcock
825c61bdcd Merge branch 'master' into master 2025-03-10 08:51:58 +00:00
Adam Hathcock
f13f49bd71 Merge pull request #904 from jdpurcell/pr-7zattr
Provide access to extended attributes for 7-zip
2025-03-10 08:49:33 +00:00
J.D. Purcell
88f5d4544b Add SevenZipEntry ExtendedAttrib property 2025-03-09 17:16:55 -04:00
J.D. Purcell
b7432a20f0 Remove some unused internal members 2025-03-09 17:16:55 -04:00
Twan van Dongen
ab57c85e66 Missing comma added for csharpier 2025-03-09 18:45:27 +01:00
Twan van Dongen
c7c41bc0d8 Missing comma added for csharpier checksBase Reader implementation of .ARC format 2025-03-09 18:40:43 +01:00
Twan van Dongen
c66f9b06a4 Base Reader implementation of .ARC format 2025-03-09 18:22:52 +01:00
Adam Hathcock
a3ac7e7ca7 Merge pull request #901 from ms264556/feature/sha256-quick-fix
Handle XZ CheckType SHA-256
2025-03-07 10:52:24 +00:00
ms264556
187b762f8a Corrected XZ BlockCheckSize formula 2025-03-07 19:53:49 +13:00
ms264556
3a7fbdfa52 Handle XZ CheckType SHA-256 2025-03-07 14:46:07 +13:00
Adam Hathcock
bbeb46b37f Merge pull request #900 from Morilli/WriteToDirectory-reader
make WriteToDirectory functions use ExtractAllEntries
2025-03-03 08:32:00 +00:00
Morilli
5450b9a700 make WriteToDirectory functions use ExtractAllEntries
for extracting files from solid archives this can be required to get acceptable extraction performance and should therefore always be called.
2025-02-28 21:46:04 +01:00
Adam Hathcock
353b28647c Merge pull request #897 from Morilli/bufferedsubstream-readbyte
Implement ReadByte for BufferedSubStream
2025-02-17 08:07:14 +00:00
Adam Hathcock
2411f4f870 Merge branch 'master' into bufferedsubstream-readbyte 2025-02-17 08:05:02 +00:00
Adam Hathcock
34cd059423 Merge pull request #898 from Morilli/lz-readbyte
Implement ReadByte for LzmaStream and LzOutWindow
2025-02-17 08:04:34 +00:00
Morilli
4b4ec12c87 Implement ReadByte for LzmaStream and LzOutWindow 2025-02-16 18:37:56 +01:00
Morilli
441147c0dc Implement ReadByte for BufferedSubStream 2025-02-16 13:41:46 +01:00
Adam Hathcock
08ceac9f46 Merge pull request #896 from adamhathcock/rar_unpack20_audio_fix
Rar2 v20,v26 Multimedia (Audio) decoder fix
2025-02-12 08:25:38 +00:00
Nanook
6bc690b50b Seriously? 2025-02-11 23:10:03 +00:00
Nanook
c41888b338 Rar2 v20,v26 Multimedia (Audio) decoder fix 2025-02-11 22:30:59 +00:00
Adam Hathcock
6d3c980b39 Merge pull request #894 from Morilli/fix-rare-solid-rar-failure
Fix condition in rar v3 code
2025-02-11 11:39:17 +00:00
Morilli
eb5db176fa revert change to rar5 code
... yeah... apparently the thing that fixes rar3 code breaks rar5 code
2025-02-11 11:21:51 +01:00
Morilli
a77c03fcaf add comment and change the same code in new Unpack50 2025-02-11 11:03:56 +01:00
Adam Hathcock
04b25011e9 Merge branch 'master' into fix-rare-solid-rar-failure 2025-02-11 08:31:49 +00:00
Adam Hathcock
96b34aec10 Merge pull request #895 from Morilli/fix-test-concurrency
use File.OpenRead instead of File.Open in tests to allow concurrent access
2025-02-11 08:30:12 +00:00
Morilli
d560b46c85 use File.OpenRead instead of File.Open to allow concurrent access 2025-02-11 03:31:49 +01:00
Morilli
94789ce455 Fix condition in rar v3 code 2025-02-11 03:04:18 +01:00
Adam Hathcock
55797e5873 Merge pull request #893 from adamhathcock/rar_unpack20
Fix for Rar4 v20 compression.
2025-02-10 09:27:27 +00:00
Nanook
675cab3074 Fix for Rar4 v20 compression. 2025-02-07 22:39:33 +00:00
Adam Hathcock
8d63ab646e Merge pull request #891 from Morilli/fix-zip-datadescriptor-header
Fix zip entry handling for entries with data descriptors
2025-01-28 08:12:40 +00:00
Adam Hathcock
37a2fa1cdc Merge branch 'master' into fix-zip-datadescriptor-header 2025-01-28 08:08:55 +00:00
Adam Hathcock
79ed9650b3 Merge pull request #892 from adamhathcock/fix-tests
don't run net48 on non-windows
2025-01-28 08:08:41 +00:00
Adam Hathcock
b6dc58164e don't run net48 on non-windows 2025-01-28 08:04:59 +00:00
Morilli
f9a974c1fe fix formatting 2025-01-28 02:24:42 +01:00
Morilli
91e672befb remove old hack trying to fix a similar thing
Introduced in af264cdc58c9d076bf83477cbdbfe7a5dad282b7; the test included in that commit passes still.
2025-01-28 02:12:16 +01:00
Morilli
c14d18b9df set local header data from directory header when flag is set
As described in section 4.4.7-4.4.9 of the zip specification when this flag is set the correct values will be in the data descriptor record and in the directory header.
2025-01-28 01:56:14 +01:00
Morilli
d2cfc1844c add failing test 2025-01-28 01:49:49 +01:00
Adam Hathcock
2fb3243a1a Tests also on net48 2025-01-16 08:38:11 +00:00
Adam Hathcock
6f3124a84f Mark for 0.39 2025-01-16 08:30:21 +00:00
Adam Hathcock
59eb5bd9c4 Merge pull request #888 from adamhathcock/updates
Update to support net48, net481, netstandard2.0, net6 and net8
2025-01-16 08:19:50 +00:00
Adam Hathcock
ad2b6bb4f7 Merge branch 'master' into updates 2025-01-16 08:17:25 +00:00
Adam Hathcock
51d64ee10e Merge pull request #889 from majorro/internal-helpers
Make helper classes internal
2025-01-16 08:15:02 +00:00
majorro
1159efc6cc formatting 2025-01-15 21:43:10 +03:00
Adam Hathcock
3cfb76a56f Add back net48 and net481 support 2025-01-15 16:35:05 +00:00
majorro
7a1ad6688a make helpers internal 2025-01-15 04:46:22 +03:00
majorro
4f4af6e3fd make test friend assembly 2025-01-15 04:46:07 +03:00
majorro
2736b79097 signed test assembly 2025-01-15 04:43:16 +03:00
Adam Hathcock
8da2ffa433 Update dependencies 2025-01-14 09:12:03 +00:00
Adam Hathcock
5bf3d6dc32 update csharpier 2025-01-14 09:07:40 +00:00
Adam Hathcock
30d4380afe update csproj 2025-01-14 09:06:03 +00:00
Adam Hathcock
c8b5aad482 Update to support only netstandard2.0, net6 and net8 2025-01-14 09:04:39 +00:00
Adam Hathcock
fa1d440947 Merge pull request #887 from majorro/improve-rar-memory-usage
Improve rar memory usage
2025-01-14 08:58:18 +00:00
Adam Hathcock
a89fc3a276 formatting 2025-01-14 08:55:09 +00:00
majorro
3875f62453 deps fix 2025-01-14 00:36:26 +03:00
majorro
23e1447ab6 stackalloc addvmcode 2025-01-13 23:31:33 +03:00
majorro
91364c6a7c stackalloc readtables 2025-01-13 23:28:26 +03:00
majorro
a070493fba window null checks 2025-01-13 23:26:49 +03:00
majorro
ca0a6ab72c rollback ILLink.Tasks downgrade 2025-01-12 16:38:30 +03:00
majorro
10e0562a82 stackalloc rar unpackv1 2025-01-12 16:02:49 +03:00
majorro
44998a2a2b pooled window for rar unpackv1 2025-01-12 16:01:59 +03:00
majorro
f8e033e560 add explicit System.Buffers, remove redundant code 2025-01-12 15:36:53 +03:00
Adam Hathcock
5842da84af Merge pull request #878 from Morilli/fix-xzblock-padding
Fix XZBlock padding calculation when its stream's starting position % 4 != 0
2024-12-20 08:50:46 +00:00
Adam Hathcock
10cc270170 Merge branch 'master' into fix-xzblock-padding 2024-12-20 08:47:40 +00:00
Morilli
f51bdd56aa expose Position getter in ForwardOnlyStream 2024-12-19 20:41:00 +01:00
Adam Hathcock
7f79c49f93 Merge pull request #884 from YoshiRulz/exports-unclutter
Exports unclutter
2024-12-19 14:06:48 +00:00
YoshiRulz
e4920255f0 Replace ReadOnlyCollection with the one in the BCL
I changed the param type of the helper, but the stronger constraint
didn't seem to matter in practice
2024-12-19 22:11:16 +10:00
YoshiRulz
f8e8273d39 Move utility classes to new namespace 2024-12-19 22:11:16 +10:00
Adam Hathcock
ceddab98d1 Merge pull request #877 from StarkDirewolf/master
Fixed bug in zip time header flags
2024-11-01 14:07:24 +00:00
Robb Pryde
0faa176791 Sharpier styling 2024-10-31 17:35:47 +00:00
Morilli
9b0e0ee536 Fix padding calculation when initial position % 4 != 0 2024-10-29 02:57:26 +01:00
Morilli
881d2756db Add failing test 2024-10-29 02:57:25 +01:00
Robb Pryde
ab5af40999 Fixed bug in zip time header flags, and failing to parse it no longer throws an exception. 2024-10-26 01:22:07 +01:00
Adam Hathcock
6aeef8dcf9 Merge pull request #876 from Morilli/fix-isarchive-stream-seek
Restore stream position in ArchiveFactory.IsArchive
2024-10-24 08:28:52 +01:00
Morilli
5d62196dde Restore stream position in ArchiveFactory.IsArchive 2024-10-23 17:01:55 +02:00
Adam Hathcock
7fe27ac310 Mark for 0.38 2024-09-02 09:09:57 +01:00
Adam Hathcock
1e300349ce Merge pull request #868 from kikaragyozov/patch-1
Fix small typo in USAGE.md
2024-09-02 07:43:30 +01:00
Kiril Karagyozov
6b01a7b08e Fix small typo in USAGE.md 2024-08-29 12:11:19 +03:00
Adam Hathcock
34d948df18 Merge pull request #866 from TwanVanDongen/master
Added shrink, reduce and implode to FORMATS
2024-08-22 16:07:23 +01:00
Twan
27091c4f1d Update FORMATS.md 2024-08-21 19:09:14 +02:00
Twan
970a3d7f2a Update FORMATS.md 2024-08-21 19:08:40 +02:00
Twan
2bedbbfc54 Update FORMATS.md 2024-08-21 19:06:14 +02:00
Adam Hathcock
8de33f0db3 Merge pull request #864 from adamhathcock/update-csproj
Update csproj to get green marks and update deps
2024-08-12 16:08:28 +01:00
Adam Hathcock
df4eab67dc Update csproj to get green marks and update deps 2024-08-08 08:41:51 +01:00
Adam Hathcock
2d13bc0046 Merge pull request #860 from lostmsu/7zSFX
Added support for 7zip SFX archives
2024-08-06 08:54:12 +01:00
Victor Nova
704a0cb35d added support for 7zip SFX archives by handling ReaderOptions.LookForHeader 2024-08-05 23:11:15 -07:00
Adam Hathcock
06a983e445 Merge pull request #859 from DineshSolanki/#858-fix-invalid-character-in-filename
Fix #858 - Replaces invalid filename characters
2024-07-30 08:22:01 +01:00
Dinesh Solanki
2d10df8b87 Fix #858 - Replaces invalid filename characters
Added a method to replace invalid characters in file names with underscores during file extraction. This prevents errors related to invalid file names.
2024-07-26 00:16:44 +05:30
Adam Hathcock
baf66db9dc format 2024-07-24 08:31:44 +01:00
GordonJ
3545693999 Added Tests and supporting Files. 2024-07-23 14:05:07 -05:00
gjefferyes
84fb99c2c8 Merge branch 'adamhathcock:master' into master 2024-07-23 13:58:48 -05:00
Adam Hathcock
21e2983ae1 Merge pull request #857 from alexprabhat99/master
Fix for missing empty directories when using ExtractToDirectory
2024-07-18 08:34:20 +01:00
Alex Prabhat Bara
004e0941d5 code formatted using csharpier 2024-07-16 20:12:01 +05:30
Alex Prabhat Bara
188a426dde fix for missing empty directories when using ExtractToDirectory 2024-07-16 16:20:04 +05:30
Adam Hathcock
6fcfae8bfe Merge pull request #855 from Erior/feature/Check-tar-crc-on-header
Check crc on tar header
2024-07-12 08:35:27 +01:00
Lars Vahlenberg
9515350f52 Remove using directive 2024-07-11 19:56:46 +02:00
Lars Vahlenberg
6b88f82656 Handle special case, empty file 2024-07-11 19:52:33 +02:00
Lars Vahlenberg
e42d953f47 Check crc on tar header 2024-07-10 19:53:32 +02:00
Zavarkó Gábor
471a3f63fe return Stream.Null when 7z entry has no stream 2024-07-07 00:14:00 +02:00
gjefferyes
9c257faf26 Merge branch 'master' into master 2024-06-26 06:28:55 -05:00
Adam Hathcock
d18cad6d76 Merge pull request #852 from LANCommander/fix-post-zip64-entry-subsequent-extractions
Fixed extractions after first ZIP64 entry is read from stream
2024-06-26 08:31:58 +01:00
GordonJ
061273be22 Added Export and (un)Reduce to sharpCompress 2024-06-25 11:35:11 -05:00
Pat Hartl
b89de6caad Fix formatting 2024-06-24 17:19:53 -05:00
Pat Hartl
9bc0a1d7c7 Null reference checking
Reorders this null reference check to avoid throwing a null reference exception.
2024-06-23 22:30:34 -05:00
Pat Hartl
eee518b7fa Reworked ZIP64 handling to separate block
The last commit made in this branch messed up some ZIP reading and caused a bunch of tests to fail. These changes branch off ZIP64 logic into its own block so that data is read correctly for 64 and non-64 entries.
2024-06-23 22:29:33 -05:00
Pat Hartl
b7b78edaa3 Fixed extractions after first ZIP64 entry is read from stream 2024-06-22 00:09:25 -05:00
Adam Hathcock
3eaac68ab4 Merge pull request #850 from Erior/feature/Issue-842
Issue 842
2024-06-18 13:45:53 +01:00
Adam Hathcock
a7672190e9 Merge branch 'master' into feature/Issue-842 2024-06-18 13:43:22 +01:00
Adam Hathcock
4e4e89b6eb Merge pull request #849 from Erior/develop
Fix for issue #844
2024-06-18 13:41:52 +01:00
Lars Vahlenberg
33dd519f56 Throw exception when bzip2 is corrupt 2024-06-08 18:26:12 +02:00
Lars Vahlenberg
5c1149aa8b #844 2024-06-08 17:22:20 +02:00
Adam Hathcock
9061e92af6 Merge pull request #848 from Morilli/fix-gzip-archivetype
Fix gzip archives having a `Type` of `ArchiveType.Tar` instead of `ArchiveType.Gzip`
2024-06-06 08:21:14 +01:00
Morilli
49f5ceaa9b Fix GZipArchive getting Type set to ArchiveType.Tar 2024-06-04 10:34:06 +02:00
Morilli
525b309d37 Add failing test 2024-06-04 10:33:32 +02:00
Adam Hathcock
bdb3a787fc Merge pull request #847 from DannyBoyk/846_tar_longlinkname
Tar: Add processing for the LongLink header type
2024-06-04 08:47:57 +01:00
Daniel Nash
a9601ef848 Tar: Add processing for the LongLink header type
Fixes #846
2024-06-03 12:54:19 -04:00
Adam Hathcock
6fc4b045fd mark for 0.37.2 2024-04-27 09:34:32 +01:00
Adam Hathcock
446852c7d0 really fix source link and central usage 2024-04-27 09:34:05 +01:00
Adam Hathcock
c635f00899 mark as 0.37.1 2024-04-27 09:12:17 +01:00
Adam Hathcock
1393629bc5 Mark sourcelink as PrivateAssets="All" 2024-04-27 06:15:29 +01:00
Adam Hathcock
49ce17b759 update zstdsharp.port and net8 is only trimmable 2024-04-25 08:35:52 +01:00
Adam Hathcock
74888021c8 Merge pull request #835 from Blokyk/fix-736
Prevent infinite loop when reading corrupted archive
2024-04-24 09:20:44 +01:00
Adam Hathcock
9483856439 fmt 2024-04-24 09:17:42 +01:00
blokyk
dbbc7c8132 fix(tar): prevent infinite loop when reading corrupted archive 2024-04-24 03:13:13 +02:00
Adam Hathcock
5d9c99508d fmt 2024-04-23 15:08:50 +01:00
Adam Hathcock
e4d5b56951 fix more nulls and tests 2024-04-23 15:08:32 +01:00
Adam Hathcock
e31238d121 fmt 2024-04-23 14:48:50 +01:00
Adam Hathcock
a283d99e1b compiles 2024-04-23 14:48:33 +01:00
Adam Hathcock
8cb621ebed Add SharpCompressException and use it or children in most places 2024-04-23 14:47:40 +01:00
Adam Hathcock
b203d165f5 Mark for 0.37.0 2024-04-23 10:25:32 +01:00
Adam Hathcock
c695e1136d Merge pull request #828 from adamhathcock/remove-netstandard20
Remove ~netstandard20~ just net7.0
2024-04-23 10:18:24 +01:00
Adam Hathcock
d847202308 add back net standard 2.0 2024-04-23 09:59:30 +01:00
Adam Hathcock
9d24e0a4b8 set package locks and central management 2024-04-23 09:37:25 +01:00
Adam Hathcock
745fe1eb9f references 2024-04-23 09:28:33 +01:00
Adam Hathcock
3e52b85e9d Merge remote-tracking branch 'origin/master' into remove-netstandard20
# Conflicts:
#	.config/dotnet-tools.json
2024-04-23 09:26:44 +01:00
Adam Hathcock
d26f020b50 Merge pull request #832 from adamhathcock/remove-ignored-nulls
Remove ignored nulls
2024-04-23 09:25:08 +01:00
Adam Hathcock
095b5f702c get rid of another null! 2024-04-23 09:20:20 +01:00
Adam Hathcock
9622853b8d fix and fmt 2024-04-23 09:16:05 +01:00
Adam Hathcock
b94e75fabe try to fix more tests 2024-04-23 09:06:49 +01:00
Adam Hathcock
23dd041e2e fix some tests 2024-04-23 08:52:10 +01:00
Adam Hathcock
c73ca21b4d fmt 2024-04-22 15:19:05 +01:00
Adam Hathcock
7ebdc85ad2 more null clean up 2024-04-22 15:17:24 +01:00
Adam Hathcock
99e2c8c90d more clean up 2024-04-22 15:10:22 +01:00
Adam Hathcock
f24bfdf945 fix tests 2024-04-22 14:57:08 +01:00
Adam Hathcock
7963233702 add missing usings 2024-04-22 14:18:41 +01:00
Adam Hathcock
b550df2038 get rid of more ! and update csharpier 2024-04-22 14:17:08 +01:00
Adam Hathcock
fb55624f5f add more null handling 2024-04-18 14:25:10 +01:00
Adam Hathcock
e96366f489 Entry can be null and remove other ! usages 2024-04-18 13:24:03 +01:00
Adam Hathcock
900190cf54 Merge pull request #829 from NeuroXiq/patch-1
Update README.md - Change API Docs to DNDocs
2024-04-15 08:14:16 +01:00
Marek Węglarz
2af744b474 Update README.md 2024-04-15 04:28:15 +02:00
Adam Hathcock
11153084e2 update csharpier 2024-04-11 15:47:39 +01:00
Adam Hathcock
4b9c814bfc remove .netstandard 2.0 and clean up 2024-04-11 15:46:43 +01:00
Adam Hathcock
1b5d3a3b6e Merge pull request #825 from adamhathcock/tar-corruption
Fix tar corruption when sizes mismatch
2024-04-11 13:11:29 +01:00
Adam Hathcock
373637e6a7 more logic fixes 2024-04-11 09:05:45 +01:00
Adam Hathcock
cb223217c1 actually, transfer block is different than overall transfer 2024-04-10 16:12:01 +01:00
Adam Hathcock
eab97a3f8b calculate remaining afterwards 2024-04-10 08:53:20 +01:00
Adam Hathcock
fdfaa8ab45 add max transfer size to tar 2024-04-09 15:35:15 +01:00
Adam Hathcock
2321d9dbee fix patch 2024-04-09 08:56:15 +01:00
Adam Hathcock
bf74dd887a Fix tar corruption when sizes mismatch 2024-04-09 08:19:23 +01:00
Adam Hathcock
3612035894 Merge pull request #823 from klimatr26/new-7z-filters
Add support for 7z ARM64 and RISCV filters
2024-04-08 09:56:07 +01:00
Adam Hathcock
6553e9b0cd formatting 2024-04-08 09:50:37 +01:00
klimatr26
09f2410170 Add support for 7z ARM64 and RISCV filters 2024-04-05 15:00:43 -05:00
Adam Hathcock
fb73d8c0a7 Merge pull request #819 from TwanVanDongen/master
Support added for TAR LZW compression (Unix 'compress' resulting in .…
2024-03-25 08:41:34 +00:00
Twan van Dongen
f2b0368078 CSharpier reformat missed 2024-03-24 16:29:29 +01:00
Twan van Dongen
02301ecf6d Support added for TAR LZW compression (Unix 'compress' resulting in .Z files) 2024-03-24 16:23:25 +01:00
Adam Hathcock
bcb61ee3e4 Merge pull request #817 from btomblinson/master
#809 Add README.md to csproj for NuGet
2024-03-18 08:34:20 +00:00
btomblinson
6a824429d0 #809 Add README.md to csproj for NuGet 2024-03-16 22:52:36 -06:00
Adam Hathcock
6a52f9097f Merge pull request #815 from adamhathcock/code-clean-up
Code clean up
2024-03-14 16:01:34 +00:00
Adam Hathcock
3fa85fc516 Merge branch 'master' into code-clean-up 2024-03-14 15:58:29 +00:00
Adam Hathcock
498d132d8a Merge pull request #816 from coderb/pullreq-rar-memusage
rar5 improve memory usage
2024-03-14 15:58:15 +00:00
root
b6340f1458 rar5 improve memory usage
use ArrayPool for stream buffer
  use stackalloc for methods on file decompression code path
2024-03-14 11:50:45 -04:00
Adam Hathcock
4afc7ae2e4 use complete namespace 2024-03-14 13:07:40 +00:00
Adam Hathcock
95975a4c33 even more clean up 2024-03-14 09:07:21 +00:00
Adam Hathcock
198a0673a2 more clean up 2024-03-14 09:00:44 +00:00
Adam Hathcock
94d1503c64 more clean up 2024-03-14 08:57:16 +00:00
Adam Hathcock
5f13e245f0 more clean up on tests 2024-03-14 08:53:08 +00:00
Adam Hathcock
2715ae645d use var 2024-03-14 08:38:12 +00:00
Adam Hathcock
0299232cb5 just using rider to clean up 2024-03-14 08:37:17 +00:00
Adam Hathcock
93e181cfd9 update csharpier 2024-03-14 08:29:30 +00:00
Adam Hathcock
8072eb1212 Merge pull request #814 from coderb/pullreq-rar5-redir
rar5 read FHEXTRA_REDIR and expose via RarEntry
2024-03-14 08:26:06 +00:00
root
226ce340f2 rar5 read FHEXTRA_REDIR and expose via RarEntry
NOTE: api user should skip entries where RarEntry.IsRedir is true and not call OpenEntryStream()
2024-03-14 04:17:31 -04:00
Adam Hathcock
ab5535eba3 Merge pull request #807 from TwanVanDongen/master
Support for decompressing Zip Shrink (Method:1)
2024-01-29 08:27:32 +00:00
Adam Hathcock
8da2499495 Merge pull request #805 from DannyBoyk/804_Fix_ZIP_Decryption
Zip: Use last modified time from basic header when validating zip decryption
2024-01-29 08:26:17 +00:00
Twan van Dongen
c057ffb153 Refrormatted using CSharpier 2024-01-27 18:59:56 +01:00
Twan van Dongen
fe13d29549 Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2024-01-27 18:31:13 +01:00
Twan van Dongen
225aaab4f4 Support for decompressing Zip Shrink (method:1) added 2024-01-27 18:28:46 +01:00
Daniel Nash
14c973558b Zip: Use last modified time from basic header when validating zip decryption
The last modified time used for zip decryption validation must be the
one from the basic header. If UnixTimeExtraFields are present, the
previous implementation was attempting to verify against that value
instead.
Fixed #804
2024-01-26 10:54:41 -05:00
Adam Hathcock
f515ff36b6 Mark for 0.36.0 2024-01-15 08:21:36 +00:00
Adam Hathcock
ed57cfd2f9 Merge pull request #803 from DannyBoyk/802_Add_UnixTimeExtraField_Support_Zips
Add support for the UnixTimeExtraField in Zip files
2024-01-15 08:19:29 +00:00
Daniel Nash
d69559e9c7 Add support for the UnixTimeExtraField in Zip files
Fixes #802
2024-01-12 09:34:13 -05:00
Adam Hathcock
396717efd1 Merge pull request #799 from Erior/feature/DataDescriptorStream-fix-report-size-position
Fix reporting size / position
2024-01-08 09:18:34 +00:00
Adam Hathcock
284fa24464 Merge pull request #800 from Erior/feature/Expose-file-attributes-for-rar-entries
Expose file attributes for rar
2024-01-08 09:17:53 +00:00
Adam Hathcock
0a20b9179a Merge pull request #798 from Erior/feature/Fix-crash-when-not-setting-password-for-rar5
Set Empty string for Rar5 password as default
2024-01-08 09:17:07 +00:00
Adam Hathcock
a0d5037885 Merge pull request #801 from Erior/feature/771
Issue 771, remove throw on flush for readonly streams
2024-01-08 09:13:39 +00:00
Lars Vahlenberg
4477833b1d Issue 771, remove throw on flush for readonly streams 2024-01-06 00:14:34 +01:00
Lars Vahlenberg
e0a5ed4bdb Expose file attributes 2024-01-05 09:50:58 +01:00
Lars Vahlenberg
46d4b26eba Fix testing under Linux 2024-01-05 00:35:24 +01:00
Lars Vahlenberg
f7c6edf849 Fix reporting size / position 2024-01-04 23:33:39 +01:00
Lars Vahlenberg
6c157def4b set empty string if password not set 2024-01-04 21:16:07 +01:00
Adam Hathcock
741712f89f Merge pull request #794 from Erior/feature/rar5-blake2
Feature/rar5 blake2
2024-01-03 08:34:54 +00:00
Lars Vahlenberg
4f749da628 Merge branch 'develop' into feature/rar5-blake2 2024-01-02 21:26:51 +01:00
Lars Vahlenberg
8b02795d69 CSharpier 2024-01-02 21:25:40 +01:00
Lars Vahlenberg
f8a0069a5d Calc checksum when encrypted is not working for RAR5, disable for now 2024-01-02 21:18:49 +01:00
Lars Vahlenberg
388bbe047e Blake2 Archive test OK 2024-01-02 20:46:55 +01:00
Adam Hathcock
2d4ce30e58 Merge pull request #792 from DannyBoyk/791_Correct_EOCD_ZipWriter
ZipWriter: Write correct EOCD record when more than 65,535 files
2023-12-27 08:55:02 +00:00
Daniel Nash
d4fb17cf66 ZipWriter: Write correct EOCD record when more than 65,535 files
0xFFFF will be written to the EOCD to signal to use the ZIP64
CentralDirectory record when the number of files is 65,535 or more.
Fixes #791
2023-12-22 11:26:01 -05:00
Adam Hathcock
372a2c8375 Mark for 0.35.0 2023-12-18 09:59:46 +00:00
Adam Hathcock
8f27121f21 Merge pull request #789 from adamhathcock/dotnet8
Dotnet8
2023-12-18 09:57:50 +00:00
Adam Hathcock
b986bf675f just remove readme 2023-12-18 09:31:33 +00:00
Adam Hathcock
80718a461b fix readme? 2023-12-18 09:24:00 +00:00
Adam Hathcock
2d14ecf58b add readme 2023-12-18 09:20:44 +00:00
Adam Hathcock
32aa9877c0 remove caching 2023-12-18 09:16:02 +00:00
Adam Hathcock
cee3a9c11d Revert "add lock files"
This reverts commit 30a31de45b.
2023-12-18 09:15:26 +00:00
Adam Hathcock
b78643f2d8 update upload artifact 2023-12-18 09:15:15 +00:00
Adam Hathcock
30a31de45b add lock files 2023-12-18 09:13:14 +00:00
Adam Hathcock
e4c4db534c build for dotnet 8 2023-12-18 09:09:31 +00:00
Adam Hathcock
4f7a0d3ad0 CI to dotnet 8 2023-12-18 09:08:06 +00:00
Adam Hathcock
ea3a96eead update and rerun csharpier 2023-12-18 09:04:04 +00:00
Adam Hathcock
c0e01ac132 Use dotnet 8 and update deps 2023-12-18 09:01:54 +00:00
Adam Hathcock
28ea50bca4 Merge pull request #788 from Erior/develop
RAR5 decryption support
2023-12-18 08:51:25 +00:00
Lars Vahlenberg
619e44b30f CSharpier fixes 2023-12-16 03:08:51 +01:00
Lars Vahlenberg
d678275dee Implement RAR5 decryption 2023-12-16 02:53:09 +01:00
Adam Hathcock
08eed53595 Merge pull request #787 from adamhathcock/dependabot/github_actions/actions/setup-dotnet-4
Bump actions/setup-dotnet from 3 to 4
2023-12-11 10:09:28 +00:00
dependabot[bot]
ff40f7d262 Bump actions/setup-dotnet from 3 to 4
Bumps [actions/setup-dotnet](https://github.com/actions/setup-dotnet) from 3 to 4.
- [Release notes](https://github.com/actions/setup-dotnet/releases)
- [Commits](https://github.com/actions/setup-dotnet/compare/v3...v4)

---
updated-dependencies:
- dependency-name: actions/setup-dotnet
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-12-11 09:35:30 +00:00
Adam Hathcock
3c1ae51dae Merge pull request #786 from Erior/feature/Issue-774
LZMA EOS marker detection
2023-12-11 08:46:20 +00:00
Adam Hathcock
8a59fc9aaf Merge pull request #785 from Erior/feature/Issue-782
Handle tar files generated with tar -H oldgnu that has large uid/gid values
2023-12-11 08:44:54 +00:00
Adam Hathcock
b7ea9dd841 Merge pull request #784 from Erior/feature/rar-comment
Dont crash on reading rar5 comment #783
2023-12-11 08:44:01 +00:00
Lars Vahlenberg
0320db6b4a LZMA EOS marker detection 2023-12-09 13:41:35 +01:00
Lars Vahlenberg
18c7f58093 Handle tar files generated with tar -H oldgnu that has large uid/gid values 2023-12-04 22:35:11 +01:00
Lars Vahlenberg
7f6f7b1436 Resharpier fix 2023-12-04 20:28:16 +01:00
Lars Vahlenberg
ca49176b97 Dont crash on reading rar5 comment 2023-12-04 20:19:11 +01:00
Adam Hathcock
67be0cd9d7 Mark for 0.34.2 2023-11-15 11:32:51 +00:00
Adam Hathcock
902fadef83 Merge pull request #780 from caesay/cs/revert-disable-strongname
Revert change disabling strong name signing in 92df1ec
2023-11-15 11:22:02 +00:00
Adam Hathcock
2777b6411f Merge branch 'master' into cs/revert-disable-strongname 2023-11-15 11:18:30 +00:00
Adam Hathcock
e3235d7f04 Merge pull request #781 from adamhathcock/fix-formatting
Update csharpier and fix formatting
2023-11-15 11:18:04 +00:00
Adam Hathcock
dc89c8858e comment out more C++ bits 2023-11-15 11:14:39 +00:00
Adam Hathcock
d28a278d63 Comment out flag to allow formatting 2023-11-15 11:10:05 +00:00
Adam Hathcock
7080c2abd0 Update csharpier and fix formatting 2023-11-15 11:05:30 +00:00
Caelan Sayler
43f86bcab8 Revert change disabling strong name signing in 92df1ec 2023-11-14 16:34:58 +00:00
Adam Hathcock
7d9c875c4d Merge pull request #778 from LANCommander/throw-cancelled-exception
Throw ReaderCancelledException on reader cancelled
2023-11-13 08:34:15 +00:00
Pat Hartl
ed4099eb12 Throw ReaderCancelledException on reader cancelled 2023-11-10 23:36:14 -06:00
Adam Hathcock
632b83f75d Mark for 0.34.1 2023-10-02 08:44:14 +01:00
Adam Hathcock
66c92637f9 Merge pull request #769 from Erior/feature/766
Update Zstd to 0.7.2
2023-09-25 09:01:51 +01:00
Adam Hathcock
6bcaebc471 Merge pull request #768 from Erior/feature/761
Feature/761
2023-09-25 09:01:20 +01:00
Lars Vahlenberg
7feee1027c Update Zstd to 0.7.2 2023-09-23 16:02:27 +02:00
Lars Vahlenberg
4fd8c77fa9 CSharpier cleanup 2023-09-23 14:52:39 +02:00
Lars Vahlenberg
bc3bb2d323 Set FilePart properties for directory type 2023-09-23 14:51:08 +02:00
Adam Hathcock
7764684c68 Release for 0.34 2023-09-18 09:53:05 +01:00
Adam Hathcock
feb2c38572 fmt update 2023-09-18 09:48:31 +01:00
Adam Hathcock
6a97e82a2e Merge pull request #763 from btomblinson/master
#751 Add .tar.7z support
2023-09-18 09:47:01 +01:00
btomblinson
57c0d19cde #751 Add .tar.7z support 2023-09-17 17:46:56 -06:00
Adam Hathcock
e14ed89f3d Merge pull request #759 from Erior/feature/748
Feature/748
2023-09-14 09:05:48 +01:00
Adam Hathcock
6a14893a23 Merge branch 'master' into feature/748 2023-09-14 09:00:50 +01:00
Lars Vahlenberg
0f19735d33 Cleanup 2023-09-11 19:31:27 +02:00
Lars Vahlenberg
6a859ac65d Adding Filters to 7z 2023-09-11 19:24:54 +02:00
Adam Hathcock
4d9c24244c Merge pull request #758 from adamhathcock/dependabot/github_actions/actions/checkout-4
Bump actions/checkout from 3 to 4
2023-09-11 10:47:54 +01:00
dependabot[bot]
cdeb288c4f Bump actions/checkout from 3 to 4
Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v3...v4)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-09-11 09:38:10 +00:00
Adam Hathcock
7f8016497b Merge pull request #750 from FlsZen/master
Add fast `ExtractToDirectoryAsync` extension method on `IArchive`
2023-09-07 15:05:15 +01:00
FlsZen
ec01225225 It looks like build format wants this on one line 2023-08-13 12:52:14 -04:00
James Hood
4e23b84999 Remove Task aspect 2023-07-21 13:17:42 -04:00
James Hood
c7c143fed9 Add ExtractToDirectoryAsync extension method on IArchive that is very fast compared to WriteToDirectory. 2023-07-19 08:22:15 -04:00
Adam Hathcock
4932171834 update csharpier 2023-06-12 09:53:00 +01:00
Adam Hathcock
00fc6c8e2f formatting 2023-06-12 09:52:35 +01:00
Adam Hathcock
1b73dab341 Merge remote-tracking branch 'Nanook/fixes-zstd' 2023-06-12 09:52:17 +01:00
Adam Hathcock
99e99c1ccd Merge branch 'master' into fixes-zstd 2023-06-12 09:45:35 +01:00
Adam Hathcock
14ce479ab7 Merge pull request #746 from rodesfl/patch-1
Added simple example
2023-06-12 09:31:17 +01:00
Rodrigo Lotrário
e7e873a1b2 Added simple example
Had some difficult figuring this one out, as this is so straight forward and is the simplest use case, maybe should be included at the top of the docs ?
2023-06-09 19:38:42 -03:00
Adam Hathcock
5a57428ec0 Merge pull request #745 from Erior/feature/Issue-743
Skip if we know the size, set blank password if not set for rar
2023-06-07 09:44:36 +01:00
Lars Vahlenberg
18e8e6ee98 Sharpier 2023-06-06 22:43:12 +02:00
Lars Vahlenberg
6bf6e51740 Skip if we know the size, set blank password if not set for rar 2023-06-06 22:27:53 +02:00
Adam Hathcock
b52a899a18 Merge pull request #740 from AlissaSabre/issue_739
Make ArchiveFactory.IsArchive(Stream, ...) public. Fix #739
2023-05-10 09:56:50 +01:00
Alissa Sabre
cf722a7120 Make ArchiveFactory.IsArchive(Stream, ...) public. Fix #739 2023-05-10 15:49:44 +09:00
Adam Hathcock
33cd1f3db8 Merge pull request #737 from Erior/feature/TarHeaderFactory-Infinite-loop
Check for broken file #736
2023-03-30 11:35:20 +01:00
Lars Vahlenberg
b7d2715ffd Remove whitespace 2023-03-29 19:39:03 +02:00
Lars Vahlenberg
fe63466d67 CSharpier 2023-03-29 18:18:17 +02:00
Lars Vahlenberg
0fb63eea99 Check for broken file 2023-03-28 23:07:21 +02:00
Adam Hathcock
59552804f6 fix badge 2023-03-21 14:11:18 +00:00
Adam Hathcock
579d6d73f8 build tests 2023-03-21 13:56:51 +00:00
Adam Hathcock
f83e3022ba add no restore to build 2023-03-21 13:54:45 +00:00
Adam Hathcock
bf93bbf5f8 fix restore 2023-03-21 13:44:02 +00:00
Adam Hathcock
fa2a52ff41 add caching and some cleanup 2023-03-21 13:41:36 +00:00
Adam Hathcock
813d9eace3 update dependencies 2023-03-21 13:16:31 +00:00
Adam Hathcock
16e8dd447b update csharpier 2023-03-21 13:14:08 +00:00
Adam Hathcock
0129b31dec Merge pull request #735 from Erior/feature/Expand-Delta-distance-tests
Remove check for minimal distance and add test case generated by 7z as compatibility check
2023-03-21 10:06:10 +00:00
Adam Hathcock
9544960314 Merge branch 'master' into feature/Expand-Delta-distance-tests 2023-03-21 08:34:54 +00:00
Lars Vahlenberg
23a9ca3140 Delta distance 2023-03-19 12:37:12 +01:00
Adam Hathcock
dfa4bb6ca4 Merge pull request #733 from Erior/feature/Handle-split-rar-archive-above-100-parts
Increase character value to support rar file with more than 100 parts…
2023-03-13 09:01:22 +00:00
Lars Vahlenberg
8aac3320be Increase character value to support rar file with more than 100 parts old tyle 2023-03-11 02:41:50 +01:00
Adam Hathcock
42ddb0d5ed Merge pull request #727 from Erior/feature/UncompressedZipExtractall
Implement Searching Data Descriptor stream issue/pull #680
2023-03-02 09:47:44 +00:00
Lars Vahlenberg
f60728b537 ReadOnlySubStream span position bug 2023-03-01 20:17:43 +01:00
Lars Vahlenberg
9e6f9d50ef Sync with master 2023-03-01 20:13:13 +01:00
Adam Hathcock
d76a473324 Merge pull request #729 from TwanVanDongen/master
Fixed support for RAR 1.5 (algo15)
2023-03-01 08:56:40 +00:00
Adam Hathcock
5a2d2b86d5 Merge branch 'master' into master 2023-03-01 08:52:34 +00:00
Adam Hathcock
42e8f481d6 Merge pull request #726 from Erior/feature/7ZipDelta
Add support for 7ZipDelta decompress
2023-03-01 08:43:48 +00:00
Adam Hathcock
9ecf652745 Merge pull request #722 from Nanook/fixes-XZ-zip
Zip Multipart fix, XZ stream fix, XZ stream support added to zip/zipx
2023-03-01 08:36:21 +00:00
Twan van Dongen
8748314b5b Fixed support for RAR 1.5 (algo15) and added test archive for unit testing. Rar 1.5 archive created using DOS version, which only supports uppercase characters, thus resulting in amending base class with 'UseCaseInsensitiveToVerify' toggle. 2023-02-21 21:28:38 +01:00
Lars Vahlenberg
606923b374 Implement Searching Data Descriptor stream for supporting UncompressedZipExtractAll 2023-02-19 01:33:22 +01:00
Lars Vahlenberg
3ae830a637 Add support for 7ZipDelta decompress 2023-02-18 15:27:16 +01:00
Nanook
92df1ecd5f ZStandard support added to zip/zipx (WinZip) and 7zip (mcmilk 7zip) 2023-02-02 01:06:18 +00:00
Nanook
b9d019561f Added the XZ zipx unit test. 2023-02-02 00:39:34 +00:00
Nanook
48a341b79c Split archive handling fix. XZ stream decoding bug fix (in X64Convert filter). XZ stream support added to zip/zipx. 2023-02-02 00:18:37 +00:00
Adam Hathcock
d1ea8517d2 Merge pull request #717 from ds5678/lzma-improvements
Several improvements to the LZMA Compressor
2022-12-23 10:24:24 +00:00
Jeremy Pritts
cd23844e35 Several improvements to the LZMA compressor:
* Encapsulation and Immutability for LzmaEncoderProperties
* LzmaEncoderProperties.Default static property
* Encoding tests for LzmaStream
2022-12-23 00:00:04 -05:00
Adam Hathcock
3f24af3a99 Merge pull request #716 from itn3000/fix-reflection-aot-error
replace Activator.CreateInstance to Func for avoiding error in NativeAOT
2022-12-21 13:37:19 +00:00
itn3000
78421683fe remove redundant cast 2022-12-21 14:13:24 +09:00
itn3000
61bfbdb26e fix compilation error 2022-12-21 14:10:52 +09:00
itn3000
a7a5c41370 replace Activator.CreateInstance to Func for avoiding error in NativeAOT 2022-12-21 10:37:12 +09:00
Adam Hathcock
f0b4c13200 Merge pull request #715 from adamhathcock/net7
Update to dotnet 7.  Change net461 to net462.  Remove netcoreapp3.1
2022-12-20 16:00:31 +00:00
Adam Hathcock
b01e97b168 more clean up 2022-12-20 15:20:49 +00:00
Adam Hathcock
7da10795a1 csharpier 2022-12-20 15:14:22 +00:00
Adam Hathcock
959bbdcf1b big clean up 2022-12-20 15:06:44 +00:00
Adam Hathcock
970e31a1b1 cleanup and run csharpier 2022-12-20 13:45:47 +00:00
Adam Hathcock
2dae2b7984 update build targets and CI 2022-12-20 13:40:33 +00:00
Adam Hathcock
d6ac9a0363 Fix build 2022-12-20 13:09:16 +00:00
Adam Hathcock
18336c2b8e Update to dotnet 7. Change net461 to net462. Remove netcoreapp3.1 2022-12-20 11:17:45 +00:00
Adam Hathcock
fc02d32ac3 Merge pull request #709 from vpenades/master
Generalized factories to readers and writers.
2022-12-06 14:17:02 +00:00
vpenades
891d5d3c35 commented unsupported extensions 2022-12-06 10:13:09 +01:00
vpenades
17dab3df34 small refactor 2022-12-05 22:36:18 +01:00
vpenades
37c7251ec9 added additional TAR extensions 2022-12-05 22:29:28 +01:00
vpenades
a22393075f Generalized archive creation 2022-12-05 09:31:30 +01:00
Vicente Penades
ca3d088785 generalized multipart archive interface 2022-12-03 13:44:19 +01:00
vpenades
8775b65f58 Generalized factories to readers and writers. 2022-11-29 16:45:20 +01:00
Adam Hathcock
c1204a5efb Merge pull request #706 from andreas-eriksson/master
Use PackageLicenseExpression instead of PackageLicenseFile
2022-11-28 08:38:49 +00:00
Andreas Eriksson
941db572c7 Add Copyright. 2022-11-27 12:39:59 +01:00
Andreas Eriksson
addb7ca95d spaces 2022-11-27 12:37:28 +01:00
Andreas Eriksson
5a930930da Use PackageLicenseExpression instead of PackageLicenseFile 2022-11-27 12:36:19 +01:00
Adam Hathcock
df877e87bf Merge pull request #699 from adamhathcock/dependabot/github_actions/actions/setup-dotnet-3
Bump actions/setup-dotnet from 2 to 3
2022-10-03 12:31:33 +01:00
dependabot[bot]
adb200d885 Bump actions/setup-dotnet from 2 to 3
Bumps [actions/setup-dotnet](https://github.com/actions/setup-dotnet) from 2 to 3.
- [Release notes](https://github.com/actions/setup-dotnet/releases)
- [Commits](https://github.com/actions/setup-dotnet/compare/v2...v3)

---
updated-dependencies:
- dependency-name: actions/setup-dotnet
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-10-03 09:26:24 +00:00
Adam Hathcock
4f0840b8b8 Merge pull request #697 from IngBertolini/rar_comment
Added support for reading comment header for Rar v5 archives
2022-09-30 08:53:37 +01:00
IngBertolini
c37ea5c890 Forgot to consider the zero termination 2022-09-29 18:14:56 +02:00
IngBertolini
cac9366c86 Update RarVolume.cs 2022-09-29 17:44:15 +02:00
IngBertolini
1fc28f8cd1 Update RarVolume.cs 2022-09-29 17:42:43 +02:00
IngBertolini
2109643ec0 Update RarVolume.cs 2022-09-29 17:23:06 +02:00
Adam Hathcock
3caf5a5a6d Merge pull request #696 from stakira/master
Ignores UnicodePathExtra if forced encoding is specified
2022-09-15 10:16:18 +01:00
Sugita Akira
418c1256a8 Ignores UnicodePathExtra if forced encoding is specified 2022-09-14 21:45:41 -07:00
Adam Hathcock
3ae7ba89e8 Merge pull request #689 from Erior/master
64bit datadescriptors
2022-09-06 11:52:00 +01:00
Adam Hathcock
191bb9d916 Merge branch 'master' into master 2022-09-06 11:46:16 +01:00
Adam Hathcock
f1d8fadabf Merge pull request #671 from vpenades/master
Introduced IArchiveFactory
2022-09-05 09:46:24 +01:00
Erior
c898732739 Merge branch 'master' into master 2022-08-31 16:37:46 +02:00
Vicente Penades
6e4f54eaf6 Merge branch 'master' into master 2022-08-26 14:34:01 +02:00
Adam Hathcock
4ba2963cfe Merge pull request #690 from louis-michelbergeron/master
Access level to LzmaStream Decoder
2022-08-09 08:28:05 +01:00
Lars Vahlenberg
20e6209036 Merge 2022-08-04 21:24:27 +02:00
Lars Vahlenberg
44a5433af3 64bit DataDescriptor 2022-08-04 21:22:31 +02:00
Adam Hathcock
0f6f13ed3e Merge pull request #687 from Erior/feature/SourceStreamPositionCountingBug
SourceStream Position counting bug fix
2022-08-01 08:14:57 +01:00
Lars Vahlenberg
0ae75634b1 SourceStream Position counting bug fix 2022-07-30 16:28:43 +02:00
Vicente Penades
9458ea81da Merge branch 'adamhathcock:master' into master 2022-07-30 15:24:10 +02:00
louis-michel
9f539c1d08 Merge branch 'master' of https://github.com/louis-michelbergeron/sharpcompress 2022-07-29 14:07:17 -04:00
louis-michel
37b03c7d5a Accessibility of Decoder 2022-07-29 14:06:53 -04:00
Adam Hathcock
3009e6dcfd Mark for 0.32.2 2022-07-29 10:45:56 +01:00
Adam Hathcock
70343b17bc add more tests for uncompressed streaming zips 2022-07-29 09:47:35 +01:00
Adam Hathcock
3f6027ec2c Merge pull request #686 from Erior/477
Mitigation of problems
2022-07-29 09:41:24 +01:00
Lars Vahlenberg
5706732c55 Naive implementation of searching of DataDescriptor, not compatible with big archives (>32bit), but handles test cases. 2022-07-28 23:03:06 +02:00
Lars Vahlenberg
ad633a9dd0 missing test file from error report 2022-07-28 21:20:42 +02:00
Lars Vahlenberg
7c56df1237 Mitigation of problems 2022-07-28 20:36:28 +02:00
Adam Hathcock
c1110f2897 Merge pull request #683 from OwnageIsMagic/patch-1
WriteAll: use delegate instead of Expression
2022-07-27 10:13:50 +01:00
Adam Hathcock
647642578b Merge branch 'master' into patch-1 2022-07-27 09:49:13 +01:00
OwnageIsMagic
5ca4efac31 WriteAll: revert 109a7c1 2022-07-26 21:36:00 +03:00
Adam Hathcock
deddf12b70 Merge pull request #684 from daverant/nuget-license
Include license in nuget package
2022-07-26 16:21:41 +01:00
OwnageIsMagic
109a7c12ea WriteAll: update delegate type 2022-07-19 04:03:26 +03:00
David Rant
f955031e27 Hide license in IDE 2022-07-18 17:16:22 +01:00
David Rant
6a69c6cd02 Reference bundled package license file 2022-07-18 17:11:06 +01:00
David Rant
c1d4ac45ab Include license when packing 2022-07-18 17:10:36 +01:00
OwnageIsMagic
2946a35b0e WriteAll: use delegate instead of Expression 2022-07-18 04:36:31 +03:00
Adam Hathcock
c73a8cb18f Merge pull request #682 from adamhathcock/RarFileVolIdx_RarArcVer_GzCrc 2022-07-16 11:29:48 +01:00
Nanook
574a093038 Minor tweak that got missed in the last tidy. 2022-07-15 21:25:39 +01:00
Nanook
4eb1fe0b80 RarArchive has Min/MaxVersion. RarEntry has Volumne Indexes. GZ CRC fix. 2022-07-15 21:15:10 +01:00
Adam Hathcock
4c46cd725b Merge pull request #679 from louis-michelbergeron/master
Fix LZMADecoder Code function
2022-06-28 08:27:13 +01:00
Adam Hathcock
fdbd0e1fba Merge branch 'master' into master 2022-06-28 08:21:49 +01:00
louis-michel
5801168ce0 Merge branch 'master' of https://github.com/louis-michelbergeron/sharpcompress 2022-06-27 19:13:20 -04:00
louis-michel
d4c7551087 Fix LZMA Code function 2022-06-27 19:13:10 -04:00
Adam Hathcock
c9daf0c9f5 Merge pull request #675 from Erior/feature/#636
ReadOnlySubStream overrides and adds logic #636
2022-06-22 11:17:18 +01:00
Adam Hathcock
8cb566b031 Merge branch 'master' into feature/#636 2022-06-22 09:05:57 +01:00
Lars Vahlenberg
089b16326e ReadOnlySubStream overrides and adds logic to Read byte[], needs to have same logic for Span<byte> for consistency. 2022-06-21 19:30:07 +02:00
Adam Hathcock
c0e43cc0e5 Mark for 0.32.1 2022-06-20 10:32:47 +01:00
Adam Hathcock
514c3539e6 Merge pull request #672 from MartinDemberger/Task_477
Corrected skip-marker on skip of uncompressed ZIP file with missing size informations.
2022-06-20 10:31:31 +01:00
Adam Hathcock
62c94a178c Merge branch 'master' into Task_477 2022-06-20 10:26:45 +01:00
Adam Hathcock
9fee38b18d Merge pull request #674 from MartinDemberger/DeduplicateNonDisposing
Suppress nested NonDisposingStream
2022-06-20 10:25:25 +01:00
Adam Hathcock
cd3114d39e Merge branch 'master' into DeduplicateNonDisposing 2022-06-20 10:20:02 +01:00
Adam Hathcock
12b4e15812 Merge pull request #673 from Erior/feature/Malformed-zip-file-generated
Feature/malformed zip file generated
2022-06-20 10:19:41 +01:00
Martin Demberger
35336a0827 Suppress nested NonDisposingStream 2022-06-19 22:05:52 +02:00
Martin Demberger
ece7cbfec3 Set skip-marker when stream is skipped 2022-06-18 14:35:14 +02:00
Lars Vahlenberg
a00075ee0d Wrong flags set, we do not expose this in the interface 2022-06-17 15:07:07 +02:00
Lars Vahlenberg
b6c4e28b4d Generated test case, however, don't see any problems 2022-06-16 23:32:46 +02:00
Martin Demberger
8b55cce39a Better handling of uncompressed zip files. 2022-06-15 16:28:14 +02:00
vpenades
1db216e6ec Merge branch 'master' of https://github.com/vpenades/sharpcompress 2022-06-14 11:40:33 +02:00
vpenades
20142f91fd Added remaining Open method variants 2022-06-14 11:39:52 +02:00
Vicente Penades
b34be141b7 Merge branch 'master' into master 2022-06-13 17:15:22 +02:00
vpenades
8ea8dcde19 Added Archive Name 2022-06-13 13:34:21 +02:00
vpenades
b205e2a84f Introduced IArchiveFactory 2022-06-13 11:56:32 +02:00
517 changed files with 72167 additions and 51395 deletions

View File

@@ -2,11 +2,12 @@
"version": 1,
"isRoot": true,
"tools": {
"dotnet-format": {
"version": "4.1.131201",
"csharpier": {
"version": "1.1.2",
"commands": [
"dotnet-format"
]
"csharpier"
],
"rollForward": false
}
}
}

7
.copilot-agent.yml Normal file
View File

@@ -0,0 +1,7 @@
enabled: true
agent:
name: copilot-coding-agent
allow:
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
actions: ["create", "modify"]
require_review_before_merge: true

View File

@@ -70,7 +70,7 @@ indent_style = tab
[*.{cs,csx,cake,vb,vbx}]
# Default Severity for all .NET Code Style rules below
dotnet_analyzer_diagnostic.severity = warning
dotnet_analyzer_diagnostic.severity = silent
##########################################
# File Header (Uncomment to support file headers)
@@ -195,8 +195,6 @@ csharp_prefer_static_local_function = true:warning
csharp_prefer_simple_using_statement = true:suggestion
dotnet_diagnostic.IDE0063.severity = suggestion
csharp_style_namespace_declarations = file_scoped
##########################################
# .NET Formatting Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-code-style-settings-reference#formatting-conventions
@@ -251,6 +249,8 @@ csharp_space_between_square_brackets = false
csharp_preserve_single_line_statements = false
csharp_preserve_single_line_blocks = true
csharp_style_namespace_declarations = file_scoped
##########################################
# .NET Naming Conventions
# https://docs.microsoft.com/visualstudio/ide/editorconfig-naming-conventions
@@ -260,13 +260,17 @@ csharp_preserve_single_line_blocks = true
dotnet_diagnostic.CA1000.severity = suggestion
dotnet_diagnostic.CA1001.severity = error
dotnet_diagnostic.CA1018.severity = error
dotnet_diagnostic.CA1036.severity = silent
dotnet_diagnostic.CA1051.severity = suggestion
dotnet_diagnostic.CA1068.severity = error
dotnet_diagnostic.CA1069.severity = error
dotnet_diagnostic.CA1304.severity = error
dotnet_diagnostic.CA1305.severity = suggestion
dotnet_diagnostic.CA1307.severity = suggestion
dotnet_diagnostic.CA1309.severity = suggestion
dotnet_diagnostic.CA1310.severity = error
dotnet_diagnostic.CA1507.severity = suggestion
dotnet_diagnostic.CA1513.severity = suggestion
dotnet_diagnostic.CA1707.severity = suggestion
dotnet_diagnostic.CA1708.severity = suggestion
dotnet_diagnostic.CA1711.severity = suggestion
@@ -283,6 +287,8 @@ dotnet_diagnostic.CA1829.severity = suggestion
dotnet_diagnostic.CA1834.severity = error
dotnet_diagnostic.CA1845.severity = suggestion
dotnet_diagnostic.CA1848.severity = suggestion
dotnet_diagnostic.CA1852.severity = suggestion
dotnet_diagnostic.CA1860.severity = silent
dotnet_diagnostic.CA2016.severity = suggestion
dotnet_diagnostic.CA2201.severity = error
dotnet_diagnostic.CA2206.severity = error
@@ -295,11 +301,12 @@ dotnet_diagnostic.CA2254.severity = suggestion
dotnet_diagnostic.CS0169.severity = error
dotnet_diagnostic.CS0219.severity = error
dotnet_diagnostic.CS0649.severity = suggestion
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS8602.severity = error
dotnet_diagnostic.CS8604.severity = error
dotnet_diagnostic.CS8618.severity = error
dotnet_diagnostic.CS0618.severity = error
dotnet_diagnostic.CS0618.severity = suggestion
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS4014.severity = error
dotnet_diagnostic.CS8600.severity = error
@@ -310,38 +317,56 @@ dotnet_diagnostic.BL0005.severity = suggestion
dotnet_diagnostic.MVC1000.severity = suggestion
dotnet_diagnostic.IDE0055.severity = suggestion # Fix formatting
dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operators
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
dotnet_diagnostic.IDE0007.severity = error # Use var
dotnet_diagnostic.IDE0160.severity = suggestion # Use block scoped
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
dotnet_diagnostic.IDE0057.severity = suggestion # substring can be simplified
dotnet_diagnostic.RZ10012.severity = error
dotnet_diagnostic.IDE0004.severity = error # redundant cast
dotnet_diagnostic.IDE0005.severity = suggestion
dotnet_diagnostic.IDE0007.severity = error # Use var
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
dotnet_diagnostic.IDE0010.severity = silent # populate switch
dotnet_diagnostic.IDE0017.severity = suggestion # initialization can be simplified
dotnet_diagnostic.IDE0021.severity = silent # expression body for constructors
dotnet_diagnostic.IDE0022.severity = silent # expression body for methods
dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operators
dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
dotnet_diagnostic.IDE0028.severity = silent
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type
dotnet_diagnostic.IDE0040.severity = error # modifiers required
dotnet_diagnostic.IDE0041.severity = error # simplify null
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
dotnet_diagnostic.IDE0044.severity = error # make field only when possible
dotnet_diagnostic.IDE0047.severity = suggestion # paratemeter name
dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible
dotnet_diagnostic.IDE0047.severity = suggestion # parameter name
dotnet_diagnostic.IDE0051.severity = error # unused field
dotnet_diagnostic.IDE0052.severity = error # unused member
dotnet_diagnostic.IDE0053.severity = suggestion # lambda not needed
dotnet_diagnostic.IDE0055.severity = suggestion # Fix formatting
dotnet_diagnostic.IDE0057.severity = suggestion # substring can be simplified
dotnet_diagnostic.IDE0060.severity = suggestion # unused parameters
dotnet_diagnostic.IDE0061.severity = suggestion # local expression body
dotnet_diagnostic.IDE0062.severity = suggestion # local to static
dotnet_diagnostic.IDE0063.severity = error # simplify using
dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
dotnet_diagnostic.IDE0161.severity = suggestion # Please use file namespaces
dotnet_diagnostic.IDE0161.severity = error # Please use file namespaces
dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching
dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation
dotnet_diagnostic.IDE0290.severity = error # Primary Constructor
dotnet_diagnostic.IDE0300.severity = suggestion # Collection
dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList
dotnet_diagnostic.NX0001.severity = error
dotnet_diagnostic.NX0002.severity = silent
dotnet_diagnostic.NX0003.severity = silent
##########################################
# Styles

13
.github/COPILOT_AGENT_README.md vendored Normal file
View File

@@ -0,0 +1,13 @@
# Copilot Coding Agent Configuration
This repository includes a minimal opt-in configuration and CI workflow to allow the GitHub Copilot coding agent to open and validate PRs.
- .copilot-agent.yml: opt-in config for automated agents
- .github/workflows/dotnetcore.yml: CI runs on PRs touching the solution, source, or tests to validate changes
- AGENTS.yml: general information for this project
Maintainers can adjust the allowed paths or disable the agent by editing or removing .copilot-agent.yml.
Notes:
- Do not change any other files in the repository.
- If build/test paths are different, update the workflow accordingly; this workflow targets SharpCompress.sln and the SharpCompress.Tests test project.

17
.github/agents/copilot-agent.yml vendored Normal file
View File

@@ -0,0 +1,17 @@
enabled: true
agent:
name: copilot-coding-agent
allow:
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
actions: ["create", "modify", "delete"]
require_review_before_merge: true
required_approvals: 1
allowed_merge_strategies:
- squash
- merge
auto_merge_on_green: false
run_workflows: true
notes: |
- This manifest expresses the policy for the Copilot coding agent in this repository.
- It does NOT install or authorize the agent; a repository admin must install the Copilot coding agent app and grant the repository the necessary permissions (contents: write, pull_requests: write, checks: write, actions: write/read, issues: write) to allow the agent to act.
- Keep allow paths narrow and prefer require_review_before_merge during initial rollout.

View File

@@ -1,6 +1,13 @@
version: 2
updates:
- package-ecosystem: "github-actions" # search for actions - there are other options available
directory: "/" # search in .github/workflows under root `/`
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly" # check for action update every week
interval: "weekly"
- package-ecosystem: "nuget"
directory: "/" # change to "/src/YourProject" if .csproj files are in subfolders
schedule:
interval: "weekly"
open-pull-requests-limit: 5
# optional: target-branch: "master"

View File

@@ -1,5 +1,10 @@
name: SharpCompress
on: [push, pull_request]
on:
push:
branches:
- 'master'
pull_request:
types: [ opened, synchronize, reopened, ready_for_review ]
jobs:
build:
@@ -9,12 +14,12 @@ jobs:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v3
- uses: actions/setup-dotnet@v2
- uses: actions/checkout@v5
- uses: actions/setup-dotnet@v5
with:
dotnet-version: 6.0.x
- run: dotnet run -p build/build.csproj
- uses: actions/upload-artifact@v3
dotnet-version: 8.0.x
- run: dotnet run --project build/build.csproj
- uses: actions/upload-artifact@v5
with:
name: ${{ matrix.os }}-sharpcompress.nupkg
path: artifacts/*

65
AGENTS.md Normal file
View File

@@ -0,0 +1,65 @@
---
description: 'Guidelines for building C# applications'
applyTo: '**/*.cs'
---
# C# Development
## C# Instructions
- Always use the latest version C#, currently C# 13 features.
- Write clear and concise comments for each function.
## General Instructions
- Make only high confidence suggestions when reviewing code changes.
- Write code with good maintainability practices, including comments on why certain design decisions were made.
- Handle edge cases and write clear exception handling.
- For libraries or external dependencies, mention their usage and purpose in comments.
## Naming Conventions
- Follow PascalCase for component names, method names, and public members.
- Use camelCase for private fields and local variables.
- Prefix interface names with "I" (e.g., IUserService).
## Code Formatting
- Use CSharpier for all code formatting to ensure consistent style across the project.
- Install CSharpier globally: `dotnet tool install -g csharpier`
- Format files with: `dotnet csharpier format .`
- **ALWAYS run `dotnet csharpier format .` after making code changes before committing.**
- Configure your IDE to format on save using CSharpier.
- CSharpier configuration can be customized via `.csharpierrc` file in the project root.
- Trust CSharpier's opinionated formatting decisions to maintain consistency.
## Project Setup and Structure
- Guide users through creating a new .NET project with the appropriate templates.
- Explain the purpose of each generated file and folder to build understanding of the project structure.
- Demonstrate how to organize code using feature folders or domain-driven design principles.
- Show proper separation of concerns with models, services, and data access layers.
- Explain the Program.cs and configuration system in ASP.NET Core 9 including environment-specific settings.
## Nullable Reference Types
- Declare variables non-nullable, and check for `null` at entry points.
- Always use `is null` or `is not null` instead of `== null` or `!= null`.
- Trust the C# null annotations and don't add null checks when the type system says a value cannot be null.
## Testing
- Always include test cases for critical paths of the application.
- Guide users through creating unit tests.
- Do not emit "Act", "Arrange" or "Assert" comments.
- Copy existing style in nearby files for test method names and capitalization.
- Explain integration testing approaches for API endpoints.
- Demonstrate how to mock dependencies for effective testing.
- Show how to test authentication and authorization logic.
- Explain test-driven development principles as applied to API development.
## Performance Optimization
- Guide users on implementing caching strategies (in-memory, distributed, response caching).
- Explain asynchronous programming patterns and why they matter for API performance.
- Demonstrate pagination, filtering, and sorting for large data sets.
- Show how to implement compression and other performance optimizations.
- Explain how to measure and benchmark API performance.

16
Directory.Build.props Normal file
View File

@@ -0,0 +1,16 @@
<Project>
<PropertyGroup>
<LangVersion>latest</LangVersion>
<Nullable>enable</Nullable>
<AnalysisMode>Recommended</AnalysisMode>
<WarningsAsErrors>true</WarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<CodeAnalysisTreatWarningsAsErrors>true</CodeAnalysisTreatWarningsAsErrors>
<EnforceCodeStyleInBuild>true</EnforceCodeStyleInBuild>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<RunAnalyzersDuringLiveAnalysis>False</RunAnalyzersDuringLiveAnalysis>
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
</PropertyGroup>
</Project>

20
Directory.Packages.props Normal file
View File

@@ -0,0 +1,20 @@
<Project>
<ItemGroup>
<PackageVersion Include="Bullseye" Version="6.0.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.2.1" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.0" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="12.0.0" />
<PackageVersion Include="System.Buffers" Version="4.6.1" />
<PackageVersion Include="System.Memory" Version="4.6.3" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.6" />
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
</ItemGroup>
</Project>

View File

@@ -11,10 +11,11 @@
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
| Zip (2) | None, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Zip (2) | None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.BZip2 | BZip2 | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.Zstandard | ZStandard | Decompress | TarArchive | TarReader | N/A |
| Tar.LZip | LZMA | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
@@ -41,6 +42,7 @@ For those who want to directly compress/decompress bits. The single file formats
| ADCStream | Decompress |
| LZipStream | Both |
| XZStream | Decompress |
| ZStandardStream | Decompress |
## Archive Formats vs Compression

9
NuGet.config Normal file
View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSourceMapping>
<!-- key value for <packageSource> should match key values from <packageSources> element -->
<packageSource key="nuget.org">
<package pattern="*" />
</packageSource>
</packageSourceMapping>
</configuration>

View File

@@ -1,11 +1,14 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
**NEW:** All I/O operations now support async/await for improved performance and scalability. See the [Async Usage](#async-usage) section below.
GitHub Actions Build -
[![GitHubActions](https://github.com/adamhathcock/sharpcompress/workflows/SharpCompress/badge.svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
[![SharpCompress](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml/badge.svg)](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
[![Static Badge](https://img.shields.io/badge/API%20Docs-DNDocs-190088?logo=readme&logoColor=white)](https://dndocs.com/d/sharpcompress/api/index.html)
## Need Help?
@@ -19,29 +22,110 @@ In general, I recommend GZip (Deflate)/BZip2 (BZip)/LZip (LZMA) as the simplicit
Zip is okay, but it's a very hap-hazard format and the variation in headers and implementations makes it hard to get correct. Uses Deflate by default but supports a lot of compression methods.
RAR is not recommended as it's a propriatory format and the compression is closed source. Use Tar/LZip for LZMA
RAR is not recommended as it's a proprietary format and the compression is closed source. Use Tar/LZip for LZMA
7Zip and XZ both are overly complicated. 7Zip does not support streamable formats. XZ has known holes explained here: (http://www.nongnu.org/lzip/xz_inadequate.html) Use Tar/LZip for LZMA compression instead.
ZStandard is an efficient format that works well for streaming with a flexible compression level to tweak the speed/performance trade off you are looking for. We currently only implement decompression for ZStandard but as we leverage the [ZstdSharp](https://github.com/oleg-st/ZstdSharp) library one could likely add compression support without much trouble (PRs are welcome!).
## A Simple Request
Hi everyone. I hope you're using SharpCompress and finding it useful. Please give me feedback on what you'd like to see changed especially as far as usability goes. New feature suggestions are always welcome as well. I would also like to know what projects SharpCompress is being used in. I like seeing how it is used to give me ideas for future versions. Thanks!
Please do not email me directly to ask for help. If you think there is a real issue, please report it here.
## Async Usage
SharpCompress now provides full async/await support for all I/O operations, allowing for better performance and scalability in modern applications.
### Async Reading Examples
Extract entries asynchronously:
```csharp
using (Stream stream = File.OpenRead("archive.zip"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
// Async extraction
await reader.WriteEntryToDirectoryAsync(
@"C:\temp",
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
}
}
```
Extract all entries to directory asynchronously:
```csharp
using (Stream stream = File.OpenRead("archive.tar.gz"))
using (var reader = ReaderFactory.Open(stream))
{
await reader.WriteAllToDirectoryAsync(
@"C:\temp",
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
```
Open entry stream asynchronously:
```csharp
using (var archive = ZipArchive.Open("archive.zip"))
{
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
{
// Process stream asynchronously
await entryStream.CopyToAsync(outputStream, cancellationToken);
}
}
}
```
### Async Writing Examples
Write files asynchronously:
```csharp
using (Stream stream = File.OpenWrite("output.zip"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
await writer.WriteAsync("file1.txt", fileStream, DateTime.Now, cancellationToken);
}
```
Write all files from directory asynchronously:
```csharp
using (Stream stream = File.OpenWrite("output.tar.gz"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
{
await writer.WriteAllAsync(@"D:\files", "*", SearchOption.AllDirectories, cancellationToken);
}
```
All async methods support `CancellationToken` for graceful cancellation of long-running operations.
## Want to contribute?
I'm always looking for help or ideas. Please submit code or email with ideas. Unfortunately, just letting me know you'd like to help is not enough because I really have no overall plan of what needs to be done. I'll definitely accept code submissions and add you as a member of the project!
## TODOs (always lots)
* RAR 5 decryption support
* RAR 5 decryption crc check support
* 7Zip writing
* Zip64 (Need writing and extend Reading)
* Multi-volume Zip support.
* ZStandard writing
## Version Log
* [Releases](https://github.com/adamhathcock/sharpcompress/releases)
### Version 0.18
* [Now on Github releases](https://github.com/adamhathcock/sharpcompress/releases/tag/0.18)

View File

@@ -3,8 +3,6 @@ Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.26430.6
MinimumVisualStudioVersion = 10.0.40219.1
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{F18F1765-4A02-42FD-9BEF-F0E2FCBD9D17}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{3C5BE746-03E5-4895-9988-0B57F162F86C}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "tests", "tests", "{0F0901FF-E8D9-426A-B5A2-17C7F47C1529}"
@@ -15,6 +13,22 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress.Test", "tests
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "build", "build\build.csproj", "{D4D613CB-5E94-47FB-85BE-B8423D20C545}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB42573-7D22-4490-BA12-1B7FB99CE7FB}"
ProjectSection(SolutionItems) = preProject
Directory.Build.props = Directory.Build.props
global.json = global.json
.editorconfig = .editorconfig
Directory.Packages.props = Directory.Packages.props
NuGet.config = NuGet.config
.github\workflows\dotnetcore.yml = .github\workflows\dotnetcore.yml
USAGE.md = USAGE.md
README.md = README.md
FORMATS.md = FORMATS.md
AGENTS.md = AGENTS.md
EndProjectSection
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "SharpCompress.Performance", "tests\SharpCompress.Performance\SharpCompress.Performance.csproj", "{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -33,6 +47,10 @@ Global
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D4D613CB-5E94-47FB-85BE-B8423D20C545}.Release|Any CPU.Build.0 = Release|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Debug|Any CPU.Build.0 = Debug|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.ActiveCfg = Release|Any CPU
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -40,5 +58,6 @@ Global
GlobalSection(NestedProjects) = preSolution
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998} = {3C5BE746-03E5-4895-9988-0B57F162F86C}
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
{5BDE6DBC-9E5F-4E21-AB71-F138A3E72B17} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
EndGlobalSection
EndGlobal

View File

@@ -15,17 +15,17 @@
<s:String x:Key="/Default/CodeStyle/CodeCleanup/SilentCleanupProfile/@EntryValue">Basic Clean</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/APPLY_ON_COMPLETION/@EntryValue">True</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/ARGUMENTS_NAMED/@EntryValue">Named</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/ARGUMENTS_NAMED/@EntryValue">Positional</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOR/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_FOREACH/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_IFELSE/@EntryValue">Required</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpCodeStyle/BRACES_FOR_WHILE/@EntryValue">Required</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_FIRST_ARG_BY_PAREN/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_FIRST_ARG_BY_PAREN/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_LINQ_QUERY/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARGUMENT/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARRAY_AND_OBJECT_INITIALIZER/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_ARRAY_AND_OBJECT_INITIALIZER/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_CALLS_CHAIN/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXPRESSION/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXPRESSION/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_EXTENDS_LIST/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_FOR_STMT/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/ALIGN_MULTILINE_PARAMETER/@EntryValue">True</s:Boolean>
@@ -42,7 +42,7 @@
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_IFELSE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_USING_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/FORCE_WHILE_BRACES_STYLE/@EntryValue">ALWAYS_ADD</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/INDENT_ANONYMOUS_METHOD_BLOCK/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/INDENT_ANONYMOUS_METHOD_BLOCK/@EntryValue">False</s:Boolean>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_CODE/@EntryValue">1</s:Int64>
<s:Int64 x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/KEEP_BLANK_LINES_IN_DECLARATIONS/@EntryValue">1</s:Int64>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
@@ -50,12 +50,12 @@
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_CONSTRUCTOR_INITIALIZER_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_FIELD_ATTRIBUTE_ON_SAME_LINE_EX/@EntryValue">NEVER</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSORHOLDER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSORHOLDER_ON_SINGLE_LINE/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_ACCESSOR_ATTRIBUTE_ON_SAME_LINE/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_EMBEDDED_STATEMENT_ON_SAME_LINE/@EntryValue">NEVER</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_INITIALIZER_ON_SINGLE_LINE/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_SIMPLE_INITIALIZER_ON_SINGLE_LINE/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_WHILE_ON_NEW_LINE/@EntryValue">True</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/PLACE_WHILE_ON_NEW_LINE/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SIMPLE_EMBEDDED_STATEMENT_STYLE/@EntryValue">LINE_BREAK</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_AFTER_TYPECAST_PARENTHESES/@EntryValue">False</s:Boolean>
@@ -67,18 +67,22 @@
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/SPACE_BEFORE_TYPEOF_PARENTHESES/@EntryValue">False</s:Boolean>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/STICK_COMMENT/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARGUMENTS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARRAY_INITIALIZER_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_ARRAY_INITIALIZER_STYLE/@EntryValue">CHOP_ALWAYS</s:String>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_EXTENDS_LIST_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:Boolean x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_LINES/@EntryValue">False</s:Boolean>
<s:String x:Key="/Default/CodeStyle/CodeFormatting/CSharpFormat/WRAP_PARAMETERS_STYLE/@EntryValue">CHOP_IF_LONG</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForBuiltInTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForOtherTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForSimpleTypes/@EntryValue">UseVarWhenEvident</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForBuiltInTypes/@EntryValue">UseVar</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForOtherTypes/@EntryValue">UseVar</s:String>
<s:String x:Key="/Default/CodeStyle/CSharpVarKeywordUsage/ForSimpleTypes/@EntryValue">UseVar</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateInstanceFields/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticFields/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=StaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=15b5b1f1_002D457c_002D4ca6_002Db278_002D5615aedc07d3/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static readonly fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=4a98fdf6_002D7d98_002D4f5a_002Dafeb_002Dea44ad98c70c/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Instance" AccessRightKinds="Private" Description="Instance fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="FIELD" /&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=c873eafb_002Dd57f_002D481d_002D8c93_002D77f6863c2f88/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Protected, ProtectedInternal, Internal, Public, PrivateProtected" Description="Static readonly fields (not private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=f9fce829_002De6f4_002D4cb2_002D80f1_002D5497c44f51df/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FCONSTANT/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
@@ -118,6 +122,7 @@
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=NAMESPACE_005FALIAS/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FFIELD/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/XamlNaming/UserRules/=XAML_005FRESOURCE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AaBb" /&gt;</s:String>
<s:String x:Key="/Default/CustomTools/CustomToolsData/@EntryValue"></s:String>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpAttributeForSingleLineMethodUpgrade/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpKeepExistingMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ECSharpPlaceEmbeddedOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
@@ -127,6 +132,7 @@
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EPredefinedNamingRulesToUserRulesUpgrade/@EntryIndexedValue">True</s:Boolean>
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue">&lt;SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"&gt;
&lt;Solution /&gt;
&lt;/SessionState&gt;</s:String></wpf:ResourceDictionary>

187
USAGE.md
View File

@@ -1,5 +1,18 @@
# SharpCompress Usage
## Async/Await Support
SharpCompress now provides full async/await support for all I/O operations. All `Read`, `Write`, and extraction operations have async equivalents ending in `Async` that accept an optional `CancellationToken`. This enables better performance and scalability for I/O-bound operations.
**Key Async Methods:**
- `reader.WriteEntryToAsync(stream, cancellationToken)` - Extract entry asynchronously
- `reader.WriteAllToDirectoryAsync(path, options, cancellationToken)` - Extract all asynchronously
- `writer.WriteAsync(filename, stream, modTime, cancellationToken)` - Write entry asynchronously
- `writer.WriteAllAsync(directory, pattern, searchOption, cancellationToken)` - Write directory asynchronously
- `entry.OpenEntryStreamAsync(cancellationToken)` - Open entry stream asynchronously
See [Async Examples](#async-examples) section below for usage patterns.
## Stream Rules (changed with 0.21)
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
@@ -27,12 +40,24 @@ To deal with the "correct" rules as well as the expectations of users, I've deci
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapped to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapper to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
## Samples
Also, look over the tests for more thorough [examples](https://github.com/adamhathcock/sharpcompress/tree/master/tests/SharpCompress.Test)
### Create Zip Archive from multiple files
```C#
using(var archive = ZipArchive.Create())
{
archive.AddEntry("file01.txt", "C:\\file01.txt");
archive.AddEntry("file02.txt", "C:\\file02.txt");
...
archive.SaveTo("C:\\temp.zip", CompressionType.Deflate);
}
```
### Create Zip Archive from all files in a directory to a file
```C#
@@ -59,18 +84,34 @@ using (var archive = ZipArchive.Create())
memoryStream.Position = 0;
```
### Extract all files from a Rar file to a directory using RarArchive
### Extract all files from a rar file to a directory using RarArchive
Note: Extracting a solid rar or 7z file needs to be done in sequential order to get acceptable decompression speed.
It is explicitly recommended to use `ExtractAllEntries` when extracting an entire `IArchive` instead of iterating over all its `Entries`.
Alternatively, use `IArchive.WriteToDirectory`.
```C#
using (var archive = RarArchive.Open("Test.rar"))
{
using (var reader = archive.ExtractAllEntries())
{
reader.WriteAllToDirectory(@"D:\temp", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
```
### Iterate over all files from a Rar file using RarArchive
```C#
using (var archive = RarArchive.Open("Test.rar"))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
entry.WriteToDirectory("D:\\temp", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
Console.WriteLine($"{entry.Key}: {entry.Size} bytes");
}
}
```
@@ -143,4 +184,134 @@ foreach(var entry in tr.Entries)
{
Console.WriteLine($"{entry.Key}");
}
```
```
## Async Examples
### Async Reader Examples
**Extract single entry asynchronously:**
```C#
using (Stream stream = File.OpenRead("archive.zip"))
using (var reader = ReaderFactory.Open(stream))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using (var entryStream = reader.OpenEntryStream())
{
using (var outputStream = File.Create("output.bin"))
{
await reader.WriteEntryToAsync(outputStream, cancellationToken);
}
}
}
}
}
```
**Extract all entries asynchronously:**
```C#
using (Stream stream = File.OpenRead("archive.tar.gz"))
using (var reader = ReaderFactory.Open(stream))
{
await reader.WriteAllToDirectoryAsync(
@"D:\temp",
new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
},
cancellationToken
);
}
```
**Open and process entry stream asynchronously:**
```C#
using (var archive = ZipArchive.Open("archive.zip"))
{
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
{
using (var entryStream = await entry.OpenEntryStreamAsync(cancellationToken))
{
// Process the decompressed stream asynchronously
await ProcessStreamAsync(entryStream, cancellationToken);
}
}
}
```
### Async Writer Examples
**Write single file asynchronously:**
```C#
using (Stream archiveStream = File.OpenWrite("output.zip"))
using (var writer = WriterFactory.Open(archiveStream, ArchiveType.Zip, CompressionType.Deflate))
{
using (Stream fileStream = File.OpenRead("input.txt"))
{
await writer.WriteAsync("entry.txt", fileStream, DateTime.Now, cancellationToken);
}
}
```
**Write entire directory asynchronously:**
```C#
using (Stream stream = File.OpenWrite("backup.tar.gz"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
{
await writer.WriteAllAsync(
@"D:\files",
"*",
SearchOption.AllDirectories,
cancellationToken
);
}
```
**Write with progress tracking and cancellation:**
```C#
var cts = new CancellationTokenSource();
// Set timeout or cancel from UI
cts.CancelAfter(TimeSpan.FromMinutes(5));
using (Stream stream = File.OpenWrite("archive.zip"))
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
try
{
await writer.WriteAllAsync(@"D:\data", "*", SearchOption.AllDirectories, cts.Token);
}
catch (OperationCanceledException)
{
Console.WriteLine("Operation was cancelled");
}
}
```
### Archive Async Examples
**Extract from archive asynchronously:**
```C#
using (var archive = ZipArchive.Open("archive.zip"))
{
using (var reader = archive.ExtractAllEntries())
{
await reader.WriteAllToDirectoryAsync(
@"C:\output",
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
}
```
**Benefits of Async Operations:**
- Non-blocking I/O for better application responsiveness
- Improved scalability for server applications
- Support for cancellation via CancellationToken
- Better resource utilization in async/await contexts
- Compatible with modern .NET async patterns

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
@@ -7,77 +7,89 @@ using static Bullseye.Targets;
using static SimpleExec.Command;
const string Clean = "clean";
const string Format = "format";
const string Restore = "restore";
const string Build = "build";
const string Test = "test";
const string Format = "format";
const string Publish = "publish";
Target(Clean,
ForEach("**/bin", "**/obj"),
dir =>
{
IEnumerable<string> GetDirectories(string d)
{
return Glob.Directories(".", d);
}
Target(
Clean,
["**/bin", "**/obj"],
dir =>
{
IEnumerable<string> GetDirectories(string d)
{
return Glob.Directories(".", d);
}
void RemoveDirectory(string d)
{
if (Directory.Exists(d))
{
Console.WriteLine(d);
Directory.Delete(d, true);
}
}
void RemoveDirectory(string d)
{
if (Directory.Exists(d))
{
Console.WriteLine(d);
Directory.Delete(d, true);
}
}
foreach (var d in GetDirectories(dir))
{
RemoveDirectory(d);
}
});
foreach (var d in GetDirectories(dir))
{
RemoveDirectory(d);
}
}
);
Target(Format,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "format --check");
});
Target(
Format,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "csharpier check .");
}
);
Target(Restore, [Format], () => Run("dotnet", "restore"));
Target(Build,
DependsOn(Format),
framework =>
{
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
});
Target(
Build,
[Restore],
() =>
{
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release --no-restore");
}
);
Target(Test,
DependsOn(Build),
ForEach("net6.0", "net461"),
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
Target(
Test,
[Build],
["net8.0", "net48"],
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net461")
{
return;
}
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net48")
{
return;
}
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework}");
}
});
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework} --no-restore --verbosity=normal");
}
}
);
Target(Publish,
DependsOn(Test),
() =>
{
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
});
Target(
Publish,
[Test],
() =>
{
Run("dotnet", "pack src/SharpCompress/SharpCompress.csproj -c Release -o artifacts/");
}
);
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
Target("default", [Publish], () => Console.WriteLine("Done!"));
await RunTargetsAndExitAsync(args);

View File

@@ -1,14 +1,11 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" Version="4.0.0" />
<PackageReference Include="Glob" Version="1.1.9" />
<PackageReference Include="SimpleExec" Version="10.0.0" />
<PackageReference Include="Bullseye" />
<PackageReference Include="Glob" />
<PackageReference Include="SimpleExec" />
</ItemGroup>
</Project>

25
build/packages.lock.json Normal file
View File

@@ -0,0 +1,25 @@
{
"version": 2,
"dependencies": {
"net8.0": {
"Bullseye": {
"type": "Direct",
"requested": "[6.0.0, )",
"resolved": "6.0.0",
"contentHash": "vgwwXfzs7jJrskWH7saHRMgPzziq/e86QZNWY1MnMxd7e+De7E7EX4K3C7yrvaK9y02SJoLxNxcLG/q5qUAghw=="
},
"Glob": {
"type": "Direct",
"requested": "[1.1.9, )",
"resolved": "1.1.9",
"contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw=="
},
"SimpleExec": {
"type": "Direct",
"requested": "[12.0.0, )",
"resolved": "12.0.0",
"contentHash": "ptxlWtxC8vM6Y6e3h9ZTxBBkOWnWrm/Sa1HT+2i1xcXY3Hx2hmKDZP5RShPf8Xr9D+ivlrXNy57ktzyH8kyt+Q=="
}
}
}
}

View File

@@ -1,6 +1,6 @@
{
"sdk": {
"version": "6.0.200",
"version": "8.0.100",
"rollForward": "latestFeature"
}
}
}

View File

@@ -15,305 +15,256 @@ using System.Runtime.Intrinsics.X86;
#endif
#pragma warning disable IDE0007 // Use implicit type
namespace SharpCompress.Algorithms
namespace SharpCompress.Algorithms;
/// <summary>
/// Calculates the 32 bit Adler checksum of a given buffer according to
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
/// </summary>
internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Compression/Zlib/Adler32.cs
{
/// <summary>
/// Calculates the 32 bit Adler checksum of a given buffer according to
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
/// Global inlining options. Helps temporarily disable inlining for better profiler output.
/// </summary>
internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Compression/Zlib/Adler32.cs
private static class InliningOptions // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/InliningOptions.cs
{
/// <summary>
/// Global inlining options. Helps temporarily disable inlining for better profiler output.
/// <see cref="MethodImplOptions.AggressiveInlining"/> regardless of the build conditions.
/// </summary>
private static class InliningOptions // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/InliningOptions.cs
{
/// <summary>
/// <see cref="MethodImplOptions.AggressiveInlining"/> regardless of the build conditions.
/// </summary>
public const MethodImplOptions AlwaysInline = MethodImplOptions.AggressiveInlining;
public const MethodImplOptions AlwaysInline = MethodImplOptions.AggressiveInlining;
#if PROFILING
public const MethodImplOptions HotPath = MethodImplOptions.NoInlining;
public const MethodImplOptions ShortMethod = MethodImplOptions.NoInlining;
public const MethodImplOptions HotPath = MethodImplOptions.NoInlining;
public const MethodImplOptions ShortMethod = MethodImplOptions.NoInlining;
#else
#if SUPPORTS_HOTPATH
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveOptimization;
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveOptimization;
#else
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveInlining;
public const MethodImplOptions HotPath = MethodImplOptions.AggressiveInlining;
#endif
public const MethodImplOptions ShortMethod = MethodImplOptions.AggressiveInlining;
public const MethodImplOptions ShortMethod = MethodImplOptions.AggressiveInlining;
#endif
public const MethodImplOptions ColdPath = MethodImplOptions.NoInlining;
}
public const MethodImplOptions ColdPath = MethodImplOptions.NoInlining;
}
#if SUPPORTS_RUNTIME_INTRINSICS
/// <summary>
/// Provides optimized static methods for trigonometric, logarithmic,
/// and other common mathematical functions.
/// </summary>
private static class Numerics // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/Numerics.cs
{
/// <summary>
/// Provides optimized static methods for trigonometric, logarithmic,
/// and other common mathematical functions.
/// Reduces elements of the vector into one sum.
/// </summary>
private static class Numerics // From https://github.com/SixLabors/ImageSharp/blob/main/src/ImageSharp/Common/Helpers/Numerics.cs
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of all elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int ReduceSum(Vector256<int> accumulator)
{
/// <summary>
/// Reduces elements of the vector into one sum.
/// </summary>
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of all elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int ReduceSum(Vector256<int> accumulator)
{
// Add upper lane to lower lane.
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
// Add upper lane to lower lane.
var vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
// Add odd to even.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_11_01_01));
// Add odd to even.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_11_01_01));
// Add high to low.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10));
// Add high to low.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10));
return Sse2.ConvertToInt32(vsum);
}
/// <summary>
/// Reduces even elements of the vector into one sum.
/// </summary>
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of even elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int EvenReduceSum(Vector256<int> accumulator)
{
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10)); // add high to low
// Vector128<int>.ToScalar() isn't optimized pre-net5.0 https://github.com/dotnet/runtime/pull/37882
return Sse2.ConvertToInt32(vsum);
}
return Sse2.ConvertToInt32(vsum);
}
/// <summary>
/// Reduces even elements of the vector into one sum.
/// </summary>
/// <param name="accumulator">The accumulator to reduce.</param>
/// <returns>The sum of even elements.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int EvenReduceSum(Vector256<int> accumulator)
{
var vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10)); // add high to low
// Vector128<int>.ToScalar() isn't optimized pre-net5.0 https://github.com/dotnet/runtime/pull/37882
return Sse2.ConvertToInt32(vsum);
}
}
#endif
/// <summary>
/// The default initial seed value of a Adler32 checksum calculation.
/// </summary>
public const uint SeedValue = 1U;
/// <summary>
/// The default initial seed value of a Adler32 checksum calculation.
/// </summary>
public const uint SeedValue = 1U;
// Largest prime smaller than 65536
private const uint BASE = 65521;
// Largest prime smaller than 65536
private const uint BASE = 65521;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private const uint NMAX = 5552;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private const uint NMAX = 5552;
#if SUPPORTS_RUNTIME_INTRINSICS
private const int MinBufferSize = 64;
private const int MinBufferSize = 64;
private const int BlockSize = 1 << 5;
private const int BlockSize = 1 << 5;
// The C# compiler emits this as a compile-time constant embedded in the PE file.
private static ReadOnlySpan<byte> Tap1Tap2 => new byte[]
// The C# compiler emits this as a compile-time constant embedded in the PE file.
private static ReadOnlySpan<byte> Tap1Tap2 =>
new byte[]
{
32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, // tap1
16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 // tap2
32,
31,
30,
29,
28,
27,
26,
25,
24,
23,
22,
21,
20,
19,
18,
17, // tap1
16,
15,
14,
13,
12,
11,
10,
9,
8,
7,
6,
5,
4,
3,
2,
1, // tap2
};
#endif
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span.
/// </summary>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.ShortMethod)]
public static uint Calculate(ReadOnlySpan<byte> buffer)
=> Calculate(SeedValue, buffer);
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span.
/// </summary>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.ShortMethod)]
public static uint Calculate(ReadOnlySpan<byte> buffer) => Calculate(SeedValue, buffer);
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
/// </summary>
/// <param name="adler">The input Adler32 value.</param>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
/// </summary>
/// <param name="adler">The input Adler32 value.</param>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
{
if (buffer.IsEmpty)
{
if (buffer.IsEmpty)
{
return adler;
}
return adler;
}
#if SUPPORTS_RUNTIME_INTRINSICS
if (Avx2.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateAvx2(adler, buffer);
}
if (Avx2.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateAvx2(adler, buffer);
}
if (Ssse3.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateSse(adler, buffer);
}
if (Ssse3.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateSse(adler, buffer);
}
return CalculateScalar(adler, buffer);
return CalculateScalar(adler, buffer);
#else
return CalculateScalar(adler, buffer);
return CalculateScalar(adler, buffer);
#endif
}
}
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
#if SUPPORTS_RUNTIME_INTRINSICS
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
{
var s1 = adler & 0xFFFF;
var s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
var length = (uint)buffer.Length;
var blocks = length / BlockSize;
length -= blocks * BlockSize;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
uint length = (uint)buffer.Length;
uint blocks = length / BlockSize;
length -= blocks * BlockSize;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
fixed (byte* tapPtr = &MemoryMarshal.GetReference(Tap1Tap2))
{
fixed (byte* tapPtr = &MemoryMarshal.GetReference(Tap1Tap2))
var localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
var tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
var tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
var zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
{
byte* localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
Vector128<sbyte> tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
Vector128<sbyte> tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
Vector128<byte> zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
var n = NMAX / BlockSize; /* The NMAX constraint. */
if (n > blocks)
{
uint n = NMAX / BlockSize; /* The NMAX constraint. */
if (n > blocks)
{
n = blocks;
}
blocks -= n;
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
Vector128<uint> v_ps = Vector128.CreateScalar(s1 * n);
Vector128<uint> v_s2 = Vector128.CreateScalar(s2);
Vector128<uint> v_s1 = Vector128<uint>.Zero;
do
{
// Load 32 input bytes.
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsUInt32());
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones).AsUInt32());
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsUInt32());
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones).AsUInt32());
localBufferPtr += BlockSize;
}
while (--n > 0);
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
s1 += v_s1.ToScalar();
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
s2 = v_s2.ToScalar();
// Reduce.
s1 %= BASE;
s2 %= BASE;
n = blocks;
}
if (length > 0)
{
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
}
blocks -= n;
return s1 | (s2 << 16);
}
}
}
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
var v_ps = Vector128.CreateScalar(s1 * n);
var v_s2 = Vector128.CreateScalar(s2);
var v_s1 = Vector128<uint>.Zero;
// Based on: https://github.com/zlib-ng/zlib-ng/blob/develop/arch/x86/adler32_avx2.c
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static unsafe uint CalculateAvx2(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint length = (uint)buffer.Length;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
byte* localBufferPtr = bufferPtr;
Vector256<byte> zero = Vector256<byte>.Zero;
var dot3v = Vector256.Create((short)1);
var dot2v = Vector256.Create(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1);
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
var vs1 = Vector256.CreateScalar(s1);
var vs2 = Vector256.CreateScalar(s2);
while (length >= 32)
{
int k = length < NMAX ? (int)length : (int)NMAX;
k -= k % 32;
length -= (uint)k;
Vector256<uint> vs10 = vs1;
Vector256<uint> vs3 = Vector256<uint>.Zero;
while (k >= 32)
do
{
// Load 32 input bytes.
Vector256<byte> block = Avx.LoadVector256(localBufferPtr);
var bytes1 = Sse3.LoadDquVector128(localBufferPtr);
var bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
// Sum of abs diff, resulting in 2 x int32's
Vector256<ushort> vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
vs1 = Avx2.Add(vs1, vs1sad.AsUInt32());
vs3 = Avx2.Add(vs3, vs10);
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsUInt32());
var mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones).AsUInt32());
// sum 32 uint8s to 16 shorts.
Vector256<short> vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
// sum 16 shorts to 8 uint32s.
Vector256<int> vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
vs2 = Avx2.Add(vsum2.AsUInt32(), vs2);
vs10 = vs1;
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsUInt32());
var mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones).AsUInt32());
localBufferPtr += BlockSize;
k -= 32;
}
} while (--n > 0);
// Defer the multiplication with 32 to outside of the loop.
vs3 = Avx2.ShiftLeftLogical(vs3, 5);
vs2 = Avx2.Add(vs2, vs3);
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
s1 = (uint)Numerics.EvenReduceSum(vs1.AsInt32());
s2 = (uint)Numerics.ReduceSum(vs2.AsInt32());
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
s1 += v_s1.ToScalar();
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
s2 = v_s2.ToScalar();
// Reduce.
s1 %= BASE;
s2 %= BASE;
vs1 = Vector256.CreateScalar(s1);
vs2 = Vector256.CreateScalar(s2);
}
if (length > 0)
@@ -324,97 +275,212 @@ namespace SharpCompress.Algorithms
return s1 | (s2 << 16);
}
}
}
private static unsafe void HandleLeftOver(byte* localBufferPtr, uint length, ref uint s1, ref uint s2)
// Based on: https://github.com/zlib-ng/zlib-ng/blob/develop/arch/x86/adler32_avx2.c
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static unsafe uint CalculateAvx2(uint adler, ReadOnlySpan<byte> buffer)
{
var s1 = adler & 0xFFFF;
var s2 = (adler >> 16) & 0xFFFF;
var length = (uint)buffer.Length;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
if (length >= 16)
var localBufferPtr = bufferPtr;
var zero = Vector256<byte>.Zero;
var dot3v = Vector256.Create((short)1);
var dot2v = Vector256.Create(
32,
31,
30,
29,
28,
27,
26,
25,
24,
23,
22,
21,
20,
19,
18,
17,
16,
15,
14,
13,
12,
11,
10,
9,
8,
7,
6,
5,
4,
3,
2,
1
);
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
var vs1 = Vector256.CreateScalar(s1);
var vs2 = Vector256.CreateScalar(s2);
while (length >= 32)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
var k = length < NMAX ? (int)length : (int)NMAX;
k -= k % 32;
length -= (uint)k;
localBufferPtr += 16;
length -= 16;
}
var vs10 = vs1;
var vs3 = Vector256<uint>.Zero;
while (length-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
if (s1 >= BASE)
{
s1 -= BASE;
}
s2 %= BASE;
}
#endif
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint k;
fixed (byte* bufferPtr = buffer)
{
var localBufferPtr = bufferPtr;
uint length = (uint)buffer.Length;
while (length > 0)
while (k >= 32)
{
k = length < NMAX ? length : NMAX;
length -= k;
// Load 32 input bytes.
var block = Avx.LoadVector256(localBufferPtr);
while (k >= 16)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
// Sum of abs diff, resulting in 2 x int32's
var vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
localBufferPtr += 16;
k -= 16;
}
vs1 = Avx2.Add(vs1, vs1sad.AsUInt32());
vs3 = Avx2.Add(vs3, vs10);
while (k-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
// sum 32 uint8s to 16 shorts.
var vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
s1 %= BASE;
s2 %= BASE;
// sum 16 shorts to 8 uint32s.
var vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
vs2 = Avx2.Add(vsum2.AsUInt32(), vs2);
vs10 = vs1;
localBufferPtr += BlockSize;
k -= 32;
}
return (s2 << 16) | s1;
// Defer the multiplication with 32 to outside of the loop.
vs3 = Avx2.ShiftLeftLogical(vs3, 5);
vs2 = Avx2.Add(vs2, vs3);
s1 = (uint)Numerics.EvenReduceSum(vs1.AsInt32());
s2 = (uint)Numerics.ReduceSum(vs2.AsInt32());
s1 %= BASE;
s2 %= BASE;
vs1 = Vector256.CreateScalar(s1);
vs2 = Vector256.CreateScalar(s2);
}
if (length > 0)
{
HandleLeftOver(localBufferPtr, length, ref s1, ref s2);
}
return s1 | (s2 << 16);
}
}
private static unsafe void HandleLeftOver(
byte* localBufferPtr,
uint length,
ref uint s1,
ref uint s2
)
{
if (length >= 16)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
localBufferPtr += 16;
length -= 16;
}
while (length-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
if (s1 >= BASE)
{
s1 -= BASE;
}
s2 %= BASE;
}
#endif
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
var s1 = adler & 0xFFFF;
var s2 = (adler >> 16) & 0xFFFF;
uint k;
fixed (byte* bufferPtr = buffer)
{
var localBufferPtr = bufferPtr;
var length = (uint)buffer.Length;
while (length > 0)
{
k = length < NMAX ? length : NMAX;
length -= k;
while (k >= 16)
{
s2 += s1 += localBufferPtr[0];
s2 += s1 += localBufferPtr[1];
s2 += s1 += localBufferPtr[2];
s2 += s1 += localBufferPtr[3];
s2 += s1 += localBufferPtr[4];
s2 += s1 += localBufferPtr[5];
s2 += s1 += localBufferPtr[6];
s2 += s1 += localBufferPtr[7];
s2 += s1 += localBufferPtr[8];
s2 += s1 += localBufferPtr[9];
s2 += s1 += localBufferPtr[10];
s2 += s1 += localBufferPtr[11];
s2 += s1 += localBufferPtr[12];
s2 += s1 += localBufferPtr[13];
s2 += s1 += localBufferPtr[14];
s2 += s1 += localBufferPtr[15];
localBufferPtr += 16;
k -= 16;
}
while (k-- > 0)
{
s2 += s1 += *localBufferPtr++;
}
s1 %= BASE;
s2 %= BASE;
}
return (s2 << 16) | s1;
}
}
}

View File

@@ -6,161 +6,170 @@ using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives
namespace SharpCompress.Archives;
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
where TEntry : IArchiveEntry
where TVolume : IVolume
{
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
where TEntry : IArchiveEntry
where TVolume : IVolume
private readonly LazyReadOnlyCollection<TVolume> _lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> _lazyEntries;
private bool _disposed;
private readonly SourceStream? _sourceStream;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
{
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
Type = type;
ReaderOptions = sourceStream.ReaderOptions;
_sourceStream = sourceStream;
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
internal AbstractArchive(ArchiveType type)
{
Type = type;
ReaderOptions = new();
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
}
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
public ArchiveType Type { get; }
protected ReaderOptions ReaderOptions { get; }
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) =>
EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
private bool disposed;
protected SourceStream SrcStream;
void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry) =>
EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
internal AbstractArchive(ArchiveType type, SourceStream srcStream)
private static Stream CheckStreams(Stream stream)
{
if (!stream.CanSeek || !stream.CanRead)
{
Type = type;
ReaderOptions = srcStream.ReaderOptions;
SrcStream = srcStream;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(SrcStream));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
throw new ArchiveException("Archive streams must be Readable and Seekable");
}
return stream;
}
#nullable disable
internal AbstractArchive(ArchiveType type)
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries => _lazyEntries;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes => _lazyVolumes;
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
public virtual long TotalSize =>
Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual long TotalUncompressSize =>
Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
public virtual void Dispose()
{
if (!_disposed)
{
Type = type;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
_lazyVolumes.ForEach(v => v.Dispose());
_lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
_sourceStream?.Dispose();
_disposed = true;
}
#nullable enable
}
public ArchiveType Type { get; }
void IArchiveExtractionListener.EnsureEntriesLoaded()
{
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
}
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry)
{
EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
}
void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry)
{
EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
}
private static Stream CheckStreams(Stream stream)
{
if (!stream.CanSeek || !stream.CanRead)
{
throw new ArgumentException("Archive streams must be Readable and Seekable");
}
return stream;
}
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries => lazyEntries;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes => lazyVolumes;
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
public virtual long TotalSize => Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual long TotalUncompressSize => Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream srcStream);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
public virtual void Dispose()
{
if (!disposed)
{
lazyVolumes.ForEach(v => v.Dispose());
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
if (SrcStream != null)
SrcStream.Dispose();
disposed = true;
}
}
void IArchiveExtractionListener.EnsureEntriesLoaded()
{
lazyEntries.EnsureFullyLoaded();
lazyVolumes.EnsureFullyLoaded();
}
void IExtractionListener.FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes)
{
CompressedBytesRead?.Invoke(this, new CompressedBytesReadEventArgs(
void IExtractionListener.FireCompressedBytesRead(
long currentPartCompressedBytes,
long compressedReadBytes
) =>
CompressedBytesRead?.Invoke(
this,
new CompressedBytesReadEventArgs(
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
compressedBytesRead: compressedReadBytes
));
}
)
);
void IExtractionListener.FireFilePartExtractionBegin(string name, long size, long compressedSize)
{
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs(
void IExtractionListener.FireFilePartExtractionBegin(
string name,
long size,
long compressedSize
) =>
FilePartExtractionBegin?.Invoke(
this,
new FilePartExtractionBeginEventArgs(
compressedSize: compressedSize,
size: size,
name: name
));
}
)
);
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
///
/// This method will load all entry information from the archive.
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
public IReader ExtractAllEntries()
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
///
/// This method will load all entry information from the archive.
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
public IReader ExtractAllEntries()
{
if (!IsSolid && Type != ArchiveType.SevenZip)
{
throw new InvalidOperationException(
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}
protected abstract IReader CreateReaderForSolidExtraction();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// </summary>
public virtual bool IsSolid => false;
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
/// </summary>
public bool IsComplete
{
get
{
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}
protected abstract IReader CreateReaderForSolidExtraction();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// </summary>
public virtual bool IsSolid => false;
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
/// </summary>
public bool IsComplete
{
get
{
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
return Entries.All(x => x.IsComplete);
}
return Entries.All(x => x.IsComplete);
}
}
}

View File

@@ -4,163 +4,180 @@ using System.IO;
using System.Linq;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Writers;
namespace SharpCompress.Archives
namespace SharpCompress.Archives;
public abstract class AbstractWritableArchive<TEntry, TVolume>
: AbstractArchive<TEntry, TVolume>,
IWritableArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
public abstract class AbstractWritableArchive<TEntry, TVolume> : AbstractArchive<TEntry, TVolume>, IWritableArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
private class RebuildPauseDisposable : IDisposable
{
private class RebuildPauseDisposable : IDisposable
{
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
public RebuildPauseDisposable(AbstractWritableArchive<TEntry, TVolume> archive)
{
this.archive = archive;
archive.pauseRebuilding = true;
}
public void Dispose()
{
archive.pauseRebuilding = false;
archive.RebuildModifiedCollection();
}
}
private readonly List<TEntry> newEntries = new List<TEntry>();
private readonly List<TEntry> removedEntries = new List<TEntry>();
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
private bool hasModifications;
private bool pauseRebuilding;
internal AbstractWritableArchive(ArchiveType type)
: base(type)
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
public RebuildPauseDisposable(AbstractWritableArchive<TEntry, TVolume> archive)
{
this.archive = archive;
archive.pauseRebuilding = true;
}
internal AbstractWritableArchive(ArchiveType type, SourceStream srcStream)
: base(type, srcStream)
public void Dispose()
{
}
public override ICollection<TEntry> Entries
{
get
{
if (hasModifications)
{
return modifiedEntries;
}
return base.Entries;
}
}
public IDisposable PauseEntryRebuilding()
{
return new RebuildPauseDisposable(this);
}
private void RebuildModifiedCollection()
{
if (pauseRebuilding)
{
return;
}
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
modifiedEntries.AddRange(OldEntries.Concat(newEntries));
}
private IEnumerable<TEntry> OldEntries { get { return base.Entries.Where(x => !removedEntries.Contains(x)); } }
public void RemoveEntry(TEntry entry)
{
if (!removedEntries.Contains(entry))
{
removedEntries.Add(entry);
RebuildModifiedCollection();
}
}
void IWritableArchive.RemoveEntry(IArchiveEntry entry)
{
RemoveEntry((TEntry)entry);
}
public TEntry AddEntry(string key, Stream source,
long size = 0, DateTime? modified = null)
{
return AddEntry(key, source, false, size, modified);
}
IArchiveEntry IWritableArchive.AddEntry(string key, Stream source, bool closeStream, long size, DateTime? modified)
{
return AddEntry(key, source, closeStream, size, modified);
}
public TEntry AddEntry(string key, Stream source, bool closeStream,
long size = 0, DateTime? modified = null)
{
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
if (DoesKeyMatchExisting(key))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = CreateEntry(key, source, size, modified, closeStream);
newEntries.Add(entry);
RebuildModifiedCollection();
return entry;
}
private bool DoesKeyMatchExisting(string key)
{
foreach (var path in Entries.Select(x => x.Key))
{
var p = path.Replace('/', '\\');
if (p.Length > 0 && p[0] == '\\')
{
p = p.Substring(1);
}
return string.Equals(p, key, StringComparison.OrdinalIgnoreCase);
}
return false;
}
public void SaveTo(Stream stream, WriterOptions options)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
SaveTo(stream, options, OldEntries, newEntries);
}
protected TEntry CreateEntry(string key, Stream source, long size, DateTime? modified,
bool closeStream)
{
if (!source.CanRead || !source.CanSeek)
{
throw new ArgumentException("Streams must be readable and seekable to use the Writing Archive API");
}
return CreateEntryInternal(key, source, size, modified, closeStream);
}
protected abstract TEntry CreateEntryInternal(string key, Stream source, long size, DateTime? modified,
bool closeStream);
protected abstract void SaveTo(Stream stream, WriterOptions options, IEnumerable<TEntry> oldEntries, IEnumerable<TEntry> newEntries);
public override void Dispose()
{
base.Dispose();
newEntries.Cast<Entry>().ForEach(x => x.Close());
removedEntries.Cast<Entry>().ForEach(x => x.Close());
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
archive.pauseRebuilding = false;
archive.RebuildModifiedCollection();
}
}
private readonly List<TEntry> newEntries = new();
private readonly List<TEntry> removedEntries = new();
private readonly List<TEntry> modifiedEntries = new();
private bool hasModifications;
private bool pauseRebuilding;
internal AbstractWritableArchive(ArchiveType type)
: base(type) { }
internal AbstractWritableArchive(ArchiveType type, SourceStream sourceStream)
: base(type, sourceStream) { }
public override ICollection<TEntry> Entries
{
get
{
if (hasModifications)
{
return modifiedEntries;
}
return base.Entries;
}
}
public IDisposable PauseEntryRebuilding() => new RebuildPauseDisposable(this);
private void RebuildModifiedCollection()
{
if (pauseRebuilding)
{
return;
}
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
modifiedEntries.AddRange(OldEntries.Concat(newEntries));
}
private IEnumerable<TEntry> OldEntries => base.Entries.Where(x => !removedEntries.Contains(x));
public void RemoveEntry(TEntry entry)
{
if (!removedEntries.Contains(entry))
{
removedEntries.Add(entry);
RebuildModifiedCollection();
}
}
void IWritableArchive.RemoveEntry(IArchiveEntry entry) => RemoveEntry((TEntry)entry);
public TEntry AddEntry(string key, Stream source, long size = 0, DateTime? modified = null) =>
AddEntry(key, source, false, size, modified);
IArchiveEntry IWritableArchive.AddEntry(
string key,
Stream source,
bool closeStream,
long size,
DateTime? modified
) => AddEntry(key, source, closeStream, size, modified);
public TEntry AddEntry(
string key,
Stream source,
bool closeStream,
long size = 0,
DateTime? modified = null
)
{
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
if (DoesKeyMatchExisting(key))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = CreateEntry(key, source, size, modified, closeStream);
newEntries.Add(entry);
RebuildModifiedCollection();
return entry;
}
private bool DoesKeyMatchExisting(string key)
{
foreach (var path in Entries.Select(x => x.Key))
{
if (path is null)
{
continue;
}
var p = path.Replace('/', '\\');
if (p.Length > 0 && p[0] == '\\')
{
p = p.Substring(1);
}
return string.Equals(p, key, StringComparison.OrdinalIgnoreCase);
}
return false;
}
public void SaveTo(Stream stream, WriterOptions options)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
SaveTo(stream, options, OldEntries, newEntries);
}
protected TEntry CreateEntry(
string key,
Stream source,
long size,
DateTime? modified,
bool closeStream
)
{
if (!source.CanRead || !source.CanSeek)
{
throw new ArchiveException(
"Streams must be readable and seekable to use the Writing Archive API"
);
}
return CreateEntryInternal(key, source, size, modified, closeStream);
}
protected abstract TEntry CreateEntryInternal(
string key,
Stream source,
long size,
DateTime? modified,
bool closeStream
);
protected abstract void SaveTo(
Stream stream,
WriterOptions options,
IEnumerable<TEntry> oldEntries,
IEnumerable<TEntry> newEntries
);
public override void Dispose()
{
base.Dispose();
newEntries.Cast<Entry>().ForEach(x => x.Close());
removedEntries.Cast<Entry>().ForEach(x => x.Close());
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
}
}

View File

@@ -2,295 +2,251 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Rar;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives
namespace SharpCompress.Archives;
public static class ArchiveFactory
{
public static class ArchiveFactory
/// <summary>
/// Opens an Archive for random access
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <returns></returns>
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
/// <summary>
/// Opens an Archive for random access
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <returns></returns>
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
readerOptions ??= new ReaderOptions();
var factory = FindFactory<IArchiveFactory>(stream);
stream = new SharpCompressStream(stream, bufferSize: readerOptions.BufferSize);
return factory.Open(stream, readerOptions);
}
public static IWritableArchive Create(ArchiveType type)
{
var factory = Factory
.Factories.OfType<IWriteableArchiveFactory>()
.FirstOrDefault(item => item.KnownArchiveType == type);
if (factory != null)
{
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
}
readerOptions ??= new ReaderOptions();
ArchiveType? type;
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(stream, readerOptions);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(stream, readerOptions);
case ArchiveType.GZip:
return GZipArchive.Open(stream, readerOptions);
case ArchiveType.Rar:
return RarArchive.Open(stream, readerOptions);
case ArchiveType.Tar:
return TarArchive.Open(stream, readerOptions);
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip, LZip");
return factory.CreateWriteableArchive();
}
public static IWritableArchive Create(ArchiveType type)
throw new NotSupportedException("Cannot create Archives of type: " + type);
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
return FindFactory<IArchiveFactory>(fileInfo).Open(fileInfo, options);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
{
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
return type switch
{
ArchiveType.Zip => ZipArchive.Create(),
ArchiveType.Tar => TarArchive.Create(),
ArchiveType.GZip => GZipArchive.Create(),
_ => throw new NotSupportedException("Cannot create Archives of type: " + type)
};
throw new InvalidOperationException("No files to open");
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions? options = null)
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
return Open(fileInfo, options);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
{
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
fileInfo.CheckNotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
ArchiveType? type;
using (Stream stream = fileInfo.OpenRead())
{
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(fileInfo, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(fileInfo, options);
case ArchiveType.GZip:
return GZipArchive.Open(fileInfo, options);
case ArchiveType.Rar:
return RarArchive.Open(fileInfo, options);
case ArchiveType.Tar:
return TarArchive.Open(fileInfo, options);
}
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
throw new InvalidOperationException("No streams");
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
if (files.Length == 0)
throw new InvalidOperationException("No files to open");
FileInfo fileInfo = files[0];
if (files.Length == 1)
return Open(fileInfo, options);
fileInfo.CheckNotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
ArchiveType? type;
using (Stream stream = fileInfo.OpenRead())
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(files, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(files, options);
case ArchiveType.GZip:
return GZipArchive.Open(files, options);
case ArchiveType.Rar:
return RarArchive.Open(files, options);
case ArchiveType.Tar:
return TarArchive.Open(files, options);
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
return Open(firstStream, options);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(
string sourceArchive,
string destinationDirectory,
ExtractionOptions? options = null
)
{
using var archive = Open(sourceArchive);
archive.WriteToDirectory(destinationDirectory, options);
}
private static T FindFactory<T>(FileInfo finfo)
where T : IFactory
{
finfo.NotNull(nameof(finfo));
using Stream stream = finfo.OpenRead();
return FindFactory<T>(stream);
}
private static T FindFactory<T>(Stream stream)
where T : IFactory
{
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
streams.CheckNotNull(nameof(streams));
if (streams.Count() == 0)
throw new InvalidOperationException("No streams");
if (streams.Count() == 1)
return Open(streams.First(), options);
options ??= new ReaderOptions();
ArchiveType? type;
using (Stream stream = streams.First())
IsArchive(stream, out type); //test and reset stream position
if (type != null)
{
switch (type.Value)
{
case ArchiveType.Zip:
return ZipArchive.Open(streams, options);
case ArchiveType.SevenZip:
return SevenZipArchive.Open(streams, options);
case ArchiveType.GZip:
return GZipArchive.Open(streams, options);
case ArchiveType.Rar:
return RarArchive.Open(streams, options);
case ArchiveType.Tar:
return TarArchive.Open(streams, options);
}
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
throw new ArgumentException("Stream should be readable and seekable");
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(string sourceArchive, string destinationDirectory,
ExtractionOptions? options = null)
var factories = Factory.Factories.OfType<T>();
var startPosition = stream.Position;
foreach (var factory in factories)
{
using IArchive archive = Open(sourceArchive);
foreach (IArchiveEntry entry in archive.Entries)
stream.Seek(startPosition, SeekOrigin.Begin);
if (factory.IsArchive(stream))
{
entry.WriteToDirectory(destinationDirectory, options);
stream.Seek(startPosition, SeekOrigin.Begin);
return factory;
}
}
public static bool IsArchive(string filePath, out ArchiveType? type)
var extensions = string.Join(", ", factories.Select(item => item.Name));
throw new InvalidOperationException(
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
);
}
public static bool IsArchive(
string filePath,
out ArchiveType? type,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
filePath.NotNullOrEmpty(nameof(filePath));
using Stream s = File.OpenRead(filePath);
return IsArchive(s, out type, bufferSize);
}
public static bool IsArchive(
Stream stream,
out ArchiveType? type,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
type = null;
stream.NotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
using (Stream s = File.OpenRead(filePath))
return IsArchive(s, out type);
throw new ArgumentException("Stream should be readable and seekable");
}
private static bool IsArchive(Stream stream, out ArchiveType? type)
var startPosition = stream.Position;
foreach (var factory in Factory.Factories)
{
type = null;
stream.CheckNotNull(nameof(stream));
var isArchive = factory.IsArchive(stream);
stream.Position = startPosition;
if (!stream.CanRead || !stream.CanSeek)
if (isArchive)
{
throw new ArgumentException("Stream should be readable and seekable");
type = factory.KnownArchiveType;
return true;
}
if (ZipArchive.IsZipFile(stream, null))
type = ArchiveType.Zip;
stream.Seek(0, SeekOrigin.Begin);
if (type == null)
{
if (SevenZipArchive.IsSevenZipFile(stream))
type = ArchiveType.SevenZip;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (GZipArchive.IsGZipFile(stream))
type = ArchiveType.GZip;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (RarArchive.IsRarFile(stream))
type = ArchiveType.Rar;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null)
{
if (TarArchive.IsTarFile(stream))
type = ArchiveType.Tar;
stream.Seek(0, SeekOrigin.Begin);
}
if (type == null) //test multipartzip as it could find zips in other non compressed archive types?
{
if (ZipArchive.IsZipMulti(stream)) //test the zip (last) file of a multipart zip
type = ArchiveType.Zip;
stream.Seek(0, SeekOrigin.Begin);
}
return type != null;
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<string> GetFileParts(string part1)
{
part1.CheckNotNullOrEmpty(nameof(part1));
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
}
return false;
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
{
part1.CheckNotNull(nameof(part1));
yield return part1;
int i = 1;
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<string> GetFileParts(string part1)
{
part1.NotNullOrEmpty(nameof(part1));
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
}
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>
/// <param name="part1"></param>
/// <returns></returns>
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
{
part1.NotNull(nameof(part1));
yield return part1;
foreach (var factory in Factory.Factories.OfType<IFactory>())
{
var i = 1;
var part = factory.GetFilePart(i++, part1);
FileInfo? part = RarArchiveVolumeFactory.GetFilePart(i++, part1);
if (part != null)
{
yield return part;
while ((part = RarArchiveVolumeFactory.GetFilePart(i++, part1)) != null) //tests split too
yield return part;
}
else
{
i = 1;
while ((part = ZipArchiveVolumeFactory.GetFilePart(i++, part1)) != null) //tests split too
while ((part = factory.GetFilePart(i++, part1)) != null) //tests split too
{
yield return part;
}
yield break;
}
}
}
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
}

View File

@@ -1,29 +1,30 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives
namespace SharpCompress.Archives;
internal abstract class ArchiveVolumeFactory
{
internal abstract class ArchiveVolumeFactory
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
FileInfo? item = null;
FileInfo? item = null;
//split 001, 002 ...
Match m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'))));
if (item != null && item.Exists)
return item;
return null;
}
//split 001, 002 ...
var m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
String.Concat(
m.Groups[1].Value,
(index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0')
)
)
);
if (item != null && item.Exists)
return item;
return null;
}
}

View File

@@ -0,0 +1,30 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
class AutoArchiveFactory : IArchiveFactory
{
public string Name => nameof(AutoArchiveFactory);
public ArchiveType? KnownArchiveType => null;
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
public bool IsArchive(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => throw new NotSupportedException();
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(stream, readerOptions);
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(fileInfo, readerOptions);
}

View File

@@ -10,182 +10,205 @@ using SharpCompress.Readers.GZip;
using SharpCompress.Writers;
using SharpCompress.Writers.GZip;
namespace SharpCompress.Archives.GZip
namespace SharpCompress.Archives.GZip;
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new GZipArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new GZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new GZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
return new GZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
public static GZipArchive Create() => new();
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private GZipArchive(SourceStream sourceStream)
: base(ArchiveType.GZip, sourceStream) { }
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.LoadAllParts();
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
}
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
public static bool IsGZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new GZipArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
return false;
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
using Stream stream = fileInfo.OpenRead();
return IsGZipFile(stream);
}
public void SaveTo(string filePath) => SaveTo(new FileInfo(filePath));
public void SaveTo(FileInfo fileInfo)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
public static bool IsGZipFile(Stream stream)
{
// read the header on the first read
Span<byte> header = stackalloc byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!stream.ReadFully(header))
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new GZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
return false;
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new GZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
return false;
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
return true;
}
internal GZipArchive()
: base(ArchiveType.GZip) { }
protected override GZipArchiveEntry CreateEntryInternal(
string filePath,
Stream source,
long size,
DateTime? modified,
bool closeStream
)
{
if (Entries.Any())
{
stream.CheckNotNull(nameof(stream));
return new GZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
public static GZipArchive Create()
protected override void SaveTo(
Stream stream,
WriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries
)
{
if (Entries.Count > 1)
{
return new GZipArchive();
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal GZipArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream)
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
}
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream srcStream)
{
srcStream.LoadAllParts();
return srcStream.Streams.Select(a => new GZipVolume(a, ReaderOptions));
}
public static bool IsGZipFile(string filePath)
{
return IsGZipFile(new FileInfo(filePath));
}
public static bool IsGZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsGZipFile(stream);
}
public void SaveTo(string filePath)
{
SaveTo(new FileInfo(filePath));
}
public void SaveTo(FileInfo fileInfo)
{
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
{
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
}
public static bool IsGZipFile(Stream stream)
{
// read the header on the first read
Span<byte> header = stackalloc byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!stream.ReadFully(header))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
internal GZipArchive()
: base(ArchiveType.GZip)
{
}
protected override GZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream)
{
if (Entries.Any())
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
}
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries)
{
if (Entries.Count > 1)
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
}
using (var writer = new GZipWriter(stream, new GZipWriterOptions(options)))
{
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
{
using (var entryStream = entry.OpenEntryStream())
{
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
}
}
}
}
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
Stream stream = volumes.Single().Stream;
yield return new GZipArchiveEntry(this, new GZipFilePart(stream, ReaderOptions.ArchiveEncoding));
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return GZipReader.Open(stream);
using var entryStream = entry.OpenEntryStream();
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime
);
}
}
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
var stream = volumes.Single().Stream;
yield return new GZipArchiveEntry(
this,
new GZipFilePart(stream, ReaderOptions.ArchiveEncoding)
);
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return GZipReader.Open(stream);
}
}

View File

@@ -1,34 +1,38 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
namespace SharpCompress.Archives.GZip
namespace SharpCompress.Archives.GZip;
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
{
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part)
: base(part) => Archive = archive;
public virtual Stream OpenEntryStream()
{
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part)
: base(part)
//this is to reset the stream to be read multiple times
var part = (GZipFilePart)Parts.Single();
if (part.GetRawStream().Position != part.EntryStartPosition)
{
Archive = archive;
part.GetRawStream().Position = part.EntryStartPosition;
}
public virtual Stream OpenEntryStream()
{
//this is to reset the stream to be read multiple times
var part = (GZipFilePart)Parts.Single();
if (part.GetRawStream().Position != part.EntryStartPosition)
{
part.GetRawStream().Position = part.EntryStartPosition;
}
return Parts.Single().GetCompressedStream();
}
#region IArchiveEntry Members
public IArchive Archive { get; }
public bool IsComplete => true;
#endregion
return Parts.Single().GetCompressedStream().NotNull();
}
}
public virtual Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
// GZip synchronous implementation is fast enough, just wrap it
return Task.FromResult(OpenEntryStream());
}
#region IArchiveEntry Members
public IArchive Archive { get; }
public bool IsComplete => true;
#endregion
}

View File

@@ -1,68 +1,71 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives.GZip
namespace SharpCompress.Archives.GZip;
internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
{
internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArchiveEntry
private readonly bool closeStream;
private readonly Stream stream;
internal GZipWritableArchiveEntry(
GZipArchive archive,
Stream stream,
string path,
long size,
DateTime? lastModified,
bool closeStream
)
: base(archive, null)
{
private readonly bool closeStream;
private readonly Stream stream;
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
internal GZipWritableArchiveEntry(GZipArchive archive, Stream stream,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null)
public override long Crc => 0;
public override string? Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
}
internal override void Close()
{
if (closeStream)
{
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream)
{
stream.Dispose();
}
stream.Dispose();
}
}
}
}

View File

@@ -1,49 +1,48 @@
using System;
using System;
using System.Collections.Generic;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives
namespace SharpCompress.Archives;
public interface IArchive : IDisposable
{
public interface IArchive : IDisposable
{
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
IEnumerable<IArchiveEntry> Entries { get; }
IEnumerable<IVolume> Volumes { get; }
IEnumerable<IArchiveEntry> Entries { get; }
IEnumerable<IVolume> Volumes { get; }
ArchiveType Type { get; }
ArchiveType Type { get; }
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
/// </summary>
IReader ExtractAllEntries();
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
/// </summary>
IReader ExtractAllEntries();
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
/// </summary>
bool IsSolid { get; }
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
/// </summary>
bool IsSolid { get; }
/// <summary>
/// This checks to see if all the known entries have IsComplete = true
/// </summary>
bool IsComplete { get; }
/// <summary>
/// This checks to see if all the known entries have IsComplete = true
/// </summary>
bool IsComplete { get; }
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
long TotalSize { get; }
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
long TotalSize { get; }
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
long TotalUncompressSize { get; }
}
}
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
long TotalUncompressSize { get; }
}

View File

@@ -1,24 +1,31 @@
using System.IO;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Archives
namespace SharpCompress.Archives;
public interface IArchiveEntry : IEntry
{
public interface IArchiveEntry : IEntry
{
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
Stream OpenEntryStream();
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
Stream OpenEntryStream();
/// <summary>
/// The archive can find all the parts of the archive needed to extract this entry.
/// </summary>
bool IsComplete { get; }
/// <summary>
/// Opens the current entry as a stream that will decompress as it is read asynchronously.
/// Read the entire stream or use SkipEntry on EntryStream.
/// </summary>
Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
/// <summary>
/// The archive instance this entry belongs to
/// </summary>
IArchive Archive { get; }
}
}
/// <summary>
/// The archive can find all the parts of the archive needed to extract this entry.
/// </summary>
bool IsComplete { get; }
/// <summary>
/// The archive instance this entry belongs to
/// </summary>
IArchive Archive { get; }
}

View File

@@ -1,63 +1,66 @@
using System.IO;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives
namespace SharpCompress.Archives;
public static class IArchiveEntryExtensions
{
public static class IArchiveEntryExtensions
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
{
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
if (archiveEntry.IsDirectory)
{
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(archiveEntry.Key, archiveEntry.Size, archiveEntry.CompressedSize);
var entryStream = archiveEntry.OpenEntryStream();
if (entryStream is null)
{
return;
}
using (entryStream)
{
using (Stream s = new ListeningStream(streamListener, entryStream))
{
s.TransferTo(streamToWriteTo);
}
}
streamListener.FireEntryExtractionEnd(archiveEntry);
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractionOptions? options = null)
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(
archiveEntry.Key ?? "Key",
archiveEntry.Size,
archiveEntry.CompressedSize
);
var entryStream = archiveEntry.OpenEntryStream();
using (entryStream)
{
ExtractionMethods.WriteEntryToDirectory(entry, destinationDirectory, options,
entry.WriteToFile);
}
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null)
{
ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,
(x, fm) =>
{
using (FileStream fs = File.Open(destinationFileName, fm))
{
entry.WriteTo(fs);
}
});
using Stream s = new ListeningStream(streamListener, entryStream);
s.CopyTo(streamToWriteTo);
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(
this IArchiveEntry entry,
string destinationDirectory,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToDirectory(
entry,
destinationDirectory,
options,
entry.WriteToFile
);
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(
this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToFile(
entry,
destinationFileName,
options,
(x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
entry.WriteTo(fs);
}
);
}

View File

@@ -1,20 +1,85 @@
using System.Linq;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives
namespace SharpCompress.Archives;
public static class IArchiveExtensions
{
public static class IArchiveExtensions
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(
this IArchive archive,
string destinationDirectory,
ExtractionOptions? options = null
)
{
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
ExtractionOptions? options = null)
using var reader = archive.ExtractAllEntries();
reader.WriteAllToDirectory(destinationDirectory, options);
}
/// <summary>
/// Extracts the archive to the destination directory. Directories will be created as needed.
/// </summary>
/// <param name="archive">The archive to extract.</param>
/// <param name="destination">The folder to extract into.</param>
/// <param name="progressReport">Optional progress report callback.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public static void ExtractToDirectory(
this IArchive archive,
string destination,
Action<double>? progressReport = null,
CancellationToken cancellationToken = default
)
{
// Prepare for progress reporting
var totalBytes = archive.TotalUncompressSize;
var bytesRead = 0L;
// Tracking for created directories.
var seenDirectories = new HashSet<string>();
// Extract
foreach (var entry in archive.Entries)
{
foreach (IArchiveEntry entry in archive.Entries.Where(x => !x.IsDirectory))
cancellationToken.ThrowIfCancellationRequested();
if (entry.IsDirectory)
{
entry.WriteToDirectory(destinationDirectory, options);
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
if (
Path.GetDirectoryName(dirPath + "/") is { } emptyDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(emptyDirectory);
}
continue;
}
// Create each directory if not already created
var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
if (Path.GetDirectoryName(path) is { } directory)
{
if (!Directory.Exists(directory) && !seenDirectories.Contains(directory))
{
Directory.CreateDirectory(directory);
seenDirectories.Add(directory);
}
}
// Write file
using var fs = File.OpenWrite(path);
entry.WriteTo(fs);
// Update progress
bytesRead += entry.Size;
progressReport?.Invoke(bytesRead / (double)totalBytes);
}
}
}
}

View File

@@ -1,11 +1,10 @@
using SharpCompress.Common;
using SharpCompress.Common;
namespace SharpCompress.Archives
namespace SharpCompress.Archives;
internal interface IArchiveExtractionListener : IExtractionListener
{
internal interface IArchiveExtractionListener : IExtractionListener
{
void EnsureEntriesLoaded();
void FireEntryExtractionBegin(IArchiveEntry entry);
void FireEntryExtractionEnd(IArchiveEntry entry);
}
}
void EnsureEntriesLoaded();
void FireEntryExtractionBegin(IArchiveEntry entry);
void FireEntryExtractionEnd(IArchiveEntry entry);
}

View File

@@ -0,0 +1,35 @@
using System.IO;
using SharpCompress.Factories;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
/// <summary>
/// Represents a factory used to identify and open archives.
/// </summary>
/// <remarks>
/// Currently implemented by:<br/>
/// <list type="table">
/// <item><see cref="TarFactory"/></item>
/// <item><see cref="RarFactory"/></item>
/// <item><see cref="ZipFactory"/></item>
/// <item><see cref="GZipFactory"/></item>
/// <item><see cref="SevenZipFactory"/></item>
/// </list>
/// </remarks>
public interface IArchiveFactory : IFactory
{
/// <summary>
/// Opens an Archive for random access.
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
}

View File

@@ -0,0 +1,36 @@
using System.Collections.Generic;
using System.IO;
using SharpCompress.Factories;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
/// <summary>
/// Represents a factory used to identify and open archives.
/// </summary>
/// <remarks>
/// Implemented by:<br/>
/// <list type="table">
/// <item><see cref="TarFactory"/></item>
/// <item><see cref="RarFactory"/></item>
/// <item><see cref="ZipFactory"/></item>
/// <item><see cref="GZipFactory"/></item>
/// <item><see cref="SevenZipFactory"/></item>
/// </list>
/// </remarks>
public interface IMultiArchiveFactory : IFactory
{
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
/// <summary>
/// Constructor with IEnumerable Stream objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
}

View File

@@ -2,20 +2,25 @@ using System;
using System.IO;
using SharpCompress.Writers;
namespace SharpCompress.Archives
namespace SharpCompress.Archives;
public interface IWritableArchive : IArchive
{
public interface IWritableArchive : IArchive
{
void RemoveEntry(IArchiveEntry entry);
void RemoveEntry(IArchiveEntry entry);
IArchiveEntry AddEntry(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null);
IArchiveEntry AddEntry(
string key,
Stream source,
bool closeStream,
long size = 0,
DateTime? modified = null
);
void SaveTo(Stream stream, WriterOptions options);
void SaveTo(Stream stream, WriterOptions options);
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
}
}
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
}

View File

@@ -1,9 +1,8 @@
using System.IO;
using System.IO;
namespace SharpCompress.Archives
namespace SharpCompress.Archives;
internal interface IWritableArchiveEntry
{
internal interface IWritableArchiveEntry
{
Stream Stream { get; }
}
}
Stream Stream { get; }
}

View File

@@ -2,56 +2,85 @@
using System.IO;
using SharpCompress.Writers;
namespace SharpCompress.Archives
namespace SharpCompress.Archives;
public static class IWritableArchiveExtensions
{
public static class IWritableArchiveExtensions
public static void AddEntry(
this IWritableArchive writableArchive,
string entryPath,
string filePath
)
{
public static void AddEntry(this IWritableArchive writableArchive,
string entryPath, string filePath)
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
{
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException("Could not AddEntry: " + filePath);
}
writableArchive.AddEntry(entryPath, new FileInfo(filePath).OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
throw new FileNotFoundException("Could not AddEntry: " + filePath);
}
writableArchive.AddEntry(
entryPath,
new FileInfo(filePath).OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
public static void SaveTo(this IWritableArchive writableArchive, string filePath, WriterOptions options)
{
writableArchive.SaveTo(new FileInfo(filePath), options);
}
public static void SaveTo(
this IWritableArchive writableArchive,
string filePath,
WriterOptions options
) => writableArchive.SaveTo(new FileInfo(filePath), options);
public static void SaveTo(this IWritableArchive writableArchive, FileInfo fileInfo, WriterOptions options)
{
using (var stream = fileInfo.Open(FileMode.Create, FileAccess.Write))
{
writableArchive.SaveTo(stream, options);
}
}
public static void SaveTo(
this IWritableArchive writableArchive,
FileInfo fileInfo,
WriterOptions options
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
writableArchive.SaveTo(stream, options);
}
public static void AddAllFromDirectory(
this IWritableArchive writableArchive,
string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
public static void AddAllFromDirectory(
this IWritableArchive writableArchive,
string filePath,
string searchPattern = "*.*",
SearchOption searchOption = SearchOption.AllDirectories
)
{
using (writableArchive.PauseEntryRebuilding())
{
using (writableArchive.PauseEntryRebuilding())
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
{
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
}
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(
path.Substring(filePath.Length),
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}
public static IArchiveEntry AddEntry(this IWritableArchive writableArchive, string key, FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntry(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime);
}
}
}
public static IArchiveEntry AddEntry(
this IWritableArchive writableArchive,
string key,
FileInfo fileInfo
)
{
if (!fileInfo.Exists)
{
throw new ArgumentException("FileInfo does not exist.");
}
return writableArchive.AddEntry(
key,
fileInfo.OpenRead(),
true,
fileInfo.Length,
fileInfo.LastWriteTime
);
}
}

View File

@@ -0,0 +1,20 @@
namespace SharpCompress.Archives;
/// <summary>
/// Decorator for <see cref="Factories.Factory"/> used to declare an archive format as able to create writeable archives
/// </summary>
/// <remarks>
/// Implemented by:<br/>
/// <list type="table">
/// <item><see cref="Factories.TarFactory"/></item>
/// <item><see cref="Factories.ZipFactory"/></item>
/// <item><see cref="Factories.GZipFactory"/></item>
/// </list>
public interface IWriteableArchiveFactory : Factories.IFactory
{
/// <summary>
/// Creates a new, empty archive, ready to be written.
/// </summary>
/// <returns></returns>
IWritableArchive CreateWriteableArchive();
}

View File

@@ -1,4 +1,5 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar;
@@ -6,39 +7,33 @@ using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
namespace SharpCompress.Archives.Rar;
/// <summary>
/// A rar part based on a FileInfo object
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
/// <summary>
/// A rar part based on a FileInfo object
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options))
{
FileInfo = fileInfo;
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();
}
private static ReaderOptions FixOptions(ReaderOptions options)
{
//make sure we're closing streams with fileinfo
options.LeaveStreamOpen = false;
return options;
}
internal ReadOnlyCollection<RarFilePart> FileParts { get; }
internal FileInfo FileInfo { get; }
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
{
return new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
}
internal override IEnumerable<RarFilePart> ReadFileParts()
{
return FileParts;
}
FileInfo = fileInfo;
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();
}
private static ReaderOptions FixOptions(ReaderOptions options)
{
//make sure we're closing streams with fileinfo
options.LeaveStreamOpen = false;
return options;
}
internal ReadOnlyCollection<RarFilePart> FileParts { get; }
internal FileInfo FileInfo { get; }
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
internal override IEnumerable<RarFilePart> ReadFileParts() => FileParts;
}

View File

@@ -1,25 +1,21 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar
namespace SharpCompress.Archives.Rar;
internal sealed class FileInfoRarFilePart : SeekableFilePart
{
internal sealed class FileInfoRarFilePart : SeekableFilePart
{
internal FileInfoRarFilePart(FileInfoRarArchiveVolume volume, string? password, MarkHeader mh, FileHeader fh, FileInfo fi)
: base(mh, fh, volume.Stream, password)
{
FileInfo = fi;
}
internal FileInfoRarFilePart(
FileInfoRarArchiveVolume volume,
string? password,
MarkHeader mh,
FileHeader fh,
FileInfo fi
)
: base(mh, fh, volume.Index, volume.Stream, password) => FileInfo = fi;
internal FileInfo FileInfo { get; }
internal FileInfo FileInfo { get; }
internal override string FilePartName
{
get
{
return "Rar File: " + FileInfo.FullName
+ " File Entry: " + FileHeader.FileName;
}
}
}
internal override string FilePartName =>
"Rar File: " + FileInfo.FullName + " File Entry: " + FileHeader.FileName;
}

View File

@@ -1,23 +1,18 @@
using System.Linq;
namespace SharpCompress.Archives.Rar
{
public static class RarArchiveExtensions
{
/// <summary>
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
/// </summary>
public static bool IsFirstVolume(this RarArchive archive)
{
return archive.Volumes.First().IsFirstVolume;
}
namespace SharpCompress.Archives.Rar;
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public static bool IsMultipartVolume(this RarArchive archive)
{
return archive.Volumes.First().IsMultiVolume;
}
}
}
public static class RarArchiveExtensions
{
/// <summary>
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
/// </summary>
public static bool IsFirstVolume(this RarArchive archive) =>
archive.Volumes.First().IsFirstVolume;
/// <summary>
/// RarArchive is part of a multi-part archive.
/// </summary>
public static bool IsMultipartVolume(this RarArchive archive) =>
archive.Volumes.First().IsMultiVolume;
}

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -9,145 +10,199 @@ using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar
namespace SharpCompress.Archives.Rar;
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
public class
RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
private bool _disposed;
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
new(() => new Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private RarArchive(SourceStream sourceStream)
: base(ArchiveType.Rar, sourceStream) { }
public override void Dispose()
{
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal RarArchive(SourceStream srcStream)
: base(ArchiveType.Rar, srcStream)
if (!_disposed)
{
}
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes)
{
return RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
}
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream srcStream)
{
base.SrcStream.LoadAllParts(); //request all streams
Stream[] streams = base.SrcStream.Streams.ToArray();
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
{
base.SrcStream.IsVolumes = true;
streams[1].Position = 0;
base.SrcStream.Position = 0;
return srcStream.Streams.Select(a => new StreamRarArchiveVolume(a, ReaderOptions));
unpackV1.Dispose();
}
else //split mode or single file
return new StreamRarArchiveVolume(base.SrcStream, ReaderOptions).AsEnumerable();
_disposed = true;
base.Dispose();
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.First().Stream;
stream.Position = 0;
return RarReader.Open(stream, ReaderOptions);
}
public override bool IsSolid => Volumes.First().IsSolidArchive;
#region Creation
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
FileInfo fileInfo = new FileInfo(filePath);
return new RarArchive(new SourceStream(fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), options ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new RarArchive(new SourceStream(fileInfo, i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo), options ?? new ReaderOptions()));
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
return new RarArchive(new SourceStream(stream, i => null, options ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new RarArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new RarArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
public static bool IsRarFile(string filePath)
{
return IsRarFile(new FileInfo(filePath));
}
public static bool IsRarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsRarFile(stream);
}
}
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try
{
MarkHeader.Read(stream, true, false);
return true;
}
catch
{
return false;
}
}
#endregion
}
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.LoadAllParts(); //request all streams
var streams = sourceStream.Streams.ToArray();
var i = 0;
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
{
sourceStream.IsVolumes = true;
streams[1].Position = 0;
sourceStream.Position = 0;
return sourceStream.Streams.Select(a => new StreamRarArchiveVolume(
a,
ReaderOptions,
i++
));
}
//split mode or single file
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
}
protected override IReader CreateReaderForSolidExtraction()
{
if (this.IsMultipartVolume())
{
var streams = Volumes.Select(volume =>
{
volume.Stream.Position = 0;
return volume.Stream;
});
return RarReader.Open(streams, ReaderOptions);
}
var stream = Volumes.First().Stream;
stream.Position = 0;
return RarReader.Open(stream, ReaderOptions);
}
public override bool IsSolid => Volumes.First().IsSolidArchive;
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;
#region Creation
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions? options = null)
{
filePath.NotNullOrEmpty(nameof(filePath));
var fileInfo = new FileInfo(filePath);
return new RarArchive(
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
options ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new RarArchive(
new SourceStream(
fileInfo,
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
options ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new RarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new RarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
public static bool IsRarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsRarFile(stream);
}
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try
{
MarkHeader.Read(stream, true, false);
return true;
}
catch
{
return false;
}
}
#endregion
}

View File

@@ -2,88 +2,109 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
namespace SharpCompress.Archives.Rar;
public class RarArchiveEntry : RarEntry, IArchiveEntry
{
public class RarArchiveEntry : RarEntry, IArchiveEntry
private readonly ICollection<RarFilePart> parts;
private readonly RarArchive archive;
private readonly ReaderOptions readerOptions;
internal RarArchiveEntry(
RarArchive archive,
IEnumerable<RarFilePart> parts,
ReaderOptions readerOptions
)
{
private readonly ICollection<RarFilePart> parts;
private readonly RarArchive archive;
private readonly ReaderOptions readerOptions;
this.parts = parts.ToList();
this.archive = archive;
this.readerOptions = readerOptions;
IsSolid = FileHeader.IsSolid;
}
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts, ReaderOptions readerOptions)
public override CompressionType CompressionType => CompressionType.Rar;
public IArchive Archive => archive;
internal override IEnumerable<FilePart> Parts => parts.Cast<FilePart>();
internal override FileHeader FileHeader => parts.First().FileHeader;
public override long Crc
{
get
{
this.parts = parts.ToList();
this.archive = archive;
this.readerOptions = readerOptions;
this.IsSolid = this.FileHeader.IsSolid;
}
public override CompressionType CompressionType => CompressionType.Rar;
public IArchive Archive => archive;
internal override IEnumerable<FilePart> Parts => parts.Cast<FilePart>();
internal override FileHeader FileHeader => parts.First().FileHeader;
public override long Crc
{
get
{
CheckIncomplete();
return parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc;
}
}
public override long Size
{
get
{
CheckIncomplete();
return parts.First().FileHeader.UncompressedSize;
}
}
public override long CompressedSize
{
get
{
CheckIncomplete();
return parts.Aggregate(0L, (total, fp) => total + fp.FileHeader.CompressedSize);
}
}
public Stream OpenEntryStream()
{
if (IsRarV3)
{
return new RarStream(archive.UnpackV1.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
return new RarStream(archive.UnpackV2017.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
public bool IsComplete
{
get
{
var headers = parts.Select(x => x.FileHeader);
return !headers.First().IsSplitBefore && !headers.Last().IsSplitAfter;
}
}
private void CheckIncomplete()
{
if (!readerOptions.DisableCheckIncomplete && !IsComplete)
{
throw new IncompleteArchiveException("ArchiveEntry is incomplete and cannot perform this operation.");
}
CheckIncomplete();
return BitConverter.ToUInt32(
parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc.NotNull(),
0
);
}
}
}
public override long Size
{
get
{
CheckIncomplete();
return parts.First().FileHeader.UncompressedSize;
}
}
public override long CompressedSize
{
get
{
CheckIncomplete();
return parts.Aggregate(0L, (total, fp) => total + fp.FileHeader.CompressedSize);
}
}
public Stream OpenEntryStream()
{
if (IsRarV3)
{
return new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
);
}
return new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
);
}
public Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
Task.FromResult(OpenEntryStream());
public bool IsComplete
{
get
{
var headers = parts.Select(x => x.FileHeader);
return !headers.First().IsSplitBefore && !headers.Last().IsSplitAfter;
}
}
private void CheckIncomplete()
{
if (!readerOptions.DisableCheckIncomplete && !IsComplete)
{
throw new IncompleteArchiveException(
"ArchiveEntry is incomplete and cannot perform this operation."
);
}
}
}

View File

@@ -1,49 +1,52 @@
using System.Collections.Generic;
using System.Collections.Generic;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
namespace SharpCompress.Archives.Rar;
internal static class RarArchiveEntryFactory
{
internal static class RarArchiveEntryFactory
private static IEnumerable<RarFilePart> GetFileParts(IEnumerable<RarVolume> parts)
{
private static IEnumerable<RarFilePart> GetFileParts(IEnumerable<RarVolume> parts)
foreach (var rarPart in parts)
{
foreach (RarVolume rarPart in parts)
foreach (var fp in rarPart.ReadFileParts())
{
foreach (RarFilePart fp in rarPart.ReadFileParts())
{
yield return fp;
}
}
}
private static IEnumerable<IEnumerable<RarFilePart>> GetMatchedFileParts(IEnumerable<RarVolume> parts)
{
var groupedParts = new List<RarFilePart>();
foreach (RarFilePart fp in GetFileParts(parts))
{
groupedParts.Add(fp);
if (!fp.FileHeader.IsSplitAfter)
{
yield return groupedParts;
groupedParts = new List<RarFilePart>();
}
}
if (groupedParts.Count > 0)
{
yield return groupedParts;
}
}
internal static IEnumerable<RarArchiveEntry> GetEntries(RarArchive archive,
IEnumerable<RarVolume> rarParts,
ReaderOptions readerOptions)
{
foreach (var groupedParts in GetMatchedFileParts(rarParts))
{
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
yield return fp;
}
}
}
}
private static IEnumerable<IEnumerable<RarFilePart>> GetMatchedFileParts(
IEnumerable<RarVolume> parts
)
{
var groupedParts = new List<RarFilePart>();
foreach (var fp in GetFileParts(parts))
{
groupedParts.Add(fp);
if (!fp.FileHeader.IsSplitAfter)
{
yield return groupedParts;
groupedParts = new List<RarFilePart>();
}
}
if (groupedParts.Count > 0)
{
yield return groupedParts;
}
}
internal static IEnumerable<RarArchiveEntry> GetEntries(
RarArchive archive,
IEnumerable<RarVolume> rarParts,
ReaderOptions readerOptions
)
{
foreach (var groupedParts in GetMatchedFileParts(rarParts))
{
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
}
}
}

View File

@@ -1,40 +1,52 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives.Rar
namespace SharpCompress.Archives.Rar;
internal static class RarArchiveVolumeFactory
{
internal static class RarArchiveVolumeFactory
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
FileInfo? item = null;
//new style rar - ..part1 | /part01 | part001 ....
var m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
String.Concat(
m.Groups[1].Value,
(index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'),
m.Groups[3].Value
)
)
);
else
{
FileInfo? item = null;
//new style rar - ..part1 | /part01 | part001 ....
Match m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
//old style - ...rar, .r00, .r01 ...
m = Regex.Match(part1.Name, @"^(.*\.)([r-z{])(ar|[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, (index + 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'), m.Groups[3].Value)));
else
{
//old style - ...rar, .r00, .r01 ...
m = Regex.Match(part1.Name, @"^(.*\.r)(ar|[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, index == 0 ? "ar" : (index - 1).ToString().PadLeft(m.Groups[2].Value.Length, '0'))));
else //split .001, .002 ....
return ArchiveVolumeFactory.GetFilePart(index, part1);
}
if (item != null && item.Exists)
return item;
return null; //no more items
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
String.Concat(
m.Groups[1].Value,
index == 0
? m.Groups[2].Value + m.Groups[3].Value
: (char)(m.Groups[2].Value[0] + ((index - 1) / 100))
+ (index - 1).ToString("D4").Substring(2)
)
)
);
else //split .001, .002 ....
return ArchiveVolumeFactory.GetFilePart(index, part1);
}
if (item != null && item.Exists)
return item;
return null; //no more items
}
}

View File

@@ -1,31 +1,45 @@
using System.IO;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar
namespace SharpCompress.Archives.Rar;
internal class SeekableFilePart : RarFilePart
{
internal class SeekableFilePart : RarFilePart
private readonly Stream _stream;
private readonly string? _password;
internal SeekableFilePart(
MarkHeader mh,
FileHeader fh,
int index,
Stream stream,
string? password
)
: base(mh, fh, index)
{
private readonly Stream stream;
private readonly string? password;
internal SeekableFilePart(MarkHeader mh, FileHeader fh, Stream stream, string? password)
: base(mh, fh)
{
this.stream = stream;
this.password = password;
}
internal override Stream GetCompressedStream()
{
stream.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)
{
return new RarCryptoWrapper(stream, password!, FileHeader.R4Salt);
}
return stream;
}
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
_stream = stream;
_password = password;
}
}
internal override Stream GetCompressedStream()
{
_stream.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)
{
var cryptKey = new CryptKey3(_password!);
return new RarCryptoWrapper(_stream, FileHeader.R4Salt, cryptKey);
}
if (FileHeader.Rar5CryptoInfo != null)
{
var cryptKey = new CryptKey5(_password!, FileHeader.Rar5CryptoInfo);
return new RarCryptoWrapper(_stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
}
return _stream;
}
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
}

View File

@@ -1,27 +1,19 @@
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
namespace SharpCompress.Archives.Rar;
internal class StreamRarArchiveVolume : RarVolume
{
internal class StreamRarArchiveVolume : RarVolume
{
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options)
: base(StreamingMode.Seekable, stream, options)
{
}
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index)
: base(StreamingMode.Seekable, stream, options, index) { }
internal override IEnumerable<RarFilePart> ReadFileParts()
{
return GetVolumeFileParts();
}
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader)
{
return new SeekableFilePart(markHeader, fileHeader, Stream, ReaderOptions.Password);
}
}
}
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
new SeekableFilePart(markHeader, fileHeader, Index, Stream, ReaderOptions.Password);
}

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
@@ -10,238 +8,266 @@ using SharpCompress.Compressors.LZMA.Utilites;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Archives.SevenZip
namespace SharpCompress.Archives.SevenZip;
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
{
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
private ArchiveDatabase? _database;
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
private ArchiveDatabase database;
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions = null)
filePath.NotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull("fileInfo");
return new SevenZipArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new SevenZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(
IEnumerable<Stream> streams,
ReaderOptions? readerOptions = null
)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new SevenZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull("stream");
if (stream is not { CanSeek: true })
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
return new SevenZipArchive(
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new SevenZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private SevenZipArchive(SourceStream sourceStream)
: base(ArchiveType.SevenZip, sourceStream) { }
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(IEnumerable<Stream> streams, ReaderOptions readerOptions = null)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new SevenZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
return new SevenZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal SevenZipArchive(SourceStream srcStream)
: base(ArchiveType.SevenZip, srcStream)
public static bool IsSevenZipFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsSevenZipFile(stream);
}
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream srcStream)
internal SevenZipArchive()
: base(ArchiveType.SevenZip) { }
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(
IEnumerable<SevenZipVolume> volumes
)
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
if (_database is null)
{
base.SrcStream.LoadAllParts(); //request all streams
return new SevenZipVolume(srcStream, ReaderOptions).AsEnumerable(); //simple single volume or split, multivolume not supported
return Enumerable.Empty<SevenZipArchiveEntry>();
}
public static bool IsSevenZipFile(string filePath)
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
{
return IsSevenZipFile(new FileInfo(filePath));
var file = _database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
);
}
public static bool IsSevenZipFile(FileInfo fileInfo)
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
if (!fileInfo.Exists)
var isSolid = false;
foreach (var entry in group)
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsSevenZipFile(stream);
entry.IsSolid = isSolid;
isSolid = true; //mark others in this group as solid - same as rar behaviour.
}
}
internal SevenZipArchive()
: base(ArchiveType.SevenZip)
return entries;
}
private void LoadFactory(Stream stream)
{
if (_database is null)
{
}
protected override IEnumerable<SevenZipArchiveEntry> LoadEntries(IEnumerable<SevenZipVolume> volumes)
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
var entries = new SevenZipArchiveEntry[database._files.Count];
for (int i = 0; i < database._files.Count; i++)
{
var file = database._files[i];
entries[i] = new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
var isSolid = false;
foreach (var entry in group)
{
entry.IsSolid = isSolid;
isSolid = true; //mark others in this group as solid - same as rar behaviour.
}
}
return entries;
}
private void LoadFactory(Stream stream)
{
if (database is null)
{
stream.Position = 0;
var reader = new ArchiveReader();
reader.Open(stream);
database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
}
}
public static bool IsSevenZipFile(Stream stream)
{
try
{
return SignatureMatch(stream);
}
catch
{
return false;
}
}
private static ReadOnlySpan<byte> SIGNATURE => new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static bool SignatureMatch(Stream stream)
{
BinaryReader reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(SIGNATURE);
}
protected override IReader CreateReaderForSolidExtraction()
{
return new SevenZipReader(ReaderOptions, this);
}
public override bool IsSolid { get { return Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1; } }
public override long TotalSize
{
get
{
int i = Entries.Count;
return database._packSizes.Aggregate(0L, (total, packSize) => total + packSize);
}
}
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive archive;
private CFolder currentFolder;
private Stream currentStream;
private CFileItem currentItem;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip)
{
this.archive = archive;
}
public override SevenZipVolume Volume => archive.Volumes.Single();
protected override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
{
List<SevenZipArchiveEntry> entries = archive.Entries.ToList();
stream.Position = 0;
foreach (var dir in entries.Where(x => x.IsDirectory))
{
yield return dir;
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
{
currentFolder = group.Key;
if (group.Key is null)
{
currentStream = Stream.Null;
}
else
{
currentStream = archive.database.GetFolderStream(stream, currentFolder, new PasswordProvider(Options.Password));
}
foreach (var entry in group)
{
currentItem = entry.FilePart.Header;
yield return entry;
}
}
}
protected override EntryStream GetEntryStream()
{
return CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
}
}
private class PasswordProvider : IPasswordProvider
{
private readonly string _password;
public PasswordProvider(string password)
{
_password = password;
}
public string CryptoGetTextPassword()
{
return _password;
}
stream.Position = 0;
var reader = new ArchiveReader();
reader.Open(stream, lookForHeader: ReaderOptions.LookForHeader);
_database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
}
}
public static bool IsSevenZipFile(Stream stream)
{
try
{
return SignatureMatch(stream);
}
catch
{
return false;
}
}
private static ReadOnlySpan<byte> Signature =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static bool SignatureMatch(Stream stream)
{
var reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(Signature);
}
protected override IReader CreateReaderForSolidExtraction() =>
new SevenZipReader(ReaderOptions, this);
public override bool IsSolid =>
Entries
.Where(x => !x.IsDirectory)
.GroupBy(x => x.FilePart.Folder)
.Any(folder => folder.Count() > 1);
public override long TotalSize =>
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive _archive;
private CFolder? _currentFolder;
private Stream? _currentStream;
private CFileItem? _currentItem;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
public override SevenZipVolume Volume => _archive.Volumes.Single();
protected override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
{
var entries = _archive.Entries.ToList();
stream.Position = 0;
foreach (var dir in entries.Where(x => x.IsDirectory))
{
yield return dir;
}
foreach (
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
)
{
_currentFolder = group.Key;
if (group.Key is null)
{
_currentStream = Stream.Null;
}
else
{
_currentStream = _archive._database?.GetFolderStream(
stream,
_currentFolder,
new PasswordProvider(Options.Password)
);
}
foreach (var entry in group)
{
_currentItem = entry.FilePart.Header;
yield return entry;
}
}
}
protected override EntryStream GetEntryStream() =>
CreateEntryStream(
new ReadOnlySubStream(
_currentStream.NotNull("currentStream is not null"),
_currentItem?.Size ?? 0
)
);
}
private class PasswordProvider : IPasswordProvider
{
private readonly string? _password;
public PasswordProvider(string? password) => _password = password;
public string? CryptoGetTextPassword() => _password;
}
}

View File

@@ -1,28 +1,26 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.SevenZip;
namespace SharpCompress.Archives.SevenZip
namespace SharpCompress.Archives.SevenZip;
public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
{
public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
{
internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part)
: base(part)
{
Archive = archive;
}
internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part)
: base(part) => Archive = archive;
public Stream OpenEntryStream()
{
return FilePart.GetCompressedStream();
}
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
public IArchive Archive { get; }
public Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
Task.FromResult(OpenEntryStream());
public bool IsComplete => true;
public IArchive Archive { get; }
/// <summary>
/// This is a 7Zip Anti item
/// </summary>
public bool IsAnti => FilePart.Header.IsAnti;
}
}
public bool IsComplete => true;
/// <summary>
/// This is a 7Zip Anti item
/// </summary>
public bool IsAnti => FilePart.Header.IsAnti;
}

View File

@@ -11,197 +11,241 @@ using SharpCompress.Readers.Tar;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
namespace SharpCompress.Archives.Tar
namespace SharpCompress.Archives.Tar;
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
{
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new TarArchive(
new SourceStream(
fileInfo,
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new TarArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new TarArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new TarArchive(new SourceStream(fileInfo, i => ArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
return new TarArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new TarArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
}
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
public static bool IsTarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new TarArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new TarArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
public static bool IsTarFile(string filePath)
{
return IsTarFile(new FileInfo(filePath));
}
public static bool IsTarFile(FileInfo fileInfo)
{
if (!fileInfo.Exists)
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsTarFile(stream);
}
}
public static bool IsTarFile(Stream stream)
{
try
{
TarHeader tarHeader = new TarHeader(new ArchiveEncoding());
bool readSucceeded = tarHeader.Read(new BinaryReader(stream));
bool isEmptyArchive = tarHeader.Name.Length == 0 && tarHeader.Size == 0 && Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch
{
}
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsTarFile(stream);
}
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream srcStream)
public static bool IsTarFile(Stream stream)
{
try
{
base.SrcStream.LoadAllParts(); //request all streams
return new TarVolume(srcStream, ReaderOptions).AsEnumerable(); //simple single volume or split, multivolume not supported
var tarHeader = new TarHeader(new ArchiveEncoding());
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
var isEmptyArchive =
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
}
catch { }
return false;
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal TarArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream)
{
}
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable(); //simple single volume or split, multivolume not supported
}
internal TarArchive()
: base(ArchiveType.Tar)
{
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
private TarArchive(SourceStream sourceStream)
: base(ArchiveType.Tar, sourceStream) { }
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
private TarArchive()
: base(ArchiveType.Tar) { }
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
{
var stream = volumes.Single().Stream;
TarHeader? previousHeader = null;
foreach (
var header in TarHeaderFactory.ReadHeader(
StreamingMode.Seekable,
stream,
ReaderOptions.ArchiveEncoding
)
)
{
Stream stream = volumes.Single().Stream;
TarHeader? previousHeader = null;
foreach (TarHeader? header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
if (header != null)
{
if (header != null)
if (header.EntryType == EntryType.LongName)
{
if (header.EntryType == EntryType.LongName)
previousHeader = header;
}
else
{
if (previousHeader != null)
{
previousHeader = header;
}
else
{
if (previousHeader != null)
var entry = new TarArchiveEntry(
this,
new TarFilePart(previousHeader, stream),
CompressionType.None
);
var oldStreamPos = stream.Position;
using (var entryStream = entry.OpenEntryStream())
{
var entry = new TarArchiveEntry(this, new TarFilePart(previousHeader, stream),
CompressionType.None);
using var memoryStream = new MemoryStream();
entryStream.CopyTo(memoryStream);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
var oldStreamPos = stream.Position;
using (var entryStream = entry.OpenEntryStream())
{
using (var memoryStream = new MemoryStream())
{
entryStream.TransferTo(memoryStream);
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
header.Name = ReaderOptions.ArchiveEncoding.Decode(bytes).TrimNulls();
}
}
stream.Position = oldStreamPos;
previousHeader = null;
header.Name = ReaderOptions.ArchiveEncoding.Decode(bytes).TrimNulls();
}
yield return new TarArchiveEntry(this, new TarFilePart(header, stream), CompressionType.None);
stream.Position = oldStreamPos;
previousHeader = null;
}
yield return new TarArchiveEntry(
this,
new TarFilePart(header, stream),
CompressionType.None
);
}
}
}
public static TarArchive Create()
{
return new TarArchive();
}
protected override TarArchiveEntry CreateEntryInternal(string filePath, Stream source,
long size, DateTime? modified, bool closeStream)
{
return new TarWritableArchiveEntry(this, source, CompressionType.Unknown, filePath, size, modified,
closeStream);
}
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries)
{
using (var writer = new TarWriter(stream, new TarWriterOptions(options)))
else
{
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
{
using (var entryStream = entry.OpenEntryStream())
{
writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size);
}
}
throw new IncompleteArchiveException("Failed to read TAR header");
}
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return TarReader.Open(stream);
}
}
public static TarArchive Create() => new();
protected override TarArchiveEntry CreateEntryInternal(
string filePath,
Stream source,
long size,
DateTime? modified,
bool closeStream
) =>
new TarWritableArchiveEntry(
this,
source,
CompressionType.Unknown,
filePath,
size,
modified,
closeStream
);
protected override void SaveTo(
Stream stream,
WriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size
);
}
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return TarReader.Open(stream);
}
}

View File

@@ -1,29 +1,28 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
namespace SharpCompress.Archives.Tar
namespace SharpCompress.Archives.Tar;
public class TarArchiveEntry : TarEntry, IArchiveEntry
{
public class TarArchiveEntry : TarEntry, IArchiveEntry
{
internal TarArchiveEntry(TarArchive archive, TarFilePart part, CompressionType compressionType)
: base(part, compressionType)
{
Archive = archive;
}
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
: base(part, compressionType) => Archive = archive;
public virtual Stream OpenEntryStream()
{
return Parts.Single().GetCompressedStream();
}
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
#region IArchiveEntry Members
public virtual Task<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
) => Task.FromResult(OpenEntryStream());
public IArchive Archive { get; }
#region IArchiveEntry Members
public bool IsComplete => true;
public IArchive Archive { get; }
#endregion
}
}
public bool IsComplete => true;
#endregion
}

View File

@@ -1,67 +1,71 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives.Tar
namespace SharpCompress.Archives.Tar;
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
{
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
private readonly bool closeStream;
private readonly Stream stream;
internal TarWritableArchiveEntry(
TarArchive archive,
Stream stream,
CompressionType compressionType,
string path,
long size,
DateTime? lastModified,
bool closeStream
)
: base(archive, null, compressionType)
{
private readonly bool closeStream;
private readonly Stream stream;
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
internal TarWritableArchiveEntry(TarArchive archive, Stream stream, CompressionType compressionType,
string path, long size, DateTime? lastModified, bool closeStream)
: base(archive, null, compressionType)
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
}
internal override void Close()
{
if (closeStream)
{
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream)
{
stream.Dispose();
}
stream.Dispose();
}
}
}
}

View File

@@ -12,261 +12,325 @@ using SharpCompress.Readers.Zip;
using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
namespace SharpCompress.Archives.Zip
namespace SharpCompress.Archives.Zip;
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
private readonly SeekableZipHeaderFactory? headerFactory;
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
/// if the compression method is set to deflate
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="sourceStream"></param>
/// <param name="options"></param>
internal ZipArchive(SourceStream sourceStream)
: base(ArchiveType.Zip, sourceStream) =>
headerFactory = new SeekableZipHeaderFactory(
sourceStream.ReaderOptions.Password,
sourceStream.ReaderOptions.ArchiveEncoding
);
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
#nullable disable
private readonly SeekableZipHeaderFactory headerFactory;
#nullable enable
filePath.NotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
/// if the compression method is set to deflate
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.NotNull(nameof(fileInfo));
return new ZipArchive(
new SourceStream(
fileInfo,
i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo),
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal ZipArchive(SourceStream srcStream)
: base(ArchiveType.Zip, srcStream)
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? readerOptions = null
)
{
fileInfos.NotNull(nameof(fileInfos));
var files = fileInfos.ToArray();
return new ZipArchive(
new SourceStream(
files[0],
i => i < files.Length ? files[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
{
streams.NotNull(nameof(streams));
var strms = streams.ToArray();
return new ZipArchive(
new SourceStream(
strms[0],
i => i < strms.Length ? strms[i] : null,
readerOptions ?? new ReaderOptions()
)
);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.NotNull(nameof(stream));
if (stream is not { CanSeek: true })
{
headerFactory = new SeekableZipHeaderFactory(srcStream.ReaderOptions.Password, srcStream.ReaderOptions.ArchiveEncoding);
throw new ArgumentException("Stream must be seekable", nameof(stream));
}
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
return new ZipArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
return new ZipArchive(new SourceStream(fileInfo, i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo), readerOptions ?? new ReaderOptions()));
}
public static bool IsZipFile(
string filePath,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
) => IsZipFile(new FileInfo(filePath), password, bufferSize);
/// <summary>
/// Constructor with all file parts passed in
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? readerOptions = null)
public static bool IsZipFile(
FileInfo fileInfo,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
if (!fileInfo.Exists)
{
fileInfos.CheckNotNull(nameof(fileInfos));
FileInfo[] files = fileInfos.ToArray();
return new ZipArchive(new SourceStream(files[0], i => i < files.Length ? files[i] : null, readerOptions ?? new ReaderOptions()));
return false;
}
using Stream stream = fileInfo.OpenRead();
return IsZipFile(stream, password, bufferSize);
}
/// <summary>
/// Constructor with all stream parts passed in
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
public static bool IsZipFile(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
streams.CheckNotNull(nameof(streams));
Stream[] strms = streams.ToArray();
return new ZipArchive(new SourceStream(strms[0], i => i < strms.Length ? strms[i] : null, readerOptions ?? new ReaderOptions()));
}
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull(nameof(stream));
return new ZipArchive(new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions()));
}
public static bool IsZipFile(string filePath, string? password = null)
{
return IsZipFile(new FileInfo(filePath), password);
}
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
{
if (!fileInfo.Exists)
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsZipFile(stream, password);
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
public static bool IsZipFile(Stream stream, string? password = null)
catch (CryptographicException)
{
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
return true;
}
catch
{
return false;
}
}
public static bool IsZipMulti(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize
)
{
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
public static bool IsZipMulti(Stream stream, string? password = null)
catch (CryptographicException)
{
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
{
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
SeekableZipHeaderFactory z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
return true;
}
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream srcStream)
catch
{
base.SrcStream.LoadAllParts(); //request all streams
base.SrcStream.Position = 0;
List<Stream> streams = base.SrcStream.Streams.ToList();
if (streams.Count > 1) //test part 2 - true = multipart not split
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
bool isZip = IsZipFile(streams[1], ReaderOptions.Password);
streams[1].Position -= 4;
if (isZip)
{
base.SrcStream.IsVolumes = true;
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
streams.RemoveAt(0);
streams.Add(tmp);
//streams[0].Position = 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
return streams.Select(a => new ZipVolume(a, ReaderOptions));
}
}
//split mode or single file
return new ZipVolume(base.SrcStream, ReaderOptions).AsEnumerable();
}
internal ZipArchive()
: base(ArchiveType.Zip)
{
}
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var vols = volumes.ToArray();
foreach (ZipHeader h in headerFactory.ReadSeekableHeader(vols.Last().Stream))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
DirectoryEntryHeader deh = (DirectoryEntryHeader)h;
Stream s;
if (deh.RelativeOffsetOfEntryHeader + deh.CompressedSize > vols[deh.DiskNumberStart].Stream.Length)
{
var v = vols.Skip(deh.DiskNumberStart).ToArray();
s = new SourceStream(v[0].Stream, i => i < v.Length ? v[i].Stream : null, new ReaderOptions() { LeaveStreamOpen = true });
}
else
s = vols[deh.DiskNumberStart].Stream;
yield return new ZipArchiveEntry(this, new SeekableZipFilePart(headerFactory, deh, s));
}
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volumes.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}
}
public void SaveTo(Stream stream)
{
SaveTo(stream, new WriterOptions(CompressionType.Deflate));
}
protected override void SaveTo(Stream stream, WriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries)
{
using (var writer = new ZipWriter(stream, new ZipWriterOptions(options)))
{
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
{
using (var entryStream = entry.OpenEntryStream())
{
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
}
}
}
}
protected override ZipArchiveEntry CreateEntryInternal(string filePath, Stream source, long size, DateTime? modified,
bool closeStream)
{
return new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
public static ZipArchive Create()
{
return new ZipArchive();
}
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return ZipReader.Open(stream, ReaderOptions);
return false;
}
}
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
{
stream.LoadAllParts(); //request all streams
stream.Position = 0;
var streams = stream.Streams.ToList();
var idx = 0;
if (streams.Count() > 1) //test part 2 - true = multipart not split
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
var isZip = IsZipFile(streams[1], ReaderOptions.Password, ReaderOptions.BufferSize);
streams[1].Position -= 4;
if (isZip)
{
stream.IsVolumes = true;
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
streams.RemoveAt(0);
streams.Add(tmp);
//streams[0].Position = 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
return streams.Select(a => new ZipVolume(a, ReaderOptions, idx++));
}
}
//split mode or single file
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
}
internal ZipArchive()
: base(ArchiveType.Zip) { }
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var vols = volumes.ToArray();
foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
var deh = (DirectoryEntryHeader)h;
Stream s;
if (
deh.RelativeOffsetOfEntryHeader + deh.CompressedSize
> vols[deh.DiskNumberStart].Stream.Length
)
{
var v = vols.Skip(deh.DiskNumberStart).ToArray();
s = new SourceStream(
v[0].Stream,
i => i < v.Length ? v[i].Stream : null,
new ReaderOptions() { LeaveStreamOpen = true }
);
}
else
{
s = vols[deh.DiskNumberStart].Stream;
}
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
);
}
break;
case ZipHeaderType.DirectoryEnd:
{
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
vols.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}
}
public void SaveTo(Stream stream) => SaveTo(stream, new WriterOptions(CompressionType.Deflate));
protected override void SaveTo(
Stream stream,
WriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime
);
}
}
protected override ZipArchiveEntry CreateEntryInternal(
string filePath,
Stream source,
long size,
DateTime? modified,
bool closeStream
) => new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
public static ZipArchive Create() => new();
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
((IStreamStack)stream).StackSeek(0);
return ZipReader.Open(stream, ReaderOptions, Entries);
}
}

View File

@@ -1,30 +1,27 @@
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip;
namespace SharpCompress.Archives.Zip
namespace SharpCompress.Archives.Zip;
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
: base(part)
{
Archive = archive;
}
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
: base(part) => Archive = archive;
public virtual Stream OpenEntryStream()
{
return Parts.Single().GetCompressedStream();
}
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
#region IArchiveEntry Members
public virtual Task<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
) => Task.FromResult(OpenEntryStream());
public IArchive Archive { get; }
#region IArchiveEntry Members
public bool IsComplete => true;
public IArchive Archive { get; }
#endregion
public bool IsComplete => true;
public string? Comment => ((SeekableZipFilePart)Parts.Single()).Comment;
}
}
#endregion
}

View File

@@ -1,34 +1,35 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using System.Text.RegularExpressions;
namespace SharpCompress.Archives.Zip
namespace SharpCompress.Archives.Zip;
internal static class ZipArchiveVolumeFactory
{
internal static class ZipArchiveVolumeFactory
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
FileInfo? item = null;
FileInfo? item = null;
//load files with zip/zipx first. Swapped to end once loaded in ZipArchive
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
Match m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(Path.Combine(part1.DirectoryName!, String.Concat(m.Groups[1].Value, Regex.Replace(m.Groups[2].Value, @"[^xz]", ""), index.ToString().PadLeft(2, '0'))));
else //split - 001, 002 ...
return ArchiveVolumeFactory.GetFilePart(index, part1);
//load files with zip/zipx first. Swapped to end once loaded in ZipArchive
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
var m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
String.Concat(
m.Groups[1].Value,
Regex.Replace(m.Groups[2].Value, @"[^xz]", ""),
index.ToString().PadLeft(2, '0')
)
)
);
else //split - 001, 002 ...
return ArchiveVolumeFactory.GetFilePart(index, part1);
if (item != null && item.Exists)
return item;
return null; //no more items
}
if (item != null && item.Exists)
return item;
return null; //no more items
}
}

View File

@@ -1,68 +1,73 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Archives.Zip
namespace SharpCompress.Archives.Zip;
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
{
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
private readonly bool closeStream;
private readonly Stream stream;
private bool isDisposed;
internal ZipWritableArchiveEntry(
ZipArchive archive,
Stream stream,
string path,
long size,
DateTime? lastModified,
bool closeStream
)
: base(archive, null)
{
private readonly bool closeStream;
private readonly Stream stream;
private bool isDisposed;
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
internal ZipWritableArchiveEntry(ZipArchive archive, Stream stream, string path, long size,
DateTime? lastModified, bool closeStream)
: base(archive, null)
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
}
internal override void Close()
{
if (closeStream && !isDisposed)
{
this.stream = stream;
Key = path;
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
}
public override long Crc => 0;
public override string Key { get; }
public override long CompressedSize => 0;
public override long Size { get; }
public override DateTime? LastModifiedTime { get; }
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
public override Stream OpenEntryStream()
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return new NonDisposingStream(stream);
}
internal override void Close()
{
if (closeStream && !isDisposed)
{
stream.Dispose();
isDisposed = true;
}
stream.Dispose();
isDisposed = true;
}
}
}
}

View File

@@ -1,24 +1,7 @@
using System;
using System.Reflection;
using System;
using System.Runtime.CompilerServices;
[assembly: AssemblyTitle("SharpCompress")]
[assembly: AssemblyProduct("SharpCompress")]
[assembly: InternalsVisibleTo("SharpCompress.Test" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: CLSCompliant(true)]
namespace SharpCompress
{
/// <summary>
/// Just a static class to house the public key, to avoid repetition.
/// </summary>
internal static class AssemblyInfo
{
internal const string PublicKeySuffix =
",PublicKey=002400000480000094000000060200000024000052534131000400000100010059acfa17d26c44" +
"7a4d03f16eaa72c9187c04f16e6569dd168b080e39a6f5c9fd00f28c768cd8e9a089d5a0e1b34c" +
"cd971488e7afe030ce5ce8df2053cf12ec89f6d38065c434c09ee6af3ee284c5dc08f44774b679" +
"bf39298e57efe30d4b00aecf9e4f6f8448b2cb0146d8956dfcab606cc64a0ac38c60a7d78b0d65" +
"d3b98dc0";
}
}
[assembly: InternalsVisibleTo(
"SharpCompress.Test,PublicKey=0024000004800000940000000602000000240000525341310004000001000100158bebf1433f76dffc356733c138babea7a47536c65ed8009b16372c6f4edbb20554db74a62687f56b97c20a6ce8c4b123280279e33c894e7b3aa93ab3c573656fde4db576cfe07dba09619ead26375b25d2c4a8e43f7be257d712b0dd2eb546f67adb09281338618a58ac834fc038dd7e2740a7ab3591826252e4f4516306dc"
)]

View File

@@ -0,0 +1,60 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Tar;
namespace SharpCompress.Common.Arc
{
public class ArcEntry : Entry
{
private readonly ArcFilePart? _filePart;
internal ArcEntry(ArcFilePart? filePart)
{
_filePart = filePart;
}
public override long Crc
{
get
{
if (_filePart == null)
{
return 0;
}
return _filePart.Header.Crc16;
}
}
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType =>
_filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
public override long Size => throw new NotImplementedException();
public override DateTime? LastModifiedTime => null;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}
}

View File

@@ -0,0 +1,76 @@
using System;
using System.IO;
using System.Linq;
using System.Text;
namespace SharpCompress.Common.Arc
{
public class ArcEntryHeader
{
public ArchiveEncoding ArchiveEncoding { get; }
public CompressionType CompressionMethod { get; private set; }
public string? Name { get; private set; }
public long CompressedSize { get; private set; }
public DateTime DateTime { get; private set; }
public int Crc16 { get; private set; }
public long OriginalSize { get; private set; }
public long DataStartPosition { get; private set; }
public ArcEntryHeader(ArchiveEncoding archiveEncoding)
{
this.ArchiveEncoding = archiveEncoding;
}
public ArcEntryHeader? ReadHeader(Stream stream)
{
byte[] headerBytes = new byte[29];
if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
{
return null;
}
DataStartPosition = stream.Position;
return LoadFrom(headerBytes);
}
public ArcEntryHeader LoadFrom(byte[] headerBytes)
{
CompressionMethod = GetCompressionType(headerBytes[1]);
// Read name
int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
int offset = 15;
CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
offset += 4;
uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
DateTime = ConvertToDateTime(rawDateTime);
offset += 4;
Crc16 = BitConverter.ToUInt16(headerBytes, offset);
offset += 2;
OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
return this;
}
private CompressionType GetCompressionType(byte value)
{
return value switch
{
1 or 2 => CompressionType.None,
3 => CompressionType.RLE90,
4 => CompressionType.Squeezed,
5 or 6 or 7 or 8 => CompressionType.Crunched,
9 => CompressionType.Squashed,
10 => CompressionType.Crushed,
11 => CompressionType.Distilled,
_ => CompressionType.Unknown,
};
}
public static DateTime ConvertToDateTime(long rawDateTime)
{
// Convert Unix timestamp to DateTime (UTC)
return DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
}
}
}

View File

@@ -0,0 +1,75 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;
using SharpCompress.IO;
namespace SharpCompress.Common.Arc
{
public class ArcFilePart : FilePart
{
private readonly Stream? _stream;
internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
: base(localArcHeader.ArchiveEncoding)
{
_stream = seekableStream;
Header = localArcHeader;
}
internal ArcEntryHeader Header { get; set; }
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionType.None:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionType.RLE90:
compressedStream = new RunLength90Stream(
_stream,
(int)Header.CompressedSize
);
break;
case CompressionType.Squeezed:
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Crunched:
compressedStream = new ArcLzwStream(
_stream,
(int)Header.CompressedSize,
true
);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream? GetRawStream() => _stream;
}
}

View File

@@ -0,0 +1,16 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Common.Arc
{
public class ArcVolume : Volume
{
public ArcVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
}
}

View File

@@ -1,76 +1,57 @@
using System;
using System;
using System.Text;
namespace SharpCompress.Common
namespace SharpCompress.Common;
public class ArchiveEncoding
{
public class ArchiveEncoding
/// <summary>
/// Default encoding to use when archive format doesn't specify one.
/// </summary>
public Encoding? Default { get; set; }
/// <summary>
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
/// </summary>
public Encoding? Password { get; set; }
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
/// </summary>
public Encoding? Forced { get; set; }
/// <summary>
/// Set this when you want to use a custom method for all decoding operations.
/// </summary>
/// <returns>string Func(bytes, index, length)</returns>
public Func<byte[], int, int, string>? CustomDecoder { get; set; }
public ArchiveEncoding()
: this(Encoding.Default, Encoding.Default) { }
public ArchiveEncoding(Encoding def, Encoding password)
{
/// <summary>
/// Default encoding to use when archive format doesn't specify one.
/// </summary>
public Encoding Default { get; set; }
/// <summary>
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
/// </summary>
public Encoding Password { get; set; }
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
/// </summary>
public Encoding? Forced { get; set; }
/// <summary>
/// Set this when you want to use a custom method for all decoding operations.
/// </summary>
/// <returns>string Func(bytes, index, length)</returns>
public Func<byte[], int, int, string>? CustomDecoder { get; set; }
public ArchiveEncoding()
: this(Encoding.Default, Encoding.Default)
{
}
public ArchiveEncoding(Encoding def, Encoding password)
{
Default = def;
Password = password;
}
Default = def;
Password = password;
}
#if !NETFRAMEWORK
static ArchiveEncoding()
{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
}
static ArchiveEncoding() => Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
#endif
public string Decode(byte[] bytes)
{
return Decode(bytes, 0, bytes.Length);
}
public string Decode(byte[] bytes) => Decode(bytes, 0, bytes.Length);
public string Decode(byte[] bytes, int start, int length)
{
return GetDecoder().Invoke(bytes, start, length);
}
public string Decode(byte[] bytes, int start, int length) =>
GetDecoder().Invoke(bytes, start, length);
public string DecodeUTF8(byte[] bytes)
{
return Encoding.UTF8.GetString(bytes, 0, bytes.Length);
}
public string DecodeUTF8(byte[] bytes) => Encoding.UTF8.GetString(bytes, 0, bytes.Length);
public byte[] Encode(string str)
{
return GetEncoding().GetBytes(str);
}
public byte[] Encode(string str) => GetEncoding().GetBytes(str);
public Encoding GetEncoding()
{
return Forced ?? Default ?? Encoding.UTF8;
}
public Encoding GetEncoding() => Forced ?? Default ?? Encoding.UTF8;
public Func<byte[], int, int, string> GetDecoder()
{
return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}
}
public Encoding GetPasswordEncoding() => Password ?? Encoding.UTF8;
public Func<byte[], int, int, string> GetDecoder() =>
CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}

View File

@@ -1,12 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class ArchiveException : Exception
{
public ArchiveException(string message)
: base(message)
{
}
}
}

View File

@@ -1,14 +1,10 @@
using System;
namespace SharpCompress.Common
{
public class ArchiveExtractionEventArgs<T> : EventArgs
{
internal ArchiveExtractionEventArgs(T entry)
{
Item = entry;
}
namespace SharpCompress.Common;
public T Item { get; }
}
}
public class ArchiveExtractionEventArgs<T> : EventArgs
{
internal ArchiveExtractionEventArgs(T entry) => Item = entry;
public T Item { get; }
}

View File

@@ -1,11 +1,11 @@
namespace SharpCompress.Common
namespace SharpCompress.Common;
public enum ArchiveType
{
public enum ArchiveType
{
Rar,
Zip,
Tar,
SevenZip,
GZip
}
}
Rar,
Zip,
Tar,
SevenZip,
GZip,
Arc,
}

View File

@@ -1,23 +1,25 @@
using System;
using System;
namespace SharpCompress.Common
namespace SharpCompress.Common;
public sealed class CompressedBytesReadEventArgs : EventArgs
{
public sealed class CompressedBytesReadEventArgs : EventArgs
public CompressedBytesReadEventArgs(
long compressedBytesRead,
long currentFilePartCompressedBytesRead
)
{
public CompressedBytesReadEventArgs(long compressedBytesRead, long currentFilePartCompressedBytesRead)
{
CompressedBytesRead = compressedBytesRead;
CurrentFilePartCompressedBytesRead = currentFilePartCompressedBytesRead;
}
/// <summary>
/// Compressed bytes read for the current entry
/// </summary>
public long CompressedBytesRead { get; }
/// <summary>
/// Current file part read for Multipart files (e.g. Rar)
/// </summary>
public long CurrentFilePartCompressedBytesRead { get; }
CompressedBytesRead = compressedBytesRead;
CurrentFilePartCompressedBytesRead = currentFilePartCompressedBytesRead;
}
}
/// <summary>
/// Compressed bytes read for the current entry
/// </summary>
public long CompressedBytesRead { get; }
/// <summary>
/// Current file part read for Multipart files (e.g. Rar)
/// </summary>
public long CurrentFilePartCompressedBytesRead { get; }
}

View File

@@ -1,19 +1,32 @@
namespace SharpCompress.Common
namespace SharpCompress.Common;
public enum CompressionType
{
public enum CompressionType
{
None,
GZip,
BZip2,
PPMd,
Deflate,
Rar,
LZMA,
BCJ,
BCJ2,
LZip,
Xz,
Unknown,
Deflate64
}
}
None,
GZip,
BZip2,
PPMd,
Deflate,
Rar,
LZMA,
BCJ,
BCJ2,
LZip,
Xz,
Unknown,
Deflate64,
Shrink,
Lzw,
Reduce1,
Reduce2,
Reduce3,
Reduce4,
Explode,
Squeezed,
RLE90,
Crunched,
Squashed,
Crushed,
Distilled,
ZStandard,
}

View File

@@ -1,12 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class CryptographicException : Exception
{
public CryptographicException(string message)
: base(message)
{
}
}
}

View File

@@ -1,89 +1,90 @@
using System;
using System;
using System.Collections.Generic;
using System.Linq;
namespace SharpCompress.Common
namespace SharpCompress.Common;
public abstract class Entry : IEntry
{
public abstract class Entry : IEntry
{
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>
public abstract long Crc { get; }
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>
public abstract long Crc { get; }
/// <summary>
/// The string key of the file internal to the Archive.
/// </summary>
public abstract string Key { get; }
/// <summary>
/// The string key of the file internal to the Archive.
/// </summary>
public abstract string? Key { get; }
/// <summary>
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
/// </summary>
public abstract string? LinkTarget { get; }
/// <summary>
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
/// </summary>
public abstract string? LinkTarget { get; }
/// <summary>
/// The compressed file size
/// </summary>
public abstract long CompressedSize { get; }
/// <summary>
/// The compressed file size
/// </summary>
public abstract long CompressedSize { get; }
/// <summary>
/// The compression type
/// </summary>
public abstract CompressionType CompressionType { get; }
/// <summary>
/// The compression type
/// </summary>
public abstract CompressionType CompressionType { get; }
/// <summary>
/// The uncompressed file size
/// </summary>
public abstract long Size { get; }
/// <summary>
/// The uncompressed file size
/// </summary>
public abstract long Size { get; }
/// <summary>
/// The entry last modified time in the archive, if recorded
/// </summary>
public abstract DateTime? LastModifiedTime { get; }
/// <summary>
/// The entry last modified time in the archive, if recorded
/// </summary>
public abstract DateTime? LastModifiedTime { get; }
/// <summary>
/// The entry create time in the archive, if recorded
/// </summary>
public abstract DateTime? CreatedTime { get; }
/// <summary>
/// The entry create time in the archive, if recorded
/// </summary>
public abstract DateTime? CreatedTime { get; }
/// <summary>
/// The entry last accessed time in the archive, if recorded
/// </summary>
public abstract DateTime? LastAccessedTime { get; }
/// <summary>
/// The entry last accessed time in the archive, if recorded
/// </summary>
public abstract DateTime? LastAccessedTime { get; }
/// <summary>
/// The entry time when archived, if recorded
/// </summary>
public abstract DateTime? ArchivedTime { get; }
/// <summary>
/// The entry time when archived, if recorded
/// </summary>
public abstract DateTime? ArchivedTime { get; }
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// </summary>
public abstract bool IsEncrypted { get; }
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// </summary>
public abstract bool IsEncrypted { get; }
/// <summary>
/// Entry is directory.
/// </summary>
public abstract bool IsDirectory { get; }
/// <summary>
/// Entry is directory.
/// </summary>
public abstract bool IsDirectory { get; }
/// <summary>
/// Entry is split among multiple volumes
/// </summary>
public abstract bool IsSplitAfter { get; }
/// <summary>
/// Entry is split among multiple volumes
/// </summary>
public abstract bool IsSplitAfter { get; }
/// <inheritdoc/>
public override string ToString() => Key;
public int VolumeIndexFirst => Parts.FirstOrDefault()?.Index ?? 0;
public int VolumeIndexLast => Parts.LastOrDefault()?.Index ?? 0;
internal abstract IEnumerable<FilePart> Parts { get; }
/// <inheritdoc/>
public override string ToString() => Key ?? "Entry";
public bool IsSolid { get; set; }
internal abstract IEnumerable<FilePart> Parts { get; }
internal virtual void Close()
{
}
public bool IsSolid { get; set; }
/// <summary>
/// Entry file attribute.
/// </summary>
public virtual int? Attrib => throw new NotImplementedException();
}
internal virtual void Close() { }
/// <summary>
/// Entry file attribute.
/// </summary>
public virtual int? Attrib => throw new NotImplementedException();
}

View File

@@ -1,93 +1,207 @@
using System;
using System;
using System.IO;
using System.IO.Compression;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{
public class EntryStream : Stream
{
private readonly IReader _reader;
private readonly Stream _stream;
private bool _completed;
private bool _isDisposed;
namespace SharpCompress.Common;
internal EntryStream(IReader reader, Stream stream)
public class EntryStream : Stream, IStreamStack
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _stream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
private readonly IReader _reader;
private readonly Stream _stream;
private bool _completed;
private bool _isDisposed;
internal EntryStream(IReader reader, Stream stream)
{
_reader = reader;
_stream = stream;
#if DEBUG_STREAMS
this.DebugConstruct(typeof(EntryStream));
#endif
}
/// <summary>
/// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
/// </summary>
public void SkipEntry()
{
this.Skip();
_completed = true;
}
/// <summary>
/// Asynchronously skip the rest of the entry stream.
/// </summary>
public async Task SkipEntryAsync(CancellationToken cancellationToken = default)
{
await this.SkipAsync(cancellationToken).ConfigureAwait(false);
_completed = true;
}
protected override void Dispose(bool disposing)
{
if (!(_completed || _reader.Cancelled))
{
_reader = reader;
_stream = stream;
SkipEntry();
}
/// <summary>
/// When reading a stream from OpenEntryStream, the stream must be completed so use this to finish reading the entire entry.
/// </summary>
public void SkipEntry()
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
if (_stream is IStreamStack ss)
{
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
{
deflateStream.Flush(); //Deflate over reads. Knock it back
}
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
{
lzmaStream.Flush(); //Lzma over reads. Knock it back
}
}
if (_isDisposed)
{
return;
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(EntryStream));
#endif
base.Dispose(disposing);
_stream.Dispose();
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask DisposeAsync()
{
if (!(_completed || _reader.Cancelled))
{
await SkipEntryAsync().ConfigureAwait(false);
}
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
if (_stream is IStreamStack ss)
{
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
{
await deflateStream.FlushAsync().ConfigureAwait(false);
}
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
{
await lzmaStream.FlushAsync().ConfigureAwait(false);
}
}
if (_isDisposed)
{
return;
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(EntryStream));
#endif
await base.DisposeAsync().ConfigureAwait(false);
await _stream.DisposeAsync().ConfigureAwait(false);
}
#endif
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override void Flush() { }
public override Task FlushAsync(CancellationToken cancellationToken) => Task.CompletedTask;
public override long Length => _stream.Length;
public override long Position
{
get => _stream.Position; //throw new NotSupportedException();
set => throw new NotSupportedException();
}
public override int Read(byte[] buffer, int offset, int count)
{
var read = _stream.Read(buffer, offset, count);
if (read <= 0)
{
this.Skip();
_completed = true;
}
protected override void Dispose(bool disposing)
{
if (!(_completed || _reader.Cancelled))
{
SkipEntry();
}
if (_isDisposed)
{
return;
}
_isDisposed = true;
base.Dispose(disposing);
_stream.Dispose();
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override void Flush()
{
}
public override long Length => _stream.Length;
public override long Position { get => throw new NotSupportedException(); set => throw new NotSupportedException(); }
public override int Read(byte[] buffer, int offset, int count)
{
int read = _stream.Read(buffer, offset, count);
if (read <= 0)
{
_completed = true;
}
return read;
}
public override int ReadByte()
{
int value = _stream.ReadByte();
if (value == -1)
{
_completed = true;
}
return value;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
return read;
}
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
var read = await _stream
.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
if (read <= 0)
{
_completed = true;
}
return read;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
var read = await _stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (read <= 0)
{
_completed = true;
}
return read;
}
#endif
public override int ReadByte()
{
var value = _stream.ReadByte();
if (value == -1)
{
_completed = true;
}
return value;
}
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
}

View File

@@ -1,17 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class ExtractionException : Exception
{
public ExtractionException(string message)
: base(message)
{
}
public ExtractionException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@@ -1,104 +1,232 @@
using System;
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common
namespace SharpCompress.Common;
internal static class ExtractionMethods
{
internal static class ExtractionMethods
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Action<string, ExtractionOptions?> write
)
{
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Action<string, ExtractionOptions?> write)
string destinationFileName;
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
//check for trailing slash.
if (
fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1]
!= Path.DirectorySeparatorChar
)
{
string destinationFileName;
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
//check for trailing slash.
if (fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1] != Path.DirectorySeparatorChar)
{
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
}
if (!Directory.Exists(fullDestinationDirectoryPath))
{
throw new ExtractionException($"Directory does not exist to extract to: {fullDestinationDirectoryPath}");
}
options ??= new ExtractionOptions()
{
Overwrite = true
};
string file = Path.GetFileName(entry.Key);
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key)!;
string destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
{
throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
}
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
if (!entry.IsDirectory)
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
{
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
}
write(destinationFileName, options);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
}
public static void WriteEntryToFile(IEntry entry, string destinationFileName,
ExtractionOptions? options,
Action<string, FileMode> openAndWrite)
if (!Directory.Exists(fullDestinationDirectoryPath))
{
if (entry.LinkTarget != null)
{
if (options?.WriteSymbolicLink is null)
{
throw new ExtractionException("Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null");
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
}
else
{
FileMode fm = FileMode.Create;
options ??= new ExtractionOptions()
{
Overwrite = true
};
throw new ExtractionException(
$"Directory does not exist to extract to: {fullDestinationDirectoryPath}"
);
}
if (!options.Overwrite)
options ??= new ExtractionOptions() { Overwrite = true };
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
file = Utility.ReplaceInvalidFileNameChars(file);
if (options.ExtractFullPath)
{
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
.NotNull("Directory is null");
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
{
fm = FileMode.CreateNew;
throw new ExtractionException(
"Entry is trying to create a directory outside of the destination directory."
);
}
openAndWrite(destinationFileName, fm);
entry.PreserveExtractionOptions(destinationFileName, options);
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
if (!entry.IsDirectory)
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (
!destinationFileName.StartsWith(
fullDestinationDirectoryPath,
StringComparison.Ordinal
)
)
{
throw new ExtractionException(
"Entry is trying to write a file outside of the destination directory."
);
}
write(destinationFileName, options);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
}
}
public static void WriteEntryToFile(
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Action<string, FileMode> openAndWrite
)
{
if (entry.LinkTarget != null)
{
if (options?.WriteSymbolicLink is null)
{
throw new ExtractionException(
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
);
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
}
else
{
var fm = FileMode.Create;
options ??= new ExtractionOptions() { Overwrite = true };
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
openAndWrite(destinationFileName, fm);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}
public static async Task WriteEntryToDirectoryAsync(
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Func<string, ExtractionOptions?, Task> writeAsync,
CancellationToken cancellationToken = default
)
{
string destinationFileName;
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
//check for trailing slash.
if (
fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1]
!= Path.DirectorySeparatorChar
)
{
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
}
if (!Directory.Exists(fullDestinationDirectoryPath))
{
throw new ExtractionException(
$"Directory does not exist to extract to: {fullDestinationDirectoryPath}"
);
}
options ??= new ExtractionOptions() { Overwrite = true };
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
file = Utility.ReplaceInvalidFileNameChars(file);
if (options.ExtractFullPath)
{
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
.NotNull("Directory is null");
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
{
throw new ExtractionException(
"Entry is trying to create a directory outside of the destination directory."
);
}
Directory.CreateDirectory(destdir);
}
destinationFileName = Path.Combine(destdir, file);
}
else
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
if (!entry.IsDirectory)
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (
!destinationFileName.StartsWith(
fullDestinationDirectoryPath,
StringComparison.Ordinal
)
)
{
throw new ExtractionException(
"Entry is trying to write a file outside of the destination directory."
);
}
await writeAsync(destinationFileName, options).ConfigureAwait(false);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
Directory.CreateDirectory(destinationFileName);
}
}
public static async Task WriteEntryToFileAsync(
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Func<string, FileMode, Task> openAndWriteAsync,
CancellationToken cancellationToken = default
)
{
if (entry.LinkTarget != null)
{
if (options?.WriteSymbolicLink is null)
{
throw new ExtractionException(
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
);
}
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
}
else
{
var fm = FileMode.Create;
options ??= new ExtractionOptions() { Overwrite = true };
if (!options.Overwrite)
{
fm = FileMode.CreateNew;
}
await openAndWriteAsync(destinationFileName, fm).ConfigureAwait(false);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}
}

View File

@@ -1,40 +1,40 @@
using System;
using System;
namespace SharpCompress.Common
namespace SharpCompress.Common;
public class ExtractionOptions
{
public class ExtractionOptions
/// <summary>
/// overwrite target if it exists
/// </summary>
public bool Overwrite { get; set; }
/// <summary>
/// extract with internal directory structure
/// </summary>
public bool ExtractFullPath { get; set; }
/// <summary>
/// preserve file time
/// </summary>
public bool PreserveFileTime { get; set; }
/// <summary>
/// preserve windows file attributes
/// </summary>
public bool PreserveAttributes { get; set; }
/// <summary>
/// Delegate for writing symbolic links to disk.
/// sourcePath is where the symlink is created.
/// targetPath is what the symlink refers to.
/// </summary>
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
public SymbolicLinkWriterDelegate WriteSymbolicLink = (sourcePath, targetPath) =>
{
/// <summary>
/// overwrite target if it exists
/// </summary>
public bool Overwrite { get; set; }
/// <summary>
/// extract with internal directory structure
/// </summary>
public bool ExtractFullPath { get; set; }
/// <summary>
/// preserve file time
/// </summary>
public bool PreserveFileTime { get; set; }
/// <summary>
/// preserve windows file attributes
/// </summary>
public bool PreserveAttributes { get; set; }
/// <summary>
/// Delegate for writing symbolic links to disk.
/// sourcePath is where the symlink is created.
/// targetPath is what the symlink refers to.
/// </summary>
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
public SymbolicLinkWriterDelegate WriteSymbolicLink =
(sourcePath, targetPath) =>
{
Console.WriteLine($"Could not write symlink {sourcePath} -> {targetPath}, for more information please see https://github.com/dotnet/runtime/issues/24271");
};
}
}
Console.WriteLine(
$"Could not write symlink {sourcePath} -> {targetPath}, for more information please see https://github.com/dotnet/runtime/issues/24271"
);
};
}

View File

@@ -1,20 +1,17 @@
using System.IO;
using System.IO;
namespace SharpCompress.Common
namespace SharpCompress.Common;
public abstract class FilePart
{
public abstract class FilePart
{
protected FilePart(ArchiveEncoding archiveEncoding)
{
ArchiveEncoding = archiveEncoding;
}
protected FilePart(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
internal ArchiveEncoding ArchiveEncoding { get; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string FilePartName { get; }
internal abstract string? FilePartName { get; }
public int Index { get; set; }
internal abstract Stream GetCompressedStream();
internal abstract Stream? GetRawStream();
internal bool Skipped { get; set; }
}
internal abstract Stream? GetCompressedStream();
internal abstract Stream? GetRawStream();
internal bool Skipped { get; set; }
}

View File

@@ -1,29 +1,28 @@
using System;
using System;
namespace SharpCompress.Common
namespace SharpCompress.Common;
public sealed class FilePartExtractionBeginEventArgs : EventArgs
{
public sealed class FilePartExtractionBeginEventArgs : EventArgs
public FilePartExtractionBeginEventArgs(string name, long size, long compressedSize)
{
public FilePartExtractionBeginEventArgs(string name, long size, long compressedSize)
{
Name = name;
Size = size;
CompressedSize = compressedSize;
}
/// <summary>
/// File name for the part for the current entry
/// </summary>
public string Name { get; }
/// <summary>
/// Uncompressed size of the current entry in the part
/// </summary>
public long Size { get; }
/// <summary>
/// Compressed size of the current entry in the part
/// </summary>
public long CompressedSize { get; }
Name = name;
Size = size;
CompressedSize = compressedSize;
}
}
/// <summary>
/// File name for the part for the current entry
/// </summary>
public string Name { get; }
/// <summary>
/// Uncompressed size of the current entry in the part
/// </summary>
public long Size { get; }
/// <summary>
/// Compressed size of the current entry in the part
/// </summary>
public long CompressedSize { get; }
}

View File

@@ -1,108 +1,86 @@
using System;
namespace SharpCompress.Common
namespace SharpCompress.Common;
internal static class FlagUtility
{
internal static class FlagUtility
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag<T>(long bitField, T flag)
where T : struct => HasFlag(bitField, flag);
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag<T>(ulong bitField, T flag)
where T : struct => HasFlag(bitField, flag);
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag(ulong bitField, ulong flag) => ((bitField & flag) == flag);
public static bool HasFlag(short bitField, short flag) => ((bitField & flag) == flag);
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag<T>(T bitField, T flag)
where T : struct => HasFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag));
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag(long bitField, long flag) => ((bitField & flag) == flag);
/// <summary>
/// Sets a bit-field to either on or off for the specified flag.
/// </summary>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to change</param>
/// <param name="on">bool</param>
/// <returns>The flagged variable with the flag changed</returns>
public static long SetFlag(long bitField, long flag, bool on)
{
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag<T>(long bitField, T flag)
where T : struct
if (on)
{
return HasFlag(bitField, flag);
}
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag<T>(ulong bitField, T flag)
where T : struct
{
return HasFlag(bitField, flag);
}
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag(ulong bitField, ulong flag)
{
return ((bitField & flag) == flag);
}
public static bool HasFlag(short bitField, short flag)
{
return ((bitField & flag) == flag);
}
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag<T>(T bitField, T flag)
where T : struct
{
return HasFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag));
}
/// <summary>
/// Returns true if the flag is set on the specified bit field.
/// Currently only works with 32-bit bitfields.
/// </summary>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to test</param>
/// <returns></returns>
public static bool HasFlag(long bitField, long flag)
{
return ((bitField & flag) == flag);
}
/// <summary>
/// Sets a bit-field to either on or off for the specified flag.
/// </summary>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to change</param>
/// <param name="on">bool</param>
/// <returns>The flagged variable with the flag changed</returns>
public static long SetFlag(long bitField, long flag, bool on)
{
if (on)
{
return bitField | flag;
}
return bitField & (~flag);
}
/// <summary>
/// Sets a bit-field to either on or off for the specified flag.
/// </summary>
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to change</param>
/// <param name="on">bool</param>
/// <returns>The flagged variable with the flag changed</returns>
public static long SetFlag<T>(T bitField, T flag, bool on)
where T : struct
{
return SetFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag), on);
return bitField | flag;
}
return bitField & (~flag);
}
}
/// <summary>
/// Sets a bit-field to either on or off for the specified flag.
/// </summary>
/// <typeparam name="T">Enumeration with Flags attribute</typeparam>
/// <param name="bitField">Flagged variable</param>
/// <param name="flag">Flag to change</param>
/// <param name="on">bool</param>
/// <returns>The flagged variable with the flag changed</returns>
public static long SetFlag<T>(T bitField, T flag, bool on)
where T : struct => SetFlag(Convert.ToInt64(bitField), Convert.ToInt64(flag), on);
}

View File

@@ -2,48 +2,44 @@
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.GZip
namespace SharpCompress.Common.GZip;
public class GZipEntry : Entry
{
public class GZipEntry : Entry
private readonly GZipFilePart? _filePart;
internal GZipEntry(GZipFilePart? filePart) => _filePart = filePart;
public override CompressionType CompressionType => CompressionType.GZip;
public override long Crc => _filePart?.Crc ?? 0;
public override string? Key => _filePart?.FilePartName;
public override string? LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => _filePart?.UncompressedSize ?? 0;
public override DateTime? LastModifiedTime => _filePart?.DateModified;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
{
private readonly GZipFilePart _filePart;
internal GZipEntry(GZipFilePart filePart)
{
_filePart = filePart;
}
public override CompressionType CompressionType => CompressionType.GZip;
public override long Crc => _filePart.Crc ?? 0;
public override string Key => _filePart.FilePartName;
public override string? LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => _filePart.UncompressedSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.DateModified;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
{
yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
}
yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
}
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
@@ -6,133 +6,126 @@ using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Common.GZip
namespace SharpCompress.Common.GZip;
internal sealed class GZipFilePart : FilePart
{
internal sealed class GZipFilePart : FilePart
private string? _name;
private readonly Stream _stream;
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
{
private string? _name;
private readonly Stream _stream;
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
_stream = stream;
ReadAndValidateGzipHeader();
if (stream.CanSeek)
{
_stream = stream;
ReadAndValidateGzipHeader();
if (stream.CanSeek)
{
long position = stream.Position;
stream.Position = stream.Length - 8;
ReadTrailer();
stream.Position = position;
}
EntryStartPosition = stream.Position;
var position = stream.Position;
stream.Position = stream.Length - 8;
ReadTrailer();
stream.Position = position;
}
EntryStartPosition = stream.Position;
}
internal long EntryStartPosition { get; }
internal DateTime? DateModified { get; private set; }
internal uint? Crc { get; private set; }
internal uint? UncompressedSize { get; private set; }
internal override string? FilePartName => _name;
internal override Stream GetCompressedStream() =>
new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
internal override Stream GetRawStream() => _stream;
private void ReadTrailer()
{
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
Span<byte> trailer = stackalloc byte[8];
var n = _stream.Read(trailer);
Crc = BinaryPrimitives.ReadUInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadUInt32LittleEndian(trailer.Slice(4));
}
private void ReadAndValidateGzipHeader()
{
// read the header on the first read
Span<byte> header = stackalloc byte[10];
var n = _stream.Read(header);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
{
return;
}
internal long EntryStartPosition { get; }
internal DateTime? DateModified { get; private set; }
internal int? Crc { get; private set; }
internal int? UncompressedSize { get; private set; }
internal override string FilePartName => _name!;
internal override Stream GetCompressedStream()
if (n != 10)
{
return new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
throw new ZlibException("Not a valid GZIP stream.");
}
internal override Stream GetRawStream()
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return _stream;
throw new ZlibException("Bad GZIP header.");
}
private void ReadTrailer()
var timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
Span<byte> trailer = stackalloc byte[8];
int n = _stream.Read(trailer);
// read and discard extra field
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
Crc = BinaryPrimitives.ReadInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
var extraLength = (short)(header[0] + (header[1] * 256));
var extra = new byte[extraLength];
if (!_stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
n = extraLength;
}
private void ReadAndValidateGzipHeader()
if ((header[3] & 0x08) == 0x08)
{
// read the header on the first read
Span<byte> header = stackalloc byte[10];
int n = _stream.Read(header);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
{
return;
}
if (n != 10)
{
throw new ZlibException("Not a valid GZIP stream.");
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
throw new ZlibException("Bad GZIP header.");
}
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
short extraLength = (short)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
if (!_stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
n = extraLength;
}
if ((header[3] & 0x08) == 0x08)
{
_name = ReadZeroTerminatedString(_stream);
}
if ((header[3] & 0x10) == 0x010)
{
ReadZeroTerminatedString(_stream);
}
if ((header[3] & 0x02) == 0x02)
{
_stream.ReadByte(); // CRC16, ignore
}
_name = ReadZeroTerminatedString(_stream);
}
private string ReadZeroTerminatedString(Stream stream)
if ((header[3] & 0x10) == 0x010)
{
Span<byte> buf1 = stackalloc byte[1];
var list = new List<byte>();
bool done = false;
do
{
// workitem 7740
int n = stream.Read(buf1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
}
if (buf1[0] == 0)
{
done = true;
}
else
{
list.Add(buf1[0]);
}
}
while (!done);
byte[] buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
ReadZeroTerminatedString(_stream);
}
if ((header[3] & 0x02) == 0x02)
{
_stream.ReadByte(); // CRC16, ignore
}
}
private string ReadZeroTerminatedString(Stream stream)
{
Span<byte> buf1 = stackalloc byte[1];
var list = new List<byte>();
var done = false;
do
{
// workitem 7740
var n = stream.Read(buf1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
}
if (buf1[0] == 0)
{
done = true;
}
else
{
list.Add(buf1[0]);
}
} while (!done);
var buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
}
}

View File

@@ -1,23 +1,17 @@
using System.IO;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.GZip
namespace SharpCompress.Common.GZip;
public class GZipVolume : Volume
{
public class GZipVolume : Volume
{
public GZipVolume(Stream stream, ReaderOptions options)
: base(stream, options)
{
}
public GZipVolume(Stream stream, ReaderOptions? options, int index)
: base(stream, options, index) { }
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
: base(fileInfo.OpenRead(), options)
{
options.LeaveStreamOpen = false;
}
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
: base(fileInfo.OpenRead(), options) => options.LeaveStreamOpen = false;
public override bool IsFirstVolume => true;
public override bool IsFirstVolume => true;
public override bool IsMultiVolume => true;
}
}
public override bool IsMultiVolume => true;
}

View File

@@ -1,45 +1,48 @@
using System.IO;
using System.IO;
namespace SharpCompress.Common
namespace SharpCompress.Common;
internal static class EntryExtensions
{
internal static class EntryExtensions
internal static void PreserveExtractionOptions(
this IEntry entry,
string destinationFileName,
ExtractionOptions options
)
{
internal static void PreserveExtractionOptions(this IEntry entry, string destinationFileName,
ExtractionOptions options)
if (options.PreserveFileTime || options.PreserveAttributes)
{
if (options.PreserveFileTime || options.PreserveAttributes)
var nf = new FileInfo(destinationFileName);
if (!nf.Exists)
{
FileInfo nf = new FileInfo(destinationFileName);
if (!nf.Exists)
return;
}
// update file time to original packed time
if (options.PreserveFileTime)
{
if (entry.CreatedTime.HasValue)
{
return;
nf.CreationTime = entry.CreatedTime.Value;
}
// update file time to original packed time
if (options.PreserveFileTime)
if (entry.LastModifiedTime.HasValue)
{
if (entry.CreatedTime.HasValue)
{
nf.CreationTime = entry.CreatedTime.Value;
}
if (entry.LastModifiedTime.HasValue)
{
nf.LastWriteTime = entry.LastModifiedTime.Value;
}
if (entry.LastAccessedTime.HasValue)
{
nf.LastAccessTime = entry.LastAccessedTime.Value;
}
nf.LastWriteTime = entry.LastModifiedTime.Value;
}
if (options.PreserveAttributes)
if (entry.LastAccessedTime.HasValue)
{
if (entry.Attrib.HasValue)
{
nf.Attributes = (FileAttributes)System.Enum.ToObject(typeof(FileAttributes), entry.Attrib.Value);
}
nf.LastAccessTime = entry.LastAccessedTime.Value;
}
}
if (options.PreserveAttributes)
{
if (entry.Attrib.HasValue)
{
nf.Attributes = (FileAttributes)
System.Enum.ToObject(typeof(FileAttributes), entry.Attrib.Value);
}
}
}

View File

@@ -1,23 +1,24 @@
using System;
using System;
namespace SharpCompress.Common
namespace SharpCompress.Common;
public interface IEntry
{
public interface IEntry
{
CompressionType CompressionType { get; }
DateTime? ArchivedTime { get; }
long CompressedSize { get; }
long Crc { get; }
DateTime? CreatedTime { get; }
string Key { get; }
string? LinkTarget { get; }
bool IsDirectory { get; }
bool IsEncrypted { get; }
bool IsSplitAfter { get; }
bool IsSolid { get; }
DateTime? LastAccessedTime { get; }
DateTime? LastModifiedTime { get; }
long Size { get; }
int? Attrib { get; }
}
}
CompressionType CompressionType { get; }
DateTime? ArchivedTime { get; }
long CompressedSize { get; }
long Crc { get; }
DateTime? CreatedTime { get; }
string? Key { get; }
string? LinkTarget { get; }
bool IsDirectory { get; }
bool IsEncrypted { get; }
bool IsSplitAfter { get; }
bool IsSolid { get; }
int VolumeIndexFirst { get; }
int VolumeIndexLast { get; }
DateTime? LastAccessedTime { get; }
DateTime? LastModifiedTime { get; }
long Size { get; }
int? Attrib { get; }
}

View File

@@ -1,8 +1,7 @@
namespace SharpCompress.Common
namespace SharpCompress.Common;
public interface IExtractionListener
{
internal interface IExtractionListener
{
void FireFilePartExtractionBegin(string name, long size, long compressedSize);
void FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes);
}
}
void FireFilePartExtractionBegin(string name, long size, long compressedSize);
void FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes);
}

View File

@@ -1,8 +1,10 @@
using System;
using System;
namespace SharpCompress.Common
namespace SharpCompress.Common;
public interface IVolume : IDisposable
{
public interface IVolume : IDisposable
{
}
}
int Index { get; }
string? FileName { get; }
}

View File

@@ -1,10 +0,0 @@
namespace SharpCompress.Common
{
public class IncompleteArchiveException : ArchiveException
{
public IncompleteArchiveException(string message)
: base(message)
{
}
}
}

View File

@@ -1,17 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class InvalidFormatException : ExtractionException
{
public InvalidFormatException(string message)
: base(message)
{
}
public InvalidFormatException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@@ -1,17 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class MultiVolumeExtractionException : ExtractionException
{
public MultiVolumeExtractionException(string message)
: base(message)
{
}
public MultiVolumeExtractionException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@@ -1,10 +0,0 @@
namespace SharpCompress.Common
{
public class MultipartStreamRequiredException : ExtractionException
{
public MultipartStreamRequiredException(string message)
: base(message)
{
}
}
}

View File

@@ -1,13 +1,11 @@
namespace SharpCompress.Common
{
public class OptionsBase
{
/// <summary>
/// SharpCompress will keep the supplied streams open. Default is true.
/// </summary>
public bool LeaveStreamOpen { get; set; } = true;
namespace SharpCompress.Common;
public ArchiveEncoding ArchiveEncoding { get; set; } = new ArchiveEncoding();
}
}
public class OptionsBase
{
/// <summary>
/// SharpCompress will keep the supplied streams open. Default is true.
/// </summary>
public bool LeaveStreamOpen { get; set; } = true;
public ArchiveEncoding ArchiveEncoding { get; set; } = new();
}

View File

@@ -1,17 +0,0 @@
using System;
namespace SharpCompress.Common
{
public class PasswordProtectedException : ExtractionException
{
public PasswordProtectedException(string message)
: base(message)
{
}
public PasswordProtectedException(string message, Exception inner)
: base(message, inner)
{
}
}
}

View File

@@ -0,0 +1,84 @@
#nullable disable
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar;
internal class CryptKey3 : ICryptKey
{
const int AES_128 = 128;
private string _password;
public CryptKey3(string password) => _password = password ?? "";
public ICryptoTransform Transformer(byte[] salt)
{
var aesIV = new byte[EncryptionConstV5.SIZE_INITV];
var rawLength = 2 * _password.Length;
var rawPassword = new byte[rawLength + EncryptionConstV5.SIZE_SALT30];
var passwordBytes = Encoding.UTF8.GetBytes(_password);
for (var i = 0; i < _password.Length; i++)
{
rawPassword[i * 2] = passwordBytes[i];
rawPassword[(i * 2) + 1] = 0;
}
for (var i = 0; i < salt.Length; i++)
{
rawPassword[i + rawLength] = salt[i];
}
var msgDigest = SHA1.Create();
const int noOfRounds = (1 << 18);
const int iblock = 3;
byte[] digest;
var data = new byte[(rawPassword.Length + iblock) * noOfRounds];
//TODO slow code below, find ways to optimize
for (var i = 0; i < noOfRounds; i++)
{
rawPassword.CopyTo(data, i * (rawPassword.Length + iblock));
data[(i * (rawPassword.Length + iblock)) + rawPassword.Length + 0] = (byte)i;
data[(i * (rawPassword.Length + iblock)) + rawPassword.Length + 1] = (byte)(i >> 8);
data[(i * (rawPassword.Length + iblock)) + rawPassword.Length + 2] = (byte)(i >> 16);
if (i % (noOfRounds / EncryptionConstV5.SIZE_INITV) == 0)
{
digest = msgDigest.ComputeHash(data, 0, (i + 1) * (rawPassword.Length + iblock));
aesIV[i / (noOfRounds / EncryptionConstV5.SIZE_INITV)] = digest[19];
}
}
digest = msgDigest.ComputeHash(data);
//slow code ends
var aesKey = new byte[EncryptionConstV5.SIZE_INITV];
for (var i = 0; i < 4; i++)
{
for (var j = 0; j < 4; j++)
{
aesKey[(i * 4) + j] = (byte)(
(
((digest[i * 4] * 0x1000000) & 0xff000000)
| (uint)((digest[(i * 4) + 1] * 0x10000) & 0xff0000)
| (uint)((digest[(i * 4) + 2] * 0x100) & 0xff00)
| (uint)(digest[(i * 4) + 3] & 0xff)
) >> (j * 8)
);
}
}
var aes = Aes.Create();
aes.KeySize = AES_128;
aes.Mode = CipherMode.CBC;
aes.Padding = PaddingMode.None;
aes.Key = aesKey;
aes.IV = aesIV;
return aes.CreateDecryptor();
}
}

View File

@@ -0,0 +1,95 @@
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar;
internal class CryptKey5 : ICryptKey
{
const int AES_256 = 256;
const int DERIVED_KEY_LENGTH = 0x10;
const int SHA256_DIGEST_SIZE = 32;
private string _password;
private Rar5CryptoInfo _cryptoInfo;
private byte[] _pswCheck = { };
private byte[] _hashKey = { };
public CryptKey5(string? password, Rar5CryptoInfo rar5CryptoInfo)
{
_password = password ?? "";
_cryptoInfo = rar5CryptoInfo;
}
public byte[] PswCheck => _pswCheck;
public byte[] HashKey => _hashKey;
private static List<byte[]> GenerateRarPBKDF2Key(
string password,
byte[] salt,
int iterations,
int keyLength
)
{
using var hmac = new HMACSHA256(Encoding.UTF8.GetBytes(password));
var block = hmac.ComputeHash(salt);
var finalHash = (byte[])block.Clone();
var loop = new int[] { iterations, 17, 17 };
var res = new List<byte[]> { };
for (var x = 0; x < 3; x++)
{
for (var i = 1; i < loop[x]; i++)
{
block = hmac.ComputeHash(block);
for (var j = 0; j < finalHash.Length; j++)
{
finalHash[j] ^= block[j];
}
}
res.Add((byte[])finalHash.Clone());
}
return res;
}
public ICryptoTransform Transformer(byte[] salt)
{
var iterations = (1 << _cryptoInfo.LG2Count); // Adjust the number of iterations as needed
var salt_rar5 = salt.Concat(new byte[] { 0, 0, 0, 1 });
var derivedKey = GenerateRarPBKDF2Key(
_password,
salt_rar5.ToArray(),
iterations,
DERIVED_KEY_LENGTH
);
_hashKey = derivedKey[1];
_pswCheck = new byte[EncryptionConstV5.SIZE_PSWCHECK];
for (var i = 0; i < SHA256_DIGEST_SIZE; i++)
{
_pswCheck[i % EncryptionConstV5.SIZE_PSWCHECK] ^= derivedKey[2][i];
}
if (_cryptoInfo.UsePswCheck && !_cryptoInfo.PswCheck.SequenceEqual(_pswCheck))
{
throw new CryptographicException("The password did not match.");
}
var aes = Aes.Create();
aes.KeySize = AES_256;
aes.Mode = CipherMode.CBC;
aes.Padding = PaddingMode.None;
aes.Key = derivedKey[0];
aes.IV = _cryptoInfo.InitV;
return aes.CreateDecryptor();
}
}

View File

@@ -1,32 +1,31 @@
using SharpCompress.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
namespace SharpCompress.Common.Rar.Headers;
internal class AvHeader : RarHeader
{
internal class AvHeader : RarHeader
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
{
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
if (IsRar5)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
throw new InvalidFormatException("unexpected rar5 record");
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
UnpackVersion = reader.ReadByte();
Method = reader.ReadByte();
AvVersion = reader.ReadByte();
AvInfoCrc = reader.ReadInt32();
}
internal int AvInfoCrc { get; private set; }
internal byte UnpackVersion { get; private set; }
internal byte Method { get; private set; }
internal byte AvVersion { get; private set; }
}
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
UnpackVersion = reader.ReadByte();
Method = reader.ReadByte();
AvVersion = reader.ReadByte();
AvInfoCrc = reader.ReadInt32();
}
internal int AvInfoCrc { get; private set; }
internal byte UnpackVersion { get; private set; }
internal byte Method { get; private set; }
internal byte AvVersion { get; private set; }
}

View File

@@ -1,59 +1,16 @@
#nullable disable
#nullable disable
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
namespace SharpCompress.Common.Rar.Headers;
internal class ArchiveCryptHeader : RarHeader
{
internal class ArchiveCryptHeader : RarHeader
{
public ArchiveCryptHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Crypt) { }
private const int CRYPT_VERSION = 0; // Supported encryption version.
private const int SIZE_SALT50 = 16;
private const int SIZE_SALT30 = 8;
private const int SIZE_INITV = 16;
private const int SIZE_PSWCHECK = 8;
private const int SIZE_PSWCHECK_CSUM = 4;
private const int CRYPT5_KDF_LG2_COUNT = 15; // LOG2 of PDKDF2 iteration count.
private const int CRYPT5_KDF_LG2_COUNT_MAX = 24; // LOG2 of maximum accepted iteration count.
public Rar5CryptoInfo CryptInfo = new();
private bool _usePswCheck;
private uint _lg2Count; // Log2 of PBKDF2 repetition count.
private byte[] _salt;
private byte[] _pswCheck;
private byte[] _pswCheckCsm;
public ArchiveCryptHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Crypt)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
var cryptVersion = reader.ReadRarVIntUInt32();
if (cryptVersion > CRYPT_VERSION)
{
//error?
return;
}
var encryptionFlags = reader.ReadRarVIntUInt32();
_usePswCheck = FlagUtility.HasFlag(encryptionFlags, EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
_lg2Count = reader.ReadRarVIntByte(1);
//UsePswCheck = HasHeaderFlag(EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
if (_lg2Count > CRYPT5_KDF_LG2_COUNT_MAX)
{
//error?
return;
}
_salt = reader.ReadBytes(SIZE_SALT50);
if (_usePswCheck)
{
_pswCheck = reader.ReadBytes(SIZE_PSWCHECK);
_pswCheckCsm = reader.ReadBytes(SIZE_PSWCHECK_CSUM);
}
}
}
}
protected override void ReadFinish(MarkingBinaryReader reader) =>
CryptInfo = new Rar5CryptoInfo(reader, false);
}

View File

@@ -1,88 +1,59 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
namespace SharpCompress.Common.Rar.Headers;
internal sealed class ArchiveHeader : RarHeader
{
internal sealed class ArchiveHeader : RarHeader
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Archive) { }
protected override void ReadFinish(MarkingBinaryReader reader)
{
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Archive)
if (IsRar5)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
Flags = reader.ReadRarVIntUInt16();
if (HasFlag(ArchiveFlagsV5.HAS_VOLUME_NUMBER))
{
Flags = reader.ReadRarVIntUInt16();
if (HasFlag(ArchiveFlagsV5.HAS_VOLUME_NUMBER))
{
VolumeNumber = (int)reader.ReadRarVIntUInt32();
}
// later: we may have a locator record if we need it
//if (ExtraSize != 0) {
// ReadLocator(reader);
//}
VolumeNumber = (int)reader.ReadRarVIntUInt32();
}
else
// later: we may have a locator record if we need it
//if (ExtraSize != 0) {
// ReadLocator(reader);
//}
}
else
{
Flags = HeaderFlags;
HighPosAv = reader.ReadInt16();
PosAv = reader.ReadInt32();
if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER))
{
Flags = HeaderFlags;
HighPosAv = reader.ReadInt16();
PosAv = reader.ReadInt32();
if (HasFlag(ArchiveFlagsV4.ENCRYPT_VER))
{
EncryptionVersion = reader.ReadByte();
}
EncryptionVersion = reader.ReadByte();
}
}
private void ReadLocator(MarkingBinaryReader reader)
{
var size = reader.ReadRarVIntUInt16();
var type = reader.ReadRarVIntUInt16();
if (type != 1)
{
throw new InvalidFormatException("expected locator record");
}
var flags = reader.ReadRarVIntUInt16();
const ushort hasQuickOpenOffset = 0x01;
const ushort hasRecoveryOffset = 0x02;
ulong quickOpenOffset = 0;
if ((flags & hasQuickOpenOffset) == hasQuickOpenOffset)
{
quickOpenOffset = reader.ReadRarVInt();
}
ulong recoveryOffset = 0;
if ((flags & hasRecoveryOffset) == hasRecoveryOffset)
{
recoveryOffset = reader.ReadRarVInt();
}
}
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}
internal int? VolumeNumber { get; private set; }
internal short? HighPosAv { get; private set; }
internal int? PosAv { get; private set; }
private byte? EncryptionVersion { get; set; }
public bool? IsEncrypted => IsRar5 ? (bool?)null : HasFlag(ArchiveFlagsV4.PASSWORD);
public bool OldNumberingFormat => !IsRar5 && !HasFlag(ArchiveFlagsV4.NEW_NUMBERING);
public bool IsVolume => HasFlag(IsRar5 ? ArchiveFlagsV5.VOLUME : ArchiveFlagsV4.VOLUME);
// RAR5: Volume number field is present. True for all volumes except first.
public bool IsFirstVolume => IsRar5 ? VolumeNumber is null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
public bool IsSolid => HasFlag(IsRar5 ? ArchiveFlagsV5.SOLID : ArchiveFlagsV4.SOLID);
}
}
private ushort Flags { get; set; }
private bool HasFlag(ushort flag) => (Flags & flag) == flag;
internal int? VolumeNumber { get; private set; }
internal short? HighPosAv { get; private set; }
internal int? PosAv { get; private set; }
private byte? EncryptionVersion { get; set; }
public bool? IsEncrypted => IsRar5 ? null : HasFlag(ArchiveFlagsV4.PASSWORD);
public bool OldNumberingFormat => !IsRar5 && !HasFlag(ArchiveFlagsV4.NEW_NUMBERING);
public bool IsVolume => HasFlag(IsRar5 ? ArchiveFlagsV5.VOLUME : ArchiveFlagsV4.VOLUME);
// RAR5: Volume number field is present. True for all volumes except first.
public bool IsFirstVolume =>
IsRar5 ? VolumeNumber is null : HasFlag(ArchiveFlagsV4.FIRST_VOLUME);
public bool IsSolid => HasFlag(IsRar5 ? ArchiveFlagsV5.SOLID : ArchiveFlagsV4.SOLID);
}

View File

@@ -1,31 +1,30 @@
using SharpCompress.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
namespace SharpCompress.Common.Rar.Headers;
internal class CommentHeader : RarHeader
{
internal class CommentHeader : RarHeader
protected CommentHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Comment)
{
protected CommentHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Comment)
if (IsRar5)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
throw new InvalidFormatException("unexpected rar5 record");
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
UnpSize = reader.ReadInt16();
UnpVersion = reader.ReadByte();
UnpMethod = reader.ReadByte();
CommCrc = reader.ReadInt16();
}
internal short UnpSize { get; private set; }
internal byte UnpVersion { get; private set; }
internal byte UnpMethod { get; private set; }
internal short CommCrc { get; private set; }
}
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
UnpSize = reader.ReadInt16();
UnpVersion = reader.ReadByte();
UnpMethod = reader.ReadByte();
CommCrc = reader.ReadInt16();
}
internal short UnpSize { get; private set; }
internal byte UnpVersion { get; private set; }
internal byte UnpMethod { get; private set; }
internal short CommCrc { get; private set; }
}

View File

@@ -1,43 +1,37 @@
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
namespace SharpCompress.Common.Rar.Headers;
internal class EndArchiveHeader : RarHeader
{
internal class EndArchiveHeader : RarHeader
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.EndArchive) { }
protected override void ReadFinish(MarkingBinaryReader reader)
{
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.EndArchive)
if (IsRar5)
{
Flags = reader.ReadRarVIntUInt16();
}
protected override void ReadFinish(MarkingBinaryReader reader)
else
{
if (IsRar5)
Flags = HeaderFlags;
if (HasFlag(EndArchiveFlagsV4.DATA_CRC))
{
Flags = reader.ReadRarVIntUInt16();
ArchiveCrc = reader.ReadInt32();
}
else
if (HasFlag(EndArchiveFlagsV4.VOLUME_NUMBER))
{
Flags = HeaderFlags;
if (HasFlag(EndArchiveFlagsV4.DATA_CRC))
{
ArchiveCrc = reader.ReadInt32();
}
if (HasFlag(EndArchiveFlagsV4.VOLUME_NUMBER))
{
VolumeNumber = reader.ReadInt16();
}
VolumeNumber = reader.ReadInt16();
}
}
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}
internal int? ArchiveCrc { get; private set; }
internal short? VolumeNumber { get; private set; }
}
}
private ushort Flags { get; set; }
private bool HasFlag(ushort flag) => (Flags & flag) == flag;
internal int? ArchiveCrc { get; private set; }
internal short? VolumeNumber { get; private set; }
}

View File

@@ -1,8 +1,9 @@
#nullable disable
using System;
using System.IO;
using System.Linq;
using System.Text;
using SharpCompress.IO;
#if !Rar2017_64bit
using nint = System.Int32;
using nuint = System.UInt32;
using size_t = System.UInt32;
#else
using nint = System.Int64;
@@ -10,447 +11,476 @@ using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
using SharpCompress.IO;
using System;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar.Headers
internal class FileHeader : RarHeader
{
internal class FileHeader : RarHeader
private byte[]? _hash;
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType) { }
protected override void ReadFinish(MarkingBinaryReader reader)
{
private uint _fileCrc;
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType)
if (IsRar5)
{
ReadFromReaderV5(reader);
}
else
{
ReadFromReaderV4(reader);
}
}
private void ReadFromReaderV5(MarkingBinaryReader reader)
{
Flags = reader.ReadRarVIntUInt16();
var lvalue = checked((long)reader.ReadRarVInt());
// long.MaxValue causes the unpack code to finish when the input stream is exhausted
UncompressedSize = HasFlag(FileFlagsV5.UNPACKED_SIZE_UNKNOWN) ? long.MaxValue : lvalue;
FileAttributes = reader.ReadRarVIntUInt32();
if (HasFlag(FileFlagsV5.HAS_MOD_TIME))
{
FileLastModifiedTime = Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
protected override void ReadFinish(MarkingBinaryReader reader)
if (HasFlag(FileFlagsV5.HAS_CRC32))
{
if (IsRar5)
{
ReadFromReaderV5(reader);
}
else
{
ReadFromReaderV4(reader);
}
FileCrc = reader.ReadBytes(4);
}
private void ReadFromReaderV5(MarkingBinaryReader reader)
var compressionInfo = reader.ReadRarVIntUInt16();
// Lower 6 bits (0x003f mask) contain the version of compression algorithm, resulting in possible 0 - 63 values. Current version is 0.
// "+ 50" to not mix with old RAR format algorithms. For example,
// we may need to use the compression algorithm 15 in the future,
// but it was already used in RAR 1.5 and Unpack needs to distinguish
// them.
CompressionAlgorithm = (byte)((compressionInfo & 0x3f) + 50);
// 7th bit (0x0040) defines the solid flag. If it is set, RAR continues to use the compression dictionary left after processing preceding files.
// It can be set only for file headers and is never set for service headers.
IsSolid = (compressionInfo & 0x40) == 0x40;
// Bits 8 - 10 (0x0380 mask) define the compression method. Currently only values 0 - 5 are used. 0 means no compression.
CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
// Bits 11 - 14 (0x3c00) define the minimum size of dictionary size required to extract data. Value 0 means 128 KB, 1 - 256 KB, ..., 14 - 2048 MB, 15 - 4096 MB.
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
HostOs = reader.ReadRarVIntByte();
var nameSize = reader.ReadRarVIntUInt16();
// Variable length field containing Name length bytes in UTF-8 format without trailing zero.
// For file header this is a name of archived file. Forward slash character is used as the path separator both for Unix and Windows names.
// Backslashes are treated as a part of name for Unix names and as invalid character for Windows file names. Type of name is defined by Host OS field.
//
// TODO: not sure if anything needs to be done to handle the following:
// If Unix file name contains any high ASCII characters which cannot be correctly converted to Unicode and UTF-8
// we map such characters to to 0xE080 - 0xE0FF private use Unicode area and insert 0xFFFE Unicode non-character
// to resulting string to indicate that it contains mapped characters, which need to be converted back when extracting.
// Concrete position of 0xFFFE is not defined, we need to search the entire string for it. Such mapped names are not
// portable and can be correctly unpacked only on the same system where they were created.
//
// For service header this field contains a name of service header. Now the following names are used:
// CMT Archive comment
// QO Archive quick open data
// ACL NTFS file permissions
// STM NTFS alternate data stream
// RR Recovery record
var b = reader.ReadBytes(nameSize);
FileName = ConvertPathV5(Encoding.UTF8.GetString(b, 0, b.Length));
// extra size seems to be redudant since we know the total header size
if (ExtraSize != RemainingHeaderBytes(reader))
{
Flags = reader.ReadRarVIntUInt16();
var lvalue = checked((long)reader.ReadRarVInt());
// long.MaxValue causes the unpack code to finish when the input stream is exhausted
UncompressedSize = HasFlag(FileFlagsV5.UNPACKED_SIZE_UNKNOWN) ? long.MaxValue : lvalue;
FileAttributes = reader.ReadRarVIntUInt32();
if (HasFlag(FileFlagsV5.HAS_MOD_TIME))
{
FileLastModifiedTime = Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
if (HasFlag(FileFlagsV5.HAS_CRC32))
{
FileCrc = reader.ReadUInt32();
}
var compressionInfo = reader.ReadRarVIntUInt16();
// Lower 6 bits (0x003f mask) contain the version of compression algorithm, resulting in possible 0 - 63 values. Current version is 0.
// "+ 50" to not mix with old RAR format algorithms. For example,
// we may need to use the compression algorithm 15 in the future,
// but it was already used in RAR 1.5 and Unpack needs to distinguish
// them.
CompressionAlgorithm = (byte)((compressionInfo & 0x3f) + 50);
// 7th bit (0x0040) defines the solid flag. If it is set, RAR continues to use the compression dictionary left after processing preceding files.
// It can be set only for file headers and is never set for service headers.
IsSolid = (compressionInfo & 0x40) == 0x40;
// Bits 8 - 10 (0x0380 mask) define the compression method. Currently only values 0 - 5 are used. 0 means no compression.
CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
// Bits 11 - 14 (0x3c00) define the minimum size of dictionary size required to extract data. Value 0 means 128 KB, 1 - 256 KB, ..., 14 - 2048 MB, 15 - 4096 MB.
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
HostOs = reader.ReadRarVIntByte();
var nameSize = reader.ReadRarVIntUInt16();
// Variable length field containing Name length bytes in UTF-8 format without trailing zero.
// For file header this is a name of archived file. Forward slash character is used as the path separator both for Unix and Windows names.
// Backslashes are treated as a part of name for Unix names and as invalid character for Windows file names. Type of name is defined by Host OS field.
//
// TODO: not sure if anything needs to be done to handle the following:
// If Unix file name contains any high ASCII characters which cannot be correctly converted to Unicode and UTF-8
// we map such characters to to 0xE080 - 0xE0FF private use Unicode area and insert 0xFFFE Unicode non-character
// to resulting string to indicate that it contains mapped characters, which need to be converted back when extracting.
// Concrete position of 0xFFFE is not defined, we need to search the entire string for it. Such mapped names are not
// portable and can be correctly unpacked only on the same system where they were created.
//
// For service header this field contains a name of service header. Now the following names are used:
// CMT Archive comment
// QO Archive quick open data
// ACL NTFS file permissions
// STM NTFS alternate data stream
// RR Recovery record
var b = reader.ReadBytes(nameSize);
FileName = ConvertPathV5(Encoding.UTF8.GetString(b, 0, b.Length));
// extra size seems to be redudant since we know the total header size
if (ExtraSize != RemainingHeaderBytes(reader))
{
throw new InvalidFormatException("rar5 header size / extra size inconsistency");
}
isEncryptedRar5 = false;
while (RemainingHeaderBytes(reader) > 0)
{
var size = reader.ReadRarVIntUInt16();
int n = RemainingHeaderBytes(reader);
var type = reader.ReadRarVIntUInt16();
switch (type)
{
//TODO
case 1: // file encryption
{
isEncryptedRar5 = true;
//var version = reader.ReadRarVIntByte();
//if (version != 0) throw new InvalidFormatException("unknown encryption algorithm " + version);
}
break;
// case 2: // file hash
// {
//
// }
// break;
case 3: // file time
{
ushort flags = reader.ReadRarVIntUInt16();
var isWindowsTime = (flags & 1) == 0;
if ((flags & 0x2) == 0x2)
{
FileLastModifiedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x4) == 0x4)
{
FileCreatedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x8) == 0x8)
{
FileLastAccessedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
}
break;
//TODO
// case 4: // file version
// {
//
// }
// break;
// case 5: // file system redirection
// {
//
// }
// break;
// case 6: // unix owner
// {
//
// }
// break;
// case 7: // service data
// {
//
// }
// break;
default:
// skip unknown record types to allow new record types to be added in the future
break;
}
// drain any trailing bytes of extra record
int did = n - RemainingHeaderBytes(reader);
int drain = size - did;
if (drain > 0)
{
reader.ReadBytes(drain);
}
}
if (AdditionalDataSize != 0)
{
CompressedSize = AdditionalDataSize;
}
throw new InvalidFormatException("rar5 header size / extra size inconsistency");
}
const ushort FHEXTRA_CRYPT = 0x01;
const ushort FHEXTRA_HASH = 0x02;
const ushort FHEXTRA_HTIME = 0x03;
// const ushort FHEXTRA_VERSION = 0x04;
const ushort FHEXTRA_REDIR = 0x05;
// const ushort FHEXTRA_UOWNER = 0x06;
// const ushort FHEXTRA_SUBDATA = 0x07;
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
while (RemainingHeaderBytes(reader) > 0)
{
if (isWindowsTime)
var size = reader.ReadRarVIntUInt16();
var n = RemainingHeaderBytes(reader);
var type = reader.ReadRarVIntUInt16();
switch (type)
{
return DateTime.FromFileTime(reader.ReadInt64());
}
else
{
return Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
}
private static string ConvertPathV5(string path)
{
if (Path.DirectorySeparatorChar == '\\')
{
// replace embedded \\ with valid filename char
return path.Replace('\\', '-').Replace('/', '\\');
}
return path;
}
private void ReadFromReaderV4(MarkingBinaryReader reader)
{
Flags = HeaderFlags;
IsSolid = HasFlag(FileFlagsV4.SOLID);
WindowSize = IsDirectory ? 0U : ((size_t)0x10000) << ((Flags & FileFlagsV4.WINDOW_MASK) >> 5);
uint lowUncompressedSize = reader.ReadUInt32();
HostOs = reader.ReadByte();
FileCrc = reader.ReadUInt32();
FileLastModifiedTime = Utility.DosDateToDateTime(reader.ReadUInt32());
CompressionAlgorithm = reader.ReadByte();
CompressionMethod = (byte)(reader.ReadByte() - 0x30);
short nameSize = reader.ReadInt16();
FileAttributes = reader.ReadUInt32();
uint highCompressedSize = 0;
uint highUncompressedkSize = 0;
if (HasFlag(FileFlagsV4.LARGE))
{
highCompressedSize = reader.ReadUInt32();
highUncompressedkSize = reader.ReadUInt32();
}
else
{
if (lowUncompressedSize == 0xffffffff)
{
lowUncompressedSize = 0xffffffff;
highUncompressedkSize = int.MaxValue;
}
}
CompressedSize = UInt32To64(highCompressedSize, checked((uint)AdditionalDataSize));
UncompressedSize = UInt32To64(highUncompressedkSize, lowUncompressedSize);
nameSize = nameSize > 4 * 1024 ? (short)(4 * 1024) : nameSize;
byte[] fileNameBytes = reader.ReadBytes(nameSize);
const int saltSize = 8;
const int newLhdSize = 32;
switch (HeaderCode)
{
case HeaderCodeV.RAR4_FILE_HEADER:
case FHEXTRA_CRYPT: // file encryption
{
if (HasFlag(FileFlagsV4.UNICODE))
Rar5CryptoInfo = new Rar5CryptoInfo(reader, true);
if (Rar5CryptoInfo.PswCheck.All(singleByte => singleByte == 0))
{
int length = 0;
while (length < fileNameBytes.Length
&& fileNameBytes[length] != 0)
{
length++;
}
if (length != nameSize)
{
length++;
FileName = FileNameDecoder.Decode(fileNameBytes, length);
}
else
{
FileName = ArchiveEncoding.Decode(fileNameBytes);
}
Rar5CryptoInfo = null;
}
}
break;
case FHEXTRA_HASH:
{
const uint FHEXTRA_HASH_BLAKE2 = 0x0;
// const uint HASH_BLAKE2 = 0x03;
const int BLAKE2_DIGEST_SIZE = 0x20;
if ((uint)reader.ReadRarVInt() == FHEXTRA_HASH_BLAKE2)
{
// var hash = HASH_BLAKE2;
_hash = reader.ReadBytes(BLAKE2_DIGEST_SIZE);
}
// enum HASH_TYPE {HASH_NONE,HASH_RAR14,HASH_CRC32,HASH_BLAKE2};
}
break;
case FHEXTRA_HTIME: // file time
{
var flags = reader.ReadRarVIntUInt16();
var isWindowsTime = (flags & 1) == 0;
if ((flags & 0x2) == 0x2)
{
FileLastModifiedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x4) == 0x4)
{
FileCreatedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x8) == 0x8)
{
FileLastAccessedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
}
break;
//TODO
// case FHEXTRA_VERSION: // file version
// {
//
// }
// break;
case FHEXTRA_REDIR: // file system redirection
{
RedirType = reader.ReadRarVIntByte();
RedirFlags = reader.ReadRarVIntByte();
var nn = reader.ReadRarVIntUInt16();
var bb = reader.ReadBytes(nn);
RedirTargetName = ConvertPathV5(Encoding.UTF8.GetString(bb, 0, bb.Length));
}
break;
//TODO
// case FHEXTRA_UOWNER: // unix owner
// {
//
// }
// break;
// case FHEXTRA_SUBDATA: // service data
// {
//
// }
// break;
default:
// skip unknown record types to allow new record types to be added in the future
//Console.WriteLine($"unhandled rar header field type {type}");
break;
}
// drain any trailing bytes of extra record
var did = n - RemainingHeaderBytes(reader);
var drain = size - did;
if (drain > 0)
{
reader.ReadBytes(drain);
}
}
if (AdditionalDataSize != 0)
{
CompressedSize = AdditionalDataSize;
}
}
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
{
if (isWindowsTime)
{
return DateTime.FromFileTime(reader.ReadInt64());
}
else
{
return Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
}
private static string ConvertPathV5(string path)
{
if (Path.DirectorySeparatorChar == '\\')
{
// replace embedded \\ with valid filename char
return path.Replace('\\', '-').Replace('/', '\\');
}
return path;
}
private void ReadFromReaderV4(MarkingBinaryReader reader)
{
Flags = HeaderFlags;
IsSolid = HasFlag(FileFlagsV4.SOLID);
WindowSize = IsDirectory
? 0U
: ((size_t)0x10000) << ((Flags & FileFlagsV4.WINDOW_MASK) >> 5);
var lowUncompressedSize = reader.ReadUInt32();
HostOs = reader.ReadByte();
FileCrc = reader.ReadBytes(4);
FileLastModifiedTime = Utility.DosDateToDateTime(reader.ReadUInt32());
CompressionAlgorithm = reader.ReadByte();
CompressionMethod = (byte)(reader.ReadByte() - 0x30);
var nameSize = reader.ReadInt16();
FileAttributes = reader.ReadUInt32();
uint highCompressedSize = 0;
uint highUncompressedkSize = 0;
if (HasFlag(FileFlagsV4.LARGE))
{
highCompressedSize = reader.ReadUInt32();
highUncompressedkSize = reader.ReadUInt32();
}
else
{
if (lowUncompressedSize == 0xffffffff)
{
lowUncompressedSize = 0xffffffff;
highUncompressedkSize = int.MaxValue;
}
}
CompressedSize = UInt32To64(highCompressedSize, checked((uint)AdditionalDataSize));
UncompressedSize = UInt32To64(highUncompressedkSize, lowUncompressedSize);
nameSize = nameSize > 4 * 1024 ? (short)(4 * 1024) : nameSize;
var fileNameBytes = reader.ReadBytes(nameSize);
const int newLhdSize = 32;
switch (HeaderCode)
{
case HeaderCodeV.RAR4_FILE_HEADER:
{
if (HasFlag(FileFlagsV4.UNICODE))
{
var length = 0;
while (length < fileNameBytes.Length && fileNameBytes[length] != 0)
{
length++;
}
if (length != nameSize)
{
length++;
FileName = FileNameDecoder.Decode(fileNameBytes, length);
}
else
{
FileName = ArchiveEncoding.Decode(fileNameBytes);
}
FileName = ConvertPathV4(FileName);
}
break;
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
else
{
int datasize = HeaderSize - newLhdSize - nameSize;
if (HasFlag(FileFlagsV4.SALT))
{
datasize -= saltSize;
}
if (datasize > 0)
{
SubData = reader.ReadBytes(datasize);
}
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
{
RecoverySectors = SubData[8] + (SubData[9] << 8)
+ (SubData[10] << 16) + (SubData[11] << 24);
}
FileName = ArchiveEncoding.Decode(fileNameBytes);
}
break;
}
if (HasFlag(FileFlagsV4.SALT))
{
R4Salt = reader.ReadBytes(saltSize);
}
if (HasFlag(FileFlagsV4.EXT_TIME))
{
// verify that the end of the header hasn't been reached before reading the Extended Time.
// some tools incorrectly omit Extended Time despite specifying FileFlags.EXTTIME, which most parsers tolerate.
if (RemainingHeaderBytes(reader) >= 2)
{
ushort extendedFlags = reader.ReadUInt16();
FileLastModifiedTime = ProcessExtendedTimeV4(extendedFlags, FileLastModifiedTime, reader, 0);
FileCreatedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 1);
FileLastAccessedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 2);
FileArchivedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 3);
FileName = ConvertPathV4(FileName);
}
}
break;
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
{
var datasize = HeaderSize - newLhdSize - nameSize;
if (HasFlag(FileFlagsV4.SALT))
{
datasize -= EncryptionConstV5.SIZE_SALT30;
}
if (datasize > 0)
{
SubData = reader.ReadBytes(datasize);
}
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
{
if (SubData is null)
{
throw new InvalidFormatException();
}
RecoverySectors =
SubData[8]
+ (SubData[9] << 8)
+ (SubData[10] << 16)
+ (SubData[11] << 24);
}
}
break;
}
private static long UInt32To64(uint x, uint y)
if (HasFlag(FileFlagsV4.SALT))
{
long l = x;
l <<= 32;
return l + y;
R4Salt = reader.ReadBytes(EncryptionConstV5.SIZE_SALT30);
}
if (HasFlag(FileFlagsV4.EXT_TIME))
{
// verify that the end of the header hasn't been reached before reading the Extended Time.
// some tools incorrectly omit Extended Time despite specifying FileFlags.EXTTIME, which most parsers tolerate.
if (RemainingHeaderBytes(reader) >= 2)
{
var extendedFlags = reader.ReadUInt16();
if (FileLastModifiedTime is not null)
{
FileLastModifiedTime = ProcessExtendedTimeV4(
extendedFlags,
FileLastModifiedTime,
reader,
0
);
}
FileCreatedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 1);
FileLastAccessedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 2);
FileArchivedTime = ProcessExtendedTimeV4(extendedFlags, null, reader, 3);
}
}
}
private static long UInt32To64(uint x, uint y)
{
long l = x;
l <<= 32;
return l + y;
}
private static DateTime? ProcessExtendedTimeV4(
ushort extendedFlags,
DateTime? time,
MarkingBinaryReader reader,
int i
)
{
var rmode = (uint)extendedFlags >> ((3 - i) * 4);
if ((rmode & 8) == 0)
{
return null;
}
if (i != 0)
{
var dosTime = reader.ReadUInt32();
time = Utility.DosDateToDateTime(dosTime);
}
if ((rmode & 4) == 0 && time is not null)
{
time = time.Value.AddSeconds(1);
}
uint nanosecondHundreds = 0;
var count = (int)rmode & 3;
for (var j = 0; j < count; j++)
{
var b = reader.ReadByte();
nanosecondHundreds |= (((uint)b) << ((j + 3 - count) * 8));
}
private static DateTime? ProcessExtendedTimeV4(ushort extendedFlags, DateTime? time, MarkingBinaryReader reader, int i)
//10^-7 to 10^-3
if (time is not null)
{
uint rmode = (uint)extendedFlags >> (3 - i) * 4;
if ((rmode & 8) == 0)
{
return null;
}
if (i != 0)
{
uint dosTime = reader.ReadUInt32();
time = Utility.DosDateToDateTime(dosTime);
}
if ((rmode & 4) == 0)
{
time = time.Value.AddSeconds(1);
}
uint nanosecondHundreds = 0;
int count = (int)rmode & 3;
for (int j = 0; j < count; j++)
{
byte b = reader.ReadByte();
nanosecondHundreds |= (((uint)b) << ((j + 3 - count) * 8));
}
//10^-7 to 10^-3
return time.Value.AddMilliseconds(nanosecondHundreds * Math.Pow(10, -4));
}
private static string ConvertPathV4(string path)
{
if (Path.DirectorySeparatorChar == '/')
{
return path.Replace('\\', '/');
}
else if (Path.DirectorySeparatorChar == '\\')
{
return path.Replace('/', '\\');
}
return path;
}
public override string ToString()
{
return FileName;
}
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}
internal uint FileCrc
{
get
{
if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32))
{
//!!! rar5:
throw new InvalidOperationException("TODO rar5");
}
return _fileCrc;
}
private set => _fileCrc = value;
}
// 0 - storing
// 1 - fastest compression
// 2 - fast compression
// 3 - normal compression
// 4 - good compression
// 5 - best compression
internal byte CompressionMethod { get; private set; }
internal bool IsStored => CompressionMethod == 0;
// eg (see DoUnpack())
//case 15: // rar 1.5 compression
//case 20: // rar 2.x compression
//case 26: // files larger than 2GB
//case 29: // rar 3.x compression
//case 50: // RAR 5.0 compression algorithm.
internal byte CompressionAlgorithm { get; private set; }
public bool IsSolid { get; private set; }
// unused for UnpackV1 implementation (limitation)
internal size_t WindowSize { get; private set; }
internal byte[] R4Salt { get; private set; }
private byte HostOs { get; set; }
internal uint FileAttributes { get; private set; }
internal long CompressedSize { get; private set; }
internal long UncompressedSize { get; private set; }
internal string FileName { get; private set; }
internal byte[] SubData { get; private set; }
internal int RecoverySectors { get; private set; }
internal long DataStartPosition { get; set; }
public Stream PackedStream { get; set; }
public bool IsSplitBefore => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_BEFORE) : HasFlag(FileFlagsV4.SPLIT_BEFORE);
public bool IsSplitAfter => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_AFTER) : HasFlag(FileFlagsV4.SPLIT_AFTER);
public bool IsDirectory => HasFlag(IsRar5 ? FileFlagsV5.DIRECTORY : FileFlagsV4.DIRECTORY);
private bool isEncryptedRar5 = false;
public bool IsEncrypted => IsRar5 ? isEncryptedRar5 : HasFlag(FileFlagsV4.PASSWORD);
internal DateTime? FileLastModifiedTime { get; private set; }
internal DateTime? FileCreatedTime { get; private set; }
internal DateTime? FileLastAccessedTime { get; private set; }
internal DateTime? FileArchivedTime { get; private set; }
return null;
}
}
private static string ConvertPathV4(string path)
{
if (Path.DirectorySeparatorChar == '/')
{
return path.Replace('\\', '/');
}
else if (Path.DirectorySeparatorChar == '\\')
{
return path.Replace('/', '\\');
}
return path;
}
public override string ToString() => FileName ?? "FileHeader";
private ushort Flags { get; set; }
private bool HasFlag(ushort flag) => (Flags & flag) == flag;
internal byte[]? FileCrc
{
get => _hash;
private set => _hash = value;
}
// 0 - storing
// 1 - fastest compression
// 2 - fast compression
// 3 - normal compression
// 4 - good compression
// 5 - best compression
internal byte CompressionMethod { get; private set; }
internal bool IsStored => CompressionMethod == 0;
// eg (see DoUnpack())
//case 15: // rar 1.5 compression
//case 20: // rar 2.x compression
//case 26: // files larger than 2GB
//case 29: // rar 3.x compression
//case 50: // RAR 5.0 compression algorithm.
internal byte CompressionAlgorithm { get; private set; }
public bool IsSolid { get; private set; }
public byte RedirType { get; private set; }
public bool IsRedir => RedirType != 0;
public byte RedirFlags { get; private set; }
public bool IsRedirDirectory => (RedirFlags & RedirFlagV5.DIRECTORY) != 0;
public string? RedirTargetName { get; private set; }
// unused for UnpackV1 implementation (limitation)
internal size_t WindowSize { get; private set; }
internal byte[]? R4Salt { get; private set; }
internal Rar5CryptoInfo? Rar5CryptoInfo { get; private set; }
private byte HostOs { get; set; }
internal uint FileAttributes { get; private set; }
internal long CompressedSize { get; private set; }
internal long UncompressedSize { get; private set; }
internal string? FileName { get; private set; }
internal byte[]? SubData { get; private set; }
internal int RecoverySectors { get; private set; }
internal long DataStartPosition { get; set; }
public Stream? PackedStream { get; set; }
public bool IsSplitBefore =>
IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_BEFORE) : HasFlag(FileFlagsV4.SPLIT_BEFORE);
public bool IsSplitAfter =>
IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_AFTER) : HasFlag(FileFlagsV4.SPLIT_AFTER);
public bool IsDirectory => HasFlag(IsRar5 ? FileFlagsV5.DIRECTORY : FileFlagsV4.DIRECTORY);
public bool IsEncrypted => IsRar5 ? Rar5CryptoInfo != null : HasFlag(FileFlagsV4.PASSWORD);
internal DateTime? FileLastModifiedTime { get; private set; }
internal DateTime? FileCreatedTime { get; private set; }
internal DateTime? FileLastAccessedTime { get; private set; }
internal DateTime? FileArchivedTime { get; private set; }
}

View File

@@ -1,78 +1,78 @@
using System.Text;
namespace SharpCompress.Common.Rar.Headers
namespace SharpCompress.Common.Rar.Headers;
/// <summary>
/// This is for the crazy Rar encoding that I don't understand
/// </summary>
internal static class FileNameDecoder
{
/// <summary>
/// This is for the crazy Rar encoding that I don't understand
/// </summary>
internal static class FileNameDecoder
internal static int GetChar(byte[] name, int pos) => name[pos] & 0xff;
internal static string Decode(byte[] name, int encPos)
{
internal static int GetChar(byte[] name, int pos)
{
return name[pos] & 0xff;
}
var decPos = 0;
var flags = 0;
var flagBits = 0;
internal static string Decode(byte[] name, int encPos)
var low = 0;
var high = 0;
var highByte = GetChar(name, encPos++);
var buf = new StringBuilder();
while (encPos < name.Length)
{
int decPos = 0;
int flags = 0;
int flagBits = 0;
int low = 0;
int high = 0;
int highByte = GetChar(name, encPos++);
StringBuilder buf = new StringBuilder();
while (encPos < name.Length)
if (flagBits == 0)
{
if (flagBits == 0)
{
flags = GetChar(name, encPos++);
flagBits = 8;
}
switch (flags >> 6)
{
case 0:
buf.Append((char)(GetChar(name, encPos++)));
++decPos;
break;
case 1:
buf.Append((char)(GetChar(name, encPos++) + (highByte << 8)));
++decPos;
break;
case 2:
low = GetChar(name, encPos);
high = GetChar(name, encPos + 1);
buf.Append((char)((high << 8) + low));
++decPos;
encPos += 2;
break;
case 3:
int length = GetChar(name, encPos++);
if ((length & 0x80) != 0)
{
int correction = GetChar(name, encPos++);
for (length = (length & 0x7f) + 2; length > 0 && decPos < name.Length; length--, decPos++)
{
low = (GetChar(name, decPos) + correction) & 0xff;
buf.Append((char)((highByte << 8) + low));
}
}
else
{
for (length += 2; length > 0 && decPos < name.Length; length--, decPos++)
{
buf.Append((char)(GetChar(name, decPos)));
}
}
break;
}
flags = (flags << 2) & 0xff;
flagBits -= 2;
flags = GetChar(name, encPos++);
flagBits = 8;
}
return buf.ToString();
switch (flags >> 6)
{
case 0:
buf.Append((char)(GetChar(name, encPos++)));
++decPos;
break;
case 1:
buf.Append((char)(GetChar(name, encPos++) + (highByte << 8)));
++decPos;
break;
case 2:
low = GetChar(name, encPos);
high = GetChar(name, encPos + 1);
buf.Append((char)((high << 8) + low));
++decPos;
encPos += 2;
break;
case 3:
var length = GetChar(name, encPos++);
if ((length & 0x80) != 0)
{
var correction = GetChar(name, encPos++);
for (
length = (length & 0x7f) + 2;
length > 0 && decPos < name.Length;
length--, decPos++
)
{
low = (GetChar(name, decPos) + correction) & 0xff;
buf.Append((char)((highByte << 8) + low));
}
}
else
{
for (length += 2; length > 0 && decPos < name.Length; length--, decPos++)
{
buf.Append((char)(GetChar(name, decPos)));
}
}
break;
}
flags = (flags << 2) & 0xff;
flagBits -= 2;
}
return buf.ToString();
}
}
}

Some files were not shown because too many files have changed in this diff Show More