Compare commits

..

351 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
ea02d31096 Add IsArchiveAsync overloads for Zip and GZip factories
- Added IsArchiveAsync interface method to IFactory
- Implemented async versions of IsZipFile, IsZipMulti, IsGZipFile
- Updated ZipFactory and GZipFactory to override IsArchiveAsync
- Updated ReaderFactory.OpenAsync to use IsArchiveAsync
- Fixed Zip_Reader_Disposal_Test2_Async to use ReaderFactory.OpenAsync
- Fixed TestStream to properly forward ReadAsync calls
- Removed BufferedStream wrapping from AsyncBinaryReader as it uses sync Read
- Added default implementation in Factory base class

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-02 18:10:54 +00:00
Adam Hathcock
d04830ba90 Add some OpenAsync 2026-01-02 17:52:18 +00:00
Adam Hathcock
8533b09091 start of implementing zip reading async 2025-12-31 14:53:55 +00:00
Adam Hathcock
44b7955d85 reader tests 2025-12-31 14:43:15 +00:00
Adam Hathcock
038b9f18c6 Merge remote-tracking branch 'origin/master' into copilot/add-buffered-stream-async-read 2025-12-31 14:24:31 +00:00
Adam Hathcock
5667595587 Merge pull request #1091 from adamhathcock/adam/update-deps2
Update dependencies
2025-12-31 14:23:58 +00:00
copilot-swe-agent[bot]
6e0e20ba6e Fix zip64_locator to use ReadUInt32Async instead of ReadUInt16Async
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-30 11:50:41 +00:00
copilot-swe-agent[bot]
ec31cb9987 Fix Zip headers to support both sync and async reading
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-30 11:47:31 +00:00
Adam Hathcock
32d5b61c4a Merge pull request #1093 from adamhathcock/copilot/sub-pr-1091 2025-12-30 11:32:05 +00:00
copilot-swe-agent[bot]
128c9e639f Add back System.Buffers and System.Memory packages
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-30 11:30:12 +00:00
copilot-swe-agent[bot]
5e3f01dc03 Initial plan 2025-12-30 11:26:39 +00:00
copilot-swe-agent[bot]
39a0b4ce78 Use BufferedStream for async reading in AsyncBinaryReader
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-30 11:23:55 +00:00
Adam Hathcock
af719707bf Merge branch 'adam/async-binary-reader' into copilot/add-buffered-stream-async-read 2025-12-30 11:17:16 +00:00
copilot-swe-agent[bot]
8415a19912 Initial plan 2025-12-30 11:15:28 +00:00
Adam Hathcock
1607d2768e Merge branch 'master' into adam/async-binary-reader 2025-12-30 11:13:14 +00:00
Adam Hathcock
c97c05a3a7 Update dependencies 2025-12-30 11:11:40 +00:00
Adam Hathcock
b2beea9c4e Merge pull request #1077 from adamhathcock/copilot/provide-interface-for-entries
Remove ExtractAllEntries restriction for non-SOLID archives
2025-12-23 15:47:52 +00:00
Adam Hathcock
41fbaa1c28 Merge pull request #1085 from adamhathcock/adam/edit-md-files
add some markdown files for planning
2025-12-23 15:38:56 +00:00
Adam Hathcock
d9274cf794 Update tests/SharpCompress.Test/ExtractAllEntriesTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-23 15:38:17 +00:00
Adam Hathcock
583b048046 Update USAGE.md
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-23 15:38:00 +00:00
Adam Hathcock
ead5916eae Update USAGE.md
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-23 15:37:28 +00:00
Adam Hathcock
d15ab92da3 add some markdown files for planning 2025-12-23 15:37:04 +00:00
Adam Hathcock
1ab30f2af5 Merge pull request #1084 from adamhathcock/copilot/fix-handled-system-exception
Avoid NotSupportedException overhead in SharpCompressStream for non-seekable streams
2025-12-23 15:15:15 +00:00
Adam Hathcock
4dbe0b91f1 Update src/SharpCompress/IO/SharpCompressStream.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-23 15:06:24 +00:00
Adam Hathcock
a972d3784e Merge pull request #1082 from adamhathcock/dependabot/nuget/JetBrains.Profiler.SelfApi-2.5.15
Bump JetBrains.Profiler.SelfApi from 2.5.14 to 2.5.15
2025-12-23 15:04:17 +00:00
copilot-swe-agent[bot]
6991900eb0 Remove try/catch blocks, just check CanSeek as requested
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-23 15:01:41 +00:00
copilot-swe-agent[bot]
d614beb9eb Add explanatory comments for CanSeek checks and try-catch blocks
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-23 14:41:44 +00:00
copilot-swe-agent[bot]
253a46d458 Fix NotSupportedException in SharpCompressStream by checking CanSeek
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-23 14:38:11 +00:00
copilot-swe-agent[bot]
32b1ec32c6 Initial plan 2025-12-23 14:32:38 +00:00
Adam Hathcock
eb2cba09b2 update usage 2025-12-23 09:34:55 +00:00
Adam Hathcock
e79dceb67e check should be there 2025-12-23 09:31:51 +00:00
Adam Hathcock
87c38d6dab fix ordering and token passing 2025-12-23 09:22:38 +00:00
dependabot[bot]
9e98d9c45c Bump JetBrains.Profiler.SelfApi from 2.5.14 to 2.5.15
---
updated-dependencies:
- dependency-name: JetBrains.Profiler.SelfApi
  dependency-version: 2.5.15
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-22 09:39:01 +00:00
Adam Hathcock
0e9a4b0511 Merge remote-tracking branch 'origin/master' into copilot/provide-interface-for-entries
# Conflicts:
#	src/SharpCompress/Archives/AbstractArchive.cs
2025-12-19 15:41:51 +00:00
Adam Hathcock
eae25aff64 Merge pull request #1076 from adamhathcock/adam/extract-all-test
add extract all test
2025-12-19 14:58:33 +00:00
Adam Hathcock
b8c06ff36e oops 2025-12-19 14:39:37 +00:00
Adam Hathcock
6cf2e054bf fmt 2025-12-19 14:33:24 +00:00
Adam Hathcock
95749234f5 Use proper polyfill for net48 2025-12-19 14:33:06 +00:00
Adam Hathcock
b976961434 Merge fixes 2025-12-19 14:22:09 +00:00
Adam Hathcock
e1aa727513 Merge remote-tracking branch 'origin/master' into adam/extract-all-test
# Conflicts:
#	src/SharpCompress/Archives/IArchiveExtensions.cs
2025-12-19 14:16:29 +00:00
Adam Hathcock
1f71ce1be2 Merge pull request #1080 from adamhathcock/copilot/refactor-write-to-directory-consistency
Standardize extraction API to WriteToDirectory with IProgress support
2025-12-19 14:15:43 +00:00
Adam Hathcock
cf13de6ac1 Merge pull request #1081 from adamhathcock/copilot/sub-pr-1076-again
Fix async LZMA extraction bug for 7Zip archives
2025-12-19 14:14:16 +00:00
Adam Hathcock
c2e01798f8 refactor archive extensions 2025-12-19 14:09:01 +00:00
copilot-swe-agent[bot]
8fc8295a89 Add TODO and explanation for SyncOnlyStream workaround
The SyncOnlyStream wrapper is necessary because the LZMA decoder has bugs in its async implementation that cause state corruption (IndexOutOfRangeException, DataErrorException) even with fresh, non-shared streams.

Without this wrapper, async operations on LZMA streams fail. The proper fix would be to repair the async bugs in LzmaStream.ReadAsync, Decoder.CodeAsync, and OutWindow async operations, but that requires deep changes to the decoder state machine.

Added detailed comments explaining this is a workaround and where the real fix should go.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 13:47:49 +00:00
copilot-swe-agent[bot]
d392991764 Remove CancellationToken from synchronous WriteToDirectory overload
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 13:40:22 +00:00
copilot-swe-agent[bot]
e57e87090f Implement option 2: Avoid async LZMA state corruption for 7Zip extraction
- Each 7Zip file now gets a fresh decompression stream instead of reusing shared streams
- Added SyncOnlyStream wrapper to force async operations to use synchronous equivalents
- This avoids LZMA decoder state corruption bugs in async operations
- Performance trade-off: slower async extraction for 7Zip but correctness guaranteed
- Documented behavior and performance implications in FORMATS.md

All 14 ExtractAll tests now pass (both async and sync)

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 13:33:04 +00:00
copilot-swe-agent[bot]
c701bbbee3 Run CSharpier formatting and document check-format process
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 13:31:35 +00:00
copilot-swe-agent[bot]
2f0eb0bd4b Replace Action<double> with IProgress<ProgressReport> for progress reporting
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 13:25:36 +00:00
copilot-swe-agent[bot]
17bde8da8a Merge branch 'master' of https://github.com/adamhathcock/sharpcompress into copilot/refactor-write-to-directory-consistency 2025-12-19 13:10:33 +00:00
Adam Hathcock
99d355e6ca Merge remote-tracking branch 'origin/master' into copilot/sub-pr-1076-again 2025-12-19 13:05:33 +00:00
Adam Hathcock
c790fd21a4 reading a single byte shouldn't be async 2025-12-19 13:05:04 +00:00
Adam Hathcock
bee51af48b Merge pull request #1044 from adamhathcock/copilot/add-progress-reporting
Unified progress reporting for compression and extraction operations
2025-12-19 12:54:55 +00:00
Adam Hathcock
ca743eae22 fix for running net 10 tests 2025-12-19 12:17:08 +00:00
copilot-swe-agent[bot]
93504cf82f Add sync test and attempt to fix async LZMA extraction bug
- Restored original async ExtractAllEntries test with using statement
- Added new ExtractAllEntriesSync test (all tests pass)
- Fixed potential partial read bug in LzmaStream.DecodeChunkHeaderAsync
  - Added ReadFullyAsync helper to ensure complete reads
  - ReadAsync is not guaranteed to return all requested bytes
- Async tests for 7Zip still failing with Data Error
  - Issue appears related to LZMA2 stream state management
  - _needDictReset flag not being cleared correctly in async flow
  - Further investigation needed

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 12:15:54 +00:00
Adam Hathcock
6d3e4e842b Merge remote-tracking branch 'origin/master' into copilot/add-progress-reporting 2025-12-19 12:13:34 +00:00
copilot-swe-agent[bot]
54b64a8c3b Fix misleading variable name: emptyDirectory -> parentDirectory
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 12:06:36 +00:00
copilot-swe-agent[bot]
0e59bf39f4 Add test for IArchive.WriteToDirectoryAsync
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 12:04:47 +00:00
copilot-swe-agent[bot]
8b95e0a76d Standardize on WriteToDirectory naming and add async support
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 11:57:01 +00:00
copilot-swe-agent[bot]
48a2ad7b57 Fix ExtractAll test to use synchronous extraction methods for 7Zip archives
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 11:55:00 +00:00
copilot-swe-agent[bot]
cfc6651fff Update documentation to reflect ExtractAllEntries universal compatibility
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 11:49:35 +00:00
copilot-swe-agent[bot]
b23827a8db Initial plan 2025-12-19 11:48:49 +00:00
copilot-swe-agent[bot]
3f9986c13c Initial plan 2025-12-19 11:47:53 +00:00
copilot-swe-agent[bot]
224989f19b Remove restriction on ExtractAllEntries and add comprehensive tests
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 11:45:21 +00:00
Adam Hathcock
c7010b75c1 fix the test targets 2025-12-19 11:41:08 +00:00
Adam Hathcock
00cfeee56e Fix logic to match ExtractAllEntries 2025-12-19 11:37:04 +00:00
copilot-swe-agent[bot]
aaa97e2ce2 Merge master branch - add ZStandard compression support and TarHeaderWriteFormat
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 11:36:44 +00:00
copilot-swe-agent[bot]
1d52618137 Initial plan 2025-12-19 11:32:17 +00:00
Adam Hathcock
34309f17f4 fmt 2025-12-18 15:33:26 +00:00
Adam Hathcock
220ba67faa add extract all test 2025-12-18 15:26:35 +00:00
Adam Hathcock
230f96e8e8 Merge pull request #1052 from adamhathcock/copilot/move-zstdsharp-into-sharpcompress
Move ZstdSharp into SharpCompress - Complete Integration
2025-12-18 14:48:03 +00:00
Adam Hathcock
930c8899d2 Merge remote-tracking branch 'origin/master' into copilot/move-zstdsharp-into-sharpcompress
# Conflicts:
#	src/SharpCompress/packages.lock.json
#	tests/SharpCompress.Test/packages.lock.json
2025-12-18 12:43:19 +00:00
copilot-swe-agent[bot]
7c0cef7dd8 Clean up unused using statements in ZstandardConstants.cs
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-18 12:32:48 +00:00
copilot-swe-agent[bot]
951ebb3fa2 Complete ZstdSharp integration: Add all Unsafe implementation files and remove ZstdSharp.Port dependency
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
2a4d098b41 Initial plan 2025-12-18 12:30:38 +00:00
dependabot[bot]
5839b87f98 Bump csharpier from 1.2.1 to 1.2.3
---
updated-dependencies:
- dependency-name: csharpier
  dependency-version: 1.2.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-18 12:30:38 +00:00
dependabot[bot]
425a2bd680 Bump actions/upload-artifact from 5 to 6
Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 5 to 6.
- [Release notes](https://github.com/actions/upload-artifact/releases)
- [Commits](https://github.com/actions/upload-artifact/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/upload-artifact
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-18 12:30:38 +00:00
drone1400
939c2497c8 Reformatted modified files with csharpier 2025-12-18 12:30:38 +00:00
drone1400
8995ba56b8 Add alternative option for writing TAR archives with USTAR header format
- TarWriterOptions now has a property that allows the user to select writing the TAR using the USTAR header format
- if unspecified, will default to the original modern GNU TAR header format
- default behavior is unchanged
2025-12-18 12:30:38 +00:00
HeroponRikIBestest
e941ab60ca Csharpier 2025-12-18 12:30:38 +00:00
HeroponRikIBestest
48860f1349 Improve logic 2025-12-18 12:30:38 +00:00
HeroponRikIBestest
94b2c5c593 Add tests 2025-12-18 12:30:38 +00:00
HeroponRikIBestest
f1d8ae5a22 Add archive-level IsEncrypted flag 2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
e44d2093e5 Consolidate agent instructions into AGENTS.md
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
9fa1201a4c Initial plan 2025-12-18 12:30:38 +00:00
Adam Hathcock
7800808648 first pass of instructions...consolidate? 2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
2789e86d21 Update launch.json to use net10.0 instead of net8.0
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
afd1e39b88 Initial plan 2025-12-18 12:30:38 +00:00
Adam Hathcock
aec4c738ef add vscode config 2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
63ecc8c842 Apply code review feedback: improve documentation structure and add explicit DirectoryEntry case
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-18 12:30:38 +00:00
Adam Hathcock
5f6d583521 Update tests/SharpCompress.Test/Zip/ZipReaderTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
0341984f10 Add documentation about ZipReader directory and central directory handling
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
fe757486ae Add multi-volume ZIP documentation to FORMATS.md
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
46d480c9a1 Changes before error encountered
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
8fe3cba7a8 Initial plan 2025-12-18 12:30:38 +00:00
Adam Hathcock
aa19f4da8b update dependencies 2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
a08f95326c Remove netstandard2.0 and net481 targets, keep net48, net8.0, net10.0
- Updated target frameworks to net48;net8.0;net10.0 only
- Removed netstandard2.0 and net481 package conditions
- Updated conditional compilation to use NETFRAMEWORK only
- Updated package description and README

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
b4c5437c92 Fix .NET 10.0 formatting in README
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
8680e3b39e Drop .NET 6 support and add .NET 10 support
- Updated SharpCompress.csproj target frameworks from net48;net481;netstandard2.0;net6.0;net8.0 to net48;net481;netstandard2.0;net8.0;net10.0
- Updated test and build projects to use .NET 10
- Updated global.json to .NET 10 SDK
- Updated CI workflow to use .NET 10
- Fixed deprecated Rfc2898DeriveBytes constructor for .NET 10 (SYSLIB0060)
- Updated package description and README to reflect new supported frameworks
- Updated package versions for .NET 10 compatibility

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-18 12:30:38 +00:00
copilot-swe-agent[bot]
1b3002c8df Initial plan 2025-12-18 12:30:38 +00:00
Adam Hathcock
394fd2e7db Merge pull request #1072 from adamhathcock/dependabot/nuget/dot-config/csharpier-1.2.3
Bump csharpier from 1.2.1 to 1.2.3
2025-12-15 11:02:17 +00:00
Adam Hathcock
d83af56d28 Merge pull request #1071 from adamhathcock/dependabot/github_actions/actions/upload-artifact-6
Bump actions/upload-artifact from 5 to 6
2025-12-15 10:58:08 +00:00
dependabot[bot]
28c93d6841 Bump csharpier from 1.2.1 to 1.2.3
---
updated-dependencies:
- dependency-name: csharpier
  dependency-version: 1.2.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-15 09:42:59 +00:00
dependabot[bot]
5f52fc2176 Bump actions/upload-artifact from 5 to 6
Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 5 to 6.
- [Release notes](https://github.com/actions/upload-artifact/releases)
- [Commits](https://github.com/actions/upload-artifact/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/upload-artifact
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-15 09:02:14 +00:00
Adam Hathcock
8fba579e3a Merge pull request #1063 from drone1400/tar-ustar-header-format-support
Add alternative option for writing TAR archives with USTAR header format
2025-12-09 08:37:18 +00:00
drone1400
40b1aadeb2 Reformatted modified files with csharpier 2025-12-08 17:49:58 +02:00
Adam Hathcock
40e72ad199 fix AI edit 2025-12-08 11:11:51 +00:00
Adam Hathcock
618b4bbb83 try to tell agents to format 2025-12-08 11:04:08 +00:00
Adam Hathcock
1eaf3e6294 format with csharpier 2025-12-08 11:00:29 +00:00
Adam Hathcock
fd453e946d Update src/SharpCompress/IO/ProgressReportingStream.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-08 10:55:24 +00:00
Adam Hathcock
c294071015 Update src/SharpCompress/Archives/IArchiveEntryExtensions.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-08 10:55:06 +00:00
Adam Hathcock
c2f6055e33 format 2025-12-08 10:26:45 +00:00
drone1400
5161f4df33 Add alternative option for writing TAR archives with USTAR header format
- TarWriterOptions now has a property that allows the user to select writing the TAR using the USTAR header format
- if unspecified, will default to the original modern GNU TAR header format
- default behavior is unchanged
2025-12-06 17:33:01 +02:00
copilot-swe-agent[bot]
3396f8fe00 Refactor to use ProgressReportingStream for progress tracking
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-05 11:55:29 +00:00
copilot-swe-agent[bot]
9291f58091 Merge master and add comprehensive tests for archive and reader progress
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-05 11:44:17 +00:00
copilot-swe-agent[bot]
85f3b17c42 Merge remote-tracking branch 'origin/master' into copilot/add-progress-reporting 2025-12-05 11:33:39 +00:00
Adam Hathcock
2a3086a0d7 Merge pull request #1060 from HeroponRikiBestest/rar-7z-password 2025-12-03 19:58:47 +00:00
HeroponRikIBestest
41c3cc1a18 Csharpier 2025-12-03 12:05:16 -05:00
HeroponRikIBestest
1b1df86a11 Improve logic 2025-12-02 10:02:49 -05:00
HeroponRikIBestest
e0660e7775 Add tests 2025-12-02 09:55:24 -05:00
HeroponRikIBestest
99a6c4de88 Add archive-level IsEncrypted flag 2025-12-02 09:47:06 -05:00
Adam Hathcock
ffa765bd97 Merge pull request #1057 from adamhathcock/adam/add-copilot-instructions 2025-11-30 13:48:55 +00:00
Adam Hathcock
b1696524b3 Merge pull request #1058 from adamhathcock/copilot/sub-pr-1057 2025-11-30 13:48:31 +00:00
copilot-swe-agent[bot]
14d432e22d Pass progress as parameter to WriteTo/WriteToAsync instead of storing on archive
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-30 13:04:40 +00:00
copilot-swe-agent[bot]
6a37c55085 Consolidate agent instructions into AGENTS.md
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-30 12:52:22 +00:00
copilot-swe-agent[bot]
9c1c6fff9f Initial plan 2025-11-30 12:49:36 +00:00
Adam Hathcock
db8c6f4bcb first pass of instructions...consolidate? 2025-11-30 12:47:57 +00:00
Adam Hathcock
ff17ecda7d Merge pull request #1055 from adamhathcock/adam/vscode-fixes
add vscode config
2025-11-30 12:47:16 +00:00
Adam Hathcock
692058677c Merge pull request #1056 from adamhathcock/copilot/sub-pr-1055
Fix launch.json debug configurations to use net10.0
2025-11-30 12:41:38 +00:00
copilot-swe-agent[bot]
1e90d69912 Update launch.json to use net10.0 instead of net8.0
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-30 12:22:31 +00:00
copilot-swe-agent[bot]
64a1cc68e1 Initial plan 2025-11-30 12:20:37 +00:00
copilot-swe-agent[bot]
0fdf9c74a8 Address code review: Replace dynamic with IArchiveProgressInfo interface
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-30 12:07:17 +00:00
Adam Hathcock
20353f35ff add vscode config 2025-11-30 12:05:08 +00:00
copilot-swe-agent[bot]
e2df7894f9 Remove IArchiveExtractionListener and add IProgress support to Archive Entry extraction
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-30 12:00:54 +00:00
copilot-swe-agent[bot]
7af029b5de Address code review: properly handle zero-sized entries in progress reporting
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 17:48:50 +00:00
copilot-swe-agent[bot]
8fc5ca5a71 Unify progress reporting: remove IExtractionListener and add IProgress support for reading
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 17:40:10 +00:00
copilot-swe-agent[bot]
aa0356de9f Changes before error encountered
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 15:55:21 +00:00
Adam Hathcock
e44a43d2b1 Merge pull request #1054 from adamhathcock/copilot/fix-zipreader-directory-entry
Document ZipReader DirectoryEntry behavior and add verification test
2025-11-29 15:46:51 +00:00
Adam Hathcock
8997f00b9b Merge pull request #1049 from adamhathcock/copilot/drop-dotnet-6-support
Drop .NET 6, .NET Standard 2.0, .NET 4.8.1, add .NET 10 support
2025-11-29 15:46:27 +00:00
copilot-swe-agent[bot]
c5da416764 Apply code review feedback: improve documentation structure and add explicit DirectoryEntry case
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 15:39:36 +00:00
Adam Hathcock
840e58fc03 Update tests/SharpCompress.Test/Zip/ZipReaderTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-29 15:36:46 +00:00
copilot-swe-agent[bot]
7f911c5219 Add documentation about ZipReader directory and central directory handling
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 15:29:07 +00:00
copilot-swe-agent[bot]
a887390c23 Add multi-volume ZIP documentation to FORMATS.md
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 15:25:35 +00:00
copilot-swe-agent[bot]
f4dddcec8e Changes before error encountered
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 14:06:03 +00:00
copilot-swe-agent[bot]
0d9d82d7e6 Initial plan 2025-11-29 13:42:43 +00:00
copilot-swe-agent[bot]
3a6d24b1d9 Add ZStandard frame and dictionary types
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 13:36:01 +00:00
copilot-swe-agent[bot]
b9b159be4c Add ZStandard compression parameter types and enums
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 13:34:18 +00:00
copilot-swe-agent[bot]
40212083a5 Add core ZStandard infrastructure: UnsafeHelper, ThrowHelper, and Unsafe Methods
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 13:28:16 +00:00
copilot-swe-agent[bot]
d3428b066e Fix XML documentation comments in buffer structs
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 13:14:52 +00:00
copilot-swe-agent[bot]
94c64b2a45 Add initial ZStandard infrastructure types for ZstdSharp integration
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 13:11:19 +00:00
copilot-swe-agent[bot]
0d671a0bb2 Initial plan 2025-11-29 13:03:20 +00:00
Adam Hathcock
d34a47c148 update dependencies 2025-11-29 11:56:20 +00:00
copilot-swe-agent[bot]
5aa216bd21 Remove netstandard2.0 and net481 targets, keep net48, net8.0, net10.0
- Updated target frameworks to net48;net8.0;net10.0 only
- Removed netstandard2.0 and net481 package conditions
- Updated conditional compilation to use NETFRAMEWORK only
- Updated package description and README

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 11:42:05 +00:00
copilot-swe-agent[bot]
8af47548fe Fix .NET 10.0 formatting in README
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 11:04:54 +00:00
copilot-swe-agent[bot]
131bd2b7b8 Drop .NET 6 support and add .NET 10 support
- Updated SharpCompress.csproj target frameworks from net48;net481;netstandard2.0;net6.0;net8.0 to net48;net481;netstandard2.0;net8.0;net10.0
- Updated test and build projects to use .NET 10
- Updated global.json to .NET 10 SDK
- Updated CI workflow to use .NET 10
- Fixed deprecated Rfc2898DeriveBytes constructor for .NET 10 (SYSLIB0060)
- Updated package description and README to reflect new supported frameworks
- Updated package versions for .NET 10 compatibility

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 11:03:48 +00:00
copilot-swe-agent[bot]
1993673a22 Initial plan 2025-11-29 10:42:07 +00:00
Adam Hathcock
30e036f9ec Mark for 0.42.0 2025-11-28 13:24:03 +00:00
copilot-swe-agent[bot]
0f374b27cf Address code review: ProgressReportingStream now throws on writes
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-27 19:17:10 +00:00
copilot-swe-agent[bot]
0d487df61b Add IProgress support for compression operations with tests
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-27 19:15:10 +00:00
copilot-swe-agent[bot]
c082d4203b Changes before error encountered
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-27 16:58:32 +00:00
copilot-swe-agent[bot]
d4380b6bb6 Initial plan 2025-11-27 16:48:09 +00:00
Adam Hathcock
095c871174 Merge pull request #1043 from adamhathcock/copilot/fix-divide-by-zero-exception
Fix DivideByZeroException when compressing empty files with BZip2
2025-11-27 16:47:39 +00:00
copilot-swe-agent[bot]
6d73c5b295 Fix DivideByZeroException when using BZip2 with empty files
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-27 15:59:39 +00:00
copilot-swe-agent[bot]
cc4d28193c Initial plan 2025-11-27 15:53:13 +00:00
Adam Hathcock
fb76bd82f2 first commit of async reader 2025-11-26 08:09:20 +00:00
Adam Hathcock
3bdaba46a9 fmt 2025-11-25 15:39:43 +00:00
Adam Hathcock
9433e06b93 Merge pull request #1023 from adamhathcock/copilot/fix-zip64-validation-issue
Fix version mismatch between Local File Header and Central Directory File Header in Zip archives
2025-11-25 15:30:48 +00:00
copilot-swe-agent[bot]
a92aaa51d5 Remove ZipCompressionMethod.None from version 63 check
None (stored) compression only requires version 10/20, not version 63.
Version 63 is specifically for advanced compression methods like LZMA,
PPMd, BZip2, and ZStandard.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-25 14:54:13 +00:00
Adam Hathcock
7c3c94ed7f Add ArcReaderAsync tests 2025-11-25 14:44:03 +00:00
Adam Hathcock
d41908adeb fixes for clarity 2025-11-25 14:25:58 +00:00
Adam Hathcock
81ca15b567 Update src/SharpCompress/Writers/Zip/ZipCentralDirectoryEntry.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-25 14:13:10 +00:00
Adam Hathcock
b81d0fd730 Merge pull request #1009 from adamhathcock/dependabot/nuget/AwesomeAssertions-9.3.0
Bump AwesomeAssertions from 9.2.1 to 9.3.0
2025-11-25 11:55:41 +00:00
Adam Hathcock
3a1bb187e8 Merge pull request #1031 from adamhathcock/dependabot/github_actions/actions/checkout-6
Bump actions/checkout from 5 to 6
2025-11-25 11:55:21 +00:00
Adam Hathcock
3fee14a070 Merge pull request #1035 from adamhathcock/adam/update-csharpier
Update csharpier and reformat
2025-11-25 11:54:56 +00:00
Adam Hathcock
5bf789ac65 Update csharpier and reformat 2025-11-25 11:50:21 +00:00
dependabot[bot]
be06049db3 Bump actions/checkout from 5 to 6
Bumps [actions/checkout](https://github.com/actions/checkout) from 5 to 6.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v5...v6)

---
updated-dependencies:
- dependency-name: actions/checkout
  dependency-version: '6'
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-24 09:20:23 +00:00
Adam Hathcock
a0435f6a60 Merge pull request #1030 from TwanVanDongen/master
Added buffer boundary tests.
2025-11-23 15:53:36 +00:00
Twan van Dongen
2321e2c90b Added buffer boundaty tests. Changed largefile to Alice29.txt as it's sufficient for the tests. 2025-11-22 12:32:25 +01:00
Adam Hathcock
97e98d8629 Merge pull request #1028 from TwanVanDongen/master
Buffer boundary tests
2025-11-21 08:35:54 +00:00
Twan van Dongen
d96e7362d2 Buffer boundary test for ARC's Squeezed method 2025-11-20 21:56:07 +01:00
Twan van Dongen
7dd46fe5ed More buffer boundary tests 2025-11-20 19:43:07 +01:00
Adam Hathcock
04c044cb2b Update tests/SharpCompress.Test/Zip/Zip64VersionConsistencyTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-19 15:49:55 +00:00
Adam Hathcock
cc10a12fbc Update tests/SharpCompress.Test/Zip/Zip64VersionConsistencyTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-19 15:49:27 +00:00
Adam Hathcock
8b0a1c699f Update tests/SharpCompress.Test/Zip/Zip64VersionConsistencyTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-19 15:49:17 +00:00
Adam Hathcock
15ca7c9807 Merge pull request #1025 from adamhathcock/copilot/fix-exception-disposing-rararchive
Fix ArgumentNullException when disposing RarArchive with damaged archives
2025-11-19 15:48:44 +00:00
Adam Hathcock
2b4da7e39b Merge pull request #1024 from adamhathcock/copilot/fix-memory-exhaustion-bug
Fix memory exhaustion in TAR header auto-detection
2025-11-19 15:33:52 +00:00
copilot-swe-agent[bot]
31f81f38af Fix null reference exception in UnpackV1.Unpack.Dispose()
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-19 15:16:18 +00:00
copilot-swe-agent[bot]
72cf77b7c7 Initial plan 2025-11-19 15:09:19 +00:00
copilot-swe-agent[bot]
0fe48c647e Enhance fix to handle LZMA/PPMd/BZip2/ZStandard compression methods
Also fixes pre-existing version mismatch for advanced compression methods that require version 63. Added tests for LZMA and PPMd to verify version consistency.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-19 11:29:43 +00:00
copilot-swe-agent[bot]
7b06652bff Add validation to prevent memory exhaustion in TAR long name headers
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-19 11:20:59 +00:00
copilot-swe-agent[bot]
434ce05416 Fix Zip64 version mismatch between LFH and CDFH
When UseZip64=true but files are small, ensure Central Directory File Header uses version 45 to match Local File Header. This fixes validation failures in System.IO.Packaging and other strict readers.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-19 11:14:06 +00:00
copilot-swe-agent[bot]
0698031ed4 Initial plan 2025-11-19 11:04:50 +00:00
copilot-swe-agent[bot]
51237a34eb Initial plan 2025-11-19 11:02:28 +00:00
Adam Hathcock
b8264a8131 Merge pull request #1004 from adamhathcock/adam/async-xz
Async XZ
2025-11-19 10:53:24 +00:00
Adam Hathcock
cad923018e Merge remote-tracking branch 'origin/master' into adam/async-xz 2025-11-19 09:56:01 +00:00
Adam Hathcock
db94b49941 fix mispellings 2025-11-19 09:55:43 +00:00
Adam Hathcock
72d15d9cbf Merge pull request #1019 from TwanVanDongen/master
ARJ's methods 1, 2 and 3 implemented for streaming
2025-11-18 13:41:49 +00:00
Twan van Dongen
e0186eadc0 Add buffer boundary tests for streaming decompression. Methods 1,2,3 still need fixing full streaming; exception now thrown to prevent corrupt output 2025-11-18 08:14:34 +01:00
Twan van Dongen
4cfa5b04af Included ARC and ARJ in readme 2025-11-14 15:57:22 +01:00
Twan van Dongen
f2c54b1f8b Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-11-14 15:43:51 +01:00
Twan van Dongen
d7d0bc6582 Missed formatting FilePart. 2025-11-14 15:43:47 +01:00
Twan
dd9dc2500b Merge branch 'adamhathcock:master' into master 2025-11-14 15:40:36 +01:00
Twan van Dongen
4efb109da8 Arj's methods CompressedMost (1), Compressed (2) and CompressedFaster (3) implemented. 2025-11-14 15:39:58 +01:00
Adam Hathcock
bcf9a6bdf1 Merge pull request #1017 from Morilli/fix-streamstack
Fix some IStreamStack and SharpCompressStream functions
2025-11-14 08:33:38 +00:00
Morilli
e3a25ecdc0 make formatting worse with csharpier 2025-11-14 03:54:26 +01:00
Morilli
783521928d fix SharpCompressStream BufferSize setter and Seek as well
the BufferSize setter was completely broken and trashed the `_internalPosition` value on set, for `Seek` this is just an off-by-one fix allowing seeking to immediately past the last byte in the buffer
2025-11-14 03:37:08 +01:00
Morilli
9a876abd31 fix IStreamStack.StackSeek with buffering streams 2025-11-14 03:37:08 +01:00
Morilli
97f58b412e Add test for StackSeek behavior 2025-11-14 03:37:08 +01:00
dependabot[bot]
4c61628078 Bump AwesomeAssertions from 9.2.1 to 9.3.0
---
updated-dependencies:
- dependency-name: AwesomeAssertions
  dependency-version: 9.3.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-11-10 11:57:30 +00:00
Adam Hathcock
99a8b0f750 Merge pull request #1007 from TwanVanDongen/master
ArjReader throws exception for password protected archives.
2025-11-06 09:58:24 +00:00
Twan van Dongen
a9017d7c25 ArjReader throws exception for password protected archives. 2025-11-06 09:32:12 +01:00
Adam Hathcock
d9e4b26648 Merge pull request #1006 from TwanVanDongen/master
ARJ multi-part archive handling improved
2025-11-05 08:37:54 +00:00
Twan van Dongen
0d03bafe49 Merg conflicts resolved 2025-11-05 08:19:30 +01:00
Twan
fee15a31f9 Merge branch 'adamhathcock:master' into master 2025-11-05 08:12:53 +01:00
Twan van Dongen
997d3910d4 CSharpier appliedArjReader thrown exception for multi-part archives. Method4 (decodefastest) refactored to support Stream. 2025-11-05 08:05:30 +01:00
Twan van Dongen
a3918cc0d7 ArjReader thrown exception for multi-part archives. Method4 (decodefastest) refactored to support Stream. 2025-11-05 08:02:20 +01:00
Adam Hathcock
f056986b07 Merge pull request #1005 from TwanVanDongen/master
Refactor SqueezeStream for CLS Compliance, Streaming, and Generic Test Coverage
2025-11-04 15:50:36 +00:00
Twan van Dongen
59c1f02f98 Still difficult to run CSharpier... 2025-11-02 19:23:30 +01:00
Twan van Dongen
3a71a2b1f8 Ran CSharpier. 2025-11-02 19:22:01 +01:00
Twan van Dongen
2ef1215b49 Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-11-02 19:20:47 +01:00
Twan van Dongen
130ac83076 Revert "CSharpier...Refactors the SqueezeStream class to ensure full CLS compliance and proper stream behavior. It replaces the previous one-shot decoding logic with a true streaming implementation by piping Huffman-decoded output into the existing RunLength90Stream, enabling real-time decompression."
This reverts commit dd606a0702.
2025-11-02 19:20:34 +01:00
Twan van Dongen
dd606a0702 CSharpier...Refactors the SqueezeStream class to ensure full CLS compliance and proper stream behavior. It replaces the previous one-shot decoding logic with a true streaming implementation by piping Huffman-decoded output into the existing RunLength90Stream, enabling real-time decompression. 2025-11-02 19:15:17 +01:00
Twan van Dongen
84cd772f50 Refactors the SqueezeStream class to ensure full CLS compliance and proper stream behavior. It replaces the previous one-shot decoding logic with a true streaming implementation by piping Huffman-decoded output into the existing RunLength90Stream, enabling real-time decompression. 2025-11-02 19:09:59 +01:00
Adam Hathcock
fa1d7af22f Merge remote-tracking branch 'origin/master' into adam/async-xz 2025-11-01 10:35:22 +00:00
Adam Hathcock
a771ba3bc0 async tests 2025-11-01 10:35:07 +00:00
Adam Hathcock
8b612c658d Merge pull request #1003 from adamhathcock/adam/async-lzma
async lzma
2025-11-01 10:34:54 +00:00
Adam Hathcock
7dd0da5fd7 Merge branch 'adam/async-lzma' into adam/async-xz 2025-11-01 10:28:27 +00:00
Adam Hathcock
f7b3525c4e fix tests and fmt 2025-11-01 10:25:14 +00:00
Adam Hathcock
de83bdae48 Merge remote-tracking branch 'origin/master' into adam/async-lzma 2025-11-01 10:21:25 +00:00
Adam Hathcock
d90b610767 Merge pull request #994 from TwanVanDongen/master
Adding the ARJ (Archived by Robert Jung) format
2025-11-01 10:20:54 +00:00
Adam Hathcock
2d41de6b72 add async tests 2025-11-01 09:57:26 +00:00
Adam Hathcock
f391c3caf3 make an async code 2025-11-01 09:42:17 +00:00
Twan van Dongen
9bdf150676 Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-10-31 16:17:26 +01:00
Twan van Dongen
0c199609eb CSharpier...Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-10-31 16:17:23 +01:00
Twan van Dongen
6eff9d3753 Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2025-10-31 16:15:51 +01:00
Twan
7ab16457c7 Added ARJ's compressmion method4 (compressed fastest).
Refactored TestBase to support archives with mixed compression algorithms.Merge branch 'adamhathcock:master' into master
2025-10-31 16:14:48 +01:00
Twan
e7ad8132b5 Merge branch 'adamhathcock:master' into master 2025-10-31 14:33:14 +01:00
Adam Hathcock
da87e45534 add async xzstream 2025-10-31 11:57:49 +00:00
Adam Hathcock
2ffaef5563 async xzblock 2025-10-31 11:47:27 +00:00
Adam Hathcock
55cb350d2c remove needless variable 2025-10-31 11:44:32 +00:00
Adam Hathcock
7fa271a1b4 fix ILLink 2025-10-31 11:43:35 +00:00
Adam Hathcock
c53ca372f2 don't use pools in tests 2025-10-31 11:39:57 +00:00
Adam Hathcock
75bc8501f4 Merge pull request #997 from adamhathcock/copilot/fix-ziparchive-linux-issue
Fix ArchiveFactory.Open double-wrapping causing "Cannot determine compressed stream type" on Linux
2025-10-31 11:24:59 +00:00
Adam Hathcock
1e22b47fe1 some fixes 2025-10-31 11:23:30 +00:00
Adam Hathcock
74e2dca207 fmt 2025-10-31 11:15:42 +00:00
Adam Hathcock
a669de24b7 Merge remote-tracking branch 'origin/master' into adam/async-lzma 2025-10-31 11:13:17 +00:00
Adam Hathcock
e1e9c449e9 use proper async versions 2025-10-31 11:12:46 +00:00
Adam Hathcock
60e1dc0239 review fixes 2025-10-31 11:10:55 +00:00
Adam Hathcock
10eb94fd82 Merge pull request #1002 from adamhathcock/adam/async-bzip2
async bzip2 and add
2025-10-31 11:09:39 +00:00
Adam Hathcock
ccc8587e5f review fixes 2025-10-31 10:55:33 +00:00
Adam Hathcock
53c96193c1 format 2025-10-30 16:40:59 +00:00
Adam Hathcock
d4f11e00b1 some more async changes 2025-10-30 16:31:45 +00:00
Adam Hathcock
321233b82c some async implementations 2025-10-30 15:08:57 +00:00
Adam Hathcock
eb188051d4 fmt 2025-10-30 14:53:11 +00:00
Adam Hathcock
a136084e11 add adc async 2025-10-30 14:52:51 +00:00
Adam Hathcock
bc06f3179d add basics for async bzip2 2025-10-30 14:42:46 +00:00
Adam Hathcock
ee84d971b2 Merge remote-tracking branch 'origin/master' into copilot/fix-ziparchive-linux-issue
# Conflicts:
#	src/SharpCompress/packages.lock.json
2025-10-30 14:30:31 +00:00
Twan
264d80ef4c Merge branch 'master' into master 2025-10-30 07:53:09 +01:00
Adam Hathcock
dba68187ac Merge pull request #996 from adamhathcock/adam/async-rar-ai 2025-10-29 14:26:15 +00:00
Adam Hathcock
ca4a1936b3 Merge remote-tracking branch 'origin/adam/async-rar-ai' into adam/async-rar-ai 2025-10-29 14:11:02 +00:00
Adam Hathcock
77c8d31a90 Merge pull request #1000 from adamhathcock/copilot/sub-pr-996
Fix Windows test failures due to ArrayPool buffer sizing
2025-10-29 14:10:39 +00:00
Adam Hathcock
ab7196f86c some review fixes 2025-10-29 14:07:12 +00:00
copilot-swe-agent[bot]
88b3a66bf9 Fix Windows test failures in SharpCompressStreamTests
ArrayPool.Rent() can return buffers larger than requested. The tests were using test.Length (the actual buffer size) instead of the requested size (0x1000), causing failures on Windows where ArrayPool returns larger buffers than on Linux.

Fixed by:
- Using explicit size (0x1000) instead of test.Length in Read() calls
- Using test.Take(0x1000) instead of test when comparing arrays

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-29 13:40:44 +00:00
copilot-swe-agent[bot]
ea77666b4a Final verification: All tests pass, no security issues
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-29 13:38:38 +00:00
copilot-swe-agent[bot]
db98e5f39b Fix ArchiveFactory.Open to avoid double-wrapping SharpCompressStream
Use SharpCompressStream.Create instead of constructor to properly handle
streams that are already wrapped. This prevents potential buffering issues
when opening ZIP files, particularly on Linux systems.

Added tests to verify both raw FileStream and pre-wrapped stream scenarios.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-29 13:34:53 +00:00
copilot-swe-agent[bot]
df59c5cb9d Plan: Fix potential ZipArchive.IsZipFile failure on Linux
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-29 13:26:00 +00:00
copilot-swe-agent[bot]
e786e95358 Initial plan 2025-10-29 13:25:48 +00:00
Adam Hathcock
75ada5623c add async tests for compress stream 2025-10-29 13:23:50 +00:00
copilot-swe-agent[bot]
ad5c655c45 Initial plan 2025-10-29 13:09:05 +00:00
Adam Hathcock
65e607454e add async for UnpWriteBufAsync and remove comments 2025-10-29 13:07:43 +00:00
Adam Hathcock
f238be6003 add arraypool usage in init 2025-10-29 10:35:03 +00:00
Adam Hathcock
dc31e4c5fa fmt 2025-10-29 10:27:30 +00:00
Adam Hathcock
665d8cd266 ran out of tokens, things work but need one more conversion 2025-10-29 10:23:57 +00:00
Adam Hathcock
8324114e84 remove unused code 2025-10-29 10:11:34 +00:00
Adam Hathcock
b83e6ee4ce fix async usage 2025-10-29 09:55:11 +00:00
Adam Hathcock
58bab0d310 async unpack2.0 2025-10-29 09:53:14 +00:00
Adam Hathcock
1af51aaaba async unpack1.5 2025-10-29 09:50:12 +00:00
Adam Hathcock
a09327b831 unpack50async 2025-10-29 09:46:10 +00:00
Adam Hathcock
16543bf74c async UnpReadBufAsync 2025-10-29 09:38:55 +00:00
Adam Hathcock
aa4cd373ac added more async methods 2025-10-29 09:28:34 +00:00
Adam Hathcock
351e294362 convert unpack15 and unpack20 2025-10-29 09:23:42 +00:00
Adam Hathcock
a0c5b1cd9d add archive tests 2025-10-29 09:08:08 +00:00
Adam Hathcock
df2ed1e584 fix RarStream 2025-10-29 09:00:23 +00:00
Adam Hathcock
b354f7a3a5 fix logic mistake 2025-10-29 08:47:18 +00:00
Adam Hathcock
bb53d1e1c6 entrystream fixes and fmt 2025-10-29 08:41:05 +00:00
Twan van Dongen
2aabd8d0e1 Initial implementation of the ARJ (Archived by Robert Jung) format, supporting 'store' for single file archives. Multi-file archives and all compression algorithms still require implementations. 2025-10-28 20:38:29 +01:00
Adam Hathcock
aca97c2c6c add rarcrc tests 2025-10-28 16:48:05 +00:00
Adam Hathcock
8e7d959cf4 add async creations 2025-10-28 16:07:16 +00:00
Adam Hathcock
b23f031db9 add async reads 2025-10-28 15:52:18 +00:00
Adam Hathcock
1ba529a9d5 first pass of async files 2025-10-28 15:29:41 +00:00
Adam Hathcock
3d29c183ef basic async usage 2025-10-28 15:00:11 +00:00
Adam Hathcock
8a108b590d Merge pull request #993 from adamhathcock/adam/macos-fixes
make test linux only
2025-10-28 14:52:05 +00:00
Adam Hathcock
bca0f67344 make test linux only 2025-10-28 12:18:05 +00:00
Adam Hathcock
f3dad51134 Merge pull request #991 from adamhathcock/async-reader-methods
Add more Async tests and complete Zip tests
2025-10-28 11:50:03 +00:00
Adam Hathcock
f51840829c Merge branch 'master' into async-reader-methods 2025-10-28 11:39:35 +00:00
Adam Hathcock
aa1c0d0870 Merge pull request #988 from adamhathcock/copilot/fix-file-write-error
Fix extraction failure on Windows due to case-sensitive path comparison
2025-10-28 11:39:17 +00:00
Adam Hathcock
dee5ee6589 Merge pull request #989 from adamhathcock/copilot/add-support-empty-directories
Add support for empty directory entries in archives
2025-10-28 11:38:41 +00:00
Adam Hathcock
b799f479c4 Merge branch 'master' into copilot/add-support-empty-directories 2025-10-28 11:35:56 +00:00
copilot-swe-agent[bot]
b4352fefa5 Fix code formatting per CSharpier standards
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-28 11:32:39 +00:00
Adam Hathcock
77d06fb60e Merge branch 'master' into copilot/fix-file-write-error 2025-10-28 11:32:05 +00:00
Adam Hathcock
00b647457c Merge pull request #990 from adamhathcock/copilot/add-common-exception-type
Make all library exceptions inherit from SharpCompressException
2025-10-28 11:31:32 +00:00
Adam Hathcock
153d10a35c add async to forward only streams 2025-10-28 11:30:12 +00:00
Adam Hathcock
06713c641e async deflate 64 2025-10-28 11:26:31 +00:00
copilot-swe-agent[bot]
210978ec2d Format code with CSharpier
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-28 11:13:51 +00:00
Adam Hathcock
42f7d43139 enable zip64 tests that pass 2025-10-28 11:07:53 +00:00
Adam Hathcock
19967f5ad7 allow forward only write 2025-10-28 11:01:27 +00:00
Adam Hathcock
a1de3eb47d add async tests and clean up deflate64stream 2025-10-28 11:01:12 +00:00
copilot-swe-agent[bot]
e88841bdec Add support for empty directory entries in archives
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-28 10:34:03 +00:00
copilot-swe-agent[bot]
c8e4915f8e Final progress report
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-28 10:29:36 +00:00
copilot-swe-agent[bot]
a93a3f0598 Address code review feedback - fix formatting
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-28 10:26:31 +00:00
copilot-swe-agent[bot]
084f81fc8d Format code with CSharpier 2025-10-28 10:23:57 +00:00
copilot-swe-agent[bot]
d148f36e87 Add support for empty directory entries in archives
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-28 10:22:58 +00:00
copilot-swe-agent[bot]
150d9c35b7 Complete fix for case-sensitive path comparison on Windows
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-28 10:22:56 +00:00
copilot-swe-agent[bot]
e11198616e Address code review feedback: use RuntimeInformation for platform detection
- Replace Environment.OSVersion.Platform with RuntimeInformation.IsOSPlatform(OSPlatform.Windows)
- Clarify test comment about platform-specific behavior
- Add using System.Runtime.InteropServices for RuntimeInformation

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-28 10:20:05 +00:00
copilot-swe-agent[bot]
2f27f1e6f9 Complete exception hierarchy implementation
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-28 10:18:42 +00:00
copilot-swe-agent[bot]
5392ca9794 Fix case-sensitive path comparison on Windows for file extraction
- Add PathComparison property that uses OrdinalIgnoreCase on Windows and Ordinal on Unix
- Update all path comparison checks in ExtractionMethods to use PathComparison
- Add comprehensive tests for extraction with case-insensitive paths
- Ensure security check for path traversal still works correctly

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-28 10:16:07 +00:00
copilot-swe-agent[bot]
46672eb583 Update exceptions to inherit from SharpCompressException
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-28 10:15:40 +00:00
Adam Hathcock
79653eee80 Merge remote-tracking branch 'origin/master' into async-reader-methods 2025-10-28 10:11:11 +00:00
Adam Hathcock
16ad86c52a add async implementations to readonlysubstream 2025-10-28 10:09:46 +00:00
copilot-swe-agent[bot]
6b7c6be5f5 Initial plan 2025-10-28 10:02:58 +00:00
copilot-swe-agent[bot]
fda1c2cc79 Initial plan 2025-10-28 10:01:37 +00:00
copilot-swe-agent[bot]
ef2fee0ee3 Initial plan 2025-10-28 10:00:50 +00:00
Adam Hathcock
e287d0811d minor clean up 2025-10-28 09:58:01 +00:00
Adam Hathcock
a7164f3c9f Merge pull request #987 from adamhathcock/copilot/fix-gzip-extract-not-supported-exception
Fix GZip extraction NotSupportedException for non-seekable streams
2025-10-27 14:26:04 +00:00
Adam Hathcock
c55060039a Merge branch 'master' into copilot/fix-gzip-extract-not-supported-exception 2025-10-27 14:21:49 +00:00
Adam Hathcock
c68d8deddd add async tests 2025-10-27 12:34:24 +00:00
Adam Hathcock
f6eabc5db1 Merge remote-tracking branch 'origin/master' into async-reader-methods
# Conflicts:
#	src/SharpCompress/packages.lock.json
2025-10-27 12:24:14 +00:00
Adam Hathcock
72d5884db6 added async reader overloads 2025-10-27 12:23:54 +00:00
Adam Hathcock
3595c89c79 Merge pull request #983 from adamhathcock/copilot/set-up-copilot-instructions
Configure Copilot coding agent instructions for SharpCompress
2025-10-27 12:13:50 +00:00
Adam Hathcock
9ebbc718c5 Merge branch 'master' into copilot/fix-gzip-extract-not-supported-exception 2025-10-27 12:11:32 +00:00
Adam Hathcock
e862480b86 Merge branch 'master' into copilot/set-up-copilot-instructions 2025-10-27 12:09:55 +00:00
Adam Hathcock
1f3d8fe6f1 Merge pull request #986 from adamhathcock/copilot/fix-compressiontype-none-bug
Support CompressionType.None for uncompressed 7z files
2025-10-27 12:06:52 +00:00
Adam Hathcock
41ae036ab4 Merge branch 'master' into copilot/set-up-copilot-instructions 2025-10-27 12:04:58 +00:00
copilot-swe-agent[bot]
588d176b96 Final verification - all tests pass
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 11:15:16 +00:00
copilot-swe-agent[bot]
f8697120a0 Add support for CompressionType.None for uncompressed 7z files
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 11:11:38 +00:00
copilot-swe-agent[bot]
1a767105e6 Add explanatory comment for EntryStartPosition initialization
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 11:09:52 +00:00
copilot-swe-agent[bot]
4067b6ed2c Fix GZip extraction for non-seekable streams
- Modified GZipFilePart to only access stream.Position when stream.CanSeek is true
- Modified GZipArchiveEntry.OpenEntryStream to check CanSeek before accessing Position
- Added test case GZip_Archive_NonSeekableStream to verify non-seekable stream support
- All existing tests pass

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 11:08:38 +00:00
copilot-swe-agent[bot]
b272dbfd1f Clarify CSharpier should be run from project root
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 11:07:02 +00:00
copilot-swe-agent[bot]
48be7bbf86 Correct formatting instructions to use CSharpier
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 11:05:47 +00:00
copilot-swe-agent[bot]
51e22cea71 Initial plan for fixing GZip non-seekable stream support
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 11:02:53 +00:00
copilot-swe-agent[bot]
2241e27e68 Initial exploration: Understanding the issue
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 11:02:29 +00:00
copilot-swe-agent[bot]
11c90ae879 Update Copilot configuration to reflect actual project setup
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 11:00:02 +00:00
copilot-swe-agent[bot]
cf55125202 Initial plan 2025-10-27 10:58:07 +00:00
copilot-swe-agent[bot]
9cefb85905 Enhance AGENTS.md with SharpCompress-specific guidelines
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-10-27 10:57:22 +00:00
copilot-swe-agent[bot]
fc672da0e0 Initial plan 2025-10-27 10:55:53 +00:00
Adam Hathcock
25b297b142 Merge pull request #980 from adamhathcock/async-gzip-tests
adds more async tests and overloads to make things writable and async
2025-10-27 10:54:42 +00:00
Adam Hathcock
ab03c12fa8 add more tests 2025-10-27 10:52:03 +00:00
copilot-swe-agent[bot]
3095c805ad Initial plan 2025-10-27 10:48:24 +00:00
Adam Hathcock
9c18daafb8 Merge remote-tracking branch 'origin/async-gzip-tests' into async-gzip-tests 2025-10-27 10:46:15 +00:00
Adam Hathcock
16182417fb add tar specific tests 2025-10-27 10:46:08 +00:00
Adam Hathcock
9af35201e4 Merge branch 'master' into async-gzip-tests 2025-10-27 10:31:51 +00:00
Adam Hathcock
f21b982955 adds more async tests and overloads to make things writable and async 2025-10-27 10:31:10 +00:00
497 changed files with 88083 additions and 2544 deletions

View File

@@ -3,11 +3,11 @@
"isRoot": true,
"tools": {
"csharpier": {
"version": "1.1.2",
"version": "1.2.4",
"commands": [
"csharpier"
],
"rollForward": false
}
}
}
}

View File

@@ -1,13 +0,0 @@
# Copilot Coding Agent Configuration
This repository includes a minimal opt-in configuration and CI workflow to allow the GitHub Copilot coding agent to open and validate PRs.
- .copilot-agent.yml: opt-in config for automated agents
- .github/workflows/dotnetcore.yml: CI runs on PRs touching the solution, source, or tests to validate changes
- AGENTS.yml: general information for this project
Maintainers can adjust the allowed paths or disable the agent by editing or removing .copilot-agent.yml.
Notes:
- Do not change any other files in the repository.
- If build/test paths are different, update the workflow accordingly; this workflow targets SharpCompress.sln and the SharpCompress.Tests test project.

25
.github/prompts/plan-async.prompt.md vendored Normal file
View File

@@ -0,0 +1,25 @@
# Plan: Implement Missing Async Functionality in SharpCompress
SharpCompress has async support for low-level stream operations and Reader/Writer APIs, but critical entry points (Archive.Open, factory methods, initialization) remain synchronous. This plan adds async overloads for all user-facing I/O operations and fixes existing async bugs, enabling full end-to-end async workflows.
## Steps
1. **Add async factory methods** to [ArchiveFactory.cs](src/SharpCompress/Factories/ArchiveFactory.cs), [ReaderFactory.cs](src/SharpCompress/Factories/ReaderFactory.cs), and [WriterFactory.cs](src/SharpCompress/Factories/WriterFactory.cs) with `OpenAsync` and `CreateAsync` overloads accepting `CancellationToken`
2. **Implement async Open methods** on concrete archive types ([ZipArchive.cs](src/SharpCompress/Archives/Zip/ZipArchive.cs), [TarArchive.cs](src/SharpCompress/Archives/Tar/TarArchive.cs), [RarArchive.cs](src/SharpCompress/Archives/Rar/RarArchive.cs), [GZipArchive.cs](src/SharpCompress/Archives/GZip/GZipArchive.cs), [SevenZipArchive.cs](src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs)) and reader types ([ZipReader.cs](src/SharpCompress/Readers/Zip/ZipReader.cs), [TarReader.cs](src/SharpCompress/Readers/Tar/TarReader.cs), etc.)
3. **Convert archive initialization logic to async** including header reading, volume loading, and format signature detection across archive constructors and internal initialization methods
4. **Fix LZMA decoder async bugs** in [LzmaStream.cs](src/SharpCompress/Compressors/LZMA/LzmaStream.cs), [Decoder.cs](src/SharpCompress/Compressors/LZMA/Decoder.cs), and [OutWindow.cs](src/SharpCompress/Compressors/LZMA/OutWindow.cs) to enable true async 7Zip support and remove `NonDisposingStream` workaround
5. **Complete Rar async implementation** by converting `UnpackV2017` methods to async in [UnpackV2017.cs](src/SharpCompress/Compressors/Rar/UnpackV2017.cs) and updating Rar20 decompression
6. **Add comprehensive async tests** covering all new async entry points, cancellation scenarios, and concurrent operations across all archive formats in test files
## Further Considerations
1. **Breaking changes** - Should new async methods be added alongside existing sync methods (non-breaking), or should sync methods eventually be deprecated? Recommend additive approach for backward compatibility.
2. **Performance impact** - Header parsing for formats like Zip/Tar is often small; consider whether truly async parsing adds value vs sync parsing wrapped in Task, or make it conditional based on stream type (network vs file).
3. **7Zip complexity** - The LZMA async bug fix (Step 4) may be challenging due to state management in the decoder; consider whether to scope it separately or implement a simpler workaround that maintains correctness.

123
.github/prompts/plan-for-next.prompt.md vendored Normal file
View File

@@ -0,0 +1,123 @@
# Plan: Modernize SharpCompress Public API
Based on comprehensive analysis, the API has several inconsistencies around factory patterns, async support, format capabilities, and options classes. Most improvements can be done incrementally without breaking changes.
## Steps
1. **Standardize factory patterns** by deprecating format-specific static `Open` methods in [Archives/Zip/ZipArchive.cs](src/SharpCompress/Archives/Zip/ZipArchive.cs), [Archives/Tar/TarArchive.cs](src/SharpCompress/Archives/Tar/TarArchive.cs), etc. in favor of centralized [Factories/ArchiveFactory.cs](src/SharpCompress/Factories/ArchiveFactory.cs)
2. **Complete async implementation** in [Writers/Zip/ZipWriter.cs](src/SharpCompress/Writers/Zip/ZipWriter.cs) and other writers that currently use sync-over-async, implementing true async I/O throughout the writer hierarchy
3. **Unify options classes** by making [Common/ExtractionOptions.cs](src/SharpCompress/Common/ExtractionOptions.cs) inherit from `OptionsBase` and adding progress reporting to extraction methods consistently
4. **Clarify GZip semantics** in [Archives/GZip/GZipArchive.cs](src/SharpCompress/Archives/GZip/GZipArchive.cs) by adding XML documentation explaining single-entry limitation and relationship to GZip compression used in Tar.gz
## Further Considerations
1. **Breaking changes roadmap** - Should we plan a major version (2.0) to remove deprecated factory methods, clean up `ArchiveType` enum (remove Arc/Arj or add full support), and consolidate naming patterns?
2. **Progress reporting consistency** - Should `IProgress<ArchiveExtractionProgress<IEntry>>` be added to all extraction extension methods or consolidated into options classes?
## Detailed Analysis
### Factory Pattern Issues
Three different factory patterns exist with overlapping functionality:
1. **Static Factories**: ArchiveFactory, ReaderFactory, WriterFactory
2. **Instance Factories**: IArchiveFactory, IReaderFactory, IWriterFactory
3. **Format-specific static methods**: Each archive class has static `Open` methods
**Example confusion:**
```csharp
// Three ways to open a Zip archive - which is recommended?
var archive1 = ArchiveFactory.Open("file.zip");
var archive2 = ZipArchive.Open("file.zip");
var archive3 = ArchiveFactory.AutoFactory.Open(fileInfo, options);
```
### Async Support Gaps
Base `IWriter` interface has async methods, but writer implementations provide minimal async support. Most writers just call synchronous methods:
```csharp
public virtual async Task WriteAsync(...)
{
// Default implementation calls synchronous version
Write(filename, source, modificationTime);
await Task.CompletedTask.ConfigureAwait(false);
}
```
Real async implementations only in:
- `TarWriter` - Proper async implementation
- Most other writers use sync-over-async
### GZip Archive Special Case
GZip is treated as both a compression format and an archive format, but only supports single-entry archives:
```csharp
protected override GZipArchiveEntry CreateEntryInternal(...)
{
if (Entries.Any())
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
// ...
}
```
### Options Class Hierarchy
```
OptionsBase (LeaveStreamOpen, ArchiveEncoding)
├─ ReaderOptions (LookForHeader, Password, DisableCheckIncomplete, BufferSize, ExtensionHint, Progress)
├─ WriterOptions (CompressionType, CompressionLevel, Progress)
│ ├─ ZipWriterOptions (ArchiveComment, UseZip64)
│ ├─ TarWriterOptions (FinalizeArchiveOnClose, HeaderFormat)
│ └─ GZipWriterOptions (no additional properties)
└─ ExtractionOptions (standalone - Overwrite, ExtractFullPath, PreserveFileTime, PreserveAttributes)
```
**Issues:**
- `ExtractionOptions` doesn't inherit from `OptionsBase` - no encoding support during extraction
- Progress reporting inconsistency between readers and extraction
- Obsolete properties (`ChecksumIsValid`, `Version`) with unclear migration path
### Implementation Priorities
**High Priority (Non-Breaking):**
1. Add API usage guide (Archive vs Reader, factory recommendations, async best practices)
2. Fix progress reporting consistency
3. Complete async implementation in writers
**Medium Priority (Next Major Version):**
1. Unify factory pattern - deprecate format-specific static `Open` methods
2. Clean up options classes - make `ExtractionOptions` inherit from `OptionsBase`
3. Clarify archive types - remove Arc/Arj from `ArchiveType` enum or add full support
4. Standardize naming across archive types
**Low Priority:**
1. Add BZip2 archive support similar to GZipArchive
2. Complete obsolete property cleanup with migration guide
### Backward Compatibility Strategy
**Safe (Non-Breaking) Changes:**
- Add new methods to interfaces (use default implementations)
- Add new options properties (with defaults)
- Add new factory methods
- Improve async implementations
- Add progress reporting support
**Breaking Changes to Avoid:**
- ❌ Removing format-specific `Open` methods (deprecate instead)
- ❌ Changing `LeaveStreamOpen` default (currently `true`)
- ❌ Removing obsolete properties before major version bump
- ❌ Changing return types or signatures of existing methods
**Deprecation Pattern:**
- Use `[Obsolete]` for one major version
- Use `[EditorBrowsable(EditorBrowsableState.Never)]` in next major version
- Remove in following major version

View File

@@ -14,12 +14,12 @@ jobs:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v5
- uses: actions/checkout@v6
- uses: actions/setup-dotnet@v5
with:
dotnet-version: 8.0.x
dotnet-version: 10.0.x
- run: dotnet run --project build/build.csproj
- uses: actions/upload-artifact@v5
- uses: actions/upload-artifact@v6
with:
name: ${{ matrix.os }}-sharpcompress.nupkg
path: artifacts/*

4
.gitignore vendored
View File

@@ -11,11 +11,11 @@ TestResults/
packages/*/
project.lock.json
tests/TestArchives/Scratch
tests/TestArchives/*/Scratch
tests/TestArchives/*/Scratch2
.vs
tools
.vscode
.idea/
.DS_Store
*.snupkg
/tests/TestArchives/6d23a38c-f064-4ef1-ad89-b942396f53b9/Scratch

9
.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,9 @@
{
"recommendations": [
"ms-dotnettools.csdevkit",
"ms-dotnettools.csharp",
"ms-dotnettools.vscode-dotnet-runtime",
"csharpier.csharpier-vscode",
"formulahendry.dotnet-test-explorer"
]
}

97
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,97 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Debug Tests (net10.0)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "dotnet",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net10.0",
"--no-build",
"--verbosity=normal"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": "Debug Specific Test (net10.0)",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "dotnet",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net10.0",
"--no-build",
"--filter",
"FullyQualifiedName~${input:testName}"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": "Debug Performance Tests",
"type": "coreclr",
"request": "launch",
"preLaunchTask": "build",
"program": "dotnet",
"args": [
"run",
"--project",
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
"--no-build"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
},
{
"name": "Debug Build Script",
"type": "coreclr",
"request": "launch",
"program": "dotnet",
"args": [
"run",
"--project",
"${workspaceFolder}/build/build.csproj",
"--",
"${input:buildTarget}"
],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"stopAtEntry": false
}
],
"inputs": [
{
"id": "testName",
"type": "promptString",
"description": "Enter test name or pattern (e.g., TestMethodName or ClassName)",
"default": ""
},
{
"id": "buildTarget",
"type": "pickString",
"description": "Select build target",
"options": [
"clean",
"restore",
"build",
"test",
"format",
"publish",
"default"
],
"default": "build"
}
]
}

29
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,29 @@
{
"dotnet.defaultSolution": "SharpCompress.sln",
"files.exclude": {
"**/bin": true,
"**/obj": true
},
"files.watcherExclude": {
"**/bin/**": true,
"**/obj/**": true,
"**/artifacts/**": true
},
"search.exclude": {
"**/bin": true,
"**/obj": true,
"**/artifacts": true
},
"editor.formatOnSave": false,
"[csharp]": {
"editor.defaultFormatter": "csharpier.csharpier-vscode",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll": "explicit"
}
},
"csharpier.enableDebugLogs": false,
"omnisharp.enableRoslynAnalyzers": true,
"omnisharp.enableEditorConfigSupport": true,
"dotnet-test-explorer.testProjectPath": "tests/**/*.csproj"
}

178
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,178 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "build",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/SharpCompress.sln",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile",
"group": {
"kind": "build",
"isDefault": true
}
},
{
"label": "build-release",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/SharpCompress.sln",
"-c",
"Release",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile",
"group": "build"
},
{
"label": "build-library",
"command": "dotnet",
"type": "process",
"args": [
"build",
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary;ForceNoAlign"
],
"problemMatcher": "$msCompile",
"group": "build"
},
{
"label": "restore",
"command": "dotnet",
"type": "process",
"args": [
"restore",
"${workspaceFolder}/SharpCompress.sln"
],
"problemMatcher": "$msCompile"
},
{
"label": "clean",
"command": "dotnet",
"type": "process",
"args": [
"clean",
"${workspaceFolder}/SharpCompress.sln"
],
"problemMatcher": "$msCompile"
},
{
"label": "test",
"command": "dotnet",
"type": "process",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"--no-build",
"--verbosity=normal"
],
"problemMatcher": "$msCompile",
"group": {
"kind": "test",
"isDefault": true
},
"dependsOn": "build"
},
{
"label": "test-net10",
"command": "dotnet",
"type": "process",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net10.0",
"--no-build",
"--verbosity=normal"
],
"problemMatcher": "$msCompile",
"group": "test",
"dependsOn": "build"
},
{
"label": "test-net48",
"command": "dotnet",
"type": "process",
"args": [
"test",
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
"-f",
"net48",
"--no-build",
"--verbosity=normal"
],
"problemMatcher": "$msCompile",
"group": "test",
"dependsOn": "build"
},
{
"label": "format",
"command": "dotnet",
"type": "process",
"args": [
"csharpier",
"."
],
"problemMatcher": []
},
{
"label": "format-check",
"command": "dotnet",
"type": "process",
"args": [
"csharpier",
"check",
"."
],
"problemMatcher": []
},
{
"label": "run-build-script",
"command": "dotnet",
"type": "process",
"args": [
"run",
"--project",
"${workspaceFolder}/build/build.csproj"
],
"problemMatcher": "$msCompile"
},
{
"label": "pack",
"command": "dotnet",
"type": "process",
"args": [
"pack",
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
"-c",
"Release",
"-o",
"${workspaceFolder}/artifacts/"
],
"problemMatcher": "$msCompile",
"dependsOn": "build-release"
},
{
"label": "performance-tests",
"command": "dotnet",
"type": "process",
"args": [
"run",
"--project",
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
"-c",
"Release"
],
"problemMatcher": "$msCompile"
}
]
}

166
AGENTS.md
View File

@@ -1,19 +1,24 @@
---
description: 'Guidelines for building C# applications'
description: 'Guidelines for building SharpCompress - A C# compression library'
applyTo: '**/*.cs'
---
# C# Development
# SharpCompress Development
## About SharpCompress
SharpCompress is a pure C# compression library supporting multiple archive formats (Zip, Tar, GZip, BZip2, 7Zip, Rar, LZip, XZ, ZStandard) for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0. The library provides both seekable Archive APIs and forward-only Reader/Writer APIs for streaming scenarios.
## C# Instructions
- Always use the latest version C#, currently C# 13 features.
- Write clear and concise comments for each function.
- Follow the existing code style and patterns in the codebase.
## General Instructions
- Make only high confidence suggestions when reviewing code changes.
- Write code with good maintainability practices, including comments on why certain design decisions were made.
- Handle edge cases and write clear exception handling.
- For libraries or external dependencies, mention their usage and purpose in comments.
- Preserve backward compatibility when making changes to public APIs.
## Naming Conventions
@@ -23,21 +28,74 @@ applyTo: '**/*.cs'
## Code Formatting
- Use CSharpier for all code formatting to ensure consistent style across the project.
- Install CSharpier globally: `dotnet tool install -g csharpier`
- Format files with: `dotnet csharpier format .`
- **ALWAYS run `dotnet csharpier format .` after making code changes before committing.**
- Configure your IDE to format on save using CSharpier.
- CSharpier configuration can be customized via `.csharpierrc` file in the project root.
- Trust CSharpier's opinionated formatting decisions to maintain consistency.
**Copilot agents: You MUST run the `format` task after making code changes to ensure consistency.**
- Use CSharpier for code formatting to ensure consistent style across the project
- CSharpier is configured as a local tool in `.config/dotnet-tools.json`
### Commands
1. **Restore tools** (first time only):
```bash
dotnet tool restore
```
2. **Check if files are formatted correctly** (doesn't modify files):
```bash
dotnet csharpier check .
```
- Exit code 0: All files are properly formatted
- Exit code 1: Some files need formatting (will show which files and differences)
3. **Format files** (modifies files):
```bash
dotnet csharpier format .
```
- Formats all files in the project to match CSharpier style
- Run from project root directory
4. **Configure your IDE** to format on save using CSharpier for the best experience
### Additional Notes
- The project also uses `.editorconfig` for editor settings (indentation, encoding, etc.)
- Let CSharpier handle code style while `.editorconfig` handles editor behavior
- Always run `dotnet csharpier check .` before committing to verify formatting
## Project Setup and Structure
- Guide users through creating a new .NET project with the appropriate templates.
- Explain the purpose of each generated file and folder to build understanding of the project structure.
- Demonstrate how to organize code using feature folders or domain-driven design principles.
- Show proper separation of concerns with models, services, and data access layers.
- Explain the Program.cs and configuration system in ASP.NET Core 9 including environment-specific settings.
- The project targets multiple frameworks: .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0
- Main library is in `src/SharpCompress/`
- Tests are in `tests/SharpCompress.Test/`
- Performance tests are in `tests/SharpCompress.Performance/`
- Test archives are in `tests/TestArchives/`
- Build project is in `build/`
- Use `dotnet build` to build the solution
- Use `dotnet test` to run tests
- Solution file: `SharpCompress.sln`
### Directory Structure
```
src/SharpCompress/
├── Archives/ # IArchive implementations (Zip, Tar, Rar, 7Zip, GZip)
├── Readers/ # IReader implementations (forward-only)
├── Writers/ # IWriter implementations (forward-only)
├── Compressors/ # Low-level compression streams (BZip2, Deflate, LZMA, etc.)
├── Factories/ # Format detection and factory pattern
├── Common/ # Shared types (ArchiveType, Entry, Options)
├── Crypto/ # Encryption implementations
└── IO/ # Stream utilities and wrappers
tests/SharpCompress.Test/
├── Zip/, Tar/, Rar/, SevenZip/, GZip/, BZip2/ # Format-specific tests
├── TestBase.cs # Base test class with helper methods
└── TestArchives/ # Test data (not checked into main test project)
```
### Factory Pattern
All format types implement factory interfaces (`IArchiveFactory`, `IReaderFactory`, `IWriterFactory`) for auto-detection:
- `ReaderFactory.Open()` - Auto-detects format by probing stream
- `WriterFactory.Open()` - Creates writer for specified `ArchiveType`
- Factories located in: `src/SharpCompress/Factories/`
## Nullable Reference Types
@@ -45,21 +103,79 @@ applyTo: '**/*.cs'
- Always use `is null` or `is not null` instead of `== null` or `!= null`.
- Trust the C# null annotations and don't add null checks when the type system says a value cannot be null.
## SharpCompress-Specific Guidelines
### Supported Formats
SharpCompress supports multiple archive and compression formats:
- **Archive Formats**: Zip, Tar, 7Zip, Rar (read-only)
- **Compression**: DEFLATE, BZip2, LZMA/LZMA2, PPMd, ZStandard (decompress only), Deflate64 (decompress only)
- **Combined Formats**: Tar.GZip, Tar.BZip2, Tar.LZip, Tar.XZ, Tar.ZStandard
- See FORMATS.md for complete format support matrix
### Stream Handling Rules
- **Disposal**: As of version 0.21, SharpCompress closes wrapped streams by default
- Use `ReaderOptions` or `WriterOptions` with `LeaveStreamOpen = true` to control stream disposal
- Use `NonDisposingStream` wrapper when working with compression streams directly to prevent disposal
- Always dispose of readers, writers, and archives in `using` blocks
- For forward-only operations, use Reader/Writer APIs; for random access, use Archive APIs
### Async/Await Patterns
- All I/O operations support async/await with `CancellationToken`
- Async methods follow the naming convention: `MethodNameAsync`
- Key async methods:
- `WriteEntryToAsync` - Extract entry asynchronously
- `WriteAllToDirectoryAsync` - Extract all entries asynchronously
- `WriteAsync` - Write entry asynchronously
- `WriteAllAsync` - Write directory asynchronously
- `OpenEntryStreamAsync` - Open entry stream asynchronously
- Always provide `CancellationToken` parameter in async methods
### Archive APIs vs Reader/Writer APIs
- **Archive API**: Use for random access with seekable streams (e.g., `ZipArchive`, `TarArchive`)
- **Reader API**: Use for forward-only reading on non-seekable streams (e.g., `ZipReader`, `TarReader`)
- **Writer API**: Use for forward-only writing on streams (e.g., `ZipWriter`, `TarWriter`)
- 7Zip only supports Archive API due to format limitations
### Tar-Specific Considerations
- Tar format requires file size in the header
- If no size is specified to TarWriter and the stream is not seekable, an exception will be thrown
- Tar combined with compression (GZip, BZip2, LZip, XZ) is supported
### Zip-Specific Considerations
- Supports Zip64 for large files (seekable streams only)
- Supports PKWare and WinZip AES encryption
- Multiple compression methods: None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA, PPMd
- Encrypted LZMA is not supported
### Performance Considerations
- For large files, use Reader/Writer APIs with non-seekable streams to avoid loading entire file in memory
- Leverage async I/O for better scalability
- Consider compression level trade-offs (speed vs. size)
- Use appropriate buffer sizes for stream operations
## Testing
- Always include test cases for critical paths of the application.
- Guide users through creating unit tests.
- Test with multiple archive formats when making changes to core functionality.
- Include tests for both Archive and Reader/Writer APIs when applicable.
- Test async operations with cancellation tokens.
- Do not emit "Act", "Arrange" or "Assert" comments.
- Copy existing style in nearby files for test method names and capitalization.
- Explain integration testing approaches for API endpoints.
- Demonstrate how to mock dependencies for effective testing.
- Show how to test authentication and authorization logic.
- Explain test-driven development principles as applied to API development.
- Use test archives from `tests/TestArchives` directory for consistency.
- Test stream disposal and `LeaveStreamOpen` behavior.
- Test edge cases: empty archives, large files, corrupted archives, encrypted archives.
## Performance Optimization
### Test Organization
- Base class: `TestBase` - Provides `TEST_ARCHIVES_PATH`, `SCRATCH_FILES_PATH`, temp directory management
- Framework: xUnit with AwesomeAssertions
- Test archives: `tests/TestArchives/` - Use existing archives, don't create new ones unnecessarily
- Match naming style of nearby test files
- Guide users on implementing caching strategies (in-memory, distributed, response caching).
- Explain asynchronous programming patterns and why they matter for API performance.
- Demonstrate pagination, filtering, and sorting for large data sets.
- Show how to implement compression and other performance optimizations.
- Explain how to measure and benchmark API performance.
## Common Pitfalls
1. **Don't mix Archive and Reader APIs** - Archive needs seekable stream, Reader doesn't
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
4. **Tar + non-seekable stream** - Must provide file size or it will throw
5. **Multi-framework differences** - Some features differ between .NET Framework and modern .NET (e.g., Mono.Posix)
6. **Format detection** - Use `ReaderFactory.Open()` for auto-detection, test with actual archive files

View File

@@ -1,19 +1,19 @@
<Project>
<ItemGroup>
<PackageVersion Include="Bullseye" Version="6.0.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.2.1" />
<PackageVersion Include="Bullseye" Version="6.1.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.0" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.15" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="12.0.0" />
<PackageVersion Include="SimpleExec" Version="12.1.0" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
<PackageVersion Include="System.Buffers" Version="4.6.1" />
<PackageVersion Include="System.Memory" Version="4.6.3" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.6" />
<PackageVersion Include="Microsoft.NET.ILLink.Tasks" Version="10.0.0" />
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
</ItemGroup>

View File

@@ -22,11 +22,28 @@
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading. See [Zip Format Notes](#zip-format-notes) for details on multi-volume archives and streaming behavior.
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API. See [7Zip Format Notes](#7zip-format-notes) for details on async extraction behavior.
5. LZip has no support for extra data like the file name or timestamp. There is a default filename used when looking at the entry Key on the archive.
### Zip Format Notes
- Multi-volume/split ZIP archives require ZipArchive (seekable streams) as ZipReader cannot seek across volume files.
- ZipReader processes entries from LocalEntry headers (which include directory entries ending with `/`) and intentionally skips DirectoryEntry headers from the central directory, as they are redundant in streaming mode - all entry data comes from LocalEntry headers which ZipReader has already processed.
### 7Zip Format Notes
- **Async Extraction Performance**: When using async extraction methods (e.g., `ExtractAllEntries()` with `MoveToNextEntryAsync()`), each file creates its own decompression stream to avoid state corruption in the LZMA decoder. This is less efficient than synchronous extraction, which can reuse a single decompression stream for multiple files in the same folder.
**Performance Impact**: For archives with many small files in the same compression folder, async extraction will be slower than synchronous extraction because it must:
1. Create a new LZMA decoder for each file
2. Skip through the decompressed data to reach each file's starting position
**Recommendation**: For best performance with 7Zip archives, use synchronous extraction methods (`MoveToNextEntry()` and `WriteEntryToDirectory()`) when possible. Use async methods only when you need to avoid blocking the thread (e.g., in UI applications or async-only contexts).
**Technical Details**: 7Zip archives group files into "folders" (compression units), where all files in a folder share one continuous LZMA-compressed stream. The LZMA decoder maintains internal state (dictionary window, decoder positions) that assumes sequential, non-interruptible processing. Async operations can yield control during awaits, which would corrupt this shared state. To avoid this, async extraction creates a fresh decoder stream for each file.
## Compression Streams
For those who want to directly compress/decompress bits. The single file formats are represented here as well. However, BZip2, LZip and XZ have no metadata (GZip has a little) so using them without something like a Tar file makes little sense.

View File

@@ -1,6 +1,6 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
SharpCompress is a compression library in pure C# for .NET Framework 4.8, .NET 8.0 and .NET 10.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd, unarc and unarj with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).

View File

@@ -87,20 +87,17 @@ memoryStream.Position = 0;
### Extract all files from a rar file to a directory using RarArchive
Note: Extracting a solid rar or 7z file needs to be done in sequential order to get acceptable decompression speed.
It is explicitly recommended to use `ExtractAllEntries` when extracting an entire `IArchive` instead of iterating over all its `Entries`.
Alternatively, use `IArchive.WriteToDirectory`.
`ExtractAllEntries` is primarily intended for solid archives (like solid Rar) or 7Zip archives, where sequential extraction provides the best performance. For general/simple extraction with any supported archive type, use `archive.WriteToDirectory()` instead.
```C#
using (var archive = RarArchive.Open("Test.rar"))
{
using (var reader = archive.ExtractAllEntries())
// Simple extraction with RarArchive; this WriteToDirectory pattern works for all archive types
archive.WriteToDirectory(@"D:\temp", new ExtractionOptions()
{
reader.WriteAllToDirectory(@"D:\temp", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
ExtractFullPath = true,
Overwrite = true
});
}
```
@@ -116,6 +113,41 @@ using (var archive = RarArchive.Open("Test.rar"))
}
```
### Extract solid Rar or 7Zip archives with manual progress reporting
`ExtractAllEntries` only works for solid archives (Rar) or 7Zip archives. For optimal performance with these archive types, use this method:
```C#
using (var archive = RarArchive.Open("archive.rar")) // Must be solid Rar or 7Zip
{
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
{
// Calculate total size for progress reporting
double totalSize = archive.Entries.Where(e => !e.IsDirectory).Sum(e => e.Size);
long completed = 0;
using (var reader = archive.ExtractAllEntries())
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(@"D:\output", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
completed += reader.Entry.Size;
double progress = completed / totalSize;
Console.WriteLine($"Progress: {progress:P}");
}
}
}
}
}
```
### Use ReaderFactory to autodetect archive type and Open the entry stream
```C#
@@ -298,14 +330,12 @@ using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.
```C#
using (var archive = ZipArchive.Open("archive.zip"))
{
using (var reader = archive.ExtractAllEntries())
{
await reader.WriteAllToDirectoryAsync(
@"C:\output",
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
// Simple async extraction - works for all archive types
await archive.WriteToDirectoryAsync(
@"C:\output",
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
```

View File

@@ -11,6 +11,7 @@ const string Restore = "restore";
const string Build = "build";
const string Test = "test";
const string Format = "format";
const string CheckFormat = "check-format";
const string Publish = "publish";
Target(
@@ -42,12 +43,20 @@ Target(
Target(
Format,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "csharpier format .");
}
);
Target(
CheckFormat,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "csharpier check .");
}
);
Target(Restore, [Format], () => Run("dotnet", "restore"));
Target(Restore, [CheckFormat], () => Run("dotnet", "restore"));
Target(
Build,
@@ -61,7 +70,7 @@ Target(
Target(
Test,
[Build],
["net8.0", "net48"],
["net10.0", "net48"],
framework =>
{
IEnumerable<string> GetFiles(string d)

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<TargetFramework>net10.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" />

View File

@@ -1,12 +1,12 @@
{
"version": 2,
"dependencies": {
"net8.0": {
"net10.0": {
"Bullseye": {
"type": "Direct",
"requested": "[6.0.0, )",
"resolved": "6.0.0",
"contentHash": "vgwwXfzs7jJrskWH7saHRMgPzziq/e86QZNWY1MnMxd7e+De7E7EX4K3C7yrvaK9y02SJoLxNxcLG/q5qUAghw=="
"requested": "[6.1.0, )",
"resolved": "6.1.0",
"contentHash": "fltnAJDe0BEX5eymXGUq+il2rSUA0pHqUonNDRH2TrvRu8SkU17mYG0IVpdmG2ibtfhdjNrv4CuTCxHOwcozCA=="
},
"Glob": {
"type": "Direct",
@@ -16,9 +16,9 @@
},
"SimpleExec": {
"type": "Direct",
"requested": "[12.0.0, )",
"resolved": "12.0.0",
"contentHash": "ptxlWtxC8vM6Y6e3h9ZTxBBkOWnWrm/Sa1HT+2i1xcXY3Hx2hmKDZP5RShPf8Xr9D+ivlrXNy57ktzyH8kyt+Q=="
"requested": "[12.1.0, )",
"resolved": "12.1.0",
"contentHash": "PcCSAlMcKr5yTd571MgEMoGmoSr+omwziq2crB47lKP740lrmjuBocAUXHj+Q6LR6aUDFyhszot2wbtFJTClkA=="
}
}
}

View File

@@ -1,6 +1,6 @@
{
"sdk": {
"version": "8.0.100",
"version": "10.0.100",
"rollForward": "latestFeature"
}
}

View File

@@ -8,7 +8,7 @@ using SharpCompress.Readers;
namespace SharpCompress.Archives;
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
public abstract class AbstractArchive<TEntry, TVolume> : IArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
@@ -17,11 +17,6 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
private bool _disposed;
private readonly SourceStream? _sourceStream;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
@@ -43,12 +38,6 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
public ArchiveType Type { get; }
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) =>
EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry) =>
EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
private static Stream CheckStreams(Stream stream)
{
if (!stream.CanSeek || !stream.CanRead)
@@ -99,38 +88,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
}
}
void IArchiveExtractionListener.EnsureEntriesLoaded()
private void EnsureEntriesLoaded()
{
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
}
void IExtractionListener.FireCompressedBytesRead(
long currentPartCompressedBytes,
long compressedReadBytes
) =>
CompressedBytesRead?.Invoke(
this,
new CompressedBytesReadEventArgs(
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
compressedBytesRead: compressedReadBytes
)
);
void IExtractionListener.FireFilePartExtractionBegin(
string name,
long size,
long compressedSize
) =>
FilePartExtractionBegin?.Invoke(
this,
new FilePartExtractionBeginEventArgs(
compressedSize: compressedSize,
size: size,
name: name
)
);
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
@@ -146,11 +109,11 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
{
if (!IsSolid && Type != ArchiveType.SevenZip)
{
throw new InvalidOperationException(
throw new SharpCompressException(
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}
@@ -161,6 +124,11 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
/// </summary>
public virtual bool IsSolid => false;
/// <summary>
/// Archive is ENCRYPTED (this means the Archive has password-protected files).
/// </summary>
public virtual bool IsEncrypted => false;
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
/// </summary>
@@ -168,7 +136,7 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
{
get
{
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
EnsureEntriesLoaded();
return Entries.All(x => x.IsComplete);
}
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Writers;
@@ -94,6 +96,9 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
DateTime? modified
) => AddEntry(key, source, closeStream, size, modified);
IArchiveEntry IWritableArchive.AddDirectoryEntry(string key, DateTime? modified) =>
AddDirectoryEntry(key, modified);
public TEntry AddEntry(
string key,
Stream source,
@@ -134,6 +139,22 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
return false;
}
public TEntry AddDirectoryEntry(string key, DateTime? modified = null)
{
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
if (DoesKeyMatchExisting(key))
{
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
}
var entry = CreateDirectoryEntry(key, modified);
newEntries.Add(entry);
RebuildModifiedCollection();
return entry;
}
public void SaveTo(Stream stream, WriterOptions options)
{
//reset streams of new entries
@@ -141,6 +162,18 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
SaveTo(stream, options, OldEntries, newEntries);
}
public async Task SaveToAsync(
Stream stream,
WriterOptions options,
CancellationToken cancellationToken = default
)
{
//reset streams of new entries
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
await SaveToAsync(stream, options, OldEntries, newEntries, cancellationToken)
.ConfigureAwait(false);
}
protected TEntry CreateEntry(
string key,
Stream source,
@@ -166,6 +199,8 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
bool closeStream
);
protected abstract TEntry CreateDirectoryEntry(string key, DateTime? modified);
protected abstract void SaveTo(
Stream stream,
WriterOptions options,
@@ -173,6 +208,14 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
IEnumerable<TEntry> newEntries
);
protected abstract Task SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<TEntry> oldEntries,
IEnumerable<TEntry> newEntries,
CancellationToken cancellationToken = default
);
public override void Dispose()
{
base.Dispose();

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;
@@ -20,9 +22,29 @@ public static class ArchiveFactory
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
}
/// <summary>
/// Opens an Archive for random access asynchronously
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
var factory = FindFactory<IArchiveFactory>(stream);
stream = new SharpCompressStream(stream, bufferSize: readerOptions.BufferSize);
return factory.Open(stream, readerOptions);
return await factory
.OpenAsync(stream, readerOptions, cancellationToken)
.ConfigureAwait(false);
}
public static IWritableArchive Create(ArchiveType type)
@@ -50,6 +72,22 @@ public static class ArchiveFactory
return Open(new FileInfo(filePath), options);
}
/// <summary>
/// Opens an Archive from a filepath asynchronously.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static Task<IArchive> OpenAsync(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenAsync(new FileInfo(filePath), options, cancellationToken);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -62,6 +100,24 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(fileInfo).Open(fileInfo, options);
}
/// <summary>
/// Opens an Archive from a FileInfo object asynchronously.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = FindFactory<IArchiveFactory>(fileInfo);
return await factory.OpenAsync(fileInfo, options, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
@@ -88,6 +144,40 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
}
/// <summary>
/// Opens a multi-part archive from files asynchronously.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return await OpenAsync(fileInfo, options, cancellationToken).ConfigureAwait(false);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = FindFactory<IMultiArchiveFactory>(fileInfo);
return await factory
.OpenAsync(filesArray, options, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
@@ -114,6 +204,41 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
}
/// <summary>
/// Opens a multi-part archive from streams asynchronously.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return await OpenAsync(firstStream, options, cancellationToken).ConfigureAwait(false);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
var factory = FindFactory<IMultiArchiveFactory>(firstStream);
return await factory
.OpenAsync(streamsArray, options, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
@@ -20,11 +22,30 @@ class AutoArchiveFactory : IArchiveFactory
int bufferSize = ReaderOptions.DefaultBufferSize
) => throw new NotSupportedException();
public Task<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => throw new NotSupportedException();
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(stream, readerOptions);
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ArchiveFactory.OpenAsync(stream, readerOptions, cancellationToken);
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(fileInfo, readerOptions);
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ArchiveFactory.OpenAsync(fileInfo, readerOptions, cancellationToken);
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.GZip;
using SharpCompress.IO;
@@ -100,6 +102,70 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Opens a GZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(stream, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a GZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfo, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a GZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(streams, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a GZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfos, readerOptions)).ConfigureAwait(false);
}
public static GZipArchive Create() => new();
/// <summary>
@@ -136,6 +202,16 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
public Task SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
SaveToAsync(new FileInfo(filePath), cancellationToken);
public async Task SaveToAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
.ConfigureAwait(false);
}
public static bool IsGZipFile(Stream stream)
{
// read the header on the first read
@@ -155,6 +231,28 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return true;
}
public static async Task<bool> IsGZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// read the header on the first read
byte[] header = new byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!await stream.ReadFullyAsync(header, cancellationToken).ConfigureAwait(false))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
internal GZipArchive()
: base(ArchiveType.GZip) { }
@@ -173,6 +271,11 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return new GZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
}
protected override GZipArchiveEntry CreateDirectoryEntry(
string directoryPath,
DateTime? modified
) => throw new NotSupportedException("GZip archives do not support directory entries.");
protected override void SaveTo(
Stream stream,
WriterOptions options,
@@ -196,6 +299,28 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
}
}
protected override async Task SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<GZipArchiveEntry> oldEntries,
IEnumerable<GZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
if (Entries.Count > 1)
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken)
.ConfigureAwait(false);
}
}
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
var stream = volumes.Single().Stream;

View File

@@ -15,9 +15,10 @@ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
{
//this is to reset the stream to be read multiple times
var part = (GZipFilePart)Parts.Single();
if (part.GetRawStream().Position != part.EntryStartPosition)
var rawStream = part.GetRawStream();
if (rawStream.CanSeek && rawStream.Position != part.EntryStartPosition)
{
part.GetRawStream().Position = part.EntryStartPosition;
rawStream.Position = part.EntryStartPosition;
}
return Parts.Single().GetCompressedStream().NotNull();
}

View File

@@ -7,12 +7,6 @@ namespace SharpCompress.Archives;
public interface IArchive : IDisposable
{
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
IEnumerable<IArchiveEntry> Entries { get; }
IEnumerable<IVolume> Volumes { get; }

View File

@@ -1,4 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
@@ -6,61 +9,153 @@ namespace SharpCompress.Archives;
public static class IArchiveEntryExtensions
{
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
private const int BufferSize = 81920;
/// <param name="archiveEntry">The archive entry to extract.</param>
extension(IArchiveEntry archiveEntry)
{
if (archiveEntry.IsDirectory)
/// <summary>
/// Extract entry to the specified stream.
/// </summary>
/// <param name="streamToWriteTo">The stream to write the entry content to.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
public void WriteTo(Stream streamToWriteTo, IProgress<ProgressReport>? progress = null)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
using var entryStream = archiveEntry.OpenEntryStream();
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
sourceStream.CopyTo(streamToWriteTo, BufferSize);
}
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(
archiveEntry.Key ?? "Key",
archiveEntry.Size,
archiveEntry.CompressedSize
);
var entryStream = archiveEntry.OpenEntryStream();
using (entryStream)
/// <summary>
/// Extract entry to the specified stream asynchronously.
/// </summary>
/// <param name="streamToWriteTo">The stream to write the entry content to.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
public async Task WriteToAsync(
Stream streamToWriteTo,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
)
{
using Stream s = new ListeningStream(streamListener, entryStream);
s.CopyTo(streamToWriteTo);
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
using var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
await sourceStream
.CopyToAsync(streamToWriteTo, BufferSize, cancellationToken)
.ConfigureAwait(false);
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(
this IArchiveEntry entry,
string destinationDirectory,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToDirectory(
entry,
destinationDirectory,
options,
entry.WriteToFile
);
private static Stream WrapWithProgress(
Stream source,
IArchiveEntry entry,
IProgress<ProgressReport>? progress
)
{
if (progress is null)
{
return source;
}
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(
this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToFile(
entry,
destinationFileName,
options,
(x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
entry.WriteTo(fs);
}
var entryPath = entry.Key ?? string.Empty;
var totalBytes = GetEntrySizeSafe(entry);
return new ProgressReportingStream(
source,
progress,
entryPath,
totalBytes,
leaveOpen: true
);
}
private static long? GetEntrySizeSafe(IArchiveEntry entry)
{
try
{
var size = entry.Size;
return size >= 0 ? size : null;
}
catch (NotImplementedException)
{
return null;
}
}
extension(IArchiveEntry entry)
{
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public void WriteToDirectory(
string destinationDirectory,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToDirectory(
entry,
destinationDirectory,
options,
entry.WriteToFile
);
/// <summary>
/// Extract to specific directory asynchronously, retaining filename
/// </summary>
public Task WriteToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
ExtractionMethods.WriteEntryToDirectoryAsync(
entry,
destinationDirectory,
options,
entry.WriteToFileAsync,
cancellationToken
);
/// <summary>
/// Extract to specific file
/// </summary>
public void WriteToFile(string destinationFileName, ExtractionOptions? options = null) =>
ExtractionMethods.WriteEntryToFile(
entry,
destinationFileName,
options,
(x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
entry.WriteTo(fs);
}
);
/// <summary>
/// Extract to specific file asynchronously
/// </summary>
public Task WriteToFileAsync(
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
ExtractionMethods.WriteEntryToFileAsync(
entry,
destinationFileName,
options,
async (x, fm, ct) =>
{
using var fs = File.Open(destinationFileName, fm);
await entry.WriteToAsync(fs, null, ct).ConfigureAwait(false);
},
cancellationToken
);
}
}

View File

@@ -1,8 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
@@ -10,76 +10,159 @@ namespace SharpCompress.Archives;
public static class IArchiveExtensions
{
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(
this IArchive archive,
string destinationDirectory,
ExtractionOptions? options = null
)
{
using var reader = archive.ExtractAllEntries();
reader.WriteAllToDirectory(destinationDirectory, options);
}
/// <summary>
/// Extracts the archive to the destination directory. Directories will be created as needed.
/// </summary>
/// <param name="archive">The archive to extract.</param>
/// <param name="destination">The folder to extract into.</param>
/// <param name="progressReport">Optional progress report callback.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public static void ExtractToDirectory(
this IArchive archive,
string destination,
Action<double>? progressReport = null,
CancellationToken cancellationToken = default
)
extension(IArchive archive)
{
// Prepare for progress reporting
var totalBytes = archive.TotalUncompressSize;
var bytesRead = 0L;
// Tracking for created directories.
var seenDirectories = new HashSet<string>();
// Extract
foreach (var entry in archive.Entries)
/// <summary>
/// Extract to specific directory with progress reporting
/// </summary>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
public void WriteToDirectory(
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null
)
{
cancellationToken.ThrowIfCancellationRequested();
if (entry.IsDirectory)
// For solid archives (Rar, 7Zip), use the optimized reader-based approach
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
{
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
if (
Path.GetDirectoryName(dirPath + "/") is { } emptyDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(emptyDirectory);
}
continue;
using var reader = archive.ExtractAllEntries();
reader.WriteAllToDirectory(destinationDirectory, options);
}
// Create each directory if not already created
var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
if (Path.GetDirectoryName(path) is { } directory)
else
{
if (!Directory.Exists(directory) && !seenDirectories.Contains(directory))
{
Directory.CreateDirectory(directory);
seenDirectories.Add(directory);
}
// For non-solid archives, extract entries directly
archive.WriteToDirectoryInternal(destinationDirectory, options, progress);
}
}
// Write file
using var fs = File.OpenWrite(path);
entry.WriteTo(fs);
private void WriteToDirectoryInternal(
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress
)
{
// Prepare for progress reporting
var totalBytes = archive.TotalUncompressSize;
var bytesRead = 0L;
// Update progress
bytesRead += entry.Size;
progressReport?.Invoke(bytesRead / (double)totalBytes);
// Tracking for created directories.
var seenDirectories = new HashSet<string>();
// Extract
foreach (var entry in archive.Entries)
{
if (entry.IsDirectory)
{
var dirPath = Path.Combine(
destinationDirectory,
entry.Key.NotNull("Entry Key is null")
);
if (
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(parentDirectory);
}
continue;
}
// Use the entry's WriteToDirectory method which respects ExtractionOptions
entry.WriteToDirectory(destinationDirectory, options);
// Update progress
bytesRead += entry.Size;
progress?.Report(
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
);
}
}
/// <summary>
/// Extract to specific directory asynchronously with progress reporting and cancellation support
/// </summary>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public async Task WriteToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
)
{
// For solid archives (Rar, 7Zip), use the optimized reader-based approach
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
{
using var reader = archive.ExtractAllEntries();
await reader.WriteAllToDirectoryAsync(
destinationDirectory,
options,
cancellationToken
);
}
else
{
// For non-solid archives, extract entries directly
await archive.WriteToDirectoryAsyncInternal(
destinationDirectory,
options,
progress,
cancellationToken
);
}
}
private async Task WriteToDirectoryAsyncInternal(
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress,
CancellationToken cancellationToken
)
{
// Prepare for progress reporting
var totalBytes = archive.TotalUncompressSize;
var bytesRead = 0L;
// Tracking for created directories.
var seenDirectories = new HashSet<string>();
// Extract
foreach (var entry in archive.Entries)
{
cancellationToken.ThrowIfCancellationRequested();
if (entry.IsDirectory)
{
var dirPath = Path.Combine(
destinationDirectory,
entry.Key.NotNull("Entry Key is null")
);
if (
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(parentDirectory);
}
continue;
}
// Use the entry's WriteToDirectoryAsync method which respects ExtractionOptions
await entry
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
.ConfigureAwait(false);
// Update progress
bytesRead += entry.Size;
progress?.Report(
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
);
}
}
}
}

View File

@@ -1,10 +0,0 @@
using SharpCompress.Common;
namespace SharpCompress.Archives;
internal interface IArchiveExtractionListener : IExtractionListener
{
void EnsureEntriesLoaded();
void FireEntryExtractionBegin(IArchiveEntry entry);
void FireEntryExtractionEnd(IArchiveEntry entry);
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -26,10 +28,34 @@ public interface IArchiveFactory : IFactory
/// <param name="readerOptions">reading options.</param>
IArchive Open(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens an Archive for random access asynchronously.
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens an Archive from a FileInfo object asynchronously.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -27,10 +29,34 @@ public interface IMultiArchiveFactory : IFactory
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens a multi-part archive from streams asynchronously.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Constructor with IEnumerable Stream objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens a multi-part archive from files asynchronously.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
@@ -16,8 +18,16 @@ public interface IWritableArchive : IArchive
DateTime? modified = null
);
IArchiveEntry AddDirectoryEntry(string key, DateTime? modified = null);
void SaveTo(Stream stream, WriterOptions options);
Task SaveToAsync(
Stream stream,
WriterOptions options,
CancellationToken cancellationToken = default
);
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Writers;
namespace SharpCompress.Archives;
@@ -42,6 +44,24 @@ public static class IWritableArchiveExtensions
writableArchive.SaveTo(stream, options);
}
public static Task SaveToAsync(
this IWritableArchive writableArchive,
string filePath,
WriterOptions options,
CancellationToken cancellationToken = default
) => writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken);
public static async Task SaveToAsync(
this IWritableArchive writableArchive,
FileInfo fileInfo,
WriterOptions options,
CancellationToken cancellationToken = default
)
{
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
await writableArchive.SaveToAsync(stream, options, cancellationToken).ConfigureAwait(false);
}
public static void AddAllFromDirectory(
this IWritableArchive writableArchive,
string filePath,

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -84,6 +86,8 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public override bool IsSolid => Volumes.First().IsSolidArchive;
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
public virtual int MinVersion => Volumes.First().MinVersion;
public virtual int MaxVersion => Volumes.First().MaxVersion;
@@ -179,6 +183,70 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
);
}
/// <summary>
/// Opens a RarArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(stream, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a RarArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfo, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a RarArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(streams, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a RarArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfos, readerOptions)).ConfigureAwait(false);
}
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
public static bool IsRarFile(FileInfo fileInfo)

View File

@@ -70,24 +70,51 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
public Stream OpenEntryStream()
{
RarStream stream;
if (IsRarV3)
{
return new RarStream(
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
else
{
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
return new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
);
stream.Initialize();
return stream;
}
public Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
Task.FromResult(OpenEntryStream());
public async Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
{
RarStream stream;
if (IsRarV3)
{
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
else
{
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
await stream.InitializeAsync(cancellationToken);
return stream;
}
public bool IsComplete
{

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
@@ -103,6 +105,70 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(stream, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfo, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(streams, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfos, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
@@ -205,15 +271,15 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
.GroupBy(x => x.FilePart.Folder)
.Any(folder => folder.Count() > 1);
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
public override long TotalSize =>
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive _archive;
private CFolder? _currentFolder;
private Stream? _currentStream;
private CFileItem? _currentItem;
private SevenZipEntry? _currentEntry;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
@@ -226,40 +292,135 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
stream.Position = 0;
foreach (var dir in entries.Where(x => x.IsDirectory))
{
_currentEntry = dir;
yield return dir;
}
foreach (
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
)
// For non-directory entries, yield them without creating shared streams
// Each call to GetEntryStream() will create a fresh decompression stream
// to avoid state corruption issues with async operations
foreach (var entry in entries.Where(x => !x.IsDirectory))
{
_currentFolder = group.Key;
if (group.Key is null)
{
_currentStream = Stream.Null;
}
else
{
_currentStream = _archive._database?.GetFolderStream(
stream,
_currentFolder,
new PasswordProvider(Options.Password)
);
}
foreach (var entry in group)
{
_currentItem = entry.FilePart.Header;
yield return entry;
}
_currentEntry = entry;
yield return entry;
}
}
protected override EntryStream GetEntryStream() =>
CreateEntryStream(
new ReadOnlySubStream(
_currentStream.NotNull("currentStream is not null"),
_currentItem?.Size ?? 0
)
);
protected override EntryStream GetEntryStream()
{
// Create a fresh decompression stream for each file (no state sharing).
// However, the LZMA decoder has bugs in its async implementation that cause
// state corruption even on fresh streams. The SyncOnlyStream wrapper
// works around these bugs by forcing async operations to use sync equivalents.
//
// TODO: Fix the LZMA decoder async bugs (in LzmaStream, Decoder, OutWindow)
// so this wrapper is no longer necessary.
var entry = _currentEntry.NotNull("currentEntry is not null");
if (entry.IsDirectory)
{
return CreateEntryStream(Stream.Null);
}
return CreateEntryStream(new SyncOnlyStream(entry.FilePart.GetCompressedStream()));
}
}
/// <summary>
/// WORKAROUND: Forces async operations to use synchronous equivalents.
/// This is necessary because the LZMA decoder has bugs in its async implementation
/// that cause state corruption (IndexOutOfRangeException, DataErrorException).
///
/// The proper fix would be to repair the LZMA decoder's async methods
/// (LzmaStream.ReadAsync, Decoder.CodeAsync, OutWindow async operations),
/// but that requires deep changes to the decoder state machine.
/// </summary>
private sealed class SyncOnlyStream : Stream
{
private readonly Stream _baseStream;
public SyncOnlyStream(Stream baseStream) => _baseStream = baseStream;
public override bool CanRead => _baseStream.CanRead;
public override bool CanSeek => _baseStream.CanSeek;
public override bool CanWrite => _baseStream.CanWrite;
public override long Length => _baseStream.Length;
public override long Position
{
get => _baseStream.Position;
set => _baseStream.Position = value;
}
public override void Flush() => _baseStream.Flush();
public override int Read(byte[] buffer, int offset, int count) =>
_baseStream.Read(buffer, offset, count);
public override long Seek(long offset, SeekOrigin origin) =>
_baseStream.Seek(offset, origin);
public override void SetLength(long value) => _baseStream.SetLength(value);
public override void Write(byte[] buffer, int offset, int count) =>
_baseStream.Write(buffer, offset, count);
// Force async operations to use sync equivalents to avoid LZMA decoder bugs
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
return Task.FromResult(_baseStream.Read(buffer, offset, count));
}
public override Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
_baseStream.Write(buffer, offset, count);
return Task.CompletedTask;
}
public override Task FlushAsync(CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
_baseStream.Flush();
return Task.CompletedTask;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new ValueTask<int>(_baseStream.Read(buffer.Span));
}
public override ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
_baseStream.Write(buffer.Span);
return ValueTask.CompletedTask;
}
#endif
protected override void Dispose(bool disposing)
{
if (disposing)
{
_baseStream.Dispose();
}
base.Dispose(disposing);
}
}
private class PasswordProvider : IPasswordProvider

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar;
using SharpCompress.Common.Tar.Headers;
@@ -101,6 +103,70 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Opens a TarArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(stream, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a TarArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfo, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a TarArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(streams, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a TarArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfos, readerOptions)).ConfigureAwait(false);
}
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
public static bool IsTarFile(FileInfo fileInfo)
@@ -222,6 +288,11 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
closeStream
);
protected override TarArchiveEntry CreateDirectoryEntry(
string directoryPath,
DateTime? modified
) => new TarWritableArchiveEntry(this, directoryPath, modified);
protected override void SaveTo(
Stream stream,
WriterOptions options,
@@ -230,15 +301,62 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
foreach (var entry in oldEntries.Concat(newEntries))
{
using var entryStream = entry.OpenEntryStream();
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size
);
if (entry.IsDirectory)
{
writer.WriteDirectory(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime
);
}
else
{
using var entryStream = entry.OpenEntryStream();
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size
);
}
}
}
protected override async Task SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<TarArchiveEntry> oldEntries,
IEnumerable<TarArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new TarWriter(stream, new TarWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size,
cancellationToken
)
.ConfigureAwait(false);
}
}
}

View File

@@ -9,7 +9,8 @@ namespace SharpCompress.Archives.Tar;
internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;
private readonly Stream? stream;
private readonly bool isDirectory;
internal TarWritableArchiveEntry(
TarArchive archive,
@@ -27,6 +28,22 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
isDirectory = false;
}
internal TarWritableArchiveEntry(
TarArchive archive,
string directoryPath,
DateTime? lastModified
)
: base(archive, null, CompressionType.None)
{
stream = null;
Key = directoryPath;
Size = 0;
LastModifiedTime = lastModified;
closeStream = false;
isDirectory = true;
}
public override long Crc => 0;
@@ -47,15 +64,19 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsDirectory => isDirectory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
Stream IWritableArchiveEntry.Stream => stream ?? Stream.Null;
public override Stream OpenEntryStream()
{
if (stream is null)
{
return Stream.Null;
}
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
@@ -63,7 +84,7 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
internal override void Close()
{
if (closeStream)
if (closeStream && stream is not null)
{
stream.Dispose();
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
@@ -122,6 +124,70 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
);
}
/// <summary>
/// Opens a ZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(stream, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a ZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfo, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a ZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(streams, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a ZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfos, readerOptions)).ConfigureAwait(false);
}
public static bool IsZipFile(
string filePath,
string? password = null,
@@ -197,7 +263,93 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
var x = z.ReadSeekableHeader(stream, useSync: true).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static async Task<bool> IsZipFileAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static async Task<bool> IsZipMultiAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
ZipHeader? x = null;
await foreach (
var h in z.ReadSeekableHeader(stream).WithCancellation(cancellationToken)
)
{
x = h;
break;
}
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
@@ -252,7 +404,9 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var vols = volumes.ToArray();
foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream))
foreach (
var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream, useSync: true)
)
{
if (h != null)
{
@@ -306,14 +460,59 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
foreach (var entry in oldEntries.Concat(newEntries))
{
using var entryStream = entry.OpenEntryStream();
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime
);
if (entry.IsDirectory)
{
writer.WriteDirectory(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime
);
}
else
{
using var entryStream = entry.OpenEntryStream();
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime
);
}
}
}
protected override async Task SaveToAsync(
Stream stream,
WriterOptions options,
IEnumerable<ZipArchiveEntry> oldEntries,
IEnumerable<ZipArchiveEntry> newEntries,
CancellationToken cancellationToken = default
)
{
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
foreach (var entry in oldEntries.Concat(newEntries))
{
if (entry.IsDirectory)
{
await writer
.WriteDirectoryAsync(
entry.Key.NotNull("Entry Key is null"),
entry.LastModifiedTime,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
using var entryStream = entry.OpenEntryStream();
await writer
.WriteAsync(
entry.Key.NotNull("Entry Key is null"),
entryStream,
cancellationToken
)
.ConfigureAwait(false);
}
}
}
@@ -325,6 +524,11 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
bool closeStream
) => new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
protected override ZipArchiveEntry CreateDirectoryEntry(
string directoryPath,
DateTime? modified
) => new ZipWritableArchiveEntry(this, directoryPath, modified);
public static ZipArchive Create() => new();
protected override IReader CreateReaderForSolidExtraction()

View File

@@ -9,7 +9,8 @@ namespace SharpCompress.Archives.Zip;
internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
{
private readonly bool closeStream;
private readonly Stream stream;
private readonly Stream? stream;
private readonly bool isDirectory;
private bool isDisposed;
internal ZipWritableArchiveEntry(
@@ -27,6 +28,22 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
Size = size;
LastModifiedTime = lastModified;
this.closeStream = closeStream;
isDirectory = false;
}
internal ZipWritableArchiveEntry(
ZipArchive archive,
string directoryPath,
DateTime? lastModified
)
: base(archive, null)
{
stream = null;
Key = directoryPath;
Size = 0;
LastModifiedTime = lastModified;
closeStream = false;
isDirectory = true;
}
public override long Crc => 0;
@@ -47,16 +64,20 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsDirectory => isDirectory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => throw new NotImplementedException();
Stream IWritableArchiveEntry.Stream => stream;
Stream IWritableArchiveEntry.Stream => stream ?? Stream.Null;
public override Stream OpenEntryStream()
{
if (stream is null)
{
return Stream.Null;
}
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
@@ -64,7 +85,7 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
internal override void Close()
{
if (closeStream && !isDisposed)
if (closeStream && !isDisposed && stream is not null)
{
stream.Dispose();
isDisposed = true;

View File

@@ -1,7 +1,8 @@
using System;
using System.Runtime.CompilerServices;
[assembly: CLSCompliant(true)]
// CLSCompliant(false) is required because ZStandard integration uses unsafe code
[assembly: CLSCompliant(false)]
[assembly: InternalsVisibleTo(
"SharpCompress.Test,PublicKey=0024000004800000940000000602000000240000525341310004000001000100158bebf1433f76dffc356733c138babea7a47536c65ed8009b16372c6f4edbb20554db74a62687f56b97c20a6ce8c4b123280279e33c894e7b3aa93ab3c573656fde4db576cfe07dba09619ead26375b25d2c4a8e43f7be257d712b0dd2eb546f67adb09281338618a58ac834fc038dd7e2740a7ab3591826252e4f4516306dc"
)]

View File

@@ -57,7 +57,7 @@ namespace SharpCompress.Common.Arc
return value switch
{
1 or 2 => CompressionType.None,
3 => CompressionType.RLE90,
3 => CompressionType.Packed,
4 => CompressionType.Squeezed,
5 or 6 or 7 or 8 => CompressionType.Crunched,
9 => CompressionType.Squashed,

View File

@@ -44,7 +44,7 @@ namespace SharpCompress.Common.Arc
Header.CompressedSize
);
break;
case CompressionType.RLE90:
case CompressionType.Packed:
compressedStream = new RunLength90Stream(
_stream,
(int)Header.CompressedSize
@@ -54,6 +54,14 @@ namespace SharpCompress.Common.Arc
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Crunched:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: "
+ Header.CompressionMethod
+ " with size > 128KB"
);
}
compressedStream = new ArcLzwStream(
_stream,
(int)Header.CompressedSize,

View File

@@ -1,10 +0,0 @@
using System;
namespace SharpCompress.Common;
public class ArchiveExtractionEventArgs<T> : EventArgs
{
internal ArchiveExtractionEventArgs(T entry) => Item = entry;
public T Item { get; }
}

View File

@@ -8,4 +8,5 @@ public enum ArchiveType
SevenZip,
GZip,
Arc,
Arj,
}

View File

@@ -0,0 +1,58 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Arc;
using SharpCompress.Common.Arj.Headers;
namespace SharpCompress.Common.Arj
{
public class ArjEntry : Entry
{
private readonly ArjFilePart _filePart;
internal ArjEntry(ArjFilePart filePart)
{
_filePart = filePart;
}
public override long Crc => _filePart.Header.OriginalCrc32;
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType
{
get
{
if (_filePart.Header.CompressionMethod == CompressionMethod.Stored)
{
return CompressionType.None;
}
return CompressionType.ArjLZ77;
}
}
public override long Size => _filePart?.Header.OriginalSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.Header.DateTimeModified.DateTime;
public override DateTime? CreatedTime => _filePart.Header.DateTimeCreated.DateTime;
public override DateTime? LastAccessedTime => _filePart.Header.DateTimeAccessed.DateTime;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => _filePart.Header.FileType == FileType.Directory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}
}

View File

@@ -0,0 +1,72 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Arj.Headers;
using SharpCompress.Compressors.Arj;
using SharpCompress.IO;
namespace SharpCompress.Common.Arj
{
public class ArjFilePart : FilePart
{
private readonly Stream _stream;
internal ArjLocalHeader Header { get; set; }
internal ArjFilePart(ArjLocalHeader localArjHeader, Stream seekableStream)
: base(localArjHeader.ArchiveEncoding)
{
_stream = seekableStream;
Header = localArjHeader;
}
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionMethod.Stored:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionMethod.CompressedMost:
case CompressionMethod.Compressed:
case CompressionMethod.CompressedFaster:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: "
+ Header.CompressionMethod
+ " with size > 128KB"
);
}
compressedStream = new LhaStream<Lh7DecoderCfg>(
_stream,
(int)Header.OriginalSize
);
break;
case CompressionMethod.CompressedFastest:
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream GetRawStream() => _stream;
}
}

View File

@@ -0,0 +1,36 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Readers;
namespace SharpCompress.Common.Arj
{
public class ArjVolume : Volume
{
public ArjVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
public override bool IsFirstVolume
{
get { return true; }
}
/// <summary>
/// ArjArchive is part of a multi-part archive.
/// </summary>
public override bool IsMultiVolume
{
get { return false; }
}
internal IEnumerable<ArjFilePart> GetVolumeFileParts()
{
return new List<ArjFilePart>();
}
}
}

View File

@@ -0,0 +1,142 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Arj.Headers
{
public enum ArjHeaderType
{
MainHeader,
LocalHeader,
}
public abstract class ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArjHeader(ArjHeaderType type)
{
ArjHeaderType = type;
}
public ArjHeaderType ArjHeaderType { get; }
public byte Flags { get; set; }
public FileType FileType { get; set; }
public abstract ArjHeader? Read(Stream reader);
public byte[] ReadHeader(Stream stream)
{
// check for magic bytes
Span<byte> magic = stackalloc byte[2];
if (stream.Read(magic) != 2)
{
return Array.Empty<byte>();
}
var magicValue = (ushort)(magic[0] | magic[1] << 8);
if (magicValue != ARJ_MAGIC)
{
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
}
// read header_size
byte[] headerBytes = new byte[2];
stream.Read(headerBytes, 0, 2);
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
if (headerSize < 1)
{
return Array.Empty<byte>();
}
var body = new byte[headerSize];
var read = stream.Read(body, 0, headerSize);
if (read < headerSize)
{
return Array.Empty<byte>();
}
byte[] crc = new byte[4];
read = stream.Read(crc, 0, 4);
var checksum = Crc32Stream.Compute(body);
// Compute the hash value
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
protected List<byte[]> ReadExtendedHeaders(Stream reader)
{
List<byte[]> extendedHeader = new List<byte[]>();
byte[] buffer = new byte[2];
while (true)
{
int bytesRead = reader.Read(buffer, 0, 2);
if (bytesRead < 2)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header size."
);
}
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
if (extHeaderSize == 0)
{
return extendedHeader;
}
byte[] header = new byte[extHeaderSize];
bytesRead = reader.Read(header, 0, extHeaderSize);
if (bytesRead < extHeaderSize)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header data."
);
}
byte[] crc = new byte[4];
bytesRead = reader.Read(crc, 0, 4);
if (bytesRead < 4)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header CRC."
);
}
var checksum = Crc32Stream.Compute(header);
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Extended header checksum is invalid");
}
extendedHeader.Add(header);
}
}
// Flag helpers
public bool IsGabled => (Flags & 0x01) != 0;
public bool IsAnsiPage => (Flags & 0x02) != 0;
public bool IsVolume => (Flags & 0x04) != 0;
public bool IsArjProtected => (Flags & 0x08) != 0;
public bool IsPathSym => (Flags & 0x10) != 0;
public bool IsBackup => (Flags & 0x20) != 0;
public bool IsSecured => (Flags & 0x40) != 0;
public bool IsAltName => (Flags & 0x80) != 0;
public static FileType FileTypeFromByte(byte value)
{
return Enum.IsDefined(typeof(FileType), value)
? (FileType)value
: Headers.FileType.Unknown;
}
}
}

View File

@@ -0,0 +1,161 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers
{
public class ArjLocalHeader : ArjHeader
{
public ArchiveEncoding ArchiveEncoding { get; }
public long DataStartPosition { get; protected set; }
public byte ArchiverVersionNumber { get; set; }
public byte MinVersionToExtract { get; set; }
public HostOS HostOS { get; set; }
public CompressionMethod CompressionMethod { get; set; }
public DosDateTime DateTimeModified { get; set; } = new DosDateTime(0);
public long CompressedSize { get; set; }
public long OriginalSize { get; set; }
public long OriginalCrc32 { get; set; }
public int FileSpecPosition { get; set; }
public int FileAccessMode { get; set; }
public byte FirstChapter { get; set; }
public byte LastChapter { get; set; }
public long ExtendedFilePosition { get; set; }
public DosDateTime DateTimeAccessed { get; set; } = new DosDateTime(0);
public DosDateTime DateTimeCreated { get; set; } = new DosDateTime(0);
public long OriginalSizeEvenForVolumes { get; set; }
public string Name { get; set; } = string.Empty;
public string Comment { get; set; } = string.Empty;
private const byte StdHdrSize = 30;
private const byte R9HdrSize = 46;
public ArjLocalHeader(ArchiveEncoding archiveEncoding)
: base(ArjHeaderType.LocalHeader)
{
ArchiveEncoding =
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
}
public override ArjHeader? Read(Stream stream)
{
var body = ReadHeader(stream);
if (body.Length > 0)
{
ReadExtendedHeaders(stream);
var header = LoadFrom(body);
header.DataStartPosition = stream.Position;
return header;
}
return null;
}
public ArjLocalHeader LoadFrom(byte[] headerBytes)
{
int offset = 0;
int ReadInt16()
{
if (offset + 1 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
return v;
}
long ReadInt32()
{
if (offset + 3 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF
| (headerBytes[offset + 1] & 0xFF) << 8
| (headerBytes[offset + 2] & 0xFF) << 16
| (headerBytes[offset + 3] & 0xFF) << 24;
offset += 4;
return v;
}
byte headerSize = headerBytes[offset++];
ArchiverVersionNumber = headerBytes[offset++];
MinVersionToExtract = headerBytes[offset++];
HostOS hostOS = (HostOS)headerBytes[offset++];
Flags = headerBytes[offset++];
CompressionMethod = CompressionMethodFromByte(headerBytes[offset++]);
FileType = FileTypeFromByte(headerBytes[offset++]);
offset++; // Skip 1 byte
var rawTimestamp = ReadInt32();
DateTimeModified =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
CompressedSize = ReadInt32();
OriginalSize = ReadInt32();
OriginalCrc32 = ReadInt32();
FileSpecPosition = ReadInt16();
FileAccessMode = ReadInt16();
FirstChapter = headerBytes[offset++];
LastChapter = headerBytes[offset++];
ExtendedFilePosition = 0;
OriginalSizeEvenForVolumes = 0;
if (headerSize > StdHdrSize)
{
ExtendedFilePosition = ReadInt32();
if (headerSize >= R9HdrSize)
{
rawTimestamp = ReadInt32();
DateTimeAccessed =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
rawTimestamp = ReadInt32();
DateTimeCreated =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
OriginalSizeEvenForVolumes = ReadInt32();
}
}
Name = Encoding.ASCII.GetString(
headerBytes,
offset,
Array.IndexOf(headerBytes, (byte)0, offset) - offset
);
offset += Name.Length + 1;
Comment = Encoding.ASCII.GetString(
headerBytes,
offset,
Array.IndexOf(headerBytes, (byte)0, offset) - offset
);
offset += Comment.Length + 1;
return this;
}
public static CompressionMethod CompressionMethodFromByte(byte value)
{
return value switch
{
0 => CompressionMethod.Stored,
1 => CompressionMethod.CompressedMost,
2 => CompressionMethod.Compressed,
3 => CompressionMethod.CompressedFaster,
4 => CompressionMethod.CompressedFastest,
8 => CompressionMethod.NoDataNoCrc,
9 => CompressionMethod.NoData,
_ => CompressionMethod.Unknown,
};
}
}
}

View File

@@ -0,0 +1,138 @@
using System;
using System.IO;
using System.Text;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Arj.Headers
{
public class ArjMainHeader : ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArchiveEncoding ArchiveEncoding { get; }
public int ArchiverVersionNumber { get; private set; }
public int MinVersionToExtract { get; private set; }
public HostOS HostOs { get; private set; }
public int SecurityVersion { get; private set; }
public DosDateTime CreationDateTime { get; private set; } = new DosDateTime(0);
public long CompressedSize { get; private set; }
public long ArchiveSize { get; private set; }
public long SecurityEnvelope { get; private set; }
public int FileSpecPosition { get; private set; }
public int SecurityEnvelopeLength { get; private set; }
public int EncryptionVersion { get; private set; }
public int LastChapter { get; private set; }
public int ArjProtectionFactor { get; private set; }
public int Flags2 { get; private set; }
public string Name { get; private set; } = string.Empty;
public string Comment { get; private set; } = string.Empty;
public ArjMainHeader(ArchiveEncoding archiveEncoding)
: base(ArjHeaderType.MainHeader)
{
ArchiveEncoding =
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
}
public override ArjHeader? Read(Stream stream)
{
var body = ReadHeader(stream);
ReadExtendedHeaders(stream);
return LoadFrom(body);
}
public ArjMainHeader LoadFrom(byte[] headerBytes)
{
var offset = 1;
byte ReadByte()
{
if (offset >= headerBytes.Length)
{
throw new EndOfStreamException();
}
return (byte)(headerBytes[offset++] & 0xFF);
}
int ReadInt16()
{
if (offset + 1 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
return v;
}
long ReadInt32()
{
if (offset + 3 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF
| (headerBytes[offset + 1] & 0xFF) << 8
| (headerBytes[offset + 2] & 0xFF) << 16
| (headerBytes[offset + 3] & 0xFF) << 24;
offset += 4;
return v;
}
string ReadNullTerminatedString(byte[] x, int startIndex)
{
var result = new StringBuilder();
int i = startIndex;
while (i < x.Length && x[i] != 0)
{
result.Append((char)x[i]);
i++;
}
// Skip the null terminator
i++;
if (i < x.Length)
{
byte[] remainder = new byte[x.Length - i];
Array.Copy(x, i, remainder, 0, remainder.Length);
x = remainder;
}
return result.ToString();
}
ArchiverVersionNumber = ReadByte();
MinVersionToExtract = ReadByte();
var hostOsByte = ReadByte();
HostOs = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
Flags = ReadByte();
SecurityVersion = ReadByte();
FileType = FileTypeFromByte(ReadByte());
offset++; // skip reserved
CreationDateTime = new DosDateTime((int)ReadInt32());
CompressedSize = ReadInt32();
ArchiveSize = ReadInt32();
SecurityEnvelope = ReadInt32();
FileSpecPosition = ReadInt16();
SecurityEnvelopeLength = ReadInt16();
EncryptionVersion = ReadByte();
LastChapter = ReadByte();
Name = ReadNullTerminatedString(headerBytes, offset);
Comment = ReadNullTerminatedString(headerBytes, offset + 1 + Name.Length);
return this;
}
}
}

View File

@@ -0,0 +1,20 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers
{
public enum CompressionMethod
{
Stored = 0,
CompressedMost = 1,
Compressed = 2,
CompressedFaster = 3,
CompressedFastest = 4,
NoDataNoCrc = 8,
NoData = 9,
Unknown,
}
}

View File

@@ -0,0 +1,37 @@
using System;
namespace SharpCompress.Common.Arj.Headers
{
public class DosDateTime
{
public DateTime DateTime { get; }
public DosDateTime(long dosValue)
{
// Ensure only the lower 32 bits are used
int value = unchecked((int)(dosValue & 0xFFFFFFFF));
var date = (value >> 16) & 0xFFFF;
var time = value & 0xFFFF;
var day = date & 0x1F;
var month = (date >> 5) & 0x0F;
var year = ((date >> 9) & 0x7F) + 1980;
var second = (time & 0x1F) * 2;
var minute = (time >> 5) & 0x3F;
var hour = (time >> 11) & 0x1F;
try
{
DateTime = new DateTime(year, month, day, hour, minute, second);
}
catch
{
DateTime = DateTime.MinValue;
}
}
public override string ToString() => DateTime.ToString("yyyy-MM-dd HH:mm:ss");
}
}

View File

@@ -0,0 +1,13 @@
namespace SharpCompress.Common.Arj.Headers
{
public enum FileType : byte
{
Binary = 0,
Text7Bit = 1,
CommentHeader = 2,
Directory = 3,
VolumeLabel = 4,
ChapterLabel = 5,
Unknown = 255,
}
}

View File

@@ -0,0 +1,19 @@
namespace SharpCompress.Common.Arj.Headers
{
public enum HostOS
{
MsDos = 0,
PrimOS = 1,
Unix = 2,
Amiga = 3,
MacOs = 4,
OS2 = 5,
AppleGS = 6,
AtariST = 7,
NeXT = 8,
VaxVMS = 9,
Win95 = 10,
Win32 = 11,
Unknown = 255,
}
}

View File

@@ -0,0 +1,117 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common
{
public sealed class AsyncBinaryReader : IDisposable
{
private readonly Stream _stream;
private readonly Stream _originalStream;
private readonly bool _leaveOpen;
private readonly byte[] _buffer = new byte[8];
private bool _disposed;
public AsyncBinaryReader(Stream stream, bool leaveOpen = false, int bufferSize = 4096)
{
_originalStream = stream ?? throw new ArgumentNullException(nameof(stream));
_leaveOpen = leaveOpen;
// Use the stream directly without wrapping in BufferedStream
// BufferedStream uses synchronous Read internally which doesn't work with async-only streams
// SharpCompress uses SharpCompressStream for buffering which supports true async reads
_stream = stream;
}
public Stream BaseStream => _stream;
public async ValueTask<byte> ReadByteAsync(CancellationToken ct = default)
{
await ReadExactAsync(_buffer, 0, 1, ct).ConfigureAwait(false);
return _buffer[0];
}
public async ValueTask<ushort> ReadUInt16Async(CancellationToken ct = default)
{
await ReadExactAsync(_buffer, 0, 2, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt16LittleEndian(_buffer);
}
public async ValueTask<uint> ReadUInt32Async(CancellationToken ct = default)
{
await ReadExactAsync(_buffer, 0, 4, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt32LittleEndian(_buffer);
}
public async ValueTask<ulong> ReadUInt64Async(CancellationToken ct = default)
{
await ReadExactAsync(_buffer, 0, 8, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt64LittleEndian(_buffer);
}
public async ValueTask<byte[]> ReadBytesAsync(int count, CancellationToken ct = default)
{
var result = new byte[count];
await ReadExactAsync(result, 0, count, ct).ConfigureAwait(false);
return result;
}
private async ValueTask ReadExactAsync(
byte[] destination,
int offset,
int length,
CancellationToken ct
)
{
var read = 0;
while (read < length)
{
var n = await _stream
.ReadAsync(destination, offset + read, length - read, ct)
.ConfigureAwait(false);
if (n == 0)
{
throw new EndOfStreamException();
}
read += n;
}
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
// Dispose the original stream if we own it
if (!_leaveOpen)
{
_originalStream.Dispose();
}
}
#if NET6_0_OR_GREATER
public async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
_disposed = true;
// Dispose the original stream if we own it
if (!_leaveOpen)
{
await _originalStream.DisposeAsync().ConfigureAwait(false);
}
}
#endif
}
}

View File

@@ -1,25 +0,0 @@
using System;
namespace SharpCompress.Common;
public sealed class CompressedBytesReadEventArgs : EventArgs
{
public CompressedBytesReadEventArgs(
long compressedBytesRead,
long currentFilePartCompressedBytesRead
)
{
CompressedBytesRead = compressedBytesRead;
CurrentFilePartCompressedBytesRead = currentFilePartCompressedBytesRead;
}
/// <summary>
/// Compressed bytes read for the current entry
/// </summary>
public long CompressedBytesRead { get; }
/// <summary>
/// Current file part read for Multipart files (e.g. Rar)
/// </summary>
public long CurrentFilePartCompressedBytesRead { get; }
}

View File

@@ -23,10 +23,11 @@ public enum CompressionType
Reduce4,
Explode,
Squeezed,
RLE90,
Packed,
Crunched,
Squashed,
Crushed,
Distilled,
ZStandard,
ArjLZ77,
}

View File

@@ -64,6 +64,11 @@ public class EntryStream : Stream, IStreamStack
protected override void Dispose(bool disposing)
{
if (_isDisposed)
{
return;
}
_isDisposed = true;
if (!(_completed || _reader.Cancelled))
{
SkipEntry();
@@ -81,12 +86,6 @@ public class EntryStream : Stream, IStreamStack
lzmaStream.Flush(); //Lzma over reads. Knock it back
}
}
if (_isDisposed)
{
return;
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(EntryStream));
#endif
@@ -97,6 +96,11 @@ public class EntryStream : Stream, IStreamStack
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask DisposeAsync()
{
if (_isDisposed)
{
return;
}
_isDisposed = true;
if (!(_completed || _reader.Cancelled))
{
await SkipEntryAsync().ConfigureAwait(false);
@@ -114,12 +118,6 @@ public class EntryStream : Stream, IStreamStack
await lzmaStream.FlushAsync().ConfigureAwait(false);
}
}
if (_isDisposed)
{
return;
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(EntryStream));
#endif
@@ -204,4 +202,11 @@ public class EntryStream : Stream, IStreamStack
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
public override Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
) => throw new NotSupportedException();
}

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
@@ -7,6 +8,15 @@ namespace SharpCompress.Common;
internal static class ExtractionMethods
{
/// <summary>
/// Gets the appropriate StringComparison for path checks based on the file system.
/// Windows uses case-insensitive file systems, while Unix-like systems use case-sensitive file systems.
/// </summary>
private static StringComparison PathComparison =>
RuntimeInformation.IsOSPlatform(OSPlatform.Windows)
? StringComparison.OrdinalIgnoreCase
: StringComparison.Ordinal;
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
@@ -48,7 +58,7 @@ internal static class ExtractionMethods
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
if (!destdir.StartsWith(fullDestinationDirectoryPath, PathComparison))
{
throw new ExtractionException(
"Entry is trying to create a directory outside of the destination directory."
@@ -68,12 +78,7 @@ internal static class ExtractionMethods
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (
!destinationFileName.StartsWith(
fullDestinationDirectoryPath,
StringComparison.Ordinal
)
)
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, PathComparison))
{
throw new ExtractionException(
"Entry is trying to write a file outside of the destination directory."
@@ -123,7 +128,7 @@ internal static class ExtractionMethods
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Func<string, ExtractionOptions?, Task> writeAsync,
Func<string, ExtractionOptions?, CancellationToken, Task> writeAsync,
CancellationToken cancellationToken = default
)
{
@@ -158,7 +163,7 @@ internal static class ExtractionMethods
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
if (!destdir.StartsWith(fullDestinationDirectoryPath, PathComparison))
{
throw new ExtractionException(
"Entry is trying to create a directory outside of the destination directory."
@@ -178,18 +183,13 @@ internal static class ExtractionMethods
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (
!destinationFileName.StartsWith(
fullDestinationDirectoryPath,
StringComparison.Ordinal
)
)
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, PathComparison))
{
throw new ExtractionException(
"Entry is trying to write a file outside of the destination directory."
);
}
await writeAsync(destinationFileName, options).ConfigureAwait(false);
await writeAsync(destinationFileName, options, cancellationToken).ConfigureAwait(false);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
@@ -201,7 +201,7 @@ internal static class ExtractionMethods
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Func<string, FileMode, Task> openAndWriteAsync,
Func<string, FileMode, CancellationToken, Task> openAndWriteAsync,
CancellationToken cancellationToken = default
)
{
@@ -225,7 +225,8 @@ internal static class ExtractionMethods
fm = FileMode.CreateNew;
}
await openAndWriteAsync(destinationFileName, fm).ConfigureAwait(false);
await openAndWriteAsync(destinationFileName, fm, cancellationToken)
.ConfigureAwait(false);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common;
@@ -14,4 +16,8 @@ public abstract class FilePart
internal abstract Stream? GetCompressedStream();
internal abstract Stream? GetRawStream();
internal bool Skipped { get; set; }
internal virtual Task<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
) => Task.FromResult(GetCompressedStream());
}

View File

@@ -1,28 +0,0 @@
using System;
namespace SharpCompress.Common;
public sealed class FilePartExtractionBeginEventArgs : EventArgs
{
public FilePartExtractionBeginEventArgs(string name, long size, long compressedSize)
{
Name = name;
Size = size;
CompressedSize = compressedSize;
}
/// <summary>
/// File name for the part for the current entry
/// </summary>
public string Name { get; }
/// <summary>
/// Uncompressed size of the current entry in the part
/// </summary>
public long Size { get; }
/// <summary>
/// Compressed size of the current entry in the part
/// </summary>
public long CompressedSize { get; }
}

View File

@@ -24,8 +24,14 @@ internal sealed class GZipFilePart : FilePart
stream.Position = stream.Length - 8;
ReadTrailer();
stream.Position = position;
EntryStartPosition = position;
}
else
{
// For non-seekable streams, we can't read the trailer or track position.
// Set to 0 since the stream will be read sequentially from its current position.
EntryStartPosition = 0;
}
EntryStartPosition = stream.Position;
}
internal long EntryStartPosition { get; }

View File

@@ -1,7 +0,0 @@
namespace SharpCompress.Common;
public interface IExtractionListener
{
void FireFilePartExtractionBegin(string name, long size, long compressedSize);
void FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes);
}

View File

@@ -0,0 +1,43 @@
namespace SharpCompress.Common;
/// <summary>
/// Represents progress information for compression or extraction operations.
/// </summary>
public sealed class ProgressReport
{
/// <summary>
/// Initializes a new instance of the <see cref="ProgressReport"/> class.
/// </summary>
/// <param name="entryPath">The path of the entry being processed.</param>
/// <param name="bytesTransferred">Number of bytes transferred so far.</param>
/// <param name="totalBytes">Total bytes to be transferred, or null if unknown.</param>
public ProgressReport(string entryPath, long bytesTransferred, long? totalBytes)
{
EntryPath = entryPath;
BytesTransferred = bytesTransferred;
TotalBytes = totalBytes;
}
/// <summary>
/// Gets the path of the entry being processed.
/// </summary>
public string EntryPath { get; }
/// <summary>
/// Gets the number of bytes transferred so far.
/// </summary>
public long BytesTransferred { get; }
/// <summary>
/// Gets the total number of bytes to be transferred, or null if unknown.
/// </summary>
public long? TotalBytes { get; }
/// <summary>
/// Gets the progress percentage (0-100), or null if total bytes is unknown.
/// </summary>
public double? PercentComplete =>
TotalBytes.HasValue && TotalBytes.Value > 0
? (double)BytesTransferred / TotalBytes.Value * 100
: null;
}

View File

@@ -1,17 +0,0 @@
using System;
using SharpCompress.Readers;
namespace SharpCompress.Common;
public sealed class ReaderExtractionEventArgs<T> : EventArgs
{
internal ReaderExtractionEventArgs(T entry, ReaderProgress? readerProgress = null)
{
Item = entry;
ReaderProgress = readerProgress;
}
public T Item { get; }
public ReaderProgress? ReaderProgress { get; }
}

View File

@@ -67,6 +67,7 @@ internal class SevenZipFilePart : FilePart
}
}
private const uint K_COPY = 0x0;
private const uint K_LZMA2 = 0x21;
private const uint K_LZMA = 0x030101;
private const uint K_PPMD = 0x030401;
@@ -82,6 +83,7 @@ internal class SevenZipFilePart : FilePart
var coder = Folder.NotNull()._coders.First();
return coder._methodId._id switch
{
K_COPY => CompressionType.None,
K_LZMA or K_LZMA2 => CompressionType.LZMA,
K_PPMD => CompressionType.PPMd,
K_B_ZIP2 => CompressionType.BZip2,

View File

@@ -1,5 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
@@ -9,8 +10,16 @@ internal sealed class TarHeader
{
internal static readonly DateTime EPOCH = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
public TarHeader(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
public TarHeader(
ArchiveEncoding archiveEncoding,
TarHeaderWriteFormat writeFormat = TarHeaderWriteFormat.GNU_TAR_LONG_LINK
)
{
ArchiveEncoding = archiveEncoding;
WriteFormat = writeFormat;
}
internal TarHeaderWriteFormat WriteFormat { get; set; }
internal string? Name { get; set; }
internal string? LinkName { get; set; }
@@ -25,7 +34,119 @@ internal sealed class TarHeader
internal const int BLOCK_SIZE = 512;
// Maximum size for long name/link headers to prevent memory exhaustion attacks
// This is generous enough for most real-world scenarios (32KB)
private const int MAX_LONG_NAME_SIZE = 32768;
internal void Write(Stream output)
{
switch (WriteFormat)
{
case TarHeaderWriteFormat.GNU_TAR_LONG_LINK:
WriteGnuTarLongLink(output);
break;
case TarHeaderWriteFormat.USTAR:
WriteUstar(output);
break;
default:
throw new Exception("This should be impossible...");
}
}
internal void WriteUstar(Stream output)
{
var buffer = new byte[BLOCK_SIZE];
WriteOctalBytes(511, buffer, 100, 8); // file mode
WriteOctalBytes(0, buffer, 108, 8); // owner ID
WriteOctalBytes(0, buffer, 116, 8); // group ID
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
var nameByteCount = ArchiveEncoding
.GetEncoding()
.GetByteCount(Name.NotNull("Name is null"));
if (nameByteCount > 100)
{
// if name is longer, try to split it into name and namePrefix
string fullName = Name.NotNull("Name is null");
// find all directory separators
List<int> dirSeps = new List<int>();
for (int i = 0; i < fullName.Length; i++)
{
if (fullName[i] == Path.DirectorySeparatorChar)
{
dirSeps.Add(i);
}
}
// find the right place to split the name
int splitIndex = -1;
for (int i = 0; i < dirSeps.Count; i++)
{
int count = ArchiveEncoding
.GetEncoding()
.GetByteCount(fullName.Substring(0, dirSeps[i]));
if (count < 155)
{
splitIndex = dirSeps[i];
}
else
{
break;
}
}
if (splitIndex == -1)
{
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Directory separator not found! Try using GNU Tar format instead!"
);
}
string namePrefix = fullName.Substring(0, splitIndex);
string name = fullName.Substring(splitIndex + 1);
if (this.ArchiveEncoding.GetEncoding().GetByteCount(namePrefix) >= 155)
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
if (this.ArchiveEncoding.GetEncoding().GetByteCount(name) >= 100)
throw new Exception(
$"Tar header USTAR format can not fit file name \"{fullName}\" of length {nameByteCount}! Try using GNU Tar format instead!"
);
// write name prefix
WriteStringBytes(ArchiveEncoding.Encode(namePrefix), buffer, 345, 100);
// write partial name
WriteStringBytes(ArchiveEncoding.Encode(name), buffer, 100);
}
else
{
WriteStringBytes(ArchiveEncoding.Encode(Name.NotNull("Name is null")), buffer, 100);
}
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
buffer[156] = (byte)EntryType;
// write ustar magic field
WriteStringBytes(Encoding.ASCII.GetBytes("ustar"), buffer, 257, 6);
// write ustar version "00"
buffer[263] = 0x30;
buffer[264] = 0x30;
var crc = RecalculateChecksum(buffer);
WriteOctalBytes(crc, buffer, 148, 8);
output.Write(buffer, 0, buffer.Length);
}
internal void WriteGnuTarLongLink(Stream output)
{
var buffer = new byte[BLOCK_SIZE];
@@ -81,7 +202,7 @@ internal sealed class TarHeader
0,
100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1)
);
Write(output);
WriteGnuTarLongLink(output);
}
}
@@ -186,6 +307,15 @@ internal sealed class TarHeader
private string ReadLongName(BinaryReader reader, byte[] buffer)
{
var size = ReadSize(buffer);
// Validate size to prevent memory exhaustion from malformed headers
if (size < 0 || size > MAX_LONG_NAME_SIZE)
{
throw new InvalidFormatException(
$"Long name size {size} is invalid or exceeds maximum allowed size of {MAX_LONG_NAME_SIZE} bytes"
);
}
var nameLength = (int)size;
var nameBytes = reader.ReadBytes(nameLength);
var remainingBytesToRead = BLOCK_SIZE - (nameLength % BLOCK_SIZE);
@@ -228,6 +358,18 @@ internal sealed class TarHeader
buffer.Slice(i, length - i).Clear();
}
private static void WriteStringBytes(
ReadOnlySpan<byte> name,
Span<byte> buffer,
int offset,
int length
)
{
name.CopyTo(buffer.Slice(offset));
var i = Math.Min(length, name.Length);
buffer.Slice(offset + i, length - i).Clear();
}
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)
{
int i;

View File

@@ -0,0 +1,7 @@
namespace SharpCompress.Common.Tar.Headers;
public enum TarHeaderWriteFormat
{
GNU_TAR_LONG_LINK,
USTAR,
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -19,6 +20,18 @@ internal class DirectoryEndHeader : ZipHeader
Comment = reader.ReadBytes(CommentLength);
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
VolumeNumber = await reader.ReadUInt16Async();
FirstVolumeWithDirectory = await reader.ReadUInt16Async();
TotalNumberOfEntriesInDisk = await reader.ReadUInt16Async();
TotalNumberOfEntries = await reader.ReadUInt16Async();
DirectorySize = await reader.ReadUInt32Async();
DirectoryStartOffsetRelativeToDisk = await reader.ReadUInt32Async();
CommentLength = await reader.ReadUInt16Async();
Comment = await reader.ReadBytesAsync(CommentLength);
}
public ushort VolumeNumber { get; private set; }
public ushort FirstVolumeWithDirectory { get; private set; }

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -31,7 +32,37 @@ internal class DirectoryEntryHeader : ZipFileEntry
var extra = reader.ReadBytes(extraLength);
var comment = reader.ReadBytes(commentLength);
// According to .ZIP File Format Specification
ProcessReadData(name, extra, comment);
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
Version = await reader.ReadUInt16Async();
VersionNeededToExtract = await reader.ReadUInt16Async();
Flags = (HeaderFlags)await reader.ReadUInt16Async();
CompressionMethod = (ZipCompressionMethod)await reader.ReadUInt16Async();
OriginalLastModifiedTime = LastModifiedTime = await reader.ReadUInt16Async();
OriginalLastModifiedDate = LastModifiedDate = await reader.ReadUInt16Async();
Crc = await reader.ReadUInt32Async();
CompressedSize = await reader.ReadUInt32Async();
UncompressedSize = await reader.ReadUInt32Async();
var nameLength = await reader.ReadUInt16Async();
var extraLength = await reader.ReadUInt16Async();
var commentLength = await reader.ReadUInt16Async();
DiskNumberStart = await reader.ReadUInt16Async();
InternalFileAttributes = await reader.ReadUInt16Async();
ExternalFileAttributes = await reader.ReadUInt32Async();
RelativeOffsetOfEntryHeader = await reader.ReadUInt32Async();
var name = await reader.ReadBytesAsync(nameLength);
var extra = await reader.ReadBytesAsync(extraLength);
var comment = await reader.ReadBytesAsync(commentLength);
ProcessReadData(name, extra, comment);
}
private void ProcessReadData(byte[] name, byte[] extra, byte[] comment)
{
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
//

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -8,4 +9,6 @@ internal class IgnoreHeader : ZipHeader
: base(type) { }
internal override void Read(BinaryReader reader) { }
internal override ValueTask Read(AsyncBinaryReader reader) => default;
}

View File

@@ -1,13 +1,12 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
internal class LocalEntryHeader : ZipFileEntry
internal class LocalEntryHeader(ArchiveEncoding archiveEncoding)
: ZipFileEntry(ZipHeaderType.LocalEntry, archiveEncoding)
{
public LocalEntryHeader(ArchiveEncoding archiveEncoding)
: base(ZipHeaderType.LocalEntry, archiveEncoding) { }
internal override void Read(BinaryReader reader)
{
Version = reader.ReadUInt16();
@@ -23,7 +22,29 @@ internal class LocalEntryHeader : ZipFileEntry
var name = reader.ReadBytes(nameLength);
var extra = reader.ReadBytes(extraLength);
// According to .ZIP File Format Specification
ProcessReadData(name, extra);
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
Version = await reader.ReadUInt16Async();
Flags = (HeaderFlags)await reader.ReadUInt16Async();
CompressionMethod = (ZipCompressionMethod)await reader.ReadUInt16Async();
OriginalLastModifiedTime = LastModifiedTime = await reader.ReadUInt16Async();
OriginalLastModifiedDate = LastModifiedDate = await reader.ReadUInt16Async();
Crc = await reader.ReadUInt32Async();
CompressedSize = await reader.ReadUInt32Async();
UncompressedSize = await reader.ReadUInt32Async();
var nameLength = await reader.ReadUInt16Async();
var extraLength = await reader.ReadUInt16Async();
var name = await reader.ReadBytesAsync(nameLength);
var extra = await reader.ReadBytesAsync(extraLength);
ProcessReadData(name, extra);
}
private void ProcessReadData(byte[] name, byte[] extra)
{
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
//

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -9,4 +10,7 @@ internal class SplitHeader : ZipHeader
: base(ZipHeaderType.Split) { }
internal override void Read(BinaryReader reader) => throw new NotImplementedException();
internal override ValueTask Read(AsyncBinaryReader reader) =>
throw new NotImplementedException();
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -26,6 +27,25 @@ internal class Zip64DirectoryEndHeader : ZipHeader
);
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
SizeOfDirectoryEndRecord = (long)await reader.ReadUInt64Async();
VersionMadeBy = await reader.ReadUInt16Async();
VersionNeededToExtract = await reader.ReadUInt16Async();
VolumeNumber = await reader.ReadUInt32Async();
FirstVolumeWithDirectory = await reader.ReadUInt32Async();
TotalNumberOfEntriesInDisk = (long)await reader.ReadUInt64Async();
TotalNumberOfEntries = (long)await reader.ReadUInt64Async();
DirectorySize = (long)await reader.ReadUInt64Async();
DirectoryStartOffsetRelativeToDisk = (long)await reader.ReadUInt64Async();
DataSector = await reader.ReadBytesAsync(
(int)(
SizeOfDirectoryEndRecord
- SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS
)
);
}
private const int SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS = 44;
public long SizeOfDirectoryEndRecord { get; private set; }

View File

@@ -1,12 +1,10 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
internal class Zip64DirectoryEndLocatorHeader : ZipHeader
internal class Zip64DirectoryEndLocatorHeader() : ZipHeader(ZipHeaderType.Zip64DirectoryEndLocator)
{
public Zip64DirectoryEndLocatorHeader()
: base(ZipHeaderType.Zip64DirectoryEndLocator) { }
internal override void Read(BinaryReader reader)
{
FirstVolumeWithDirectory = reader.ReadUInt32();
@@ -14,6 +12,13 @@ internal class Zip64DirectoryEndLocatorHeader : ZipHeader
TotalNumberOfVolumes = reader.ReadUInt32();
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
FirstVolumeWithDirectory = await reader.ReadUInt32Async();
RelativeOffsetOfTheEndOfDirectoryRecord = (long)await reader.ReadUInt64Async();
TotalNumberOfVolumes = await reader.ReadUInt32Async();
}
public uint FirstVolumeWithDirectory { get; private set; }
public long RelativeOffsetOfTheEndOfDirectoryRecord { get; private set; }

View File

@@ -2,18 +2,14 @@ using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
internal abstract class ZipFileEntry : ZipHeader
internal abstract class ZipFileEntry(ZipHeaderType type, ArchiveEncoding archiveEncoding)
: ZipHeader(type)
{
protected ZipFileEntry(ZipHeaderType type, ArchiveEncoding archiveEncoding)
: base(type)
{
Extra = new List<ExtraData>();
ArchiveEncoding = archiveEncoding;
}
internal bool IsDirectory
{
get
@@ -30,7 +26,7 @@ internal abstract class ZipFileEntry : ZipHeader
internal Stream? PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal ArchiveEncoding ArchiveEncoding { get; } = archiveEncoding;
internal string? Name { get; set; }
@@ -44,7 +40,7 @@ internal abstract class ZipFileEntry : ZipHeader
internal long UncompressedSize { get; set; }
internal List<ExtraData> Extra { get; set; }
internal List<ExtraData> Extra { get; set; } = new();
public string? Password { get; set; }
@@ -63,6 +59,24 @@ internal abstract class ZipFileEntry : ZipHeader
return encryptionData;
}
internal async Task<PkwareTraditionalEncryptionData> ComposeEncryptionDataAsync(
Stream archiveStream,
CancellationToken cancellationToken = default
)
{
if (archiveStream is null)
{
throw new ArgumentNullException(nameof(archiveStream));
}
var buffer = new byte[12];
await archiveStream.ReadFullyAsync(buffer, 0, 12, cancellationToken).ConfigureAwait(false);
var encryptionData = PkwareTraditionalEncryptionData.ForRead(Password!, this, buffer);
return encryptionData;
}
internal WinzipAesEncryptionData? WinzipAesEncryptionData { get; set; }
/// <summary>

View File

@@ -1,18 +1,14 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
internal abstract class ZipHeader
internal abstract class ZipHeader(ZipHeaderType type)
{
protected ZipHeader(ZipHeaderType type)
{
ZipHeaderType = type;
HasData = true;
}
internal ZipHeaderType ZipHeaderType { get; }
internal ZipHeaderType ZipHeaderType { get; } = type;
internal abstract void Read(BinaryReader reader);
internal abstract ValueTask Read(AsyncBinaryReader reader);
internal bool HasData { get; set; }
internal bool HasData { get; set; } = true;
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -18,7 +19,74 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
internal SeekableZipHeaderFactory(string? password, ArchiveEncoding archiveEncoding)
: base(StreamingMode.Seekable, password, archiveEncoding) { }
internal IEnumerable<ZipHeader> ReadSeekableHeader(Stream stream)
internal async IAsyncEnumerable<ZipHeader> ReadSeekableHeader(Stream stream)
{
var reader = new AsyncBinaryReader(stream);
await SeekBackToHeader(stream, reader);
var eocd_location = stream.Position;
var entry = new DirectoryEndHeader();
await entry.Read(reader);
if (entry.IsZip64)
{
_zip64 = true;
// ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin);
uint zip64_locator = await reader.ReadUInt32Async();
if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR)
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
await zip64Locator.Read(reader);
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
var zip64Signature = await reader.ReadUInt32Async();
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
await zip64Entry.Read(reader);
stream.Seek(zip64Entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
else
{
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
var position = stream.Position;
while (true)
{
stream.Position = position;
var signature = await reader.ReadUInt32Async();
var nextHeader = await ReadHeader(signature, reader, _zip64);
position = stream.Position;
if (nextHeader is null)
{
yield break;
}
if (nextHeader is DirectoryEntryHeader entryHeader)
{
//entry could be zero bytes so we need to know that.
entryHeader.HasData = entryHeader.CompressedSize != 0;
yield return entryHeader;
}
else if (nextHeader is DirectoryEndHeader endHeader)
{
yield return endHeader;
}
}
}
internal IEnumerable<ZipHeader> ReadSeekableHeader(Stream stream, bool useSync)
{
var reader = new BinaryReader(stream);
@@ -98,6 +166,45 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
return true;
}
private static async ValueTask SeekBackToHeader(Stream stream, AsyncBinaryReader reader)
{
// Minimum EOCD length
if (stream.Length < MINIMUM_EOCD_LENGTH)
{
throw new ArchiveException(
"Could not find Zip file Directory at the end of the file. File may be corrupted."
);
}
var len =
stream.Length < MAX_SEARCH_LENGTH_FOR_EOCD
? (int)stream.Length
: MAX_SEARCH_LENGTH_FOR_EOCD;
// We search for marker in reverse to find the first occurance
byte[] needle = { 0x06, 0x05, 0x4b, 0x50 };
stream.Seek(-len, SeekOrigin.End);
var seek = await reader.ReadBytesAsync(len);
// Search in reverse
Array.Reverse(seek);
// don't exclude the minimum eocd region, otherwise you fail to locate the header in empty zip files
var max_search_area = len; // - MINIMUM_EOCD_LENGTH;
for (var pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
{
if (IsMatch(seek, pos_from_end, needle))
{
stream.Seek(-pos_from_end, SeekOrigin.End);
return;
}
}
throw new ArchiveException("Failed to locate the Zip Header");
}
private static void SeekBackToHeader(Stream stream, BinaryReader reader)
{
// Minimum EOCD length

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
@@ -31,6 +33,28 @@ internal sealed class StreamingZipFilePart : ZipFilePart
return _decompressionStream;
}
internal override async Task<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
if (!Header.HasData)
{
return Stream.Null;
}
_decompressionStream = await CreateDecompressionStreamAsync(
await GetCryptoStreamAsync(CreateBaseStream(), cancellationToken)
.ConfigureAwait(false),
Header.CompressionMethod,
cancellationToken
)
.ConfigureAwait(false);
if (LeaveStreamOpen)
{
return SharpCompressStream.Create(_decompressionStream, leaveOpen: true);
}
return _decompressionStream;
}
internal BinaryReader FixStreamedFileLocation(ref SharpCompressStream rewindableStream)
{
if (Header.IsDirectory)

View File

@@ -36,11 +36,10 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
throw new ArgumentException("Stream must be a SharpCompressStream", nameof(stream));
}
}
SharpCompressStream rewindableStream = (SharpCompressStream)stream;
var rewindableStream = (SharpCompressStream)stream;
while (true)
{
ZipHeader? header;
var reader = new BinaryReader(rewindableStream);
uint headerBytes = 0;
if (
@@ -155,7 +154,7 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
}
_lastEntryHeader = null;
header = ReadHeader(headerBytes, reader);
var header = ReadHeader(headerBytes, reader);
if (header is null)
{
yield break;

View File

@@ -1,6 +1,7 @@
using System;
using System.Buffers.Binary;
using System.Security.Cryptography;
using System.Text;
namespace SharpCompress.Common.Zip;
@@ -19,8 +20,24 @@ internal class WinzipAesEncryptionData
{
_keySize = keySize;
#if NETFRAMEWORK || NETSTANDARD2_0
#if NETFRAMEWORK
var rfc2898 = new Rfc2898DeriveBytes(password, salt, RFC2898_ITERATIONS);
KeyBytes = rfc2898.GetBytes(KeySizeInBytes);
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
var generatedVerifyValue = rfc2898.GetBytes(2);
#elif NET10_0_OR_GREATER
var derivedKeySize = (KeySizeInBytes * 2) + 2;
var passwordBytes = Encoding.UTF8.GetBytes(password);
var derivedKey = Rfc2898DeriveBytes.Pbkdf2(
passwordBytes,
salt,
RFC2898_ITERATIONS,
HashAlgorithmName.SHA1,
derivedKeySize
);
KeyBytes = derivedKey.AsSpan(0, KeySizeInBytes).ToArray();
IvBytes = derivedKey.AsSpan(KeySizeInBytes, KeySizeInBytes).ToArray();
var generatedVerifyValue = derivedKey.AsSpan((KeySizeInBytes * 2), 2).ToArray();
#else
var rfc2898 = new Rfc2898DeriveBytes(
password,
@@ -28,11 +45,10 @@ internal class WinzipAesEncryptionData
RFC2898_ITERATIONS,
HashAlgorithmName.SHA1
);
#endif
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???
KeyBytes = rfc2898.GetBytes(KeySizeInBytes);
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
var generatedVerifyValue = rfc2898.GetBytes(2);
#endif
var verify = BinaryPrimitives.ReadInt16LittleEndian(passwordVerifyValue);
var generated = BinaryPrimitives.ReadInt16LittleEndian(generatedVerifyValue);

View File

@@ -2,6 +2,8 @@ using System;
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
@@ -13,8 +15,8 @@ using SharpCompress.Compressors.PPMd;
using SharpCompress.Compressors.Reduce;
using SharpCompress.Compressors.Shrink;
using SharpCompress.Compressors.Xz;
using SharpCompress.Compressors.ZStandard;
using SharpCompress.IO;
using ZstdSharp;
namespace SharpCompress.Common.Zip;
@@ -264,4 +266,220 @@ internal abstract class ZipFilePart : FilePart
}
return plainStream;
}
internal override async Task<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
if (!Header.HasData)
{
return Stream.Null;
}
var decompressionStream = await CreateDecompressionStreamAsync(
await GetCryptoStreamAsync(CreateBaseStream(), cancellationToken)
.ConfigureAwait(false),
Header.CompressionMethod,
cancellationToken
)
.ConfigureAwait(false);
if (LeaveStreamOpen)
{
return SharpCompressStream.Create(decompressionStream, leaveOpen: true);
}
return decompressionStream;
}
protected async Task<Stream> GetCryptoStreamAsync(
Stream plainStream,
CancellationToken cancellationToken = default
)
{
var isFileEncrypted = FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted);
if (Header.CompressedSize == 0 && isFileEncrypted)
{
throw new NotSupportedException("Cannot encrypt file with unknown size at start.");
}
if (
(
Header.CompressedSize == 0
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
) || Header.IsZip64
)
{
plainStream = SharpCompressStream.Create(plainStream, leaveOpen: true); //make sure AES doesn't close
}
else
{
plainStream = new ReadOnlySubStream(plainStream, Header.CompressedSize); //make sure AES doesn't close
}
if (isFileEncrypted)
{
switch (Header.CompressionMethod)
{
case ZipCompressionMethod.None:
case ZipCompressionMethod.Shrink:
case ZipCompressionMethod.Reduce1:
case ZipCompressionMethod.Reduce2:
case ZipCompressionMethod.Reduce3:
case ZipCompressionMethod.Reduce4:
case ZipCompressionMethod.Deflate:
case ZipCompressionMethod.Deflate64:
case ZipCompressionMethod.BZip2:
case ZipCompressionMethod.LZMA:
case ZipCompressionMethod.PPMd:
{
return new PkwareTraditionalCryptoStream(
plainStream,
await Header
.ComposeEncryptionDataAsync(plainStream, cancellationToken)
.ConfigureAwait(false),
CryptoMode.Decrypt
);
}
case ZipCompressionMethod.WinzipAes:
{
if (Header.WinzipAesEncryptionData != null)
{
return new WinzipAesCryptoStream(
plainStream,
Header.WinzipAesEncryptionData,
Header.CompressedSize - 10
);
}
return plainStream;
}
default:
{
throw new InvalidOperationException("Header.CompressionMethod is invalid");
}
}
}
return plainStream;
}
protected async Task<Stream> CreateDecompressionStreamAsync(
Stream stream,
ZipCompressionMethod method,
CancellationToken cancellationToken = default
)
{
switch (method)
{
case ZipCompressionMethod.None:
{
if (Header.CompressedSize is 0)
{
return new DataDescriptorStream(stream);
}
return stream;
}
case ZipCompressionMethod.Shrink:
{
return new ShrinkStream(
stream,
CompressionMode.Decompress,
Header.CompressedSize,
Header.UncompressedSize
);
}
case ZipCompressionMethod.Reduce1:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 1);
}
case ZipCompressionMethod.Reduce2:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 2);
}
case ZipCompressionMethod.Reduce3:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 3);
}
case ZipCompressionMethod.Reduce4:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 4);
}
case ZipCompressionMethod.Explode:
{
return new ExplodeStream(
stream,
Header.CompressedSize,
Header.UncompressedSize,
Header.Flags
);
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.Deflate64:
{
return new Deflate64Stream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.BZip2:
{
return new BZip2Stream(stream, CompressionMode.Decompress, false);
}
case ZipCompressionMethod.LZMA:
{
if (FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted))
{
throw new NotSupportedException("LZMA with pkware encryption.");
}
var buffer = new byte[4];
await stream.ReadFullyAsync(buffer, 0, 4, cancellationToken).ConfigureAwait(false);
var version = BinaryPrimitives.ReadUInt16LittleEndian(buffer.AsSpan(0, 2));
var propsSize = BinaryPrimitives.ReadUInt16LittleEndian(buffer.AsSpan(2, 2));
var props = new byte[propsSize];
await stream
.ReadFullyAsync(props, 0, propsSize, cancellationToken)
.ConfigureAwait(false);
return new LzmaStream(
props,
stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: Header.UncompressedSize
);
}
case ZipCompressionMethod.Xz:
{
return new XZStream(stream);
}
case ZipCompressionMethod.ZStandard:
{
return new DecompressionStream(stream);
}
case ZipCompressionMethod.PPMd:
{
var props = new byte[2];
await stream.ReadFullyAsync(props, 0, 2, cancellationToken).ConfigureAwait(false);
return new PpmdStream(new PpmdProperties(props), stream, false);
}
case ZipCompressionMethod.WinzipAes:
{
var data = Header.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
if (data is null)
{
throw new InvalidFormatException("No Winzip AES extra data found.");
}
if (data.Length != 7)
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
throw new NotSupportedException("WinzipAes isn't supported for streaming");
}
default:
{
throw new NotSupportedException("CompressionMethod: " + Header.CompressionMethod);
}
}
}
}

View File

@@ -1,6 +1,7 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -34,6 +35,82 @@ internal class ZipHeaderFactory
_archiveEncoding = archiveEncoding;
}
protected async ValueTask<ZipHeader?> ReadHeader(
uint headerBytes,
AsyncBinaryReader reader,
bool zip64 = false
)
{
switch (headerBytes)
{
case ENTRY_HEADER_BYTES:
{
var entryHeader = new LocalEntryHeader(_archiveEncoding);
await entryHeader.Read(reader);
LoadHeader(entryHeader, reader.BaseStream);
_lastEntryHeader = entryHeader;
return entryHeader;
}
case DIRECTORY_START_HEADER_BYTES:
{
var entry = new DirectoryEntryHeader(_archiveEncoding);
await entry.Read(reader);
return entry;
}
case POST_DATA_DESCRIPTOR:
{
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(
_lastEntryHeader.NotNull().Flags,
HeaderFlags.UsePostDataDescriptor
)
)
{
_lastEntryHeader.Crc = await reader.ReadUInt32Async();
_lastEntryHeader.CompressedSize = zip64
? (long)await reader.ReadUInt64Async()
: await reader.ReadUInt32Async();
_lastEntryHeader.UncompressedSize = zip64
? (long)await reader.ReadUInt64Async()
: await reader.ReadUInt32Async();
}
else
{
await reader.ReadBytesAsync(zip64 ? 20 : 12);
}
return null;
}
case DIGITAL_SIGNATURE:
return null;
case DIRECTORY_END_HEADER_BYTES:
{
var entry = new DirectoryEndHeader();
await entry.Read(reader);
return entry;
}
case SPLIT_ARCHIVE_HEADER_BYTES:
{
return new SplitHeader();
}
case ZIP64_END_OF_CENTRAL_DIRECTORY:
{
var entry = new Zip64DirectoryEndHeader();
await entry.Read(reader);
return entry;
}
case ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR:
{
var entry = new Zip64DirectoryEndLocatorHeader();
await entry.Read(reader);
return entry;
}
default:
return null;
}
}
protected ZipHeader? ReadHeader(uint headerBytes, BinaryReader reader, bool zip64 = false)
{
switch (headerBytes)

View File

@@ -24,10 +24,29 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.ADC;
/// <summary>
/// Result of an ADC decompression operation
/// </summary>
public class AdcDecompressResult
{
/// <summary>
/// Number of bytes read from input
/// </summary>
public int BytesRead { get; set; }
/// <summary>
/// Decompressed output buffer
/// </summary>
public byte[]? Output { get; set; }
}
/// <summary>
/// Provides static methods for decompressing Apple Data Compression data
/// </summary>
@@ -78,6 +97,173 @@ public static class ADCBase
public static int Decompress(byte[] input, out byte[]? output, int bufferSize = 262144) =>
Decompress(new MemoryStream(input), out output, bufferSize);
/// <summary>
/// Decompresses a byte buffer asynchronously that's compressed with ADC
/// </summary>
/// <param name="input">Compressed buffer</param>
/// <param name="bufferSize">Max size for decompressed data</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>Result containing bytes read and decompressed data</returns>
public static async Task<AdcDecompressResult> DecompressAsync(
byte[] input,
int bufferSize = 262144,
CancellationToken cancellationToken = default
) => await DecompressAsync(new MemoryStream(input), bufferSize, cancellationToken);
/// <summary>
/// Decompresses a stream asynchronously that's compressed with ADC
/// </summary>
/// <param name="input">Stream containing compressed data</param>
/// <param name="bufferSize">Max size for decompressed data</param>
/// <param name="cancellationToken">Cancellation token</param>
/// <returns>Result containing bytes read and decompressed data</returns>
public static async Task<AdcDecompressResult> DecompressAsync(
Stream input,
int bufferSize = 262144,
CancellationToken cancellationToken = default
)
{
var result = new AdcDecompressResult();
if (input is null || input.Length == 0)
{
result.BytesRead = 0;
result.Output = null;
return result;
}
var start = (int)input.Position;
var position = (int)input.Position;
int chunkSize;
int offset;
int chunkType;
var buffer = ArrayPool<byte>.Shared.Rent(bufferSize);
var outPosition = 0;
var full = false;
byte[] temp = ArrayPool<byte>.Shared.Rent(3);
try
{
while (position < input.Length)
{
cancellationToken.ThrowIfCancellationRequested();
var readByte = input.ReadByte();
if (readByte == -1)
{
break;
}
chunkType = GetChunkType((byte)readByte);
switch (chunkType)
{
case PLAIN:
chunkSize = GetChunkSize((byte)readByte);
if (outPosition + chunkSize > bufferSize)
{
full = true;
break;
}
var readCount = await input.ReadAsync(
buffer,
outPosition,
chunkSize,
cancellationToken
);
outPosition += readCount;
position += readCount + 1;
break;
case TWO_BYTE:
chunkSize = GetChunkSize((byte)readByte);
temp[0] = (byte)readByte;
temp[1] = (byte)input.ReadByte();
offset = GetOffset(temp.AsSpan(0, 2));
if (outPosition + chunkSize > bufferSize)
{
full = true;
break;
}
if (offset == 0)
{
var lastByte = buffer[outPosition - 1];
for (var i = 0; i < chunkSize; i++)
{
buffer[outPosition] = lastByte;
outPosition++;
}
position += 2;
}
else
{
for (var i = 0; i < chunkSize; i++)
{
buffer[outPosition] = buffer[outPosition - offset - 1];
outPosition++;
}
position += 2;
}
break;
case THREE_BYTE:
chunkSize = GetChunkSize((byte)readByte);
temp[0] = (byte)readByte;
temp[1] = (byte)input.ReadByte();
temp[2] = (byte)input.ReadByte();
offset = GetOffset(temp.AsSpan(0, 3));
if (outPosition + chunkSize > bufferSize)
{
full = true;
break;
}
if (offset == 0)
{
var lastByte = buffer[outPosition - 1];
for (var i = 0; i < chunkSize; i++)
{
buffer[outPosition] = lastByte;
outPosition++;
}
position += 3;
}
else
{
for (var i = 0; i < chunkSize; i++)
{
buffer[outPosition] = buffer[outPosition - offset - 1];
outPosition++;
}
position += 3;
}
break;
}
if (full)
{
break;
}
}
var output = new byte[outPosition];
Array.Copy(buffer, output, outPosition);
result.BytesRead = position - start;
result.Output = output;
return result;
}
finally
{
ArrayPool<byte>.Shared.Return(buffer);
ArrayPool<byte>.Shared.Return(temp);
}
}
/// <summary>
/// Decompresses a stream that's compressed with ADC
/// </summary>

View File

@@ -28,6 +28,8 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.ADC;
@@ -187,6 +189,76 @@ public sealed class ADCStream : Stream, IStreamStack
return copied;
}
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
)
{
if (count == 0)
{
return 0;
}
if (buffer is null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (offset < buffer.GetLowerBound(0))
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if ((offset + count) > buffer.GetLength(0))
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (_outBuffer is null)
{
var result = await ADCBase.DecompressAsync(
_stream,
cancellationToken: cancellationToken
);
_outBuffer = result.Output;
_outPosition = 0;
}
var inPosition = offset;
var toCopy = count;
var copied = 0;
while (_outPosition + toCopy >= _outBuffer.Length)
{
cancellationToken.ThrowIfCancellationRequested();
var piece = _outBuffer.Length - _outPosition;
Array.Copy(_outBuffer, _outPosition, buffer, inPosition, piece);
inPosition += piece;
copied += piece;
_position += piece;
toCopy -= piece;
var result = await ADCBase.DecompressAsync(
_stream,
cancellationToken: cancellationToken
);
_outBuffer = result.Output;
_outPosition = 0;
if (result.BytesRead == 0 || _outBuffer is null || _outBuffer.Length == 0)
{
return copied;
}
}
Array.Copy(_outBuffer, _outPosition, buffer, inPosition, toCopy);
_outPosition += toCopy;
_position += toCopy;
copied += toCopy;
return copied;
}
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();

View File

@@ -0,0 +1,72 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.Arj
{
[CLSCompliant(true)]
public class BitReader
{
private readonly Stream _input;
private int _bitBuffer; // currently buffered bits
private int _bitCount; // number of bits in buffer
public BitReader(Stream input)
{
_input = input ?? throw new ArgumentNullException(nameof(input));
_bitBuffer = 0;
_bitCount = 0;
}
/// <summary>
/// Reads a single bit from the stream. Returns 0 or 1.
/// </summary>
public int ReadBit()
{
if (_bitCount == 0)
{
int nextByte = _input.ReadByte();
if (nextByte < 0)
{
throw new EndOfStreamException("No more data available in BitReader.");
}
_bitBuffer = nextByte;
_bitCount = 8;
}
int bit = (_bitBuffer >> (_bitCount - 1)) & 1;
_bitCount--;
return bit;
}
/// <summary>
/// Reads n bits (up to 32) from the stream.
/// </summary>
public int ReadBits(int count)
{
if (count < 0 || count > 32)
{
throw new ArgumentOutOfRangeException(
nameof(count),
"Count must be between 0 and 32."
);
}
int result = 0;
for (int i = 0; i < count; i++)
{
result = (result << 1) | ReadBit();
}
return result;
}
/// <summary>
/// Resets any buffered bits.
/// </summary>
public void AlignToByte()
{
_bitCount = 0;
_bitBuffer = 0;
}
}
}

View File

@@ -0,0 +1,43 @@
using System;
using System.Collections;
using System.Collections.Generic;
namespace SharpCompress.Compressors.Arj
{
/// <summary>
/// Iterator that reads & pushes values back into the ring buffer.
/// </summary>
public class HistoryIterator : IEnumerator<byte>
{
private int _index;
private readonly IRingBuffer _ring;
public HistoryIterator(IRingBuffer ring, int startIndex)
{
_ring = ring;
_index = startIndex;
}
public bool MoveNext()
{
Current = _ring[_index];
_index = unchecked(_index + 1);
// Push value back into the ring buffer
_ring.Push(Current);
return true; // iterator is infinite
}
public void Reset()
{
throw new NotSupportedException();
}
public byte Current { get; private set; }
object IEnumerator.Current => Current;
public void Dispose() { }
}
}

View File

@@ -0,0 +1,218 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Compressors.Arj
{
[CLSCompliant(true)]
public enum NodeType
{
Leaf,
Branch,
}
[CLSCompliant(true)]
public sealed class TreeEntry
{
public readonly NodeType Type;
public readonly int LeafValue;
public readonly int BranchIndex;
public const int MAX_INDEX = 4096;
private TreeEntry(NodeType type, int leafValue, int branchIndex)
{
Type = type;
LeafValue = leafValue;
BranchIndex = branchIndex;
}
public static TreeEntry Leaf(int value)
{
return new TreeEntry(NodeType.Leaf, value, -1);
}
public static TreeEntry Branch(int index)
{
if (index >= MAX_INDEX)
{
throw new ArgumentOutOfRangeException(
nameof(index),
"Branch index exceeds MAX_INDEX"
);
}
return new TreeEntry(NodeType.Branch, 0, index);
}
}
[CLSCompliant(true)]
public sealed class HuffTree
{
private readonly List<TreeEntry> _tree;
public HuffTree(int capacity = 0)
{
_tree = new List<TreeEntry>(capacity);
}
public void SetSingle(int value)
{
_tree.Clear();
_tree.Add(TreeEntry.Leaf(value));
}
public void BuildTree(byte[] lengths, int count)
{
if (lengths == null)
{
throw new ArgumentNullException(nameof(lengths));
}
if (count < 0 || count > lengths.Length)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (count > TreeEntry.MAX_INDEX / 2)
{
throw new ArgumentException(
$"Count exceeds maximum allowed: {TreeEntry.MAX_INDEX / 2}"
);
}
byte[] slice = new byte[count];
Array.Copy(lengths, slice, count);
BuildTree(slice);
}
public void BuildTree(byte[] valueLengths)
{
if (valueLengths == null)
{
throw new ArgumentNullException(nameof(valueLengths));
}
if (valueLengths.Length > TreeEntry.MAX_INDEX / 2)
{
throw new InvalidOperationException("Too many code lengths");
}
_tree.Clear();
int maxAllocated = 1; // start with a single (root) node
for (byte currentLen = 1; ; currentLen++)
{
// add missing branches up to current limit
int maxLimit = maxAllocated;
for (int i = _tree.Count; i < maxLimit; i++)
{
// TreeEntry.Branch may throw if index too large
try
{
_tree.Add(TreeEntry.Branch(maxAllocated));
}
catch (ArgumentOutOfRangeException e)
{
_tree.Clear();
throw new InvalidOperationException("Branch index exceeds limit", e);
}
// each branch node allocates two children
maxAllocated += 2;
}
// fill tree with leaves found in the lengths table at the current length
bool moreLeaves = false;
for (int value = 0; value < valueLengths.Length; value++)
{
byte len = valueLengths[value];
if (len == currentLen)
{
_tree.Add(TreeEntry.Leaf(value));
}
else if (len > currentLen)
{
moreLeaves = true; // there are more leaves to process
}
}
// sanity check (too many leaves)
if (_tree.Count > maxAllocated)
{
throw new InvalidOperationException("Too many leaves");
}
// stop when no longer finding longer codes
if (!moreLeaves)
{
break;
}
}
// ensure tree is complete
if (_tree.Count != maxAllocated)
{
throw new InvalidOperationException(
$"Missing some leaves: tree count = {_tree.Count}, expected = {maxAllocated}"
);
}
}
public int ReadEntry(BitReader reader)
{
if (_tree.Count == 0)
{
throw new InvalidOperationException("Tree not initialized");
}
TreeEntry node = _tree[0];
while (true)
{
if (node.Type == NodeType.Leaf)
{
return node.LeafValue;
}
int bit = reader.ReadBit();
int index = node.BranchIndex + bit;
if (index >= _tree.Count)
{
throw new InvalidOperationException("Invalid branch index during read");
}
node = _tree[index];
}
}
public override string ToString()
{
var result = new StringBuilder();
void FormatStep(int index, string prefix)
{
var node = _tree[index];
if (node.Type == NodeType.Leaf)
{
result.AppendLine($"{prefix} -> {node.LeafValue}");
}
else
{
FormatStep(node.BranchIndex, prefix + "0");
FormatStep(node.BranchIndex + 1, prefix + "1");
}
}
if (_tree.Count > 0)
{
FormatStep(0, "");
}
return result.ToString();
}
}
}

View File

@@ -0,0 +1,9 @@
namespace SharpCompress.Compressors.Arj
{
public interface ILhaDecoderConfig
{
int HistoryBits { get; }
int OffsetBits { get; }
RingBuffer RingBuffer { get; }
}
}

View File

@@ -0,0 +1,17 @@
namespace SharpCompress.Compressors.Arj
{
public interface IRingBuffer
{
int BufferSize { get; }
int Cursor { get; }
void SetCursor(int pos);
void Push(byte value);
HistoryIterator IterFromOffset(int offset);
HistoryIterator IterFromPos(int pos);
byte this[int index] { get; }
}
}

View File

@@ -0,0 +1,191 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Arj
{
[CLSCompliant(true)]
public sealed class LHDecoderStream : Stream, IStreamStack
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _stream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
private readonly BitReader _bitReader;
private readonly Stream _stream;
// Buffer containing *all* bytes decoded so far.
private readonly List<byte> _buffer = new();
private long _readPosition;
private readonly int _originalSize;
private bool _finishedDecoding;
private bool _disposed;
private const int THRESHOLD = 3;
public LHDecoderStream(Stream compressedStream, int originalSize)
{
_stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream));
if (!compressedStream.CanRead)
throw new ArgumentException(
"compressedStream must be readable.",
nameof(compressedStream)
);
_bitReader = new BitReader(compressedStream);
_originalSize = originalSize;
_readPosition = 0;
_finishedDecoding = (originalSize == 0);
}
public Stream BaseStream => _stream;
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => _originalSize;
public override long Position
{
get => _readPosition;
set => throw new NotSupportedException();
}
/// <summary>
/// Decodes a single element (literal or back-reference) and appends it to _buffer.
/// Returns true if data was added, or false if all input has already been decoded.
/// </summary>
private bool DecodeNext()
{
if (_buffer.Count >= _originalSize)
{
_finishedDecoding = true;
return false;
}
int len = DecodeVal(0, 7);
if (len == 0)
{
byte nextChar = (byte)_bitReader.ReadBits(8);
_buffer.Add(nextChar);
}
else
{
int repCount = len + THRESHOLD - 1;
int backPtr = DecodeVal(9, 13);
if (backPtr >= _buffer.Count)
throw new InvalidDataException("Invalid back_ptr in LH stream");
int srcIndex = _buffer.Count - 1 - backPtr;
for (int j = 0; j < repCount && _buffer.Count < _originalSize; j++)
{
byte b = _buffer[srcIndex];
_buffer.Add(b);
srcIndex++;
// srcIndex may grow; it's allowed (source region can overlap destination)
}
}
if (_buffer.Count >= _originalSize)
{
_finishedDecoding = true;
}
return true;
}
private int DecodeVal(int from, int to)
{
int add = 0;
int bit = from;
while (bit < to && _bitReader.ReadBits(1) == 1)
{
add |= 1 << bit;
bit++;
}
int res = bit > 0 ? _bitReader.ReadBits(bit) : 0;
return res + add;
}
/// <summary>
/// Reads decompressed bytes into buffer[offset..offset+count].
/// The method decodes additional data on demand when needed.
/// </summary>
public override int Read(byte[] buffer, int offset, int count)
{
if (_disposed)
throw new ObjectDisposedException(nameof(LHDecoderStream));
if (buffer == null)
throw new ArgumentNullException(nameof(buffer));
if (offset < 0 || count < 0 || offset + count > buffer.Length)
throw new ArgumentOutOfRangeException("offset/count");
if (_readPosition >= _originalSize)
return 0; // EOF
int totalRead = 0;
while (totalRead < count && _readPosition < _originalSize)
{
if (_readPosition >= _buffer.Count)
{
bool had = DecodeNext();
if (!had)
{
break;
}
}
int available = _buffer.Count - (int)_readPosition;
if (available <= 0)
{
if (!_finishedDecoding)
{
continue;
}
break;
}
int toCopy = Math.Min(available, count - totalRead);
_buffer.CopyTo((int)_readPosition, buffer, offset + totalRead, toCopy);
_readPosition += toCopy;
totalRead += toCopy;
}
return totalRead;
}
public override void Flush() => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
}
}

View File

@@ -0,0 +1,9 @@
namespace SharpCompress.Compressors.Arj
{
public class Lh5DecoderCfg : ILhaDecoderConfig
{
public int HistoryBits => 14;
public int OffsetBits => 4;
public RingBuffer RingBuffer { get; } = new RingBuffer(1 << 14);
}
}

View File

@@ -0,0 +1,9 @@
namespace SharpCompress.Compressors.Arj
{
public class Lh7DecoderCfg : ILhaDecoderConfig
{
public int HistoryBits => 17;
public int OffsetBits => 5;
public RingBuffer RingBuffer { get; } = new RingBuffer(1 << 17);
}
}

View File

@@ -0,0 +1,363 @@
using System;
using System.Data;
using System.IO;
using System.Linq;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Arj
{
[CLSCompliant(true)]
public sealed class LhaStream<C> : Stream, IStreamStack
where C : ILhaDecoderConfig, new()
{
private readonly BitReader _bitReader;
private readonly Stream _stream;
private readonly HuffTree _commandTree;
private readonly HuffTree _offsetTree;
private int _remainingCommands;
private (int offset, int count)? _copyProgress;
private readonly RingBuffer _ringBuffer;
private readonly C _config = new C();
private const int NUM_COMMANDS = 510;
private const int NUM_TEMP_CODELEN = 20;
private readonly int _originalSize;
private int _producedBytes = 0;
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _stream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
public LhaStream(Stream compressedStream, int originalSize)
{
_stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream));
_bitReader = new BitReader(compressedStream);
_ringBuffer = _config.RingBuffer;
_commandTree = new HuffTree(NUM_COMMANDS * 2);
_offsetTree = new HuffTree(NUM_TEMP_CODELEN * 2);
_remainingCommands = 0;
_copyProgress = null;
_originalSize = originalSize;
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotSupportedException();
public override long Position
{
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
public override void Flush() { }
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
public override int Read(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0 || count < 0 || (offset + count) > buffer.Length)
{
throw new ArgumentOutOfRangeException();
}
if (_producedBytes >= _originalSize)
{
return 0; // EOF
}
if (count == 0)
{
return 0;
}
int bytesRead = FillBuffer(buffer);
return bytesRead;
}
private byte ReadCodeLength()
{
byte len = (byte)_bitReader.ReadBits(3);
if (len == 7)
{
while (_bitReader.ReadBit() != 0)
{
len++;
if (len > 255)
{
throw new InvalidOperationException("Code length overflow");
}
}
}
return len;
}
private int ReadCodeSkip(int skipRange)
{
int bits;
int increment;
switch (skipRange)
{
case 0:
return 1;
case 1:
bits = 4;
increment = 3; // 3..=18
break;
default:
bits = 9;
increment = 20; // 20..=531
break;
}
int skip = _bitReader.ReadBits(bits);
return skip + increment;
}
private void ReadTempTree()
{
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
// number of codes to read (5 bits)
int numCodes = _bitReader.ReadBits(5);
// single code only
if (numCodes == 0)
{
int code = _bitReader.ReadBits(5);
_offsetTree.SetSingle((byte)code);
return;
}
if (numCodes > NUM_TEMP_CODELEN)
{
throw new Exception("temporary codelen table has invalid size");
}
// read actual lengths
int count = Math.Min(3, numCodes);
for (int i = 0; i < count; i++)
{
codeLengths[i] = (byte)ReadCodeLength();
}
// 2-bit skip value follows
int skip = _bitReader.ReadBits(2);
if (3 + skip > numCodes)
{
throw new Exception("temporary codelen table has invalid size");
}
for (int i = 3 + skip; i < numCodes; i++)
{
codeLengths[i] = (byte)ReadCodeLength();
}
_offsetTree.BuildTree(codeLengths, numCodes);
}
private void ReadCommandTree()
{
byte[] codeLengths = new byte[NUM_COMMANDS];
// number of codes to read (9 bits)
int numCodes = _bitReader.ReadBits(9);
// single code only
if (numCodes == 0)
{
int code = _bitReader.ReadBits(9);
_commandTree.SetSingle((ushort)code);
return;
}
if (numCodes > NUM_COMMANDS)
{
throw new Exception("commands codelen table has invalid size");
}
int index = 0;
while (index < numCodes)
{
for (int n = 0; n < numCodes - index; n++)
{
int code = _offsetTree.ReadEntry(_bitReader);
if (code >= 0 && code <= 2) // skip range
{
int skipCount = ReadCodeSkip(code);
index += n + skipCount;
goto outerLoop;
}
else
{
codeLengths[index + n] = (byte)(code - 2);
}
}
break;
outerLoop:
;
}
_commandTree.BuildTree(codeLengths, numCodes);
}
private void ReadOffsetTree()
{
int numCodes = _bitReader.ReadBits(_config.OffsetBits);
if (numCodes == 0)
{
int code = _bitReader.ReadBits(_config.OffsetBits);
_offsetTree.SetSingle(code);
return;
}
if (numCodes > _config.HistoryBits)
{
throw new InvalidDataException("Offset code table too large");
}
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
for (int i = 0; i < numCodes; i++)
{
codeLengths[i] = (byte)ReadCodeLength();
}
_offsetTree.BuildTree(codeLengths, numCodes);
}
private void BeginNewBlock()
{
ReadTempTree();
ReadCommandTree();
ReadOffsetTree();
}
private int ReadCommand() => _commandTree.ReadEntry(_bitReader);
private int ReadOffset()
{
int bits = _offsetTree.ReadEntry(_bitReader);
if (bits <= 1)
{
return bits;
}
int res = _bitReader.ReadBits(bits - 1);
return res | (1 << (bits - 1));
}
private int CopyFromHistory(byte[] target, int targetIndex, int offset, int count)
{
var historyIter = _ringBuffer.IterFromOffset(offset);
int copied = 0;
while (
copied < count && historyIter.MoveNext() && (targetIndex + copied) < target.Length
)
{
target[targetIndex + copied] = historyIter.Current;
copied++;
}
if (copied < count)
{
_copyProgress = (offset, count - copied);
}
return copied;
}
public int FillBuffer(byte[] buffer)
{
int bufLen = buffer.Length;
int bufIndex = 0;
// stop when we reached original size
if (_producedBytes >= _originalSize)
{
return 0;
}
// calculate limit, so that we don't go over the original size
int remaining = (int)Math.Min(bufLen, _originalSize - _producedBytes);
while (bufIndex < remaining)
{
if (_copyProgress.HasValue)
{
var (offset, count) = _copyProgress.Value;
int copied = CopyFromHistory(
buffer,
bufIndex,
offset,
(int)Math.Min(count, remaining - bufIndex)
);
bufIndex += copied;
_copyProgress = null;
}
if (_remainingCommands == 0)
{
_remainingCommands = _bitReader.ReadBits(16);
if (bufIndex + _remainingCommands > remaining)
{
break;
}
BeginNewBlock();
}
_remainingCommands--;
int command = ReadCommand();
if (command >= 0 && command <= 0xFF)
{
byte value = (byte)command;
buffer[bufIndex++] = value;
_ringBuffer.Push(value);
}
else
{
int count = command - 0x100 + 3;
int offset = ReadOffset();
int copyCount = (int)Math.Min(count, remaining - bufIndex);
bufIndex += CopyFromHistory(buffer, bufIndex, offset, copyCount);
}
}
_producedBytes += bufIndex;
return bufIndex;
}
}
}

View File

@@ -0,0 +1,67 @@
using System;
using System.Collections;
using System.Collections.Generic;
namespace SharpCompress.Compressors.Arj
{
/// <summary>
/// A fixed-size ring buffer where N must be a power of two.
/// </summary>
public class RingBuffer : IRingBuffer
{
private readonly byte[] _buffer;
private int _cursor;
public int BufferSize { get; }
public int Cursor => _cursor;
private readonly int _mask;
public RingBuffer(int size)
{
if ((size & (size - 1)) != 0)
{
throw new ArgumentException("RingArrayBuffer size must be a power of two");
}
BufferSize = size;
_buffer = new byte[size];
_cursor = 0;
_mask = size - 1;
// Fill with spaces
for (int i = 0; i < size; i++)
{
_buffer[i] = (byte)' ';
}
}
public void SetCursor(int pos)
{
_cursor = pos & _mask;
}
public void Push(byte value)
{
int index = _cursor;
_buffer[index & _mask] = value;
_cursor = (index + 1) & _mask;
}
public byte this[int index] => _buffer[index & _mask];
public HistoryIterator IterFromOffset(int offset)
{
int masked = (offset & _mask) + 1;
int startIndex = _cursor + BufferSize - masked;
return new HistoryIterator(this, startIndex);
}
public HistoryIterator IterFromPos(int pos)
{
int startIndex = pos & _mask;
return new HistoryIterator(this, startIndex);
}
}
}

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.BZip2;
@@ -96,13 +98,37 @@ public sealed class BZip2Stream : Stream, IStreamStack
public override void SetLength(long value) => stream.SetLength(value);
#if !NETFRAMEWORK&& !NETSTANDARD2_0
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override int Read(Span<byte> buffer) => stream.Read(buffer);
public override void Write(ReadOnlySpan<byte> buffer) => stream.Write(buffer);
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
) => await stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
public override async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
) => await stream.WriteAsync(buffer, cancellationToken).ConfigureAwait(false);
#endif
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
) => await stream.ReadAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false);
public override async Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
) => await stream.WriteAsync(buffer, offset, count, cancellationToken).ConfigureAwait(false);
public override void Write(byte[] buffer, int offset, int count) =>
stream.Write(buffer, offset, count);

View File

@@ -2,6 +2,8 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
/*
@@ -1127,6 +1129,28 @@ internal class CBZip2InputStream : Stream, IStreamStack
return k;
}
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
)
{
var c = -1;
int k;
for (k = 0; k < count; ++k)
{
cancellationToken.ThrowIfCancellationRequested();
c = ReadByte();
if (c == -1)
{
break;
}
buffer[k + offset] = (byte)c;
}
return Task.FromResult(k);
}
public override long Seek(long offset, SeekOrigin origin) => 0;
public override void SetLength(long value) { }

View File

@@ -1,5 +1,7 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
/*
@@ -542,6 +544,12 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
private void EndBlock()
{
// Skip block processing for empty input (no data written)
if (last < 0)
{
return;
}
blockCRC = mCrc.GetFinalCRC();
combinedCRC = (combinedCRC << 1) | (int)(((uint)combinedCRC) >> 31);
combinedCRC ^= blockCRC;
@@ -2022,6 +2030,21 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
}
}
public override Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
)
{
for (var k = 0; k < count; ++k)
{
cancellationToken.ThrowIfCancellationRequested();
WriteByte(buffer[k + offset]);
}
return Task.CompletedTask;
}
public override bool CanRead => false;
public override bool CanSeek => false;

View File

@@ -2,12 +2,12 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#nullable disable
using System;
using System.Diagnostics;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.IO;
@@ -39,7 +39,6 @@ public sealed class Deflate64Stream : Stream, IStreamStack
private const int DEFAULT_BUFFER_SIZE = 8192;
private Stream _stream;
private CompressionMode _mode;
private InflaterManaged _inflater;
private byte[] _buffer;
@@ -62,61 +61,23 @@ public sealed class Deflate64Stream : Stream, IStreamStack
throw new ArgumentException("Deflate64: input stream is not readable", nameof(stream));
}
InitializeInflater(stream, ZipCompressionMethod.Deflate64);
#if DEBUG_STREAMS
this.DebugConstruct(typeof(Deflate64Stream));
#endif
}
/// <summary>
/// Sets up this DeflateManagedStream to be used for Inflation/Decompression
/// </summary>
private void InitializeInflater(
Stream stream,
ZipCompressionMethod method = ZipCompressionMethod.Deflate
)
{
Debug.Assert(stream != null);
Debug.Assert(
method == ZipCompressionMethod.Deflate || method == ZipCompressionMethod.Deflate64
);
if (!stream.CanRead)
{
throw new ArgumentException("Deflate64: input stream is not readable", nameof(stream));
}
_inflater = new InflaterManaged(method == ZipCompressionMethod.Deflate64);
_inflater = new InflaterManaged(true);
_stream = stream;
_mode = CompressionMode.Decompress;
_buffer = new byte[DEFAULT_BUFFER_SIZE];
#if DEBUG_STREAMS
this.DebugConstruct(typeof(Deflate64Stream));
#endif
}
public override bool CanRead
{
get
{
if (_stream is null)
{
return false;
}
public override bool CanRead => _stream.CanRead;
return (_mode == CompressionMode.Decompress && _stream.CanRead);
}
}
public override bool CanWrite
{
get
{
if (_stream is null)
{
return false;
}
return (_mode == CompressionMode.Compress && _stream.CanWrite);
}
}
public override bool CanWrite => false;
public override bool CanSeek => false;
@@ -138,7 +99,6 @@ public sealed class Deflate64Stream : Stream, IStreamStack
public override int Read(byte[] array, int offset, int count)
{
EnsureDecompressionMode();
ValidateParameters(array, offset, count);
EnsureNotDisposed();
@@ -185,6 +145,106 @@ public sealed class Deflate64Stream : Stream, IStreamStack
return count - remainingCount;
}
public override async Task<int> ReadAsync(
byte[] array,
int offset,
int count,
CancellationToken cancellationToken
)
{
ValidateParameters(array, offset, count);
EnsureNotDisposed();
int bytesRead;
var currentOffset = offset;
var remainingCount = count;
while (true)
{
bytesRead = _inflater.Inflate(array, currentOffset, remainingCount);
currentOffset += bytesRead;
remainingCount -= bytesRead;
if (remainingCount == 0)
{
break;
}
if (_inflater.Finished())
{
// if we finished decompressing, we can't have anything left in the outputwindow.
Debug.Assert(
_inflater.AvailableOutput == 0,
"We should have copied all stuff out!"
);
break;
}
var bytes = await _stream
.ReadAsync(_buffer, 0, _buffer.Length, cancellationToken)
.ConfigureAwait(false);
if (bytes <= 0)
{
break;
}
else if (bytes > _buffer.Length)
{
// The stream is either malicious or poorly implemented and returned a number of
// bytes larger than the buffer supplied to it.
throw new InvalidFormatException("Deflate64: invalid data");
}
_inflater.SetInput(_buffer, 0, bytes);
}
return count - remainingCount;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
EnsureNotDisposed();
// InflaterManaged doesn't have a Span-based Inflate method, so we need to work with arrays
// For large buffers, we could rent from ArrayPool, but for simplicity we'll use the buffer's array if available
if (
System.Runtime.InteropServices.MemoryMarshal.TryGetArray<byte>(
buffer,
out var arraySegment
)
)
{
// Fast path: the Memory<byte> is backed by an array
return await ReadAsync(
arraySegment.Array!,
arraySegment.Offset,
arraySegment.Count,
cancellationToken
)
.ConfigureAwait(false);
}
else
{
// Slow path: rent a temporary array
var tempBuffer = System.Buffers.ArrayPool<byte>.Shared.Rent(buffer.Length);
try
{
var bytesRead = await ReadAsync(tempBuffer, 0, buffer.Length, cancellationToken)
.ConfigureAwait(false);
tempBuffer.AsMemory(0, bytesRead).CopyTo(buffer);
return bytesRead;
}
finally
{
System.Buffers.ArrayPool<byte>.Shared.Return(tempBuffer);
}
}
}
#endif
private void ValidateParameters(byte[] array, int offset, int count)
{
if (array is null)
@@ -220,26 +280,6 @@ public sealed class Deflate64Stream : Stream, IStreamStack
private static void ThrowStreamClosedException() =>
throw new ObjectDisposedException(null, "Deflate64: stream has been disposed");
private void EnsureDecompressionMode()
{
if (_mode != CompressionMode.Decompress)
{
ThrowCannotReadFromDeflateManagedStreamException();
}
}
[MethodImpl(MethodImplOptions.NoInlining)]
private static void ThrowCannotReadFromDeflateManagedStreamException() =>
throw new InvalidOperationException("Deflate64: cannot read from this stream");
private void EnsureCompressionMode()
{
if (_mode != CompressionMode.Compress)
{
ThrowCannotWriteToDeflateManagedStreamException();
}
}
[MethodImpl(MethodImplOptions.NoInlining)]
private static void ThrowCannotWriteToDeflateManagedStreamException() =>
throw new InvalidOperationException("Deflate64: cannot write to this stream");
@@ -281,20 +321,17 @@ public sealed class Deflate64Stream : Stream, IStreamStack
#endif
if (disposing)
{
_stream?.Dispose();
_stream.Dispose();
}
}
finally
{
_stream = null;
try
{
_inflater?.Dispose();
_inflater.Dispose();
}
finally
{
_inflater = null;
base.Dispose(disposing);
}
}

Some files were not shown because too many files have changed in this diff Show More