Compare commits

...

87 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
ea02d31096 Add IsArchiveAsync overloads for Zip and GZip factories
- Added IsArchiveAsync interface method to IFactory
- Implemented async versions of IsZipFile, IsZipMulti, IsGZipFile
- Updated ZipFactory and GZipFactory to override IsArchiveAsync
- Updated ReaderFactory.OpenAsync to use IsArchiveAsync
- Fixed Zip_Reader_Disposal_Test2_Async to use ReaderFactory.OpenAsync
- Fixed TestStream to properly forward ReadAsync calls
- Removed BufferedStream wrapping from AsyncBinaryReader as it uses sync Read
- Added default implementation in Factory base class

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-01-02 18:10:54 +00:00
Adam Hathcock
d04830ba90 Add some OpenAsync 2026-01-02 17:52:18 +00:00
Adam Hathcock
8533b09091 start of implementing zip reading async 2025-12-31 14:53:55 +00:00
Adam Hathcock
44b7955d85 reader tests 2025-12-31 14:43:15 +00:00
Adam Hathcock
038b9f18c6 Merge remote-tracking branch 'origin/master' into copilot/add-buffered-stream-async-read 2025-12-31 14:24:31 +00:00
Adam Hathcock
5667595587 Merge pull request #1091 from adamhathcock/adam/update-deps2
Update dependencies
2025-12-31 14:23:58 +00:00
copilot-swe-agent[bot]
6e0e20ba6e Fix zip64_locator to use ReadUInt32Async instead of ReadUInt16Async
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-30 11:50:41 +00:00
copilot-swe-agent[bot]
ec31cb9987 Fix Zip headers to support both sync and async reading
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-30 11:47:31 +00:00
Adam Hathcock
32d5b61c4a Merge pull request #1093 from adamhathcock/copilot/sub-pr-1091 2025-12-30 11:32:05 +00:00
copilot-swe-agent[bot]
128c9e639f Add back System.Buffers and System.Memory packages
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-30 11:30:12 +00:00
copilot-swe-agent[bot]
5e3f01dc03 Initial plan 2025-12-30 11:26:39 +00:00
copilot-swe-agent[bot]
39a0b4ce78 Use BufferedStream for async reading in AsyncBinaryReader
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-30 11:23:55 +00:00
Adam Hathcock
af719707bf Merge branch 'adam/async-binary-reader' into copilot/add-buffered-stream-async-read 2025-12-30 11:17:16 +00:00
copilot-swe-agent[bot]
8415a19912 Initial plan 2025-12-30 11:15:28 +00:00
Adam Hathcock
1607d2768e Merge branch 'master' into adam/async-binary-reader 2025-12-30 11:13:14 +00:00
Adam Hathcock
c97c05a3a7 Update dependencies 2025-12-30 11:11:40 +00:00
Adam Hathcock
b2beea9c4e Merge pull request #1077 from adamhathcock/copilot/provide-interface-for-entries
Remove ExtractAllEntries restriction for non-SOLID archives
2025-12-23 15:47:52 +00:00
Adam Hathcock
41fbaa1c28 Merge pull request #1085 from adamhathcock/adam/edit-md-files
add some markdown files for planning
2025-12-23 15:38:56 +00:00
Adam Hathcock
d9274cf794 Update tests/SharpCompress.Test/ExtractAllEntriesTests.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-23 15:38:17 +00:00
Adam Hathcock
583b048046 Update USAGE.md
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-23 15:38:00 +00:00
Adam Hathcock
ead5916eae Update USAGE.md
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-23 15:37:28 +00:00
Adam Hathcock
d15ab92da3 add some markdown files for planning 2025-12-23 15:37:04 +00:00
Adam Hathcock
1ab30f2af5 Merge pull request #1084 from adamhathcock/copilot/fix-handled-system-exception
Avoid NotSupportedException overhead in SharpCompressStream for non-seekable streams
2025-12-23 15:15:15 +00:00
Adam Hathcock
4dbe0b91f1 Update src/SharpCompress/IO/SharpCompressStream.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-23 15:06:24 +00:00
Adam Hathcock
a972d3784e Merge pull request #1082 from adamhathcock/dependabot/nuget/JetBrains.Profiler.SelfApi-2.5.15
Bump JetBrains.Profiler.SelfApi from 2.5.14 to 2.5.15
2025-12-23 15:04:17 +00:00
copilot-swe-agent[bot]
6991900eb0 Remove try/catch blocks, just check CanSeek as requested
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-23 15:01:41 +00:00
copilot-swe-agent[bot]
d614beb9eb Add explanatory comments for CanSeek checks and try-catch blocks
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-23 14:41:44 +00:00
copilot-swe-agent[bot]
253a46d458 Fix NotSupportedException in SharpCompressStream by checking CanSeek
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-23 14:38:11 +00:00
copilot-swe-agent[bot]
32b1ec32c6 Initial plan 2025-12-23 14:32:38 +00:00
Adam Hathcock
eb2cba09b2 update usage 2025-12-23 09:34:55 +00:00
Adam Hathcock
e79dceb67e check should be there 2025-12-23 09:31:51 +00:00
Adam Hathcock
87c38d6dab fix ordering and token passing 2025-12-23 09:22:38 +00:00
dependabot[bot]
9e98d9c45c Bump JetBrains.Profiler.SelfApi from 2.5.14 to 2.5.15
---
updated-dependencies:
- dependency-name: JetBrains.Profiler.SelfApi
  dependency-version: 2.5.15
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-22 09:39:01 +00:00
Adam Hathcock
0e9a4b0511 Merge remote-tracking branch 'origin/master' into copilot/provide-interface-for-entries
# Conflicts:
#	src/SharpCompress/Archives/AbstractArchive.cs
2025-12-19 15:41:51 +00:00
Adam Hathcock
eae25aff64 Merge pull request #1076 from adamhathcock/adam/extract-all-test
add extract all test
2025-12-19 14:58:33 +00:00
Adam Hathcock
b8c06ff36e oops 2025-12-19 14:39:37 +00:00
Adam Hathcock
6cf2e054bf fmt 2025-12-19 14:33:24 +00:00
Adam Hathcock
95749234f5 Use proper polyfill for net48 2025-12-19 14:33:06 +00:00
Adam Hathcock
b976961434 Merge fixes 2025-12-19 14:22:09 +00:00
Adam Hathcock
e1aa727513 Merge remote-tracking branch 'origin/master' into adam/extract-all-test
# Conflicts:
#	src/SharpCompress/Archives/IArchiveExtensions.cs
2025-12-19 14:16:29 +00:00
Adam Hathcock
1f71ce1be2 Merge pull request #1080 from adamhathcock/copilot/refactor-write-to-directory-consistency
Standardize extraction API to WriteToDirectory with IProgress support
2025-12-19 14:15:43 +00:00
Adam Hathcock
cf13de6ac1 Merge pull request #1081 from adamhathcock/copilot/sub-pr-1076-again
Fix async LZMA extraction bug for 7Zip archives
2025-12-19 14:14:16 +00:00
Adam Hathcock
c2e01798f8 refactor archive extensions 2025-12-19 14:09:01 +00:00
copilot-swe-agent[bot]
8fc8295a89 Add TODO and explanation for SyncOnlyStream workaround
The SyncOnlyStream wrapper is necessary because the LZMA decoder has bugs in its async implementation that cause state corruption (IndexOutOfRangeException, DataErrorException) even with fresh, non-shared streams.

Without this wrapper, async operations on LZMA streams fail. The proper fix would be to repair the async bugs in LzmaStream.ReadAsync, Decoder.CodeAsync, and OutWindow async operations, but that requires deep changes to the decoder state machine.

Added detailed comments explaining this is a workaround and where the real fix should go.

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 13:47:49 +00:00
copilot-swe-agent[bot]
d392991764 Remove CancellationToken from synchronous WriteToDirectory overload
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 13:40:22 +00:00
copilot-swe-agent[bot]
e57e87090f Implement option 2: Avoid async LZMA state corruption for 7Zip extraction
- Each 7Zip file now gets a fresh decompression stream instead of reusing shared streams
- Added SyncOnlyStream wrapper to force async operations to use synchronous equivalents
- This avoids LZMA decoder state corruption bugs in async operations
- Performance trade-off: slower async extraction for 7Zip but correctness guaranteed
- Documented behavior and performance implications in FORMATS.md

All 14 ExtractAll tests now pass (both async and sync)

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 13:33:04 +00:00
copilot-swe-agent[bot]
c701bbbee3 Run CSharpier formatting and document check-format process
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 13:31:35 +00:00
copilot-swe-agent[bot]
2f0eb0bd4b Replace Action<double> with IProgress<ProgressReport> for progress reporting
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 13:25:36 +00:00
copilot-swe-agent[bot]
17bde8da8a Merge branch 'master' of https://github.com/adamhathcock/sharpcompress into copilot/refactor-write-to-directory-consistency 2025-12-19 13:10:33 +00:00
Adam Hathcock
99d355e6ca Merge remote-tracking branch 'origin/master' into copilot/sub-pr-1076-again 2025-12-19 13:05:33 +00:00
Adam Hathcock
c790fd21a4 reading a single byte shouldn't be async 2025-12-19 13:05:04 +00:00
Adam Hathcock
bee51af48b Merge pull request #1044 from adamhathcock/copilot/add-progress-reporting
Unified progress reporting for compression and extraction operations
2025-12-19 12:54:55 +00:00
Adam Hathcock
ca743eae22 fix for running net 10 tests 2025-12-19 12:17:08 +00:00
copilot-swe-agent[bot]
93504cf82f Add sync test and attempt to fix async LZMA extraction bug
- Restored original async ExtractAllEntries test with using statement
- Added new ExtractAllEntriesSync test (all tests pass)
- Fixed potential partial read bug in LzmaStream.DecodeChunkHeaderAsync
  - Added ReadFullyAsync helper to ensure complete reads
  - ReadAsync is not guaranteed to return all requested bytes
- Async tests for 7Zip still failing with Data Error
  - Issue appears related to LZMA2 stream state management
  - _needDictReset flag not being cleared correctly in async flow
  - Further investigation needed

Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 12:15:54 +00:00
Adam Hathcock
6d3e4e842b Merge remote-tracking branch 'origin/master' into copilot/add-progress-reporting 2025-12-19 12:13:34 +00:00
copilot-swe-agent[bot]
54b64a8c3b Fix misleading variable name: emptyDirectory -> parentDirectory
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 12:06:36 +00:00
copilot-swe-agent[bot]
0e59bf39f4 Add test for IArchive.WriteToDirectoryAsync
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 12:04:47 +00:00
copilot-swe-agent[bot]
8b95e0a76d Standardize on WriteToDirectory naming and add async support
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 11:57:01 +00:00
copilot-swe-agent[bot]
48a2ad7b57 Fix ExtractAll test to use synchronous extraction methods for 7Zip archives
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 11:55:00 +00:00
copilot-swe-agent[bot]
cfc6651fff Update documentation to reflect ExtractAllEntries universal compatibility
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 11:49:35 +00:00
copilot-swe-agent[bot]
b23827a8db Initial plan 2025-12-19 11:48:49 +00:00
copilot-swe-agent[bot]
3f9986c13c Initial plan 2025-12-19 11:47:53 +00:00
copilot-swe-agent[bot]
224989f19b Remove restriction on ExtractAllEntries and add comprehensive tests
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 11:45:21 +00:00
copilot-swe-agent[bot]
aaa97e2ce2 Merge master branch - add ZStandard compression support and TarHeaderWriteFormat
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-19 11:36:44 +00:00
copilot-swe-agent[bot]
1d52618137 Initial plan 2025-12-19 11:32:17 +00:00
Adam Hathcock
40e72ad199 fix AI edit 2025-12-08 11:11:51 +00:00
Adam Hathcock
618b4bbb83 try to tell agents to format 2025-12-08 11:04:08 +00:00
Adam Hathcock
1eaf3e6294 format with csharpier 2025-12-08 11:00:29 +00:00
Adam Hathcock
fd453e946d Update src/SharpCompress/IO/ProgressReportingStream.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-08 10:55:24 +00:00
Adam Hathcock
c294071015 Update src/SharpCompress/Archives/IArchiveEntryExtensions.cs
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-08 10:55:06 +00:00
Adam Hathcock
c2f6055e33 format 2025-12-08 10:26:45 +00:00
copilot-swe-agent[bot]
3396f8fe00 Refactor to use ProgressReportingStream for progress tracking
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-05 11:55:29 +00:00
copilot-swe-agent[bot]
9291f58091 Merge master and add comprehensive tests for archive and reader progress
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-12-05 11:44:17 +00:00
copilot-swe-agent[bot]
85f3b17c42 Merge remote-tracking branch 'origin/master' into copilot/add-progress-reporting 2025-12-05 11:33:39 +00:00
copilot-swe-agent[bot]
14d432e22d Pass progress as parameter to WriteTo/WriteToAsync instead of storing on archive
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-30 13:04:40 +00:00
copilot-swe-agent[bot]
0fdf9c74a8 Address code review: Replace dynamic with IArchiveProgressInfo interface
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-30 12:07:17 +00:00
copilot-swe-agent[bot]
e2df7894f9 Remove IArchiveExtractionListener and add IProgress support to Archive Entry extraction
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-30 12:00:54 +00:00
copilot-swe-agent[bot]
7af029b5de Address code review: properly handle zero-sized entries in progress reporting
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 17:48:50 +00:00
copilot-swe-agent[bot]
8fc5ca5a71 Unify progress reporting: remove IExtractionListener and add IProgress support for reading
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 17:40:10 +00:00
copilot-swe-agent[bot]
aa0356de9f Changes before error encountered
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-29 15:55:21 +00:00
copilot-swe-agent[bot]
0f374b27cf Address code review: ProgressReportingStream now throws on writes
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-27 19:17:10 +00:00
copilot-swe-agent[bot]
0d487df61b Add IProgress support for compression operations with tests
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-27 19:15:10 +00:00
copilot-swe-agent[bot]
c082d4203b Changes before error encountered
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2025-11-27 16:58:32 +00:00
copilot-swe-agent[bot]
d4380b6bb6 Initial plan 2025-11-27 16:48:09 +00:00
Adam Hathcock
fb76bd82f2 first commit of async reader 2025-11-26 08:09:20 +00:00
Adam Hathcock
3bdaba46a9 fmt 2025-11-25 15:39:43 +00:00
Adam Hathcock
7c3c94ed7f Add ArcReaderAsync tests 2025-11-25 14:44:03 +00:00
92 changed files with 3826 additions and 897 deletions

View File

@@ -3,11 +3,11 @@
"isRoot": true,
"tools": {
"csharpier": {
"version": "1.2.3",
"version": "1.2.4",
"commands": [
"csharpier"
],
"rollForward": false
}
}
}
}

View File

@@ -1,15 +0,0 @@
# Copilot Coding Agent Configuration
This repository includes a minimal opt-in configuration and CI workflow to allow the GitHub Copilot coding agent to open and validate PRs.
- .copilot-agent.yml: opt-in config for automated agents
- .github/agents/copilot-agent.yml: detailed agent policy configuration
- .github/workflows/dotnetcore.yml: CI runs on PRs touching the solution, source, or tests to validate changes
- AGENTS.md: general instructions for Copilot coding agent with project-specific guidelines
Maintainers can adjust the allowed paths or disable the agent by editing or removing .copilot-agent.yml.
Notes:
- The agent can create, modify, and delete files within the allowed paths (src, tests, README.md, AGENTS.md)
- All changes require review before merge
- If build/test paths are different, update the workflow accordingly; this workflow targets SharpCompress.sln and the SharpCompress.Test test project.

25
.github/prompts/plan-async.prompt.md vendored Normal file
View File

@@ -0,0 +1,25 @@
# Plan: Implement Missing Async Functionality in SharpCompress
SharpCompress has async support for low-level stream operations and Reader/Writer APIs, but critical entry points (Archive.Open, factory methods, initialization) remain synchronous. This plan adds async overloads for all user-facing I/O operations and fixes existing async bugs, enabling full end-to-end async workflows.
## Steps
1. **Add async factory methods** to [ArchiveFactory.cs](src/SharpCompress/Factories/ArchiveFactory.cs), [ReaderFactory.cs](src/SharpCompress/Factories/ReaderFactory.cs), and [WriterFactory.cs](src/SharpCompress/Factories/WriterFactory.cs) with `OpenAsync` and `CreateAsync` overloads accepting `CancellationToken`
2. **Implement async Open methods** on concrete archive types ([ZipArchive.cs](src/SharpCompress/Archives/Zip/ZipArchive.cs), [TarArchive.cs](src/SharpCompress/Archives/Tar/TarArchive.cs), [RarArchive.cs](src/SharpCompress/Archives/Rar/RarArchive.cs), [GZipArchive.cs](src/SharpCompress/Archives/GZip/GZipArchive.cs), [SevenZipArchive.cs](src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs)) and reader types ([ZipReader.cs](src/SharpCompress/Readers/Zip/ZipReader.cs), [TarReader.cs](src/SharpCompress/Readers/Tar/TarReader.cs), etc.)
3. **Convert archive initialization logic to async** including header reading, volume loading, and format signature detection across archive constructors and internal initialization methods
4. **Fix LZMA decoder async bugs** in [LzmaStream.cs](src/SharpCompress/Compressors/LZMA/LzmaStream.cs), [Decoder.cs](src/SharpCompress/Compressors/LZMA/Decoder.cs), and [OutWindow.cs](src/SharpCompress/Compressors/LZMA/OutWindow.cs) to enable true async 7Zip support and remove `NonDisposingStream` workaround
5. **Complete Rar async implementation** by converting `UnpackV2017` methods to async in [UnpackV2017.cs](src/SharpCompress/Compressors/Rar/UnpackV2017.cs) and updating Rar20 decompression
6. **Add comprehensive async tests** covering all new async entry points, cancellation scenarios, and concurrent operations across all archive formats in test files
## Further Considerations
1. **Breaking changes** - Should new async methods be added alongside existing sync methods (non-breaking), or should sync methods eventually be deprecated? Recommend additive approach for backward compatibility.
2. **Performance impact** - Header parsing for formats like Zip/Tar is often small; consider whether truly async parsing adds value vs sync parsing wrapped in Task, or make it conditional based on stream type (network vs file).
3. **7Zip complexity** - The LZMA async bug fix (Step 4) may be challenging due to state management in the decoder; consider whether to scope it separately or implement a simpler workaround that maintains correctness.

123
.github/prompts/plan-for-next.prompt.md vendored Normal file
View File

@@ -0,0 +1,123 @@
# Plan: Modernize SharpCompress Public API
Based on comprehensive analysis, the API has several inconsistencies around factory patterns, async support, format capabilities, and options classes. Most improvements can be done incrementally without breaking changes.
## Steps
1. **Standardize factory patterns** by deprecating format-specific static `Open` methods in [Archives/Zip/ZipArchive.cs](src/SharpCompress/Archives/Zip/ZipArchive.cs), [Archives/Tar/TarArchive.cs](src/SharpCompress/Archives/Tar/TarArchive.cs), etc. in favor of centralized [Factories/ArchiveFactory.cs](src/SharpCompress/Factories/ArchiveFactory.cs)
2. **Complete async implementation** in [Writers/Zip/ZipWriter.cs](src/SharpCompress/Writers/Zip/ZipWriter.cs) and other writers that currently use sync-over-async, implementing true async I/O throughout the writer hierarchy
3. **Unify options classes** by making [Common/ExtractionOptions.cs](src/SharpCompress/Common/ExtractionOptions.cs) inherit from `OptionsBase` and adding progress reporting to extraction methods consistently
4. **Clarify GZip semantics** in [Archives/GZip/GZipArchive.cs](src/SharpCompress/Archives/GZip/GZipArchive.cs) by adding XML documentation explaining single-entry limitation and relationship to GZip compression used in Tar.gz
## Further Considerations
1. **Breaking changes roadmap** - Should we plan a major version (2.0) to remove deprecated factory methods, clean up `ArchiveType` enum (remove Arc/Arj or add full support), and consolidate naming patterns?
2. **Progress reporting consistency** - Should `IProgress<ArchiveExtractionProgress<IEntry>>` be added to all extraction extension methods or consolidated into options classes?
## Detailed Analysis
### Factory Pattern Issues
Three different factory patterns exist with overlapping functionality:
1. **Static Factories**: ArchiveFactory, ReaderFactory, WriterFactory
2. **Instance Factories**: IArchiveFactory, IReaderFactory, IWriterFactory
3. **Format-specific static methods**: Each archive class has static `Open` methods
**Example confusion:**
```csharp
// Three ways to open a Zip archive - which is recommended?
var archive1 = ArchiveFactory.Open("file.zip");
var archive2 = ZipArchive.Open("file.zip");
var archive3 = ArchiveFactory.AutoFactory.Open(fileInfo, options);
```
### Async Support Gaps
Base `IWriter` interface has async methods, but writer implementations provide minimal async support. Most writers just call synchronous methods:
```csharp
public virtual async Task WriteAsync(...)
{
// Default implementation calls synchronous version
Write(filename, source, modificationTime);
await Task.CompletedTask.ConfigureAwait(false);
}
```
Real async implementations only in:
- `TarWriter` - Proper async implementation
- Most other writers use sync-over-async
### GZip Archive Special Case
GZip is treated as both a compression format and an archive format, but only supports single-entry archives:
```csharp
protected override GZipArchiveEntry CreateEntryInternal(...)
{
if (Entries.Any())
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
// ...
}
```
### Options Class Hierarchy
```
OptionsBase (LeaveStreamOpen, ArchiveEncoding)
├─ ReaderOptions (LookForHeader, Password, DisableCheckIncomplete, BufferSize, ExtensionHint, Progress)
├─ WriterOptions (CompressionType, CompressionLevel, Progress)
│ ├─ ZipWriterOptions (ArchiveComment, UseZip64)
│ ├─ TarWriterOptions (FinalizeArchiveOnClose, HeaderFormat)
│ └─ GZipWriterOptions (no additional properties)
└─ ExtractionOptions (standalone - Overwrite, ExtractFullPath, PreserveFileTime, PreserveAttributes)
```
**Issues:**
- `ExtractionOptions` doesn't inherit from `OptionsBase` - no encoding support during extraction
- Progress reporting inconsistency between readers and extraction
- Obsolete properties (`ChecksumIsValid`, `Version`) with unclear migration path
### Implementation Priorities
**High Priority (Non-Breaking):**
1. Add API usage guide (Archive vs Reader, factory recommendations, async best practices)
2. Fix progress reporting consistency
3. Complete async implementation in writers
**Medium Priority (Next Major Version):**
1. Unify factory pattern - deprecate format-specific static `Open` methods
2. Clean up options classes - make `ExtractionOptions` inherit from `OptionsBase`
3. Clarify archive types - remove Arc/Arj from `ArchiveType` enum or add full support
4. Standardize naming across archive types
**Low Priority:**
1. Add BZip2 archive support similar to GZipArchive
2. Complete obsolete property cleanup with migration guide
### Backward Compatibility Strategy
**Safe (Non-Breaking) Changes:**
- Add new methods to interfaces (use default implementations)
- Add new options properties (with defaults)
- Add new factory methods
- Improve async implementations
- Add progress reporting support
**Breaking Changes to Avoid:**
- ❌ Removing format-specific `Open` methods (deprecate instead)
- ❌ Changing `LeaveStreamOpen` default (currently `true`)
- ❌ Removing obsolete properties before major version bump
- ❌ Changing return types or signatures of existing methods
**Deprecation Pattern:**
- Use `[Obsolete]` for one major version
- Use `[EditorBrowsable(EditorBrowsableState.Never)]` in next major version
- Remove in following major version

View File

@@ -28,14 +28,38 @@ SharpCompress is a pure C# compression library supporting multiple archive forma
## Code Formatting
**Copilot agents: You MUST run the `format` task after making code changes to ensure consistency.**
- Use CSharpier for code formatting to ensure consistent style across the project
- CSharpier is configured as a local tool in `.config/dotnet-tools.json`
- Restore tools with: `dotnet tool restore`
- Format files from the project root with: `dotnet csharpier .`
- **Run `dotnet csharpier .` from the project root after making code changes before committing**
- Configure your IDE to format on save using CSharpier for the best experience
### Commands
1. **Restore tools** (first time only):
```bash
dotnet tool restore
```
2. **Check if files are formatted correctly** (doesn't modify files):
```bash
dotnet csharpier check .
```
- Exit code 0: All files are properly formatted
- Exit code 1: Some files need formatting (will show which files and differences)
3. **Format files** (modifies files):
```bash
dotnet csharpier format .
```
- Formats all files in the project to match CSharpier style
- Run from project root directory
4. **Configure your IDE** to format on save using CSharpier for the best experience
### Additional Notes
- The project also uses `.editorconfig` for editor settings (indentation, encoding, etc.)
- Let CSharpier handle code style while `.editorconfig` handles editor behavior
- Always run `dotnet csharpier check .` before committing to verify formatting
## Project Setup and Structure

View File

@@ -1,19 +1,18 @@
<Project>
<ItemGroup>
<PackageVersion Include="Bullseye" Version="6.0.0" />
<PackageVersion Include="Bullseye" Version="6.1.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.15" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="12.0.0" />
<PackageVersion Include="SimpleExec" Version="12.1.0" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
<PackageVersion Include="System.Buffers" Version="4.6.1" />
<PackageVersion Include="System.Memory" Version="4.6.3" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
<PackageVersion Include="xunit" Version="2.9.3" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.6" />
<PackageVersion Include="Microsoft.NET.ILLink.Tasks" Version="10.0.0" />
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />

View File

@@ -24,7 +24,7 @@
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading. See [Zip Format Notes](#zip-format-notes) for details on multi-volume archives and streaming behavior.
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API. See [7Zip Format Notes](#7zip-format-notes) for details on async extraction behavior.
5. LZip has no support for extra data like the file name or timestamp. There is a default filename used when looking at the entry Key on the archive.
### Zip Format Notes
@@ -32,6 +32,18 @@
- Multi-volume/split ZIP archives require ZipArchive (seekable streams) as ZipReader cannot seek across volume files.
- ZipReader processes entries from LocalEntry headers (which include directory entries ending with `/`) and intentionally skips DirectoryEntry headers from the central directory, as they are redundant in streaming mode - all entry data comes from LocalEntry headers which ZipReader has already processed.
### 7Zip Format Notes
- **Async Extraction Performance**: When using async extraction methods (e.g., `ExtractAllEntries()` with `MoveToNextEntryAsync()`), each file creates its own decompression stream to avoid state corruption in the LZMA decoder. This is less efficient than synchronous extraction, which can reuse a single decompression stream for multiple files in the same folder.
**Performance Impact**: For archives with many small files in the same compression folder, async extraction will be slower than synchronous extraction because it must:
1. Create a new LZMA decoder for each file
2. Skip through the decompressed data to reach each file's starting position
**Recommendation**: For best performance with 7Zip archives, use synchronous extraction methods (`MoveToNextEntry()` and `WriteEntryToDirectory()`) when possible. Use async methods only when you need to avoid blocking the thread (e.g., in UI applications or async-only contexts).
**Technical Details**: 7Zip archives group files into "folders" (compression units), where all files in a folder share one continuous LZMA-compressed stream. The LZMA decoder maintains internal state (dictionary window, decoder positions) that assumes sequential, non-interruptible processing. Async operations can yield control during awaits, which would corrupt this shared state. To avoid this, async extraction creates a fresh decoder stream for each file.
## Compression Streams
For those who want to directly compress/decompress bits. The single file formats are represented here as well. However, BZip2, LZip and XZ have no metadata (GZip has a little) so using them without something like a Tar file makes little sense.

View File

@@ -87,20 +87,17 @@ memoryStream.Position = 0;
### Extract all files from a rar file to a directory using RarArchive
Note: Extracting a solid rar or 7z file needs to be done in sequential order to get acceptable decompression speed.
It is explicitly recommended to use `ExtractAllEntries` when extracting an entire `IArchive` instead of iterating over all its `Entries`.
Alternatively, use `IArchive.WriteToDirectory`.
`ExtractAllEntries` is primarily intended for solid archives (like solid Rar) or 7Zip archives, where sequential extraction provides the best performance. For general/simple extraction with any supported archive type, use `archive.WriteToDirectory()` instead.
```C#
using (var archive = RarArchive.Open("Test.rar"))
{
using (var reader = archive.ExtractAllEntries())
// Simple extraction with RarArchive; this WriteToDirectory pattern works for all archive types
archive.WriteToDirectory(@"D:\temp", new ExtractionOptions()
{
reader.WriteAllToDirectory(@"D:\temp", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
ExtractFullPath = true,
Overwrite = true
});
}
```
@@ -116,6 +113,41 @@ using (var archive = RarArchive.Open("Test.rar"))
}
```
### Extract solid Rar or 7Zip archives with manual progress reporting
`ExtractAllEntries` only works for solid archives (Rar) or 7Zip archives. For optimal performance with these archive types, use this method:
```C#
using (var archive = RarArchive.Open("archive.rar")) // Must be solid Rar or 7Zip
{
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
{
// Calculate total size for progress reporting
double totalSize = archive.Entries.Where(e => !e.IsDirectory).Sum(e => e.Size);
long completed = 0;
using (var reader = archive.ExtractAllEntries())
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(@"D:\output", new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
completed += reader.Entry.Size;
double progress = completed / totalSize;
Console.WriteLine($"Progress: {progress:P}");
}
}
}
}
}
```
### Use ReaderFactory to autodetect archive type and Open the entry stream
```C#
@@ -298,14 +330,12 @@ using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.
```C#
using (var archive = ZipArchive.Open("archive.zip"))
{
using (var reader = archive.ExtractAllEntries())
{
await reader.WriteAllToDirectoryAsync(
@"C:\output",
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
// Simple async extraction - works for all archive types
await archive.WriteToDirectoryAsync(
@"C:\output",
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
cancellationToken
);
}
```

View File

@@ -11,6 +11,7 @@ const string Restore = "restore";
const string Build = "build";
const string Test = "test";
const string Format = "format";
const string CheckFormat = "check-format";
const string Publish = "publish";
Target(
@@ -42,12 +43,20 @@ Target(
Target(
Format,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "csharpier format .");
}
);
Target(
CheckFormat,
() =>
{
Run("dotnet", "tool restore");
Run("dotnet", "csharpier check .");
}
);
Target(Restore, [Format], () => Run("dotnet", "restore"));
Target(Restore, [CheckFormat], () => Run("dotnet", "restore"));
Target(
Build,

View File

@@ -4,9 +4,9 @@
"net10.0": {
"Bullseye": {
"type": "Direct",
"requested": "[6.0.0, )",
"resolved": "6.0.0",
"contentHash": "vgwwXfzs7jJrskWH7saHRMgPzziq/e86QZNWY1MnMxd7e+De7E7EX4K3C7yrvaK9y02SJoLxNxcLG/q5qUAghw=="
"requested": "[6.1.0, )",
"resolved": "6.1.0",
"contentHash": "fltnAJDe0BEX5eymXGUq+il2rSUA0pHqUonNDRH2TrvRu8SkU17mYG0IVpdmG2ibtfhdjNrv4CuTCxHOwcozCA=="
},
"Glob": {
"type": "Direct",
@@ -16,9 +16,9 @@
},
"SimpleExec": {
"type": "Direct",
"requested": "[12.0.0, )",
"resolved": "12.0.0",
"contentHash": "ptxlWtxC8vM6Y6e3h9ZTxBBkOWnWrm/Sa1HT+2i1xcXY3Hx2hmKDZP5RShPf8Xr9D+ivlrXNy57ktzyH8kyt+Q=="
"requested": "[12.1.0, )",
"resolved": "12.1.0",
"contentHash": "PcCSAlMcKr5yTd571MgEMoGmoSr+omwziq2crB47lKP740lrmjuBocAUXHj+Q6LR6aUDFyhszot2wbtFJTClkA=="
}
}
}

View File

@@ -8,7 +8,7 @@ using SharpCompress.Readers;
namespace SharpCompress.Archives;
public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtractionListener
public abstract class AbstractArchive<TEntry, TVolume> : IArchive
where TEntry : IArchiveEntry
where TVolume : IVolume
{
@@ -17,11 +17,6 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
private bool _disposed;
private readonly SourceStream? _sourceStream;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
@@ -43,12 +38,6 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
public ArchiveType Type { get; }
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) =>
EntryExtractionBegin?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
void IArchiveExtractionListener.FireEntryExtractionEnd(IArchiveEntry entry) =>
EntryExtractionEnd?.Invoke(this, new ArchiveExtractionEventArgs<IArchiveEntry>(entry));
private static Stream CheckStreams(Stream stream)
{
if (!stream.CanSeek || !stream.CanRead)
@@ -99,38 +88,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
}
}
void IArchiveExtractionListener.EnsureEntriesLoaded()
private void EnsureEntriesLoaded()
{
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
}
void IExtractionListener.FireCompressedBytesRead(
long currentPartCompressedBytes,
long compressedReadBytes
) =>
CompressedBytesRead?.Invoke(
this,
new CompressedBytesReadEventArgs(
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
compressedBytesRead: compressedReadBytes
)
);
void IExtractionListener.FireFilePartExtractionBegin(
string name,
long size,
long compressedSize
) =>
FilePartExtractionBegin?.Invoke(
this,
new FilePartExtractionBeginEventArgs(
compressedSize: compressedSize,
size: size,
name: name
)
);
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
@@ -146,11 +109,11 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
{
if (!IsSolid && Type != ArchiveType.SevenZip)
{
throw new InvalidOperationException(
throw new SharpCompressException(
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
);
}
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
EnsureEntriesLoaded();
return CreateReaderForSolidExtraction();
}
@@ -173,7 +136,7 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
{
get
{
((IArchiveExtractionListener)this).EnsureEntriesLoaded();
EnsureEntriesLoaded();
return Entries.All(x => x.IsComplete);
}
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;
@@ -24,6 +26,27 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
}
/// <summary>
/// Opens an Archive for random access asynchronously
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
var factory = FindFactory<IArchiveFactory>(stream);
return await factory
.OpenAsync(stream, readerOptions, cancellationToken)
.ConfigureAwait(false);
}
public static IWritableArchive Create(ArchiveType type)
{
var factory = Factory
@@ -49,6 +72,22 @@ public static class ArchiveFactory
return Open(new FileInfo(filePath), options);
}
/// <summary>
/// Opens an Archive from a filepath asynchronously.
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static Task<IArchive> OpenAsync(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenAsync(new FileInfo(filePath), options, cancellationToken);
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -61,6 +100,24 @@ public static class ArchiveFactory
return FindFactory<IArchiveFactory>(fileInfo).Open(fileInfo, options);
}
/// <summary>
/// Opens an Archive from a FileInfo object asynchronously.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = FindFactory<IArchiveFactory>(fileInfo);
return await factory.OpenAsync(fileInfo, options, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
@@ -87,6 +144,40 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
}
/// <summary>
/// Opens a multi-part archive from files asynchronously.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IEnumerable<FileInfo> fileInfos,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
fileInfos.NotNull(nameof(fileInfos));
var filesArray = fileInfos.ToArray();
if (filesArray.Length == 0)
{
throw new InvalidOperationException("No files to open");
}
var fileInfo = filesArray[0];
if (filesArray.Length == 1)
{
return await OpenAsync(fileInfo, options, cancellationToken).ConfigureAwait(false);
}
fileInfo.NotNull(nameof(fileInfo));
options ??= new ReaderOptions { LeaveStreamOpen = false };
var factory = FindFactory<IMultiArchiveFactory>(fileInfo);
return await factory
.OpenAsync(filesArray, options, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Constructor with IEnumerable FileInfo objects, multi and split support.
/// </summary>
@@ -113,6 +204,41 @@ public static class ArchiveFactory
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
}
/// <summary>
/// Opens a multi-part archive from streams asynchronously.
/// </summary>
/// <param name="streams"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IEnumerable<Stream> streams,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
streams.NotNull(nameof(streams));
var streamsArray = streams.ToArray();
if (streamsArray.Length == 0)
{
throw new InvalidOperationException("No streams");
}
var firstStream = streamsArray[0];
if (streamsArray.Length == 1)
{
return await OpenAsync(firstStream, options, cancellationToken).ConfigureAwait(false);
}
firstStream.NotNull(nameof(firstStream));
options ??= new ReaderOptions();
var factory = FindFactory<IMultiArchiveFactory>(firstStream);
return await factory
.OpenAsync(streamsArray, options, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
@@ -123,7 +249,7 @@ public static class ArchiveFactory
)
{
using var archive = Open(sourceArchive);
archive.ExtractToDirectory(destinationDirectory, options);
archive.WriteToDirectory(destinationDirectory, options);
}
private static T FindFactory<T>(FileInfo finfo)

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
@@ -20,11 +22,30 @@ class AutoArchiveFactory : IArchiveFactory
int bufferSize = ReaderOptions.DefaultBufferSize
) => throw new NotSupportedException();
public Task<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => throw new NotSupportedException();
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(stream, readerOptions);
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ArchiveFactory.OpenAsync(stream, readerOptions, cancellationToken);
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(fileInfo, readerOptions);
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ArchiveFactory.OpenAsync(fileInfo, readerOptions, cancellationToken);
}

View File

@@ -102,6 +102,70 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
/// <summary>
/// Opens a GZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(stream, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a GZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfo, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a GZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(streams, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a GZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfos, readerOptions)).ConfigureAwait(false);
}
public static GZipArchive Create() => new();
/// <summary>
@@ -167,6 +231,28 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
return true;
}
public static async Task<bool> IsGZipFileAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
// read the header on the first read
byte[] header = new byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!await stream.ReadFullyAsync(header, cancellationToken).ConfigureAwait(false))
{
return false;
}
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
{
return false;
}
return true;
}
internal GZipArchive()
: base(ArchiveType.GZip) { }

View File

@@ -7,12 +7,6 @@ namespace SharpCompress.Archives;
public interface IArchive : IDisposable
{
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionBegin;
event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>> EntryExtractionEnd;
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
IEnumerable<IArchiveEntry> Entries { get; }
IEnumerable<IVolume> Volumes { get; }

View File

@@ -1,3 +1,4 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
@@ -8,127 +9,153 @@ namespace SharpCompress.Archives;
public static class IArchiveEntryExtensions
{
public static void WriteTo(this IArchiveEntry archiveEntry, Stream streamToWriteTo)
private const int BufferSize = 81920;
/// <param name="archiveEntry">The archive entry to extract.</param>
extension(IArchiveEntry archiveEntry)
{
if (archiveEntry.IsDirectory)
/// <summary>
/// Extract entry to the specified stream.
/// </summary>
/// <param name="streamToWriteTo">The stream to write the entry content to.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
public void WriteTo(Stream streamToWriteTo, IProgress<ProgressReport>? progress = null)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
using var entryStream = archiveEntry.OpenEntryStream();
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
sourceStream.CopyTo(streamToWriteTo, BufferSize);
}
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(
archiveEntry.Key ?? "Key",
archiveEntry.Size,
archiveEntry.CompressedSize
);
var entryStream = archiveEntry.OpenEntryStream();
using (entryStream)
/// <summary>
/// Extract entry to the specified stream asynchronously.
/// </summary>
/// <param name="streamToWriteTo">The stream to write the entry content to.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
public async Task WriteToAsync(
Stream streamToWriteTo,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
)
{
using Stream s = new ListeningStream(streamListener, entryStream);
s.CopyTo(streamToWriteTo);
if (archiveEntry.IsDirectory)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
using var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
await sourceStream
.CopyToAsync(streamToWriteTo, BufferSize, cancellationToken)
.ConfigureAwait(false);
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
public static async Task WriteToAsync(
this IArchiveEntry archiveEntry,
Stream streamToWriteTo,
CancellationToken cancellationToken = default
private static Stream WrapWithProgress(
Stream source,
IArchiveEntry entry,
IProgress<ProgressReport>? progress
)
{
if (archiveEntry.IsDirectory)
if (progress is null)
{
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
return source;
}
var streamListener = (IArchiveExtractionListener)archiveEntry.Archive;
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(
archiveEntry.Key ?? "Key",
archiveEntry.Size,
archiveEntry.CompressedSize
var entryPath = entry.Key ?? string.Empty;
var totalBytes = GetEntrySizeSafe(entry);
return new ProgressReportingStream(
source,
progress,
entryPath,
totalBytes,
leaveOpen: true
);
var entryStream = archiveEntry.OpenEntryStream();
using (entryStream)
{
using Stream s = new ListeningStream(streamListener, entryStream);
await s.CopyToAsync(streamToWriteTo, 81920, cancellationToken).ConfigureAwait(false);
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(
this IArchiveEntry entry,
string destinationDirectory,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToDirectory(
entry,
destinationDirectory,
options,
entry.WriteToFile
);
private static long? GetEntrySizeSafe(IArchiveEntry entry)
{
try
{
var size = entry.Size;
return size >= 0 ? size : null;
}
catch (NotImplementedException)
{
return null;
}
}
/// <summary>
/// Extract to specific directory asynchronously, retaining filename
/// </summary>
public static Task WriteToDirectoryAsync(
this IArchiveEntry entry,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
ExtractionMethods.WriteEntryToDirectoryAsync(
entry,
destinationDirectory,
options,
(x, opt) => entry.WriteToFileAsync(x, opt, cancellationToken),
cancellationToken
);
extension(IArchiveEntry entry)
{
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public void WriteToDirectory(
string destinationDirectory,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToDirectory(
entry,
destinationDirectory,
options,
entry.WriteToFile
);
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(
this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToFile(
entry,
destinationFileName,
options,
(x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
entry.WriteTo(fs);
}
);
/// <summary>
/// Extract to specific directory asynchronously, retaining filename
/// </summary>
public Task WriteToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
ExtractionMethods.WriteEntryToDirectoryAsync(
entry,
destinationDirectory,
options,
entry.WriteToFileAsync,
cancellationToken
);
/// <summary>
/// Extract to specific file asynchronously
/// </summary>
public static Task WriteToFileAsync(
this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
ExtractionMethods.WriteEntryToFileAsync(
entry,
destinationFileName,
options,
async (x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false);
},
cancellationToken
);
/// <summary>
/// Extract to specific file
/// </summary>
public void WriteToFile(string destinationFileName, ExtractionOptions? options = null) =>
ExtractionMethods.WriteEntryToFile(
entry,
destinationFileName,
options,
(x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
entry.WriteTo(fs);
}
);
/// <summary>
/// Extract to specific file asynchronously
/// </summary>
public Task WriteToFileAsync(
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
ExtractionMethods.WriteEntryToFileAsync(
entry,
destinationFileName,
options,
async (x, fm, ct) =>
{
using var fs = File.Open(destinationFileName, fm);
await entry.WriteToAsync(fs, null, ct).ConfigureAwait(false);
},
cancellationToken
);
}
}

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
@@ -9,69 +10,159 @@ namespace SharpCompress.Archives;
public static class IArchiveExtensions
{
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(
this IArchive archive,
string destinationDirectory,
ExtractionOptions? options = null
)
/// <param name="archive">The archive to extract.</param>
extension(IArchive archive)
{
using var reader = archive.ExtractAllEntries();
reader.WriteAllToDirectory(destinationDirectory, options);
}
public static void ExtractToDirectory(
this IArchive archive,
string destination,
ExtractionOptions? options = null,
Action<double>? progressReport = null,
CancellationToken cancellationToken = default
)
{
// Prepare for progress reporting
var totalBytes = archive.TotalUncompressSize;
var bytesRead = 0L;
// Tracking for created directories.
var seenDirectories = new HashSet<string>();
// Extract
foreach (var entry in archive.Entries)
/// <summary>
/// Extract to specific directory with progress reporting
/// </summary>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
public void WriteToDirectory(
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null
)
{
cancellationToken.ThrowIfCancellationRequested();
if (entry.IsDirectory)
// For solid archives (Rar, 7Zip), use the optimized reader-based approach
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
{
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
if (
Path.GetDirectoryName(dirPath + "/") is { } emptyDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(emptyDirectory);
}
continue;
using var reader = archive.ExtractAllEntries();
reader.WriteAllToDirectory(destinationDirectory, options);
}
// Create each directory if not already created
var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
if (Path.GetDirectoryName(path) is { } directory)
else
{
if (!Directory.Exists(directory) && !seenDirectories.Contains(directory))
{
Directory.CreateDirectory(directory);
seenDirectories.Add(directory);
}
// For non-solid archives, extract entries directly
archive.WriteToDirectoryInternal(destinationDirectory, options, progress);
}
}
// Write file
entry.WriteToFile(path, options);
private void WriteToDirectoryInternal(
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress
)
{
// Prepare for progress reporting
var totalBytes = archive.TotalUncompressSize;
var bytesRead = 0L;
// Update progress
bytesRead += entry.Size;
progressReport?.Invoke(bytesRead / (double)totalBytes);
// Tracking for created directories.
var seenDirectories = new HashSet<string>();
// Extract
foreach (var entry in archive.Entries)
{
if (entry.IsDirectory)
{
var dirPath = Path.Combine(
destinationDirectory,
entry.Key.NotNull("Entry Key is null")
);
if (
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(parentDirectory);
}
continue;
}
// Use the entry's WriteToDirectory method which respects ExtractionOptions
entry.WriteToDirectory(destinationDirectory, options);
// Update progress
bytesRead += entry.Size;
progress?.Report(
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
);
}
}
/// <summary>
/// Extract to specific directory asynchronously with progress reporting and cancellation support
/// </summary>
/// <param name="destinationDirectory">The folder to extract into.</param>
/// <param name="options">Extraction options.</param>
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
/// <param name="cancellationToken">Optional cancellation token.</param>
public async Task WriteToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
IProgress<ProgressReport>? progress = null,
CancellationToken cancellationToken = default
)
{
// For solid archives (Rar, 7Zip), use the optimized reader-based approach
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
{
using var reader = archive.ExtractAllEntries();
await reader.WriteAllToDirectoryAsync(
destinationDirectory,
options,
cancellationToken
);
}
else
{
// For non-solid archives, extract entries directly
await archive.WriteToDirectoryAsyncInternal(
destinationDirectory,
options,
progress,
cancellationToken
);
}
}
private async Task WriteToDirectoryAsyncInternal(
string destinationDirectory,
ExtractionOptions? options,
IProgress<ProgressReport>? progress,
CancellationToken cancellationToken
)
{
// Prepare for progress reporting
var totalBytes = archive.TotalUncompressSize;
var bytesRead = 0L;
// Tracking for created directories.
var seenDirectories = new HashSet<string>();
// Extract
foreach (var entry in archive.Entries)
{
cancellationToken.ThrowIfCancellationRequested();
if (entry.IsDirectory)
{
var dirPath = Path.Combine(
destinationDirectory,
entry.Key.NotNull("Entry Key is null")
);
if (
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(parentDirectory);
}
continue;
}
// Use the entry's WriteToDirectoryAsync method which respects ExtractionOptions
await entry
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
.ConfigureAwait(false);
// Update progress
bytesRead += entry.Size;
progress?.Report(
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
);
}
}
}
}

View File

@@ -1,10 +0,0 @@
using SharpCompress.Common;
namespace SharpCompress.Archives;
internal interface IArchiveExtractionListener : IExtractionListener
{
void EnsureEntriesLoaded();
void FireEntryExtractionBegin(IArchiveEntry entry);
void FireEntryExtractionEnd(IArchiveEntry entry);
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -26,10 +28,34 @@ public interface IArchiveFactory : IFactory
/// <param name="readerOptions">reading options.</param>
IArchive Open(Stream stream, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens an Archive for random access asynchronously.
/// </summary>
/// <param name="stream">An open, readable and seekable stream.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens an Archive from a FileInfo object asynchronously.
/// </summary>
/// <param name="fileInfo">the file to open.</param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
using SharpCompress.Readers;
@@ -27,10 +29,34 @@ public interface IMultiArchiveFactory : IFactory
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens a multi-part archive from streams asynchronously.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
/// <summary>
/// Constructor with IEnumerable Stream objects, multi and split support.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
/// <summary>
/// Opens a multi-part archive from files asynchronously.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions">reading options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
);
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
@@ -181,6 +183,70 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
);
}
/// <summary>
/// Opens a RarArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(stream, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a RarArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfo, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a RarArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(streams, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a RarArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfos, readerOptions)).ConfigureAwait(false);
}
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
public static bool IsRarFile(FileInfo fileInfo)

View File

@@ -76,7 +76,7 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
else
@@ -84,7 +84,7 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
@@ -100,7 +100,7 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
stream = new RarStream(
archive.UnpackV1.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}
else
@@ -108,7 +108,7 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
stream = new RarStream(
archive.UnpackV2017.Value,
FileHeader,
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive)
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
);
}

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
@@ -103,6 +105,70 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
);
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(stream, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfo, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(streams, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a SevenZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfos, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
@@ -213,9 +279,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive _archive;
private CFolder? _currentFolder;
private Stream? _currentStream;
private CFileItem? _currentItem;
private SevenZipEntry? _currentEntry;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
@@ -228,40 +292,135 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
stream.Position = 0;
foreach (var dir in entries.Where(x => x.IsDirectory))
{
_currentEntry = dir;
yield return dir;
}
foreach (
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
)
// For non-directory entries, yield them without creating shared streams
// Each call to GetEntryStream() will create a fresh decompression stream
// to avoid state corruption issues with async operations
foreach (var entry in entries.Where(x => !x.IsDirectory))
{
_currentFolder = group.Key;
if (group.Key is null)
{
_currentStream = Stream.Null;
}
else
{
_currentStream = _archive._database?.GetFolderStream(
stream,
_currentFolder,
new PasswordProvider(Options.Password)
);
}
foreach (var entry in group)
{
_currentItem = entry.FilePart.Header;
yield return entry;
}
_currentEntry = entry;
yield return entry;
}
}
protected override EntryStream GetEntryStream() =>
CreateEntryStream(
new ReadOnlySubStream(
_currentStream.NotNull("currentStream is not null"),
_currentItem?.Size ?? 0
)
);
protected override EntryStream GetEntryStream()
{
// Create a fresh decompression stream for each file (no state sharing).
// However, the LZMA decoder has bugs in its async implementation that cause
// state corruption even on fresh streams. The SyncOnlyStream wrapper
// works around these bugs by forcing async operations to use sync equivalents.
//
// TODO: Fix the LZMA decoder async bugs (in LzmaStream, Decoder, OutWindow)
// so this wrapper is no longer necessary.
var entry = _currentEntry.NotNull("currentEntry is not null");
if (entry.IsDirectory)
{
return CreateEntryStream(Stream.Null);
}
return CreateEntryStream(new SyncOnlyStream(entry.FilePart.GetCompressedStream()));
}
}
/// <summary>
/// WORKAROUND: Forces async operations to use synchronous equivalents.
/// This is necessary because the LZMA decoder has bugs in its async implementation
/// that cause state corruption (IndexOutOfRangeException, DataErrorException).
///
/// The proper fix would be to repair the LZMA decoder's async methods
/// (LzmaStream.ReadAsync, Decoder.CodeAsync, OutWindow async operations),
/// but that requires deep changes to the decoder state machine.
/// </summary>
private sealed class SyncOnlyStream : Stream
{
private readonly Stream _baseStream;
public SyncOnlyStream(Stream baseStream) => _baseStream = baseStream;
public override bool CanRead => _baseStream.CanRead;
public override bool CanSeek => _baseStream.CanSeek;
public override bool CanWrite => _baseStream.CanWrite;
public override long Length => _baseStream.Length;
public override long Position
{
get => _baseStream.Position;
set => _baseStream.Position = value;
}
public override void Flush() => _baseStream.Flush();
public override int Read(byte[] buffer, int offset, int count) =>
_baseStream.Read(buffer, offset, count);
public override long Seek(long offset, SeekOrigin origin) =>
_baseStream.Seek(offset, origin);
public override void SetLength(long value) => _baseStream.SetLength(value);
public override void Write(byte[] buffer, int offset, int count) =>
_baseStream.Write(buffer, offset, count);
// Force async operations to use sync equivalents to avoid LZMA decoder bugs
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
return Task.FromResult(_baseStream.Read(buffer, offset, count));
}
public override Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
_baseStream.Write(buffer, offset, count);
return Task.CompletedTask;
}
public override Task FlushAsync(CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
_baseStream.Flush();
return Task.CompletedTask;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return new ValueTask<int>(_baseStream.Read(buffer.Span));
}
public override ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
_baseStream.Write(buffer.Span);
return ValueTask.CompletedTask;
}
#endif
protected override void Dispose(bool disposing)
{
if (disposing)
{
_baseStream.Dispose();
}
base.Dispose(disposing);
}
}
private class PasswordProvider : IPasswordProvider

View File

@@ -103,6 +103,70 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
/// <summary>
/// Opens a TarArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(stream, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a TarArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfo, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a TarArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(streams, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a TarArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfos, readerOptions)).ConfigureAwait(false);
}
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
public static bool IsTarFile(FileInfo fileInfo)

View File

@@ -124,6 +124,70 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
);
}
/// <summary>
/// Opens a ZipArchive asynchronously from a stream.
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(stream, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a ZipArchive asynchronously from a FileInfo.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfo, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a ZipArchive asynchronously from multiple streams.
/// </summary>
/// <param name="streams"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(streams, readerOptions)).ConfigureAwait(false);
}
/// <summary>
/// Opens a ZipArchive asynchronously from multiple FileInfo objects.
/// </summary>
/// <param name="fileInfos"></param>
/// <param name="readerOptions"></param>
/// <param name="cancellationToken"></param>
public static async Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(fileInfos, readerOptions)).ConfigureAwait(false);
}
public static bool IsZipFile(
string filePath,
string? password = null,
@@ -199,7 +263,93 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
var x = z.ReadSeekableHeader(stream, useSync: true).FirstOrDefault();
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
{
return false;
}
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static async Task<bool> IsZipFileAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
return false;
}
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
}
catch (CryptographicException)
{
return true;
}
catch
{
return false;
}
}
public static async Task<bool> IsZipMultiAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
{
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
ZipHeader? x = null;
await foreach (
var h in z.ReadSeekableHeader(stream).WithCancellation(cancellationToken)
)
{
x = h;
break;
}
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
}
else
@@ -254,7 +404,9 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var vols = volumes.ToArray();
foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream))
foreach (
var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream, useSync: true)
)
{
if (h != null)
{

View File

@@ -1,10 +0,0 @@
using System;
namespace SharpCompress.Common;
public class ArchiveExtractionEventArgs<T> : EventArgs
{
internal ArchiveExtractionEventArgs(T entry) => Item = entry;
public T Item { get; }
}

View File

@@ -0,0 +1,117 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common
{
public sealed class AsyncBinaryReader : IDisposable
{
private readonly Stream _stream;
private readonly Stream _originalStream;
private readonly bool _leaveOpen;
private readonly byte[] _buffer = new byte[8];
private bool _disposed;
public AsyncBinaryReader(Stream stream, bool leaveOpen = false, int bufferSize = 4096)
{
_originalStream = stream ?? throw new ArgumentNullException(nameof(stream));
_leaveOpen = leaveOpen;
// Use the stream directly without wrapping in BufferedStream
// BufferedStream uses synchronous Read internally which doesn't work with async-only streams
// SharpCompress uses SharpCompressStream for buffering which supports true async reads
_stream = stream;
}
public Stream BaseStream => _stream;
public async ValueTask<byte> ReadByteAsync(CancellationToken ct = default)
{
await ReadExactAsync(_buffer, 0, 1, ct).ConfigureAwait(false);
return _buffer[0];
}
public async ValueTask<ushort> ReadUInt16Async(CancellationToken ct = default)
{
await ReadExactAsync(_buffer, 0, 2, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt16LittleEndian(_buffer);
}
public async ValueTask<uint> ReadUInt32Async(CancellationToken ct = default)
{
await ReadExactAsync(_buffer, 0, 4, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt32LittleEndian(_buffer);
}
public async ValueTask<ulong> ReadUInt64Async(CancellationToken ct = default)
{
await ReadExactAsync(_buffer, 0, 8, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt64LittleEndian(_buffer);
}
public async ValueTask<byte[]> ReadBytesAsync(int count, CancellationToken ct = default)
{
var result = new byte[count];
await ReadExactAsync(result, 0, count, ct).ConfigureAwait(false);
return result;
}
private async ValueTask ReadExactAsync(
byte[] destination,
int offset,
int length,
CancellationToken ct
)
{
var read = 0;
while (read < length)
{
var n = await _stream
.ReadAsync(destination, offset + read, length - read, ct)
.ConfigureAwait(false);
if (n == 0)
{
throw new EndOfStreamException();
}
read += n;
}
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
// Dispose the original stream if we own it
if (!_leaveOpen)
{
_originalStream.Dispose();
}
}
#if NET6_0_OR_GREATER
public async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
_disposed = true;
// Dispose the original stream if we own it
if (!_leaveOpen)
{
await _originalStream.DisposeAsync().ConfigureAwait(false);
}
}
#endif
}
}

View File

@@ -1,25 +0,0 @@
using System;
namespace SharpCompress.Common;
public sealed class CompressedBytesReadEventArgs : EventArgs
{
public CompressedBytesReadEventArgs(
long compressedBytesRead,
long currentFilePartCompressedBytesRead
)
{
CompressedBytesRead = compressedBytesRead;
CurrentFilePartCompressedBytesRead = currentFilePartCompressedBytesRead;
}
/// <summary>
/// Compressed bytes read for the current entry
/// </summary>
public long CompressedBytesRead { get; }
/// <summary>
/// Current file part read for Multipart files (e.g. Rar)
/// </summary>
public long CurrentFilePartCompressedBytesRead { get; }
}

View File

@@ -128,7 +128,7 @@ internal static class ExtractionMethods
IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
Func<string, ExtractionOptions?, Task> writeAsync,
Func<string, ExtractionOptions?, CancellationToken, Task> writeAsync,
CancellationToken cancellationToken = default
)
{
@@ -189,7 +189,7 @@ internal static class ExtractionMethods
"Entry is trying to write a file outside of the destination directory."
);
}
await writeAsync(destinationFileName, options).ConfigureAwait(false);
await writeAsync(destinationFileName, options, cancellationToken).ConfigureAwait(false);
}
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
{
@@ -201,7 +201,7 @@ internal static class ExtractionMethods
IEntry entry,
string destinationFileName,
ExtractionOptions? options,
Func<string, FileMode, Task> openAndWriteAsync,
Func<string, FileMode, CancellationToken, Task> openAndWriteAsync,
CancellationToken cancellationToken = default
)
{
@@ -225,7 +225,8 @@ internal static class ExtractionMethods
fm = FileMode.CreateNew;
}
await openAndWriteAsync(destinationFileName, fm).ConfigureAwait(false);
await openAndWriteAsync(destinationFileName, fm, cancellationToken)
.ConfigureAwait(false);
entry.PreserveExtractionOptions(destinationFileName, options);
}
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common;
@@ -14,4 +16,8 @@ public abstract class FilePart
internal abstract Stream? GetCompressedStream();
internal abstract Stream? GetRawStream();
internal bool Skipped { get; set; }
internal virtual Task<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
) => Task.FromResult(GetCompressedStream());
}

View File

@@ -1,28 +0,0 @@
using System;
namespace SharpCompress.Common;
public sealed class FilePartExtractionBeginEventArgs : EventArgs
{
public FilePartExtractionBeginEventArgs(string name, long size, long compressedSize)
{
Name = name;
Size = size;
CompressedSize = compressedSize;
}
/// <summary>
/// File name for the part for the current entry
/// </summary>
public string Name { get; }
/// <summary>
/// Uncompressed size of the current entry in the part
/// </summary>
public long Size { get; }
/// <summary>
/// Compressed size of the current entry in the part
/// </summary>
public long CompressedSize { get; }
}

View File

@@ -1,7 +0,0 @@
namespace SharpCompress.Common;
public interface IExtractionListener
{
void FireFilePartExtractionBegin(string name, long size, long compressedSize);
void FireCompressedBytesRead(long currentPartCompressedBytes, long compressedReadBytes);
}

View File

@@ -0,0 +1,43 @@
namespace SharpCompress.Common;
/// <summary>
/// Represents progress information for compression or extraction operations.
/// </summary>
public sealed class ProgressReport
{
/// <summary>
/// Initializes a new instance of the <see cref="ProgressReport"/> class.
/// </summary>
/// <param name="entryPath">The path of the entry being processed.</param>
/// <param name="bytesTransferred">Number of bytes transferred so far.</param>
/// <param name="totalBytes">Total bytes to be transferred, or null if unknown.</param>
public ProgressReport(string entryPath, long bytesTransferred, long? totalBytes)
{
EntryPath = entryPath;
BytesTransferred = bytesTransferred;
TotalBytes = totalBytes;
}
/// <summary>
/// Gets the path of the entry being processed.
/// </summary>
public string EntryPath { get; }
/// <summary>
/// Gets the number of bytes transferred so far.
/// </summary>
public long BytesTransferred { get; }
/// <summary>
/// Gets the total number of bytes to be transferred, or null if unknown.
/// </summary>
public long? TotalBytes { get; }
/// <summary>
/// Gets the progress percentage (0-100), or null if total bytes is unknown.
/// </summary>
public double? PercentComplete =>
TotalBytes.HasValue && TotalBytes.Value > 0
? (double)BytesTransferred / TotalBytes.Value * 100
: null;
}

View File

@@ -1,17 +0,0 @@
using System;
using SharpCompress.Readers;
namespace SharpCompress.Common;
public sealed class ReaderExtractionEventArgs<T> : EventArgs
{
internal ReaderExtractionEventArgs(T entry, ReaderProgress? readerProgress = null)
{
Item = entry;
ReaderProgress = readerProgress;
}
public T Item { get; }
public ReaderProgress? ReaderProgress { get; }
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -19,6 +20,18 @@ internal class DirectoryEndHeader : ZipHeader
Comment = reader.ReadBytes(CommentLength);
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
VolumeNumber = await reader.ReadUInt16Async();
FirstVolumeWithDirectory = await reader.ReadUInt16Async();
TotalNumberOfEntriesInDisk = await reader.ReadUInt16Async();
TotalNumberOfEntries = await reader.ReadUInt16Async();
DirectorySize = await reader.ReadUInt32Async();
DirectoryStartOffsetRelativeToDisk = await reader.ReadUInt32Async();
CommentLength = await reader.ReadUInt16Async();
Comment = await reader.ReadBytesAsync(CommentLength);
}
public ushort VolumeNumber { get; private set; }
public ushort FirstVolumeWithDirectory { get; private set; }

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -31,7 +32,37 @@ internal class DirectoryEntryHeader : ZipFileEntry
var extra = reader.ReadBytes(extraLength);
var comment = reader.ReadBytes(commentLength);
// According to .ZIP File Format Specification
ProcessReadData(name, extra, comment);
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
Version = await reader.ReadUInt16Async();
VersionNeededToExtract = await reader.ReadUInt16Async();
Flags = (HeaderFlags)await reader.ReadUInt16Async();
CompressionMethod = (ZipCompressionMethod)await reader.ReadUInt16Async();
OriginalLastModifiedTime = LastModifiedTime = await reader.ReadUInt16Async();
OriginalLastModifiedDate = LastModifiedDate = await reader.ReadUInt16Async();
Crc = await reader.ReadUInt32Async();
CompressedSize = await reader.ReadUInt32Async();
UncompressedSize = await reader.ReadUInt32Async();
var nameLength = await reader.ReadUInt16Async();
var extraLength = await reader.ReadUInt16Async();
var commentLength = await reader.ReadUInt16Async();
DiskNumberStart = await reader.ReadUInt16Async();
InternalFileAttributes = await reader.ReadUInt16Async();
ExternalFileAttributes = await reader.ReadUInt32Async();
RelativeOffsetOfEntryHeader = await reader.ReadUInt32Async();
var name = await reader.ReadBytesAsync(nameLength);
var extra = await reader.ReadBytesAsync(extraLength);
var comment = await reader.ReadBytesAsync(commentLength);
ProcessReadData(name, extra, comment);
}
private void ProcessReadData(byte[] name, byte[] extra, byte[] comment)
{
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
//

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -8,4 +9,6 @@ internal class IgnoreHeader : ZipHeader
: base(type) { }
internal override void Read(BinaryReader reader) { }
internal override ValueTask Read(AsyncBinaryReader reader) => default;
}

View File

@@ -1,13 +1,12 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
internal class LocalEntryHeader : ZipFileEntry
internal class LocalEntryHeader(ArchiveEncoding archiveEncoding)
: ZipFileEntry(ZipHeaderType.LocalEntry, archiveEncoding)
{
public LocalEntryHeader(ArchiveEncoding archiveEncoding)
: base(ZipHeaderType.LocalEntry, archiveEncoding) { }
internal override void Read(BinaryReader reader)
{
Version = reader.ReadUInt16();
@@ -23,7 +22,29 @@ internal class LocalEntryHeader : ZipFileEntry
var name = reader.ReadBytes(nameLength);
var extra = reader.ReadBytes(extraLength);
// According to .ZIP File Format Specification
ProcessReadData(name, extra);
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
Version = await reader.ReadUInt16Async();
Flags = (HeaderFlags)await reader.ReadUInt16Async();
CompressionMethod = (ZipCompressionMethod)await reader.ReadUInt16Async();
OriginalLastModifiedTime = LastModifiedTime = await reader.ReadUInt16Async();
OriginalLastModifiedDate = LastModifiedDate = await reader.ReadUInt16Async();
Crc = await reader.ReadUInt32Async();
CompressedSize = await reader.ReadUInt32Async();
UncompressedSize = await reader.ReadUInt32Async();
var nameLength = await reader.ReadUInt16Async();
var extraLength = await reader.ReadUInt16Async();
var name = await reader.ReadBytesAsync(nameLength);
var extra = await reader.ReadBytesAsync(extraLength);
ProcessReadData(name, extra);
}
private void ProcessReadData(byte[] name, byte[] extra)
{
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
//

View File

@@ -1,5 +1,6 @@
using System;
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -9,4 +10,7 @@ internal class SplitHeader : ZipHeader
: base(ZipHeaderType.Split) { }
internal override void Read(BinaryReader reader) => throw new NotImplementedException();
internal override ValueTask Read(AsyncBinaryReader reader) =>
throw new NotImplementedException();
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
@@ -26,6 +27,25 @@ internal class Zip64DirectoryEndHeader : ZipHeader
);
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
SizeOfDirectoryEndRecord = (long)await reader.ReadUInt64Async();
VersionMadeBy = await reader.ReadUInt16Async();
VersionNeededToExtract = await reader.ReadUInt16Async();
VolumeNumber = await reader.ReadUInt32Async();
FirstVolumeWithDirectory = await reader.ReadUInt32Async();
TotalNumberOfEntriesInDisk = (long)await reader.ReadUInt64Async();
TotalNumberOfEntries = (long)await reader.ReadUInt64Async();
DirectorySize = (long)await reader.ReadUInt64Async();
DirectoryStartOffsetRelativeToDisk = (long)await reader.ReadUInt64Async();
DataSector = await reader.ReadBytesAsync(
(int)(
SizeOfDirectoryEndRecord
- SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS
)
);
}
private const int SIZE_OF_FIXED_HEADER_DATA_EXCEPT_SIGNATURE_AND_SIZE_FIELDS = 44;
public long SizeOfDirectoryEndRecord { get; private set; }

View File

@@ -1,12 +1,10 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
internal class Zip64DirectoryEndLocatorHeader : ZipHeader
internal class Zip64DirectoryEndLocatorHeader() : ZipHeader(ZipHeaderType.Zip64DirectoryEndLocator)
{
public Zip64DirectoryEndLocatorHeader()
: base(ZipHeaderType.Zip64DirectoryEndLocator) { }
internal override void Read(BinaryReader reader)
{
FirstVolumeWithDirectory = reader.ReadUInt32();
@@ -14,6 +12,13 @@ internal class Zip64DirectoryEndLocatorHeader : ZipHeader
TotalNumberOfVolumes = reader.ReadUInt32();
}
internal override async ValueTask Read(AsyncBinaryReader reader)
{
FirstVolumeWithDirectory = await reader.ReadUInt32Async();
RelativeOffsetOfTheEndOfDirectoryRecord = (long)await reader.ReadUInt64Async();
TotalNumberOfVolumes = await reader.ReadUInt32Async();
}
public uint FirstVolumeWithDirectory { get; private set; }
public long RelativeOffsetOfTheEndOfDirectoryRecord { get; private set; }

View File

@@ -2,18 +2,14 @@ using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
internal abstract class ZipFileEntry : ZipHeader
internal abstract class ZipFileEntry(ZipHeaderType type, ArchiveEncoding archiveEncoding)
: ZipHeader(type)
{
protected ZipFileEntry(ZipHeaderType type, ArchiveEncoding archiveEncoding)
: base(type)
{
Extra = new List<ExtraData>();
ArchiveEncoding = archiveEncoding;
}
internal bool IsDirectory
{
get
@@ -30,7 +26,7 @@ internal abstract class ZipFileEntry : ZipHeader
internal Stream? PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal ArchiveEncoding ArchiveEncoding { get; } = archiveEncoding;
internal string? Name { get; set; }
@@ -44,7 +40,7 @@ internal abstract class ZipFileEntry : ZipHeader
internal long UncompressedSize { get; set; }
internal List<ExtraData> Extra { get; set; }
internal List<ExtraData> Extra { get; set; } = new();
public string? Password { get; set; }
@@ -63,6 +59,24 @@ internal abstract class ZipFileEntry : ZipHeader
return encryptionData;
}
internal async Task<PkwareTraditionalEncryptionData> ComposeEncryptionDataAsync(
Stream archiveStream,
CancellationToken cancellationToken = default
)
{
if (archiveStream is null)
{
throw new ArgumentNullException(nameof(archiveStream));
}
var buffer = new byte[12];
await archiveStream.ReadFullyAsync(buffer, 0, 12, cancellationToken).ConfigureAwait(false);
var encryptionData = PkwareTraditionalEncryptionData.ForRead(Password!, this, buffer);
return encryptionData;
}
internal WinzipAesEncryptionData? WinzipAesEncryptionData { get; set; }
/// <summary>

View File

@@ -1,18 +1,14 @@
using System.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip.Headers;
internal abstract class ZipHeader
internal abstract class ZipHeader(ZipHeaderType type)
{
protected ZipHeader(ZipHeaderType type)
{
ZipHeaderType = type;
HasData = true;
}
internal ZipHeaderType ZipHeaderType { get; }
internal ZipHeaderType ZipHeaderType { get; } = type;
internal abstract void Read(BinaryReader reader);
internal abstract ValueTask Read(AsyncBinaryReader reader);
internal bool HasData { get; set; }
internal bool HasData { get; set; } = true;
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -18,7 +19,74 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
internal SeekableZipHeaderFactory(string? password, ArchiveEncoding archiveEncoding)
: base(StreamingMode.Seekable, password, archiveEncoding) { }
internal IEnumerable<ZipHeader> ReadSeekableHeader(Stream stream)
internal async IAsyncEnumerable<ZipHeader> ReadSeekableHeader(Stream stream)
{
var reader = new AsyncBinaryReader(stream);
await SeekBackToHeader(stream, reader);
var eocd_location = stream.Position;
var entry = new DirectoryEndHeader();
await entry.Read(reader);
if (entry.IsZip64)
{
_zip64 = true;
// ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin);
uint zip64_locator = await reader.ReadUInt32Async();
if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR)
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
await zip64Locator.Read(reader);
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
var zip64Signature = await reader.ReadUInt32Async();
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
await zip64Entry.Read(reader);
stream.Seek(zip64Entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
else
{
stream.Seek(entry.DirectoryStartOffsetRelativeToDisk, SeekOrigin.Begin);
}
var position = stream.Position;
while (true)
{
stream.Position = position;
var signature = await reader.ReadUInt32Async();
var nextHeader = await ReadHeader(signature, reader, _zip64);
position = stream.Position;
if (nextHeader is null)
{
yield break;
}
if (nextHeader is DirectoryEntryHeader entryHeader)
{
//entry could be zero bytes so we need to know that.
entryHeader.HasData = entryHeader.CompressedSize != 0;
yield return entryHeader;
}
else if (nextHeader is DirectoryEndHeader endHeader)
{
yield return endHeader;
}
}
}
internal IEnumerable<ZipHeader> ReadSeekableHeader(Stream stream, bool useSync)
{
var reader = new BinaryReader(stream);
@@ -98,6 +166,45 @@ internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
return true;
}
private static async ValueTask SeekBackToHeader(Stream stream, AsyncBinaryReader reader)
{
// Minimum EOCD length
if (stream.Length < MINIMUM_EOCD_LENGTH)
{
throw new ArchiveException(
"Could not find Zip file Directory at the end of the file. File may be corrupted."
);
}
var len =
stream.Length < MAX_SEARCH_LENGTH_FOR_EOCD
? (int)stream.Length
: MAX_SEARCH_LENGTH_FOR_EOCD;
// We search for marker in reverse to find the first occurance
byte[] needle = { 0x06, 0x05, 0x4b, 0x50 };
stream.Seek(-len, SeekOrigin.End);
var seek = await reader.ReadBytesAsync(len);
// Search in reverse
Array.Reverse(seek);
// don't exclude the minimum eocd region, otherwise you fail to locate the header in empty zip files
var max_search_area = len; // - MINIMUM_EOCD_LENGTH;
for (var pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
{
if (IsMatch(seek, pos_from_end, needle))
{
stream.Seek(-pos_from_end, SeekOrigin.End);
return;
}
}
throw new ArchiveException("Failed to locate the Zip Header");
}
private static void SeekBackToHeader(Stream stream, BinaryReader reader)
{
// Minimum EOCD length

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;
using SharpCompress.IO;
@@ -31,6 +33,28 @@ internal sealed class StreamingZipFilePart : ZipFilePart
return _decompressionStream;
}
internal override async Task<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
if (!Header.HasData)
{
return Stream.Null;
}
_decompressionStream = await CreateDecompressionStreamAsync(
await GetCryptoStreamAsync(CreateBaseStream(), cancellationToken)
.ConfigureAwait(false),
Header.CompressionMethod,
cancellationToken
)
.ConfigureAwait(false);
if (LeaveStreamOpen)
{
return SharpCompressStream.Create(_decompressionStream, leaveOpen: true);
}
return _decompressionStream;
}
internal BinaryReader FixStreamedFileLocation(ref SharpCompressStream rewindableStream)
{
if (Header.IsDirectory)

View File

@@ -2,6 +2,8 @@ using System;
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
@@ -264,4 +266,220 @@ internal abstract class ZipFilePart : FilePart
}
return plainStream;
}
internal override async Task<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
if (!Header.HasData)
{
return Stream.Null;
}
var decompressionStream = await CreateDecompressionStreamAsync(
await GetCryptoStreamAsync(CreateBaseStream(), cancellationToken)
.ConfigureAwait(false),
Header.CompressionMethod,
cancellationToken
)
.ConfigureAwait(false);
if (LeaveStreamOpen)
{
return SharpCompressStream.Create(decompressionStream, leaveOpen: true);
}
return decompressionStream;
}
protected async Task<Stream> GetCryptoStreamAsync(
Stream plainStream,
CancellationToken cancellationToken = default
)
{
var isFileEncrypted = FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted);
if (Header.CompressedSize == 0 && isFileEncrypted)
{
throw new NotSupportedException("Cannot encrypt file with unknown size at start.");
}
if (
(
Header.CompressedSize == 0
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
) || Header.IsZip64
)
{
plainStream = SharpCompressStream.Create(plainStream, leaveOpen: true); //make sure AES doesn't close
}
else
{
plainStream = new ReadOnlySubStream(plainStream, Header.CompressedSize); //make sure AES doesn't close
}
if (isFileEncrypted)
{
switch (Header.CompressionMethod)
{
case ZipCompressionMethod.None:
case ZipCompressionMethod.Shrink:
case ZipCompressionMethod.Reduce1:
case ZipCompressionMethod.Reduce2:
case ZipCompressionMethod.Reduce3:
case ZipCompressionMethod.Reduce4:
case ZipCompressionMethod.Deflate:
case ZipCompressionMethod.Deflate64:
case ZipCompressionMethod.BZip2:
case ZipCompressionMethod.LZMA:
case ZipCompressionMethod.PPMd:
{
return new PkwareTraditionalCryptoStream(
plainStream,
await Header
.ComposeEncryptionDataAsync(plainStream, cancellationToken)
.ConfigureAwait(false),
CryptoMode.Decrypt
);
}
case ZipCompressionMethod.WinzipAes:
{
if (Header.WinzipAesEncryptionData != null)
{
return new WinzipAesCryptoStream(
plainStream,
Header.WinzipAesEncryptionData,
Header.CompressedSize - 10
);
}
return plainStream;
}
default:
{
throw new InvalidOperationException("Header.CompressionMethod is invalid");
}
}
}
return plainStream;
}
protected async Task<Stream> CreateDecompressionStreamAsync(
Stream stream,
ZipCompressionMethod method,
CancellationToken cancellationToken = default
)
{
switch (method)
{
case ZipCompressionMethod.None:
{
if (Header.CompressedSize is 0)
{
return new DataDescriptorStream(stream);
}
return stream;
}
case ZipCompressionMethod.Shrink:
{
return new ShrinkStream(
stream,
CompressionMode.Decompress,
Header.CompressedSize,
Header.UncompressedSize
);
}
case ZipCompressionMethod.Reduce1:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 1);
}
case ZipCompressionMethod.Reduce2:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 2);
}
case ZipCompressionMethod.Reduce3:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 3);
}
case ZipCompressionMethod.Reduce4:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 4);
}
case ZipCompressionMethod.Explode:
{
return new ExplodeStream(
stream,
Header.CompressedSize,
Header.UncompressedSize,
Header.Flags
);
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.Deflate64:
{
return new Deflate64Stream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.BZip2:
{
return new BZip2Stream(stream, CompressionMode.Decompress, false);
}
case ZipCompressionMethod.LZMA:
{
if (FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted))
{
throw new NotSupportedException("LZMA with pkware encryption.");
}
var buffer = new byte[4];
await stream.ReadFullyAsync(buffer, 0, 4, cancellationToken).ConfigureAwait(false);
var version = BinaryPrimitives.ReadUInt16LittleEndian(buffer.AsSpan(0, 2));
var propsSize = BinaryPrimitives.ReadUInt16LittleEndian(buffer.AsSpan(2, 2));
var props = new byte[propsSize];
await stream
.ReadFullyAsync(props, 0, propsSize, cancellationToken)
.ConfigureAwait(false);
return new LzmaStream(
props,
stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: Header.UncompressedSize
);
}
case ZipCompressionMethod.Xz:
{
return new XZStream(stream);
}
case ZipCompressionMethod.ZStandard:
{
return new DecompressionStream(stream);
}
case ZipCompressionMethod.PPMd:
{
var props = new byte[2];
await stream.ReadFullyAsync(props, 0, 2, cancellationToken).ConfigureAwait(false);
return new PpmdStream(new PpmdProperties(props), stream, false);
}
case ZipCompressionMethod.WinzipAes:
{
var data = Header.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
if (data is null)
{
throw new InvalidFormatException("No Winzip AES extra data found.");
}
if (data.Length != 7)
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
throw new NotSupportedException("WinzipAes isn't supported for streaming");
}
default:
{
throw new NotSupportedException("CompressionMethod: " + Header.CompressionMethod);
}
}
}
}

View File

@@ -1,6 +1,7 @@
using System;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
@@ -34,6 +35,82 @@ internal class ZipHeaderFactory
_archiveEncoding = archiveEncoding;
}
protected async ValueTask<ZipHeader?> ReadHeader(
uint headerBytes,
AsyncBinaryReader reader,
bool zip64 = false
)
{
switch (headerBytes)
{
case ENTRY_HEADER_BYTES:
{
var entryHeader = new LocalEntryHeader(_archiveEncoding);
await entryHeader.Read(reader);
LoadHeader(entryHeader, reader.BaseStream);
_lastEntryHeader = entryHeader;
return entryHeader;
}
case DIRECTORY_START_HEADER_BYTES:
{
var entry = new DirectoryEntryHeader(_archiveEncoding);
await entry.Read(reader);
return entry;
}
case POST_DATA_DESCRIPTOR:
{
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(
_lastEntryHeader.NotNull().Flags,
HeaderFlags.UsePostDataDescriptor
)
)
{
_lastEntryHeader.Crc = await reader.ReadUInt32Async();
_lastEntryHeader.CompressedSize = zip64
? (long)await reader.ReadUInt64Async()
: await reader.ReadUInt32Async();
_lastEntryHeader.UncompressedSize = zip64
? (long)await reader.ReadUInt64Async()
: await reader.ReadUInt32Async();
}
else
{
await reader.ReadBytesAsync(zip64 ? 20 : 12);
}
return null;
}
case DIGITAL_SIGNATURE:
return null;
case DIRECTORY_END_HEADER_BYTES:
{
var entry = new DirectoryEndHeader();
await entry.Read(reader);
return entry;
}
case SPLIT_ARCHIVE_HEADER_BYTES:
{
return new SplitHeader();
}
case ZIP64_END_OF_CENTRAL_DIRECTORY:
{
var entry = new Zip64DirectoryEndHeader();
await entry.Read(reader);
return entry;
}
case ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR:
{
var entry = new Zip64DirectoryEndLocatorHeader();
await entry.Read(reader);
return entry;
}
default:
return null;
}
}
protected ZipHeader? ReadHeader(uint headerBytes, BinaryReader reader, bool zip64 = false)
{
switch (headerBytes)

View File

@@ -428,7 +428,9 @@ public class LzmaStream : Stream, IStreamStack
private async Task DecodeChunkHeaderAsync(CancellationToken cancellationToken = default)
{
var controlBuffer = new byte[1];
await _inputStream.ReadAsync(controlBuffer, 0, 1, cancellationToken).ConfigureAwait(false);
await _inputStream
.ReadExactlyAsync(controlBuffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
var control = controlBuffer[0];
_inputPosition++;
@@ -455,11 +457,15 @@ public class LzmaStream : Stream, IStreamStack
_availableBytes = (control & 0x1F) << 16;
var buffer = new byte[2];
await _inputStream.ReadAsync(buffer, 0, 2, cancellationToken).ConfigureAwait(false);
await _inputStream
.ReadExactlyAsync(buffer, 0, 2, cancellationToken)
.ConfigureAwait(false);
_availableBytes += (buffer[0] << 8) + buffer[1] + 1;
_inputPosition += 2;
await _inputStream.ReadAsync(buffer, 0, 2, cancellationToken).ConfigureAwait(false);
await _inputStream
.ReadExactlyAsync(buffer, 0, 2, cancellationToken)
.ConfigureAwait(false);
_rangeDecoderLimit = (buffer[0] << 8) + buffer[1] + 1;
_inputPosition += 2;
@@ -467,7 +473,7 @@ public class LzmaStream : Stream, IStreamStack
{
_needProps = false;
await _inputStream
.ReadAsync(controlBuffer, 0, 1, cancellationToken)
.ReadExactlyAsync(controlBuffer, 0, 1, cancellationToken)
.ConfigureAwait(false);
Properties[0] = controlBuffer[0];
_inputPosition++;
@@ -495,7 +501,9 @@ public class LzmaStream : Stream, IStreamStack
{
_uncompressedChunk = true;
var buffer = new byte[2];
await _inputStream.ReadAsync(buffer, 0, 2, cancellationToken).ConfigureAwait(false);
await _inputStream
.ReadExactlyAsync(buffer, 0, 2, cancellationToken)
.ConfigureAwait(false);
_availableBytes = (buffer[0] << 8) + buffer[1] + 1;
_inputPosition += 2;
}

View File

@@ -37,18 +37,8 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
private IEnumerator<RarFilePart> filePartEnumerator;
private Stream currentStream;
private readonly IExtractionListener streamListener;
private long currentPartTotalReadBytes;
private long currentEntryTotalReadBytes;
internal MultiVolumeReadOnlyStream(
IEnumerable<RarFilePart> parts,
IExtractionListener streamListener
)
internal MultiVolumeReadOnlyStream(IEnumerable<RarFilePart> parts)
{
this.streamListener = streamListener;
filePartEnumerator = parts.GetEnumerator();
filePartEnumerator.MoveNext();
InitializeNextFilePart();
@@ -81,15 +71,7 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
currentPosition = 0;
currentStream = filePartEnumerator.Current.GetCompressedStream();
currentPartTotalReadBytes = 0;
CurrentCrc = filePartEnumerator.Current.FileHeader.FileCrc;
streamListener.FireFilePartExtractionBegin(
filePartEnumerator.Current.FilePartName,
filePartEnumerator.Current.FileHeader.CompressedSize,
filePartEnumerator.Current.FileHeader.UncompressedSize
);
}
public override int Read(byte[] buffer, int offset, int count)
@@ -141,12 +123,6 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
break;
}
}
currentPartTotalReadBytes += totalRead;
currentEntryTotalReadBytes += totalRead;
streamListener.FireCompressedBytesRead(
currentPartTotalReadBytes,
currentEntryTotalReadBytes
);
return totalRead;
}
@@ -206,12 +182,6 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
break;
}
}
currentPartTotalReadBytes += totalRead;
currentEntryTotalReadBytes += totalRead;
streamListener.FireCompressedBytesRead(
currentPartTotalReadBytes,
currentEntryTotalReadBytes
);
return totalRead;
}
@@ -270,12 +240,6 @@ internal sealed class MultiVolumeReadOnlyStream : Stream, IStreamStack
break;
}
}
currentPartTotalReadBytes += totalRead;
currentEntryTotalReadBytes += totalRead;
streamListener.FireCompressedBytesRead(
currentPartTotalReadBytes,
currentEntryTotalReadBytes
);
return totalRead;
}
#endif

View File

@@ -4,6 +4,7 @@ using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
@@ -42,5 +43,15 @@ namespace SharpCompress.Factories
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
ArcReader.Open(stream, options);
public async Task<IReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(OpenReader(stream, options)).ConfigureAwait(false);
}
}
}

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Arj.Headers;
@@ -38,5 +39,15 @@ namespace SharpCompress.Factories
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
ArjReader.Open(stream, options);
public async Task<IReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(OpenReader(stream, options)).ConfigureAwait(false);
}
}
}

View File

@@ -1,6 +1,8 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -56,6 +58,18 @@ public abstract class Factory : IFactory
int bufferSize = ReaderOptions.DefaultBufferSize
);
/// <inheritdoc/>
public virtual Task<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return Task.FromResult(IsArchive(stream, password, bufferSize));
}
/// <inheritdoc/>
public virtual FileInfo? GetFilePart(int index, FileInfo part1) => null;

View File

@@ -1,6 +1,8 @@
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
@@ -46,6 +48,14 @@ public class GZipFactory
int bufferSize = ReaderOptions.DefaultBufferSize
) => GZipArchive.IsGZipFile(stream);
/// <inheritdoc/>
public override Task<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
) => GZipArchive.IsGZipFileAsync(stream, cancellationToken);
#endregion
#region IArchiveFactory
@@ -54,10 +64,24 @@ public class GZipFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => GZipArchive.OpenAsync(stream, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => GZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
#endregion
#region IMultiArchiveFactory
@@ -66,10 +90,24 @@ public class GZipFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => GZipArchive.OpenAsync(streams, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
GZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => GZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
#endregion
#region IReaderFactory
@@ -108,6 +146,17 @@ public class GZipFactory
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
GZipReader.Open(stream, options);
/// <inheritdoc/>
public async Task<IReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(OpenReader(stream, options)).ConfigureAwait(false);
}
#endregion
#region IWriterFactory
@@ -122,6 +171,17 @@ public class GZipFactory
return new GZipWriter(stream, new GZipWriterOptions(writerOptions));
}
/// <inheritdoc/>
public async Task<IWriter> OpenAsync(
Stream stream,
WriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(stream, writerOptions)).ConfigureAwait(false);
}
#endregion
#region IWriteableArchiveFactory

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Factories;
@@ -42,6 +44,20 @@ public interface IFactory
int bufferSize = ReaderOptions.DefaultBufferSize
);
/// <summary>
/// Returns true if the stream represents an archive of the format defined by this type asynchronously.
/// </summary>
/// <param name="stream">A stream, pointing to the beginning of the archive.</param>
/// <param name="password">optional password</param>
/// <param name="bufferSize">buffer size for reading</param>
/// <param name="cancellationToken">cancellation token</param>
Task<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
);
/// <summary>
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
/// </summary>

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Rar;
using SharpCompress.Common;
@@ -47,10 +49,24 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
RarArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => RarArchive.OpenAsync(stream, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
RarArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => RarArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
#endregion
#region IMultiArchiveFactory
@@ -59,10 +75,24 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
RarArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => RarArchive.OpenAsync(streams, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
RarArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => RarArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
#endregion
#region IReaderFactory
@@ -71,5 +101,16 @@ public class RarFactory : Factory, IArchiveFactory, IMultiArchiveFactory, IReade
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
RarReader.Open(stream, options);
/// <inheritdoc/>
public async Task<IReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(OpenReader(stream, options)).ConfigureAwait(false);
}
#endregion
}

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Common;
@@ -42,10 +44,24 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => SevenZipArchive.OpenAsync(stream, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => SevenZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
#endregion
#region IMultiArchiveFactory
@@ -54,10 +70,24 @@ public class SevenZipFactory : Factory, IArchiveFactory, IMultiArchiveFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => SevenZipArchive.OpenAsync(streams, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
SevenZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => SevenZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
#endregion
#region reader

View File

@@ -2,6 +2,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
@@ -67,10 +69,24 @@ public class TarFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
TarArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => TarArchive.OpenAsync(stream, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
TarArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => TarArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
#endregion
#region IMultiArchiveFactory
@@ -79,10 +95,24 @@ public class TarFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
TarArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => TarArchive.OpenAsync(streams, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
TarArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => TarArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
#endregion
#region IReaderFactory
@@ -234,6 +264,17 @@ public class TarFactory
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
TarReader.Open(stream, options);
/// <inheritdoc/>
public async Task<IReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(OpenReader(stream, options)).ConfigureAwait(false);
}
#endregion
#region IWriterFactory
@@ -242,6 +283,17 @@ public class TarFactory
public IWriter Open(Stream stream, WriterOptions writerOptions) =>
new TarWriter(stream, new TarWriterOptions(writerOptions));
/// <inheritdoc/>
public async Task<IWriter> OpenAsync(
Stream stream,
WriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(stream, writerOptions)).ConfigureAwait(false);
}
#endregion
#region IWriteableArchiveFactory

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
@@ -79,6 +81,49 @@ public class ZipFactory
return false;
}
/// <inheritdoc/>
public override async Task<bool> IsArchiveAsync(
Stream stream,
string? password = null,
int bufferSize = ReaderOptions.DefaultBufferSize,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var startPosition = stream.CanSeek ? stream.Position : -1;
// probe for single volume zip
if (stream is not SharpCompressStream) // wrap to provide buffer bef
{
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
}
if (await ZipArchive.IsZipFileAsync(stream, password, bufferSize, cancellationToken))
{
return true;
}
// probe for a multipart zip
if (!stream.CanSeek)
{
return false;
}
stream.Position = startPosition;
//test the zip (last) file of a multipart zip
if (await ZipArchive.IsZipMultiAsync(stream, password, bufferSize, cancellationToken))
{
return true;
}
stream.Position = startPosition;
return false;
}
/// <inheritdoc/>
public override FileInfo? GetFilePart(int index, FileInfo part1) =>
ZipArchiveVolumeFactory.GetFilePart(index, part1);
@@ -91,10 +136,24 @@ public class ZipFactory
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(stream, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
Stream stream,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ZipArchive.OpenAsync(stream, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(fileInfo, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
FileInfo fileInfo,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ZipArchive.OpenAsync(fileInfo, readerOptions, cancellationToken);
#endregion
#region IMultiArchiveFactory
@@ -103,10 +162,24 @@ public class ZipFactory
public IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(streams, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<Stream> streams,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ZipArchive.OpenAsync(streams, readerOptions, cancellationToken);
/// <inheritdoc/>
public IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null) =>
ZipArchive.Open(fileInfos, readerOptions);
/// <inheritdoc/>
public Task<IArchive> OpenAsync(
IReadOnlyList<FileInfo> fileInfos,
ReaderOptions? readerOptions = null,
CancellationToken cancellationToken = default
) => ZipArchive.OpenAsync(fileInfos, readerOptions, cancellationToken);
#endregion
#region IReaderFactory
@@ -115,6 +188,17 @@ public class ZipFactory
public IReader OpenReader(Stream stream, ReaderOptions? options) =>
ZipReader.Open(stream, options);
/// <inheritdoc/>
public async Task<IReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(OpenReader(stream, options)).ConfigureAwait(false);
}
#endregion
#region IWriterFactory
@@ -123,6 +207,17 @@ public class ZipFactory
public IWriter Open(Stream stream, WriterOptions writerOptions) =>
new ZipWriter(stream, new ZipWriterOptions(writerOptions));
/// <inheritdoc/>
public async Task<IWriter> OpenAsync(
Stream stream,
WriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
return await Task.FromResult(Open(stream, writerOptions)).ConfigureAwait(false);
}
#endregion
#region IWriteableArchiveFactory

View File

@@ -1,97 +0,0 @@
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.IO;
internal class ListeningStream : Stream, IStreamStack
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => Stream;
int IStreamStack.BufferSize
{
get => 0;
set { return; }
}
int IStreamStack.BufferPosition
{
get => 0;
set { return; }
}
void IStreamStack.SetPosition(long position) { }
private long _currentEntryTotalReadBytes;
private readonly IExtractionListener _listener;
public ListeningStream(IExtractionListener listener, Stream stream)
{
Stream = stream;
this._listener = listener;
#if DEBUG_STREAMS
this.DebugConstruct(typeof(ListeningStream));
#endif
}
protected override void Dispose(bool disposing)
{
#if DEBUG_STREAMS
this.DebugDispose(typeof(ListeningStream));
#endif
if (disposing)
{
Stream.Dispose();
}
base.Dispose(disposing);
}
public Stream Stream { get; }
public override bool CanRead => Stream.CanRead;
public override bool CanSeek => Stream.CanSeek;
public override bool CanWrite => Stream.CanWrite;
public override void Flush() => Stream.Flush();
public override long Length => Stream.Length;
public override long Position
{
get => Stream.Position;
set => Stream.Position = value;
}
public override int Read(byte[] buffer, int offset, int count)
{
var read = Stream.Read(buffer, offset, count);
_currentEntryTotalReadBytes += read;
_listener.FireCompressedBytesRead(_currentEntryTotalReadBytes, _currentEntryTotalReadBytes);
return read;
}
public override int ReadByte()
{
var value = Stream.ReadByte();
if (value == -1)
{
return -1;
}
++_currentEntryTotalReadBytes;
_listener.FireCompressedBytesRead(_currentEntryTotalReadBytes, _currentEntryTotalReadBytes);
return value;
}
public override long Seek(long offset, SeekOrigin origin) => Stream.Seek(offset, origin);
public override void SetLength(long value) => Stream.SetLength(value);
public override void Write(byte[] buffer, int offset, int count) =>
Stream.Write(buffer, offset, count);
}

View File

@@ -0,0 +1,160 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.IO;
/// <summary>
/// A stream wrapper that reports progress as data is read from the source.
/// Used to track compression or extraction progress by wrapping the source stream.
/// </summary>
internal sealed class ProgressReportingStream : Stream
{
private readonly Stream _baseStream;
private readonly IProgress<ProgressReport> _progress;
private readonly string _entryPath;
private readonly long? _totalBytes;
private long _bytesTransferred;
private readonly bool _leaveOpen;
public ProgressReportingStream(
Stream baseStream,
IProgress<ProgressReport> progress,
string entryPath,
long? totalBytes,
bool leaveOpen = false
)
{
_baseStream = baseStream;
_progress = progress;
_entryPath = entryPath;
_totalBytes = totalBytes;
_leaveOpen = leaveOpen;
}
public override bool CanRead => _baseStream.CanRead;
public override bool CanSeek => _baseStream.CanSeek;
public override bool CanWrite => false;
public override long Length => _baseStream.Length;
public override long Position
{
get => _baseStream.Position;
set =>
throw new NotSupportedException(
"Directly setting Position is not supported in ProgressReportingStream to maintain progress tracking integrity."
);
}
public override void Flush() => _baseStream.Flush();
public override int Read(byte[] buffer, int offset, int count)
{
var bytesRead = _baseStream.Read(buffer, offset, count);
if (bytesRead > 0)
{
_bytesTransferred += bytesRead;
ReportProgress();
}
return bytesRead;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{
var bytesRead = _baseStream.Read(buffer);
if (bytesRead > 0)
{
_bytesTransferred += bytesRead;
ReportProgress();
}
return bytesRead;
}
#endif
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
var bytesRead = await _baseStream
.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
if (bytesRead > 0)
{
_bytesTransferred += bytesRead;
ReportProgress();
}
return bytesRead;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
var bytesRead = await _baseStream
.ReadAsync(buffer, cancellationToken)
.ConfigureAwait(false);
if (bytesRead > 0)
{
_bytesTransferred += bytesRead;
ReportProgress();
}
return bytesRead;
}
#endif
public override int ReadByte()
{
var value = _baseStream.ReadByte();
if (value != -1)
{
_bytesTransferred++;
ReportProgress();
}
return value;
}
public override long Seek(long offset, SeekOrigin origin) => _baseStream.Seek(offset, origin);
public override void SetLength(long value) => _baseStream.SetLength(value);
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException(
"ProgressReportingStream is designed for read operations to track progress."
);
private void ReportProgress()
{
_progress.Report(new ProgressReport(_entryPath, _bytesTransferred, _totalBytes));
}
protected override void Dispose(bool disposing)
{
if (disposing && !_leaveOpen)
{
_baseStream.Dispose();
}
base.Dispose(disposing);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override async ValueTask DisposeAsync()
{
if (!_leaveOpen)
{
await _baseStream.DisposeAsync().ConfigureAwait(false);
}
await base.DisposeAsync().ConfigureAwait(false);
}
#endif
}

View File

@@ -57,14 +57,8 @@ public class SharpCompressStream : Stream, IStreamStack
{
ValidateBufferState(); // Add here
}
try
{
_internalPosition = Stream.Position;
}
catch
{
_internalPosition = 0;
}
// Check CanSeek before accessing Position to avoid exception overhead on non-seekable streams.
_internalPosition = Stream.CanSeek ? Stream.Position : 0;
}
}
}
@@ -136,14 +130,8 @@ public class SharpCompressStream : Stream, IStreamStack
_readOnly = !Stream.CanSeek;
((IStreamStack)this).SetBuffer(bufferSize, forceBuffer);
try
{
_baseInitialPos = stream.Position;
}
catch
{
_baseInitialPos = 0;
}
// Check CanSeek before accessing Position to avoid exception overhead on non-seekable streams.
_baseInitialPos = Stream.CanSeek ? Stream.Position : 0;
#if DEBUG_STREAMS
this.DebugConstruct(typeof(SharpCompressStream));

View File

@@ -3,6 +3,8 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress;
@@ -41,6 +43,28 @@ internal static class StreamExtensions
ArrayPool<byte>.Shared.Return(temp);
}
}
internal static async Task ReadExactlyAsync(
this Stream stream,
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
var totalRead = 0;
while (totalRead < count)
{
var read = await stream
.ReadAsync(buffer, offset + totalRead, count - totalRead, cancellationToken)
.ConfigureAwait(false);
if (read == 0)
{
throw new EndOfStreamException();
}
totalRead += read;
}
}
}
#endif

View File

@@ -5,13 +5,14 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Readers;
/// <summary>
/// A generic push reader that reads unseekable comrpessed streams.
/// </summary>
public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtractionListener
public abstract class AbstractReader<TEntry, TVolume> : IReader
where TEntry : Entry
where TVolume : Volume
{
@@ -19,11 +20,6 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
private IEnumerator<TEntry>? _entriesForCurrentReadStream;
private bool _wroteCurrentEntry;
public event EventHandler<ReaderExtractionEventArgs<IEntry>>? EntryExtractionProgress;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
internal AbstractReader(ReaderOptions options, ArchiveType archiveType)
{
ArchiveType = archiveType;
@@ -264,25 +260,58 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
internal void Write(Stream writeStream)
{
var streamListener = this as IReaderExtractionListener;
using Stream s = OpenEntryStream();
s.TransferTo(writeStream, Entry, streamListener);
var sourceStream = WrapWithProgress(s, Entry);
sourceStream.CopyTo(writeStream, 81920);
}
internal async Task WriteAsync(Stream writeStream, CancellationToken cancellationToken)
{
var streamListener = this as IReaderExtractionListener;
#if NETFRAMEWORK || NETSTANDARD2_0
using Stream s = OpenEntryStream();
await s.TransferToAsync(writeStream, Entry, streamListener, cancellationToken)
.ConfigureAwait(false);
using Stream s = await OpenEntryStreamAsync(cancellationToken).ConfigureAwait(false);
var sourceStream = WrapWithProgress(s, Entry);
await sourceStream.CopyToAsync(writeStream, 81920, cancellationToken).ConfigureAwait(false);
#else
await using Stream s = OpenEntryStream();
await s.TransferToAsync(writeStream, Entry, streamListener, cancellationToken)
.ConfigureAwait(false);
await using Stream s = await OpenEntryStreamAsync(cancellationToken).ConfigureAwait(false);
var sourceStream = WrapWithProgress(s, Entry);
await sourceStream.CopyToAsync(writeStream, 81920, cancellationToken).ConfigureAwait(false);
#endif
}
private Stream WrapWithProgress(Stream source, Entry entry)
{
var progress = Options.Progress;
if (progress is null)
{
return source;
}
var entryPath = entry.Key ?? string.Empty;
long? totalBytes = GetEntrySizeSafe(entry);
return new ProgressReportingStream(
source,
progress,
entryPath,
totalBytes,
leaveOpen: true
);
}
private static long? GetEntrySizeSafe(Entry entry)
{
try
{
var size = entry.Size;
// Return the actual size (including 0 for empty entries)
// Negative values indicate unknown size
return size >= 0 ? size : null;
}
catch (NotImplementedException)
{
return null;
}
}
public EntryStream OpenEntryStream()
{
if (_wroteCurrentEntry)
@@ -318,50 +347,18 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
protected virtual EntryStream GetEntryStream() =>
CreateEntryStream(Entry.Parts.First().GetCompressedStream());
protected virtual Task<EntryStream> GetEntryStreamAsync(
protected virtual async Task<EntryStream> GetEntryStreamAsync(
CancellationToken cancellationToken = default
) => Task.FromResult(GetEntryStream());
)
{
var stream = await Entry
.Parts.First()
.GetCompressedStreamAsync(cancellationToken)
.ConfigureAwait(false);
return CreateEntryStream(stream);
}
#endregion
IEntry IReader.Entry => Entry;
void IExtractionListener.FireCompressedBytesRead(
long currentPartCompressedBytes,
long compressedReadBytes
) =>
CompressedBytesRead?.Invoke(
this,
new CompressedBytesReadEventArgs(
currentFilePartCompressedBytesRead: currentPartCompressedBytes,
compressedBytesRead: compressedReadBytes
)
);
void IExtractionListener.FireFilePartExtractionBegin(
string name,
long size,
long compressedSize
) =>
FilePartExtractionBegin?.Invoke(
this,
new FilePartExtractionBeginEventArgs(
compressedSize: compressedSize,
size: size,
name: name
)
);
void IReaderExtractionListener.FireEntryExtractionProgress(
Entry entry,
long bytesTransferred,
int iterations
) =>
EntryExtractionProgress?.Invoke(
this,
new ReaderExtractionEventArgs<IEntry>(
entry,
new ReaderProgress(entry, bytesTransferred, iterations)
)
);
}

View File

@@ -8,11 +8,6 @@ namespace SharpCompress.Readers;
public interface IReader : IDisposable
{
event EventHandler<ReaderExtractionEventArgs<IEntry>> EntryExtractionProgress;
event EventHandler<CompressedBytesReadEventArgs> CompressedBytesRead;
event EventHandler<FilePartExtractionBeginEventArgs> FilePartExtractionBegin;
ArchiveType ArchiveType { get; }
IEntry Entry { get; }

View File

@@ -7,124 +7,121 @@ namespace SharpCompress.Readers;
public static class IReaderExtensions
{
public static void WriteEntryTo(this IReader reader, string filePath)
extension(IReader reader)
{
using Stream stream = File.Open(filePath, FileMode.Create, FileAccess.Write);
reader.WriteEntryTo(stream);
}
public static void WriteEntryTo(this IReader reader, FileInfo filePath)
{
using Stream stream = filePath.Open(FileMode.Create);
reader.WriteEntryTo(stream);
}
/// <summary>
/// Extract all remaining unread entries to specific directory, retaining filename
/// </summary>
public static void WriteAllToDirectory(
this IReader reader,
string destinationDirectory,
ExtractionOptions? options = null
)
{
while (reader.MoveToNextEntry())
public void WriteEntryTo(string filePath)
{
reader.WriteEntryToDirectory(destinationDirectory, options);
using Stream stream = File.Open(filePath, FileMode.Create, FileAccess.Write);
reader.WriteEntryTo(stream);
}
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(
this IReader reader,
string destinationDirectory,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToDirectory(
reader.Entry,
destinationDirectory,
options,
reader.WriteEntryToFile
);
public void WriteEntryTo(FileInfo filePath)
{
using Stream stream = filePath.Open(FileMode.Create);
reader.WriteEntryTo(stream);
}
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteEntryToFile(
this IReader reader,
string destinationFileName,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToFile(
reader.Entry,
destinationFileName,
options,
(x, fm) =>
/// <summary>
/// Extract all remaining unread entries to specific directory, retaining filename
/// </summary>
public void WriteAllToDirectory(
string destinationDirectory,
ExtractionOptions? options = null
)
{
while (reader.MoveToNextEntry())
{
using var fs = File.Open(destinationFileName, fm);
reader.WriteEntryTo(fs);
reader.WriteEntryToDirectory(destinationDirectory, options);
}
);
}
/// <summary>
/// Extract to specific directory asynchronously, retaining filename
/// </summary>
public static async Task WriteEntryToDirectoryAsync(
this IReader reader,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToDirectoryAsync(
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public void WriteEntryToDirectory(
string destinationDirectory,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToDirectory(
reader.Entry,
destinationDirectory,
options,
(fileName, opts) => reader.WriteEntryToFileAsync(fileName, opts, cancellationToken),
cancellationToken
)
.ConfigureAwait(false);
reader.WriteEntryToFile
);
/// <summary>
/// Extract to specific file asynchronously
/// </summary>
public static async Task WriteEntryToFileAsync(
this IReader reader,
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToFileAsync(
/// <summary>
/// Extract to specific file
/// </summary>
public void WriteEntryToFile(
string destinationFileName,
ExtractionOptions? options = null
) =>
ExtractionMethods.WriteEntryToFile(
reader.Entry,
destinationFileName,
options,
async (x, fm) =>
(x, fm) =>
{
using var fs = File.Open(destinationFileName, fm);
await reader.WriteEntryToAsync(fs, cancellationToken).ConfigureAwait(false);
},
cancellationToken
)
.ConfigureAwait(false);
reader.WriteEntryTo(fs);
}
);
/// <summary>
/// Extract all remaining unread entries to specific directory asynchronously, retaining filename
/// </summary>
public static async Task WriteAllToDirectoryAsync(
this IReader reader,
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
)
{
while (reader.MoveToNextEntry())
{
await reader
.WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken)
/// <summary>
/// Extract to specific directory asynchronously, retaining filename
/// </summary>
public async Task WriteEntryToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToDirectoryAsync(
reader.Entry,
destinationDirectory,
options,
reader.WriteEntryToFileAsync,
cancellationToken
)
.ConfigureAwait(false);
/// <summary>
/// Extract to specific file asynchronously
/// </summary>
public async Task WriteEntryToFileAsync(
string destinationFileName,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
) =>
await ExtractionMethods
.WriteEntryToFileAsync(
reader.Entry,
destinationFileName,
options,
async (x, fm, ct) =>
{
using var fs = File.Open(destinationFileName, fm);
await reader.WriteEntryToAsync(fs, ct).ConfigureAwait(false);
},
cancellationToken
)
.ConfigureAwait(false);
/// <summary>
/// Extract all remaining unread entries to specific directory asynchronously, retaining filename
/// </summary>
public async Task WriteAllToDirectoryAsync(
string destinationDirectory,
ExtractionOptions? options = null,
CancellationToken cancellationToken = default
)
{
while (await reader.MoveToNextEntryAsync(cancellationToken))
{
await reader
.WriteEntryToDirectoryAsync(destinationDirectory, options, cancellationToken)
.ConfigureAwait(false);
}
}
}
}

View File

@@ -1,8 +0,0 @@
using SharpCompress.Common;
namespace SharpCompress.Readers;
public interface IReaderExtractionListener : IExtractionListener
{
void FireEntryExtractionProgress(Entry entry, long sizeTransferred, int iterations);
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Readers;
@@ -11,4 +13,17 @@ public interface IReaderFactory : Factories.IFactory
/// <param name="options"></param>
/// <returns></returns>
IReader OpenReader(Stream stream, ReaderOptions? options);
/// <summary>
/// Opens a Reader asynchronously for Non-seeking usage
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
Task<IReader> OpenReaderAsync(
Stream stream,
ReaderOptions? options,
CancellationToken cancellationToken = default
);
}

View File

@@ -108,8 +108,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
}
var stream = new MultiVolumeReadOnlyStream(
CreateFilePartEnumerableForCurrentEntry().Cast<RarFilePart>(),
this
CreateFilePartEnumerableForCurrentEntry().Cast<RarFilePart>()
);
if (Entry.IsRarV3)
{
@@ -136,8 +135,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
}
var stream = new MultiVolumeReadOnlyStream(
CreateFilePartEnumerableForCurrentEntry().Cast<RarFilePart>(),
this
CreateFilePartEnumerableForCurrentEntry().Cast<RarFilePart>()
);
if (Entry.IsRarV3)
{

View File

@@ -1,6 +1,8 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.IO;
@@ -15,12 +17,46 @@ public static class ReaderFactory
return Open(new FileInfo(filePath), options);
}
/// <summary>
/// Opens a Reader from a filepath asynchronously
/// </summary>
/// <param name="filePath"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static Task<IReader> OpenAsync(
string filePath,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
filePath.NotNullOrEmpty(nameof(filePath));
return OpenAsync(new FileInfo(filePath), options, cancellationToken);
}
public static IReader Open(FileInfo fileInfo, ReaderOptions? options = null)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
return Open(fileInfo.OpenRead(), options);
}
/// <summary>
/// Opens a Reader from a FileInfo asynchronously
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static Task<IReader> OpenAsync(
FileInfo fileInfo,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
options ??= new ReaderOptions { LeaveStreamOpen = false };
return OpenAsync(fileInfo.OpenRead(), options, cancellationToken);
}
/// <summary>
/// Opens a Reader for Non-seeking usage
/// </summary>
@@ -73,4 +109,84 @@ public static class ReaderFactory
"Cannot determine compressed stream type. Supported Reader Formats: Arc, Arj, Zip, GZip, BZip2, Tar, Rar, LZip, XZ, ZStandard"
);
}
/// <summary>
/// Opens a Reader for Non-seeking usage asynchronously
/// </summary>
/// <param name="stream"></param>
/// <param name="options"></param>
/// <param name="cancellationToken"></param>
/// <returns></returns>
public static async Task<IReader> OpenAsync(
Stream stream,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
{
stream.NotNull(nameof(stream));
options ??= new ReaderOptions() { LeaveStreamOpen = false };
var bStream = new SharpCompressStream(stream, bufferSize: options.BufferSize);
long pos = ((IStreamStack)bStream).GetPosition();
var factories = Factories.Factory.Factories.OfType<Factories.Factory>();
Factory? testedFactory = null;
if (!string.IsNullOrWhiteSpace(options.ExtensionHint))
{
testedFactory = factories.FirstOrDefault(a =>
a.GetSupportedExtensions()
.Contains(options.ExtensionHint, StringComparer.CurrentCultureIgnoreCase)
);
if (testedFactory is IReaderFactory readerFactory)
{
((IStreamStack)bStream).StackSeek(pos);
if (
await testedFactory.IsArchiveAsync(
bStream,
options.Password,
options.BufferSize,
cancellationToken
)
)
{
((IStreamStack)bStream).StackSeek(pos);
return await readerFactory
.OpenReaderAsync(bStream, options, cancellationToken)
.ConfigureAwait(false);
}
}
((IStreamStack)bStream).StackSeek(pos);
}
foreach (var factory in factories)
{
if (testedFactory == factory)
{
continue; // Already tested above
}
((IStreamStack)bStream).StackSeek(pos);
if (
factory is IReaderFactory readerFactory
&& await factory.IsArchiveAsync(
bStream,
options.Password,
options.BufferSize,
cancellationToken
)
)
{
((IStreamStack)bStream).StackSeek(pos);
return await readerFactory
.OpenReaderAsync(bStream, options, cancellationToken)
.ConfigureAwait(false);
}
}
throw new InvalidFormatException(
"Cannot determine compressed stream type. Supported Reader Formats: Arc, Arj, Zip, GZip, BZip2, Tar, Rar, LZip, XZ, ZStandard"
);
}
}

View File

@@ -1,3 +1,4 @@
using System;
using SharpCompress.Common;
namespace SharpCompress.Readers;
@@ -21,4 +22,10 @@ public class ReaderOptions : OptionsBase
/// Provide a hint for the extension of the archive being read, can speed up finding the correct decoder. Should be without the leading period in the form like: tar.gz or zip
/// </summary>
public string? ExtensionHint { get; set; }
/// <summary>
/// An optional progress reporter for tracking extraction operations.
/// When set, progress updates will be reported as entries are extracted.
/// </summary>
public IProgress<ProgressReport>? Progress { get; set; }
}

View File

@@ -1,21 +0,0 @@
using System;
using SharpCompress.Common;
namespace SharpCompress.Readers;
public class ReaderProgress
{
private readonly IEntry _entry;
public long BytesTransferred { get; }
public int Iterations { get; }
public int PercentageRead => (int)Math.Round(PercentageReadExact);
public double PercentageReadExact => (float)BytesTransferred / _entry.Size * 100;
public ReaderProgress(IEntry entry, long bytesTransferred, int iterations)
{
_entry = entry;
BytesTransferred = bytesTransferred;
Iterations = iterations;
}
}

View File

@@ -6,7 +6,6 @@ using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress;
@@ -216,34 +215,6 @@ internal static class Utility
}
}
public static long TransferTo(
this Stream source,
Stream destination,
Common.Entry entry,
IReaderExtractionListener readerExtractionListener
)
{
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
var iterations = 0;
long total = 0;
int count;
while ((count = source.Read(array, 0, array.Length)) != 0)
{
total += count;
destination.Write(array, 0, count);
iterations++;
readerExtractionListener.FireEntryExtractionProgress(entry, total, iterations);
}
return total;
}
finally
{
ArrayPool<byte>.Shared.Return(array);
}
}
public static async Task<long> TransferToAsync(
this Stream source,
Stream destination,
@@ -290,43 +261,6 @@ internal static class Utility
}
}
public static async Task<long> TransferToAsync(
this Stream source,
Stream destination,
Common.Entry entry,
IReaderExtractionListener readerExtractionListener,
CancellationToken cancellationToken = default
)
{
var array = ArrayPool<byte>.Shared.Rent(TEMP_BUFFER_SIZE);
try
{
var iterations = 0;
long total = 0;
int count;
while (
(
count = await source
.ReadAsync(array, 0, array.Length, cancellationToken)
.ConfigureAwait(false)
) != 0
)
{
total += count;
await destination
.WriteAsync(array, 0, count, cancellationToken)
.ConfigureAwait(false);
iterations++;
readerExtractionListener.FireEntryExtractionProgress(entry, total, iterations);
}
return total;
}
finally
{
ArrayPool<byte>.Shared.Return(array);
}
}
private static bool ReadTransferBlock(Stream source, byte[] array, int maxSize, out int count)
{
var size = maxSize;
@@ -472,6 +406,33 @@ internal static class Utility
return (total >= buffer.Length);
}
public static async Task<bool> ReadFullyAsync(
this Stream stream,
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken = default
)
{
var total = 0;
int read;
while (
(
read = await stream
.ReadAsync(buffer, offset + total, count - total, cancellationToken)
.ConfigureAwait(false)
) > 0
)
{
total += read;
if (total >= count)
{
return true;
}
}
return (total >= count);
}
public static string TrimNulls(this string source) => source.Replace('\0', ' ').Trim();
/// <summary>

View File

@@ -3,6 +3,7 @@ using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Writers;
@@ -22,6 +23,29 @@ public abstract class AbstractWriter(ArchiveType type, WriterOptions writerOptio
protected WriterOptions WriterOptions { get; } = writerOptions;
/// <summary>
/// Wraps the source stream with a progress-reporting stream if progress reporting is enabled.
/// </summary>
/// <param name="source">The source stream to wrap.</param>
/// <param name="entryPath">The path of the entry being written.</param>
/// <returns>A stream that reports progress, or the original stream if progress is not enabled.</returns>
protected Stream WrapWithProgress(Stream source, string entryPath)
{
if (WriterOptions.Progress is null)
{
return source;
}
long? totalBytes = source.CanSeek ? source.Length : null;
return new ProgressReportingStream(
source,
WriterOptions.Progress,
entryPath,
totalBytes,
leaveOpen: true
);
}
public abstract void Write(string filename, Stream source, DateTime? modificationTime);
public virtual async Task WriteAsync(

View File

@@ -47,7 +47,8 @@ public sealed class GZipWriter : AbstractWriter
var stream = (GZipStream)OutputStream;
stream.FileName = filename;
stream.LastModified = modificationTime;
source.CopyTo(stream);
var progressStream = WrapWithProgress(source, filename);
progressStream.CopyTo(stream);
_wroteToStream = true;
}

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Factories;
namespace SharpCompress.Writers;
@@ -6,4 +8,10 @@ namespace SharpCompress.Writers;
public interface IWriterFactory : IFactory
{
IWriter Open(Stream stream, WriterOptions writerOptions);
Task<IWriter> OpenAsync(
Stream stream,
WriterOptions writerOptions,
CancellationToken cancellationToken = default
);
}

View File

@@ -129,7 +129,8 @@ public class TarWriter : AbstractWriter
header.Name = NormalizeFilename(filename);
header.Size = realSize;
header.Write(OutputStream);
size = source.TransferTo(OutputStream, realSize);
var progressStream = WrapWithProgress(source, filename);
size = progressStream.TransferTo(OutputStream, realSize);
PadTo512(size.Value);
}
@@ -161,7 +162,8 @@ public class TarWriter : AbstractWriter
header.Name = NormalizeFilename(filename);
header.Size = realSize;
header.Write(OutputStream);
var written = await source
var progressStream = WrapWithProgress(source, filename);
var written = await progressStream
.TransferToAsync(OutputStream, realSize, cancellationToken)
.ConfigureAwait(false);
PadTo512(written);

View File

@@ -1,6 +1,8 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Writers;
@@ -20,4 +22,33 @@ public static class WriterFactory
throw new NotSupportedException("Archive Type does not have a Writer: " + archiveType);
}
/// <summary>
/// Opens a Writer asynchronously.
/// </summary>
/// <param name="stream">The stream to write to.</param>
/// <param name="archiveType">The archive type.</param>
/// <param name="writerOptions">Writer options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>A task that returns an IWriter.</returns>
public static async Task<IWriter> OpenAsync(
Stream stream,
ArchiveType archiveType,
WriterOptions writerOptions,
CancellationToken cancellationToken = default
)
{
var factory = Factories
.Factory.Factories.OfType<IWriterFactory>()
.FirstOrDefault(item => item.KnownArchiveType == archiveType);
if (factory != null)
{
return await factory
.OpenAsync(stream, writerOptions, cancellationToken)
.ConfigureAwait(false);
}
throw new NotSupportedException("Archive Type does not have a Writer: " + archiveType);
}
}

View File

@@ -1,3 +1,4 @@
using System;
using SharpCompress.Common;
using D = SharpCompress.Compressors.Deflate;
@@ -36,6 +37,12 @@ public class WriterOptions : OptionsBase
/// </summary>
public int CompressionLevel { get; set; }
/// <summary>
/// An optional progress reporter for tracking compression operations.
/// When set, progress updates will be reported as entries are written.
/// </summary>
public IProgress<ProgressReport>? Progress { get; set; }
public static implicit operator WriterOptions(CompressionType compressionType) =>
new(compressionType);
}

View File

@@ -86,7 +86,8 @@ public class ZipWriter : AbstractWriter
public void Write(string entryPath, Stream source, ZipWriterEntryOptions zipWriterEntryOptions)
{
using var output = WriteToStream(entryPath, zipWriterEntryOptions);
source.CopyTo(output);
var progressStream = WrapWithProgress(source, entryPath);
progressStream.CopyTo(output);
}
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)

View File

@@ -4,9 +4,9 @@
"net10.0": {
"JetBrains.Profiler.SelfApi": {
"type": "Direct",
"requested": "[2.5.14, )",
"resolved": "2.5.14",
"contentHash": "9+NcTe49B2M8/MOledSxKZkQKqavFf5xXZw4JL4bVu/KYiw6OOaD6cDQmNGSO18yUP/WoBXsXGKmZ9VOpmyadw==",
"requested": "[2.5.15, )",
"resolved": "2.5.15",
"contentHash": "Uc+GU4Mqwzw6i6C6wPWe2g9UwReUJzrWghCSN9818tSmNI7SJtqKQ1mgfuRylpJam+ED4Cbg2BNJE/LSVpokkg==",
"dependencies": {
"JetBrains.HabitatDetector": "1.4.5",
"JetBrains.Profiler.Api": "1.4.10"
@@ -34,16 +34,7 @@
}
},
"sharpcompress": {
"type": "Project",
"dependencies": {
"ZstdSharp.Port": "[0.8.6, )"
}
},
"ZstdSharp.Port": {
"type": "CentralTransitive",
"requested": "[0.8.6, )",
"resolved": "0.8.6",
"contentHash": "iP4jVLQoQmUjMU88g1WObiNr6YKZGvh4aOXn3yOJsHqZsflwRsxZPcIBvNXgjXO3vQKSLctXGLTpcBPLnWPS8A=="
"type": "Project"
}
}
}

View File

@@ -0,0 +1,24 @@
using System.Threading.Tasks;
using SharpCompress.Common;
using Xunit;
namespace SharpCompress.Test.Arc;
public class ArcReaderAsyncTests : ReaderTests
{
public ArcReaderAsyncTests()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
}
[Fact]
public async Task Arc_Uncompressed_Read_Async() =>
await ReadAsync("Arc.uncompressed.arc", CompressionType.None);
[Fact]
public async Task Arc_Squeezed_Read_Async() => await ReadAsync("Arc.squeezed.arc");
[Fact]
public async Task Arc_Crunched_Read_Async() => await ReadAsync("Arc.crunched.arc");
}

View File

@@ -1,12 +1,5 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Readers.Arc;
using Xunit;
namespace SharpCompress.Test.Arc

View File

@@ -261,7 +261,7 @@ public class ArchiveTests : ReaderTests
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using (var archive = ArchiveFactory.Open(new FileInfo(testArchive), readerOptions))
{
archive.ExtractToDirectory(SCRATCH_FILES_PATH);
archive.WriteToDirectory(SCRATCH_FILES_PATH);
}
VerifyFiles();
}

View File

@@ -8,7 +8,7 @@ using Xunit;
namespace SharpCompress.Test;
public class ExtractAll : TestBase
public class ExtractAllTests : TestBase
{
[Theory]
[InlineData("Zip.deflate.zip")]
@@ -18,27 +18,29 @@ public class ExtractAll : TestBase
[InlineData("7Zip.solid.7z")]
[InlineData("7Zip.nonsolid.7z")]
[InlineData("7Zip.LZMA.7z")]
public async Task ExtractAllEntries(string archivePath)
public async Task ExtractAllEntriesAsync(string archivePath)
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, archivePath);
var options = new ExtractionOptions() { ExtractFullPath = true, Overwrite = true };
using var archive = ArchiveFactory.Open(testArchive);
await archive.WriteToDirectoryAsync(SCRATCH_FILES_PATH, options);
}
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
{
var reader = archive.ExtractAllEntries();
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
{
await reader.WriteEntryToDirectoryAsync(SCRATCH_FILES_PATH, options);
}
}
}
else
{
archive.ExtractToDirectory(SCRATCH_FILES_PATH, options);
}
[Theory]
[InlineData("Zip.deflate.zip")]
[InlineData("Rar5.rar")]
[InlineData("Rar.rar")]
[InlineData("Rar.solid.rar")]
[InlineData("7Zip.solid.7z")]
[InlineData("7Zip.nonsolid.7z")]
[InlineData("7Zip.LZMA.7z")]
public void ExtractAllEntriesSync(string archivePath)
{
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, archivePath);
var options = new ExtractionOptions() { ExtractFullPath = true, Overwrite = true };
using var archive = ArchiveFactory.Open(testArchive);
archive.WriteToDirectory(SCRATCH_FILES_PATH, options);
}
}

View File

@@ -0,0 +1,61 @@
using System.IO;
using System.Linq;
using SharpCompress.Archives;
using SharpCompress.Common;
using SharpCompress.Readers;
using Xunit;
namespace SharpCompress.Test;
/// <summary>
/// Tests for the ExtractAllEntries method behavior on both solid and non-solid
/// archives, including progress reporting and current usage restrictions.
/// </summary>
public class ExtractAllEntriesTests : TestBase
{
[Fact]
public void ExtractAllEntries_WithProgressReporting_NonSolidArchive()
{
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.zip");
using var archive = ArchiveFactory.Open(archivePath);
Assert.Throws<SharpCompressException>(() =>
{
using var reader = archive.ExtractAllEntries();
});
}
[Fact]
public void ExtractAllEntries_WithProgressReporting_SolidArchive()
{
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "Rar.solid.rar");
using var archive = ArchiveFactory.Open(archivePath);
Assert.True(archive.IsSolid);
// Calculate total size like user code does
double totalSize = archive.Entries.Where(e => !e.IsDirectory).Sum(e => e.Size);
long completed = 0;
var progressReports = 0;
using var reader = archive.ExtractAllEntries();
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
reader.WriteEntryToDirectory(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
completed += reader.Entry.Size;
var progress = completed / totalSize;
progressReports++;
Assert.True(progress >= 0 && progress <= 1.0);
}
}
Assert.True(progressReports > 0);
}
}

View File

@@ -0,0 +1,70 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Test.Mocks;
public class AsyncOnlyStream : Stream
{
private readonly Stream _stream;
public AsyncOnlyStream(Stream stream)
{
_stream = stream;
// Console.WriteLine("AsyncOnlyStream created");
}
public override bool CanRead => _stream.CanRead;
public override bool CanSeek => _stream.CanSeek;
public override bool CanWrite => _stream.CanWrite;
public override long Length => _stream.Length;
public override long Position
{
get => _stream.Position;
set => _stream.Position = value;
}
public override void Flush() => _stream.Flush();
public override int Read(byte[] buffer, int offset, int count)
{
throw new NotSupportedException("Synchronous Read is not supported");
}
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
return _stream.ReadAsync(buffer, offset, count, cancellationToken);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
return _stream.ReadAsync(buffer, cancellationToken);
}
#endif
public override long Seek(long offset, SeekOrigin origin) => _stream.Seek(offset, origin);
public override void SetLength(long value) => _stream.SetLength(value);
public override void Write(byte[] buffer, int offset, int count) =>
_stream.Write(buffer, offset, count);
protected override void Dispose(bool disposing)
{
if (disposing)
{
_stream.Dispose();
}
base.Dispose(disposing);
}
}

View File

@@ -1,4 +1,7 @@
using System.IO;
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Test.Mocks;
@@ -35,6 +38,20 @@ public class TestStream(Stream stream, bool read, bool write, bool seek) : Strea
public override int Read(byte[] buffer, int offset, int count) =>
stream.Read(buffer, offset, count);
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
) => stream.ReadAsync(buffer, offset, count, cancellationToken);
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
) => stream.ReadAsync(buffer, cancellationToken);
#endif
public override long Seek(long offset, SeekOrigin origin) => stream.Seek(offset, origin);
public override void SetLength(long value) => stream.SetLength(value);

View File

@@ -0,0 +1,605 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Archives;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Writers;
using SharpCompress.Writers.Tar;
using SharpCompress.Writers.Zip;
using Xunit;
namespace SharpCompress.Test;
/// <summary>
/// A synchronous progress implementation for testing.
/// Unlike Progress&lt;T&gt;, this captures reports immediately without SynchronizationContext.
/// </summary>
internal sealed class TestProgress<T> : IProgress<T>
{
private readonly List<T> _reports = new();
public IReadOnlyList<T> Reports => _reports;
public void Report(T value) => _reports.Add(value);
}
public class ProgressReportTests : TestBase
{
private static byte[] CreateTestData(int size, byte fillValue)
{
var data = new byte[size];
for (var i = 0; i < size; i++)
{
data[i] = fillValue;
}
return data;
}
[Fact]
public void Zip_Write_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
using var archiveStream = new MemoryStream();
var options = new ZipWriterOptions(CompressionType.Deflate) { Progress = progress };
using (var writer = new ZipWriter(archiveStream, options))
{
var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
Assert.All(progress.Reports, p => Assert.Equal(10000, p.TotalBytes));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
Assert.Equal(100.0, lastReport.PercentComplete);
}
[Fact]
public void Tar_Write_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
using var archiveStream = new MemoryStream();
var options = new TarWriterOptions(CompressionType.None, true) { Progress = progress };
using (var writer = new TarWriter(archiveStream, options))
{
var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
Assert.All(progress.Reports, p => Assert.Equal(10000, p.TotalBytes));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
Assert.Equal(100.0, lastReport.PercentComplete);
}
[Fact]
public void Zip_Read_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// First create a zip archive
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
// Now read it with progress reporting
archiveStream.Position = 0;
var readerOptions = new ReaderOptions { Progress = progress };
using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
reader.WriteEntryTo(extractedStream);
}
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public void ZipArchive_Entry_WriteTo_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// First create a zip archive
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
// Now open as archive and extract entry with progress as parameter
archiveStream.Position = 0;
using var archive = ZipArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
entry.WriteTo(extractedStream, progress);
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public async Task ZipArchive_Entry_WriteToAsync_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// First create a zip archive
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
// Now open as archive and extract entry async with progress as parameter
archiveStream.Position = 0;
using var archive = ZipArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
await entry.WriteToAsync(extractedStream, progress, CancellationToken.None);
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public void WriterOptions_WithoutProgress_DoesNotThrow()
{
using var archiveStream = new MemoryStream();
var options = new ZipWriterOptions(CompressionType.Deflate);
Assert.Null(options.Progress);
using (var writer = new ZipWriter(archiveStream, options))
{
var testData = CreateTestData(100, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
Assert.True(archiveStream.Length > 0);
}
[Fact]
public void ReaderOptions_WithoutProgress_DoesNotThrow()
{
// First create a zip archive
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData = CreateTestData(100, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
// Read without progress
archiveStream.Position = 0;
var readerOptions = new ReaderOptions();
Assert.Null(readerOptions.Progress);
using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
reader.WriteEntryTo(extractedStream);
}
}
}
}
[Fact]
public void ZipArchive_WithoutProgress_DoesNotThrow()
{
// First create a zip archive
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData = CreateTestData(100, (byte)'A');
using var sourceStream = new MemoryStream(testData);
writer.Write("test.txt", sourceStream, DateTime.Now);
}
// Open archive and extract without progress
archiveStream.Position = 0;
using var archive = ZipArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
entry.WriteTo(extractedStream);
}
}
}
[Fact]
public void ProgressReport_PercentComplete_WithUnknownTotalBytes_ReturnsNull()
{
var progress = new ProgressReport("test.txt", 100, null);
Assert.Null(progress.PercentComplete);
}
[Fact]
public void ProgressReport_PercentComplete_WithZeroTotalBytes_ReturnsNull()
{
var progress = new ProgressReport("test.txt", 0, 0);
Assert.Null(progress.PercentComplete);
}
[Fact]
public void ProgressReport_Properties_AreSetCorrectly()
{
var progress = new ProgressReport("path/to/file.txt", 500, 1000);
Assert.Equal("path/to/file.txt", progress.EntryPath);
Assert.Equal(500, progress.BytesTransferred);
Assert.Equal(1000, progress.TotalBytes);
Assert.Equal(50.0, progress.PercentComplete);
}
[Fact]
public void Tar_Read_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// Create a tar archive first
using var archiveStream = new MemoryStream();
using (
var writer = new TarWriter(
archiveStream,
new TarWriterOptions(CompressionType.None, true)
)
)
{
var testData = CreateTestData(10000, (byte)'B');
using var sourceStream = new MemoryStream(testData);
writer.Write("data.bin", sourceStream, DateTime.Now);
}
// Now read it with progress reporting
archiveStream.Position = 0;
var readerOptions = new ReaderOptions { Progress = progress };
using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
reader.WriteEntryTo(extractedStream);
}
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("data.bin", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public void TarArchive_Entry_WriteTo_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// Create a tar archive first
using var archiveStream = new MemoryStream();
using (
var writer = new TarWriter(
archiveStream,
new TarWriterOptions(CompressionType.None, true)
)
)
{
var testData = CreateTestData(10000, (byte)'C');
using var sourceStream = new MemoryStream(testData);
writer.Write("file.dat", sourceStream, DateTime.Now);
}
// Now open as archive and extract entry with progress as parameter
archiveStream.Position = 0;
using var archive = SharpCompress.Archives.Tar.TarArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
entry.WriteTo(extractedStream, progress);
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("file.dat", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public async Task TarArchive_Entry_WriteToAsync_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// Create a tar archive first
using var archiveStream = new MemoryStream();
using (
var writer = new TarWriter(
archiveStream,
new TarWriterOptions(CompressionType.None, true)
)
)
{
var testData = CreateTestData(10000, (byte)'D');
using var sourceStream = new MemoryStream(testData);
writer.Write("async.dat", sourceStream, DateTime.Now);
}
// Now open as archive and extract entry async with progress as parameter
archiveStream.Position = 0;
using var archive = SharpCompress.Archives.Tar.TarArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
await entry.WriteToAsync(extractedStream, progress, CancellationToken.None);
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("async.dat", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public void Zip_Read_MultipleEntries_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// Create a zip archive with multiple entries
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData1 = CreateTestData(5000, (byte)'A');
using var sourceStream1 = new MemoryStream(testData1);
writer.Write("file1.txt", sourceStream1, DateTime.Now);
var testData2 = CreateTestData(8000, (byte)'B');
using var sourceStream2 = new MemoryStream(testData2);
writer.Write("file2.txt", sourceStream2, DateTime.Now);
}
// Now read it with progress reporting
archiveStream.Position = 0;
var readerOptions = new ReaderOptions { Progress = progress };
using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
reader.WriteEntryTo(extractedStream);
}
}
}
Assert.NotEmpty(progress.Reports);
// Should have reports for both files
var file1Reports = progress.Reports.Where(p => p.EntryPath == "file1.txt").ToList();
var file2Reports = progress.Reports.Where(p => p.EntryPath == "file2.txt").ToList();
Assert.NotEmpty(file1Reports);
Assert.NotEmpty(file2Reports);
// Verify final bytes for each file
Assert.Equal(5000, file1Reports[file1Reports.Count - 1].BytesTransferred);
Assert.Equal(8000, file2Reports[file2Reports.Count - 1].BytesTransferred);
}
[Fact]
public void ZipArchive_MultipleEntries_WriteTo_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// Create a zip archive with multiple entries
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData1 = CreateTestData(5000, (byte)'A');
using var sourceStream1 = new MemoryStream(testData1);
writer.Write("entry1.txt", sourceStream1, DateTime.Now);
var testData2 = CreateTestData(7000, (byte)'B');
using var sourceStream2 = new MemoryStream(testData2);
writer.Write("entry2.txt", sourceStream2, DateTime.Now);
}
// Now open as archive and extract entries with progress as parameter
archiveStream.Position = 0;
using var archive = ZipArchive.Open(archiveStream);
foreach (var entry in archive.Entries)
{
if (!entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
entry.WriteTo(extractedStream, progress);
}
}
Assert.NotEmpty(progress.Reports);
// Should have reports for both files
var entry1Reports = progress.Reports.Where(p => p.EntryPath == "entry1.txt").ToList();
var entry2Reports = progress.Reports.Where(p => p.EntryPath == "entry2.txt").ToList();
Assert.NotEmpty(entry1Reports);
Assert.NotEmpty(entry2Reports);
// Verify final bytes for each entry
Assert.Equal(5000, entry1Reports[entry1Reports.Count - 1].BytesTransferred);
Assert.Equal(7000, entry2Reports[entry2Reports.Count - 1].BytesTransferred);
}
[Fact]
public async Task Zip_ReadAsync_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
// Create a zip archive
using var archiveStream = new MemoryStream();
using (
var writer = new ZipWriter(archiveStream, new ZipWriterOptions(CompressionType.Deflate))
)
{
var testData = CreateTestData(10000, (byte)'E');
using var sourceStream = new MemoryStream(testData);
writer.Write("async_read.txt", sourceStream, DateTime.Now);
}
// Now read it with progress reporting
archiveStream.Position = 0;
var readerOptions = new ReaderOptions { Progress = progress };
using (var reader = ReaderFactory.Open(archiveStream, readerOptions))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
using var extractedStream = new MemoryStream();
await reader.WriteEntryToAsync(extractedStream);
}
}
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("async_read.txt", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
[Fact]
public void GZip_Write_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
using var archiveStream = new MemoryStream();
var options = new SharpCompress.Writers.GZip.GZipWriterOptions { Progress = progress };
using (var writer = new SharpCompress.Writers.GZip.GZipWriter(archiveStream, options))
{
var testData = CreateTestData(10000, (byte)'G');
using var sourceStream = new MemoryStream(testData);
writer.Write("gzip_test.txt", sourceStream, DateTime.Now);
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("gzip_test.txt", p.EntryPath));
Assert.All(progress.Reports, p => Assert.Equal(10000, p.TotalBytes));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
Assert.Equal(100.0, lastReport.PercentComplete);
}
[Fact]
public async Task Tar_WriteAsync_ReportsProgress()
{
var progress = new TestProgress<ProgressReport>();
using var archiveStream = new MemoryStream();
var options = new TarWriterOptions(CompressionType.None, true) { Progress = progress };
using (var writer = new TarWriter(archiveStream, options))
{
var testData = CreateTestData(10000, (byte)'A');
using var sourceStream = new MemoryStream(testData);
await writer.WriteAsync("test.txt", sourceStream, DateTime.Now);
}
Assert.NotEmpty(progress.Reports);
Assert.All(progress.Reports, p => Assert.Equal("test.txt", p.EntryPath));
var lastReport = progress.Reports[progress.Reports.Count - 1];
Assert.Equal(10000, lastReport.BytesTransferred);
}
}

View File

@@ -112,7 +112,7 @@ public abstract class ReaderTests : TestBase
protected async Task ReadAsync(
string testArchive,
CompressionType expectedCompression,
CompressionType? expectedCompression = null,
ReaderOptions? options = null,
CancellationToken cancellationToken = default
)
@@ -131,7 +131,7 @@ public abstract class ReaderTests : TestBase
private async Task ReadImplAsync(
string testArchive,
CompressionType expectedCompression,
CompressionType? expectedCompression,
ReaderOptions options,
CancellationToken cancellationToken = default
)
@@ -158,7 +158,7 @@ public abstract class ReaderTests : TestBase
public async Task UseReaderAsync(
IReader reader,
CompressionType expectedCompression,
CompressionType? expectedCompression,
CancellationToken cancellationToken = default
)
{
@@ -166,7 +166,11 @@ public abstract class ReaderTests : TestBase
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
if (expectedCompression.HasValue)
{
Assert.Equal(expectedCompression, reader.Entry.CompressionType);
}
await reader.WriteEntryToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },

View File

@@ -91,9 +91,10 @@ public class WriterTests : TestBase
SharpCompressStream.Create(stream, leaveOpen: true),
readerOptions
);
reader.WriteAllToDirectory(
await reader.WriteAllToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true }
new ExtractionOptions { ExtractFullPath = true },
cancellationToken
);
}
VerifyFiles();

View File

@@ -194,4 +194,38 @@ public class ZipArchiveAsyncTests : ArchiveTests
}
VerifyFiles();
}
[Fact]
public async Task Zip_Deflate_Archive_WriteToDirectoryAsync()
{
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.zip")))
using (var archive = ZipArchive.Open(stream))
{
await archive.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
);
}
VerifyFiles();
}
[Fact]
public async Task Zip_Deflate_Archive_WriteToDirectoryAsync_WithProgress()
{
var progressReports = new System.Collections.Generic.List<ProgressReport>();
var progress = new Progress<ProgressReport>(report => progressReports.Add(report));
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.zip")))
using (var archive = ZipArchive.Open(stream))
{
await archive.WriteToDirectoryAsync(
SCRATCH_FILES_PATH,
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
progress
);
}
VerifyFiles();
Assert.True(progressReports.Count > 0, "Progress reports should be generated");
}
}

View File

@@ -1,7 +1,9 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
using SharpCompress.Test.Mocks;
@@ -162,9 +164,11 @@ public class ZipReaderAsyncTests : ReaderTests
public async Task Zip_Reader_Disposal_Test2_Async()
{
using var stream = new TestStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
new AsyncOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.dd.zip"))
)
);
var reader = ReaderFactory.Open(stream);
var reader = await ReaderFactory.OpenAsync(stream);
while (await reader.MoveToNextEntryAsync())
{
if (!reader.Entry.IsDirectory)
@@ -183,8 +187,8 @@ public class ZipReaderAsyncTests : ReaderTests
await Assert.ThrowsAsync<NotSupportedException>(async () =>
{
using (
Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "Zip.lzma.WinzipAES.zip")
Stream stream = new AsyncOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.lzma.WinzipAES.zip"))
)
)
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
@@ -208,8 +212,8 @@ public class ZipReaderAsyncTests : ReaderTests
public async Task Zip_Deflate_WinzipAES_Read_Async()
{
using (
Stream stream = File.OpenRead(
Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.WinzipAES.zip")
Stream stream = new AsyncOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.deflate.WinzipAES.zip"))
)
)
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
@@ -233,7 +237,11 @@ public class ZipReaderAsyncTests : ReaderTests
public async Task Zip_Deflate_ZipCrypto_Read_Async()
{
var count = 0;
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "zipcrypto.zip")))
using (
Stream stream = new AsyncOnlyStream(
File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "zipcrypto.zip"))
)
)
using (var reader = ZipReader.Open(stream, new ReaderOptions { Password = "test" }))
{
while (await reader.MoveToNextEntryAsync())