mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-11 21:22:10 +00:00
Compare commits
144 Commits
adam/more-
...
copilot/fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
89420d43cf | ||
|
|
0696bf5efc | ||
|
|
3ad39f96da | ||
|
|
649729d520 | ||
|
|
31ed7b822e | ||
|
|
8a54f253d5 | ||
|
|
d0baa16502 | ||
|
|
4178c382e1 | ||
|
|
775a9bf144 | ||
|
|
57cd2f12d0 | ||
|
|
b2066fc022 | ||
|
|
4639748461 | ||
|
|
42e118db68 | ||
|
|
8b375b9179 | ||
|
|
e4ad307413 | ||
|
|
e7609329d7 | ||
|
|
64a8a9c1dc | ||
|
|
51aa6d782a | ||
|
|
e040e5e449 | ||
|
|
3aca691ea0 | ||
|
|
a0a7da9254 | ||
|
|
960920993a | ||
|
|
e1df6557d1 | ||
|
|
34f4314b86 | ||
|
|
bd99c1ab27 | ||
|
|
2e364ac0eb | ||
|
|
0dcfbf56c6 | ||
|
|
399572d9d6 | ||
|
|
e3c3b50ac1 | ||
|
|
e21631526b | ||
|
|
cf0ad9b323 | ||
|
|
938692ef33 | ||
|
|
84c49f152e | ||
|
|
04dd177f19 | ||
|
|
2e074e18d4 | ||
|
|
4f04122eb8 | ||
|
|
4475c2af73 | ||
|
|
ed52ed1e73 | ||
|
|
b67b4fd57f | ||
|
|
1ba438d4c7 | ||
|
|
77c5fbd739 | ||
|
|
756cb7bd9d | ||
|
|
a8c06386a3 | ||
|
|
b3038010d9 | ||
|
|
ed6c774f08 | ||
|
|
c37209618d | ||
|
|
0048452efa | ||
|
|
c4a28e7cfb | ||
|
|
29197f2142 | ||
|
|
2a4081362e | ||
|
|
d5cab8172b | ||
|
|
4084b347d4 | ||
|
|
5437d9ff8c | ||
|
|
7b746c49cf | ||
|
|
1da178a4be | ||
|
|
2b74807f5e | ||
|
|
38d295b089 | ||
|
|
690e1d8f6b | ||
|
|
cc6e410be8 | ||
|
|
87cab8e16a | ||
|
|
e85752ca1d | ||
|
|
2860896640 | ||
|
|
8b6d96c9ca | ||
|
|
c96acf18dc | ||
|
|
6eec6faff2 | ||
|
|
118fbbea64 | ||
|
|
bbc664ddcc | ||
|
|
2fa8196105 | ||
|
|
d4f1dafabb | ||
|
|
f2483be1da | ||
|
|
7914b7ddaf | ||
|
|
0848a1b940 | ||
|
|
ca262920c8 | ||
|
|
2541c09973 | ||
|
|
2f9b6422c3 | ||
|
|
c21c7eb5ee | ||
|
|
4e4de625ae | ||
|
|
3fb8387419 | ||
|
|
240468f968 | ||
|
|
778a930266 | ||
|
|
db544211f5 | ||
|
|
d12b539720 | ||
|
|
d26db95aff | ||
|
|
7231b7b35c | ||
|
|
73fd2d70ba | ||
|
|
c026e02390 | ||
|
|
9676d6ccc9 | ||
|
|
fad7cf3b1d | ||
|
|
17cb952772 | ||
|
|
923217ef0e | ||
|
|
0d81d7f243 | ||
|
|
2070ab282f | ||
|
|
faacff414d | ||
|
|
642b8bddb8 | ||
|
|
27f7221902 | ||
|
|
eb738b44a8 | ||
|
|
57c0d00b37 | ||
|
|
43276b32b7 | ||
|
|
94b275c41b | ||
|
|
ae4ae799b9 | ||
|
|
9def35f78a | ||
|
|
7e8005a9d8 | ||
|
|
b93ed79ef3 | ||
|
|
c7b8021c2e | ||
|
|
3af0e1091c | ||
|
|
1323c96bc8 | ||
|
|
94716a5ba9 | ||
|
|
f67168f479 | ||
|
|
7e54f91bfa | ||
|
|
3ab4478275 | ||
|
|
8759cf08ff | ||
|
|
8cff7cb551 | ||
|
|
1b0ec2410d | ||
|
|
22d15f73f0 | ||
|
|
4e0d78d6c8 | ||
|
|
63a1927838 | ||
|
|
3d745bfa05 | ||
|
|
ce26d50792 | ||
|
|
01e162fcc4 | ||
|
|
443f7b8b0c | ||
|
|
08d64ee8a1 | ||
|
|
eedc7c7a0f | ||
|
|
3198b32008 | ||
|
|
dff17a95e8 | ||
|
|
236ee215b9 | ||
|
|
6af612fd54 | ||
|
|
361e695380 | ||
|
|
8a8784a974 | ||
|
|
ddc01527bd | ||
|
|
e6ad44def8 | ||
|
|
df63e152c1 | ||
|
|
ad7e64ba43 | ||
|
|
8737b7a38e | ||
|
|
13199fcfd1 | ||
|
|
9b8e3d8530 | ||
|
|
0b15b60506 | ||
|
|
46e2ea8507 | ||
|
|
62b8fc92d1 | ||
|
|
cb27b117b4 | ||
|
|
6112b2d1d9 | ||
|
|
0e2f8068a6 | ||
|
|
227e70926b | ||
|
|
86e412cf77 | ||
|
|
037b6842bf |
@@ -3,7 +3,7 @@
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"csharpier": {
|
||||
"version": "1.2.5",
|
||||
"version": "1.2.6",
|
||||
"commands": [
|
||||
"csharpier"
|
||||
],
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
enabled: true
|
||||
agent:
|
||||
name: copilot-coding-agent
|
||||
allow:
|
||||
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
|
||||
actions: ["create", "modify"]
|
||||
require_review_before_merge: true
|
||||
17
.github/agents/copilot-agent.yml
vendored
17
.github/agents/copilot-agent.yml
vendored
@@ -1,17 +0,0 @@
|
||||
enabled: true
|
||||
agent:
|
||||
name: copilot-coding-agent
|
||||
allow:
|
||||
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
|
||||
actions: ["create", "modify", "delete"]
|
||||
require_review_before_merge: true
|
||||
required_approvals: 1
|
||||
allowed_merge_strategies:
|
||||
- squash
|
||||
- merge
|
||||
auto_merge_on_green: false
|
||||
run_workflows: true
|
||||
notes: |
|
||||
- This manifest expresses the policy for the Copilot coding agent in this repository.
|
||||
- It does NOT install or authorize the agent; a repository admin must install the Copilot coding agent app and grant the repository the necessary permissions (contents: write, pull_requests: write, checks: write, actions: write/read, issues: write) to allow the agent to act.
|
||||
- Keep allow paths narrow and prefer require_review_before_merge during initial rollout.
|
||||
25
.github/prompts/plan-async.prompt.md
vendored
25
.github/prompts/plan-async.prompt.md
vendored
@@ -1,25 +0,0 @@
|
||||
# Plan: Implement Missing Async Functionality in SharpCompress
|
||||
|
||||
SharpCompress has async support for low-level stream operations and Reader/Writer APIs, but critical entry points (Archive.Open, factory methods, initialization) remain synchronous. This plan adds async overloads for all user-facing I/O operations and fixes existing async bugs, enabling full end-to-end async workflows.
|
||||
|
||||
## Steps
|
||||
|
||||
1. **Add async factory methods** to [ArchiveFactory.cs](src/SharpCompress/Factories/ArchiveFactory.cs), [ReaderFactory.cs](src/SharpCompress/Factories/ReaderFactory.cs), and [WriterFactory.cs](src/SharpCompress/Factories/WriterFactory.cs) with `OpenAsync` and `CreateAsync` overloads accepting `CancellationToken`
|
||||
|
||||
2. **Implement async Open methods** on concrete archive types ([ZipArchive.cs](src/SharpCompress/Archives/Zip/ZipArchive.cs), [TarArchive.cs](src/SharpCompress/Archives/Tar/TarArchive.cs), [RarArchive.cs](src/SharpCompress/Archives/Rar/RarArchive.cs), [GZipArchive.cs](src/SharpCompress/Archives/GZip/GZipArchive.cs), [SevenZipArchive.cs](src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs)) and reader types ([ZipReader.cs](src/SharpCompress/Readers/Zip/ZipReader.cs), [TarReader.cs](src/SharpCompress/Readers/Tar/TarReader.cs), etc.)
|
||||
|
||||
3. **Convert archive initialization logic to async** including header reading, volume loading, and format signature detection across archive constructors and internal initialization methods
|
||||
|
||||
4. **Fix LZMA decoder async bugs** in [LzmaStream.cs](src/SharpCompress/Compressors/LZMA/LzmaStream.cs), [Decoder.cs](src/SharpCompress/Compressors/LZMA/Decoder.cs), and [OutWindow.cs](src/SharpCompress/Compressors/LZMA/OutWindow.cs) to enable true async 7Zip support and remove `NonDisposingStream` workaround
|
||||
|
||||
5. **Complete Rar async implementation** by converting `UnpackV2017` methods to async in [UnpackV2017.cs](src/SharpCompress/Compressors/Rar/UnpackV2017.cs) and updating Rar20 decompression
|
||||
|
||||
6. **Add comprehensive async tests** covering all new async entry points, cancellation scenarios, and concurrent operations across all archive formats in test files
|
||||
|
||||
## Further Considerations
|
||||
|
||||
1. **Breaking changes** - Should new async methods be added alongside existing sync methods (non-breaking), or should sync methods eventually be deprecated? Recommend additive approach for backward compatibility.
|
||||
|
||||
2. **Performance impact** - Header parsing for formats like Zip/Tar is often small; consider whether truly async parsing adds value vs sync parsing wrapped in Task, or make it conditional based on stream type (network vs file).
|
||||
|
||||
3. **7Zip complexity** - The LZMA async bug fix (Step 4) may be challenging due to state management in the decoder; consider whether to scope it separately or implement a simpler workaround that maintains correctness.
|
||||
123
.github/prompts/plan-for-next.prompt.md
vendored
123
.github/prompts/plan-for-next.prompt.md
vendored
@@ -1,123 +0,0 @@
|
||||
# Plan: Modernize SharpCompress Public API
|
||||
|
||||
Based on comprehensive analysis, the API has several inconsistencies around factory patterns, async support, format capabilities, and options classes. Most improvements can be done incrementally without breaking changes.
|
||||
|
||||
## Steps
|
||||
|
||||
1. **Standardize factory patterns** by deprecating format-specific static `Open` methods in [Archives/Zip/ZipArchive.cs](src/SharpCompress/Archives/Zip/ZipArchive.cs), [Archives/Tar/TarArchive.cs](src/SharpCompress/Archives/Tar/TarArchive.cs), etc. in favor of centralized [Factories/ArchiveFactory.cs](src/SharpCompress/Factories/ArchiveFactory.cs)
|
||||
|
||||
2. **Complete async implementation** in [Writers/Zip/ZipWriter.cs](src/SharpCompress/Writers/Zip/ZipWriter.cs) and other writers that currently use sync-over-async, implementing true async I/O throughout the writer hierarchy
|
||||
|
||||
3. **Unify options classes** by making [Common/ExtractionOptions.cs](src/SharpCompress/Common/ExtractionOptions.cs) inherit from `OptionsBase` and adding progress reporting to extraction methods consistently
|
||||
|
||||
4. **Clarify GZip semantics** in [Archives/GZip/GZipArchive.cs](src/SharpCompress/Archives/GZip/GZipArchive.cs) by adding XML documentation explaining single-entry limitation and relationship to GZip compression used in Tar.gz
|
||||
|
||||
## Further Considerations
|
||||
|
||||
1. **Breaking changes roadmap** - Should we plan a major version (2.0) to remove deprecated factory methods, clean up `ArchiveType` enum (remove Arc/Arj or add full support), and consolidate naming patterns?
|
||||
|
||||
2. **Progress reporting consistency** - Should `IProgress<ArchiveExtractionProgress<IEntry>>` be added to all extraction extension methods or consolidated into options classes?
|
||||
|
||||
## Detailed Analysis
|
||||
|
||||
### Factory Pattern Issues
|
||||
|
||||
Three different factory patterns exist with overlapping functionality:
|
||||
|
||||
1. **Static Factories**: ArchiveFactory, ReaderFactory, WriterFactory
|
||||
2. **Instance Factories**: IArchiveFactory, IReaderFactory, IWriterFactory
|
||||
3. **Format-specific static methods**: Each archive class has static `Open` methods
|
||||
|
||||
**Example confusion:**
|
||||
```csharp
|
||||
// Three ways to open a Zip archive - which is recommended?
|
||||
var archive1 = ArchiveFactory.Open("file.zip");
|
||||
var archive2 = ZipArchive.Open("file.zip");
|
||||
var archive3 = ArchiveFactory.AutoFactory.Open(fileInfo, options);
|
||||
```
|
||||
|
||||
### Async Support Gaps
|
||||
|
||||
Base `IWriter` interface has async methods, but writer implementations provide minimal async support. Most writers just call synchronous methods:
|
||||
|
||||
```csharp
|
||||
public virtual async Task WriteAsync(...)
|
||||
{
|
||||
// Default implementation calls synchronous version
|
||||
Write(filename, source, modificationTime);
|
||||
await Task.CompletedTask.ConfigureAwait(false);
|
||||
}
|
||||
```
|
||||
|
||||
Real async implementations only in:
|
||||
- `TarWriter` - Proper async implementation
|
||||
- Most other writers use sync-over-async
|
||||
|
||||
### GZip Archive Special Case
|
||||
|
||||
GZip is treated as both a compression format and an archive format, but only supports single-entry archives:
|
||||
|
||||
```csharp
|
||||
protected override GZipArchiveEntry CreateEntryInternal(...)
|
||||
{
|
||||
if (Entries.Any())
|
||||
{
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
### Options Class Hierarchy
|
||||
|
||||
```
|
||||
OptionsBase (LeaveStreamOpen, ArchiveEncoding)
|
||||
├─ ReaderOptions (LookForHeader, Password, DisableCheckIncomplete, BufferSize, ExtensionHint, Progress)
|
||||
├─ WriterOptions (CompressionType, CompressionLevel, Progress)
|
||||
│ ├─ ZipWriterOptions (ArchiveComment, UseZip64)
|
||||
│ ├─ TarWriterOptions (FinalizeArchiveOnClose, HeaderFormat)
|
||||
│ └─ GZipWriterOptions (no additional properties)
|
||||
└─ ExtractionOptions (standalone - Overwrite, ExtractFullPath, PreserveFileTime, PreserveAttributes)
|
||||
```
|
||||
|
||||
**Issues:**
|
||||
- `ExtractionOptions` doesn't inherit from `OptionsBase` - no encoding support during extraction
|
||||
- Progress reporting inconsistency between readers and extraction
|
||||
- Obsolete properties (`ChecksumIsValid`, `Version`) with unclear migration path
|
||||
|
||||
### Implementation Priorities
|
||||
|
||||
**High Priority (Non-Breaking):**
|
||||
1. Add API usage guide (Archive vs Reader, factory recommendations, async best practices)
|
||||
2. Fix progress reporting consistency
|
||||
3. Complete async implementation in writers
|
||||
|
||||
**Medium Priority (Next Major Version):**
|
||||
1. Unify factory pattern - deprecate format-specific static `Open` methods
|
||||
2. Clean up options classes - make `ExtractionOptions` inherit from `OptionsBase`
|
||||
3. Clarify archive types - remove Arc/Arj from `ArchiveType` enum or add full support
|
||||
4. Standardize naming across archive types
|
||||
|
||||
**Low Priority:**
|
||||
1. Add BZip2 archive support similar to GZipArchive
|
||||
2. Complete obsolete property cleanup with migration guide
|
||||
|
||||
### Backward Compatibility Strategy
|
||||
|
||||
**Safe (Non-Breaking) Changes:**
|
||||
- Add new methods to interfaces (use default implementations)
|
||||
- Add new options properties (with defaults)
|
||||
- Add new factory methods
|
||||
- Improve async implementations
|
||||
- Add progress reporting support
|
||||
|
||||
**Breaking Changes to Avoid:**
|
||||
- ❌ Removing format-specific `Open` methods (deprecate instead)
|
||||
- ❌ Changing `LeaveStreamOpen` default (currently `true`)
|
||||
- ❌ Removing obsolete properties before major version bump
|
||||
- ❌ Changing return types or signatures of existing methods
|
||||
|
||||
**Deprecation Pattern:**
|
||||
- Use `[Obsolete]` for one major version
|
||||
- Use `[EditorBrowsable(EditorBrowsableState.Never)]` in next major version
|
||||
- Remove in following major version
|
||||
50
.github/workflows/performance-benchmarks.yml
vendored
Normal file
50
.github/workflows/performance-benchmarks.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: Performance Benchmarks
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'release'
|
||||
pull_request:
|
||||
branches:
|
||||
- 'master'
|
||||
- 'release'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
benchmark:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: 10.0.x
|
||||
|
||||
- name: Build Performance Project
|
||||
run: dotnet build tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release
|
||||
|
||||
- name: Run Benchmarks
|
||||
run: dotnet run --project tests/SharpCompress.Performance/SharpCompress.Performance.csproj --configuration Release --no-build -- --filter "*" --exporters json markdown --artifacts benchmark-results
|
||||
continue-on-error: true
|
||||
|
||||
- name: Display Benchmark Results
|
||||
if: always()
|
||||
run: dotnet run --project build/build.csproj -- display-benchmark-results
|
||||
|
||||
- name: Compare with Baseline
|
||||
if: always()
|
||||
run: dotnet run --project build/build.csproj -- compare-benchmark-results
|
||||
|
||||
- name: Upload Benchmark Results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: benchmark-results
|
||||
path: benchmark-results/
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -17,6 +17,10 @@ tests/TestArchives/*/Scratch2
|
||||
tools
|
||||
.idea/
|
||||
artifacts/
|
||||
BenchmarkDotNet.Artifacts/
|
||||
baseline-artifacts/
|
||||
profiler-snapshots/
|
||||
|
||||
.DS_Store
|
||||
*.snupkg
|
||||
benchmark-results/
|
||||
|
||||
99
AGENTS.md
99
AGENTS.md
@@ -6,21 +6,25 @@ applyTo: '**/*.cs'
|
||||
# SharpCompress Development
|
||||
|
||||
## About SharpCompress
|
||||
SharpCompress is a pure C# compression library supporting multiple archive formats (Zip, Tar, GZip, BZip2, 7Zip, Rar, LZip, XZ, ZStandard) for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0. The library provides both seekable Archive APIs and forward-only Reader/Writer APIs for streaming scenarios.
|
||||
SharpCompress is a pure C# compression library supporting multiple archive formats (Zip, Tar, GZip, BZip2, 7Zip, Rar, LZip, XZ, ZStandard). The project currently targets .NET Framework 4.8, .NET Standard 2.0, .NET 8.0, and .NET 10.0. The library provides both seekable Archive APIs and forward-only Reader/Writer APIs for streaming scenarios.
|
||||
|
||||
## C# Instructions
|
||||
- Always use the latest version C#, currently C# 13 features.
|
||||
- Write clear and concise comments for each function.
|
||||
- Use language features supported by the current project toolchain (`LangVersion=latest`) and existing codebase patterns.
|
||||
- Add comments for non-obvious logic and important design decisions; avoid redundant comments.
|
||||
- Follow the existing code style and patterns in the codebase.
|
||||
|
||||
## General Instructions
|
||||
- **Agents should NEVER commit to git** - Agents should stage files and leave committing to the user. Only create commits when the user explicitly requests them.
|
||||
- **Do not commit or stage changes unless the user explicitly asks for it.**
|
||||
- Make only high confidence suggestions when reviewing code changes.
|
||||
- Write code with good maintainability practices, including comments on why certain design decisions were made.
|
||||
- Handle edge cases and write clear exception handling.
|
||||
- For libraries or external dependencies, mention their usage and purpose in comments.
|
||||
- Preserve backward compatibility when making changes to public APIs.
|
||||
|
||||
### Workspace Hygiene
|
||||
- Do not edit generated or machine-local files unless required for the task (for example: `bin/`, `obj/`, `*.csproj.user`).
|
||||
- Avoid broad formatting-only diffs in unrelated files.
|
||||
|
||||
## Naming Conventions
|
||||
|
||||
- Follow PascalCase for component names, method names, and public members.
|
||||
@@ -64,7 +68,7 @@ SharpCompress is a pure C# compression library supporting multiple archive forma
|
||||
|
||||
## Project Setup and Structure
|
||||
|
||||
- The project targets multiple frameworks: .NET Framework 4.62, .NET Standard 2.1, .NET 6.0, and .NET 8.0
|
||||
- The project targets multiple frameworks: .NET Framework 4.8, .NET Standard 2.0, .NET 8.0, and .NET 10.0
|
||||
- Main library is in `src/SharpCompress/`
|
||||
- Tests are in `tests/SharpCompress.Test/`
|
||||
- Performance tests are in `tests/SharpCompress.Performance/`
|
||||
@@ -89,13 +93,18 @@ src/SharpCompress/
|
||||
tests/SharpCompress.Test/
|
||||
├── Zip/, Tar/, Rar/, SevenZip/, GZip/, BZip2/ # Format-specific tests
|
||||
├── TestBase.cs # Base test class with helper methods
|
||||
└── TestArchives/ # Test data (not checked into main test project)
|
||||
|
||||
tests/
|
||||
├── SharpCompress.Test/ # Unit/integration tests
|
||||
├── SharpCompress.Performance/ # Benchmark tests
|
||||
└── TestArchives/ # Test data archives
|
||||
```
|
||||
|
||||
### Factory Pattern
|
||||
All format types implement factory interfaces (`IArchiveFactory`, `IReaderFactory`, `IWriterFactory`) for auto-detection:
|
||||
- `ReaderFactory.Open()` - Auto-detects format by probing stream
|
||||
- `WriterFactory.Open()` - Creates writer for specified `ArchiveType`
|
||||
Factory implementations can implement one or more interfaces (`IArchiveFactory`, `IReaderFactory`, `IWriterFactory`) depending on format capabilities:
|
||||
- `ArchiveFactory.OpenArchive()` - Opens archive API objects from seekable streams/files
|
||||
- `ReaderFactory.OpenReader()` - Auto-detects and opens forward-only readers
|
||||
- `WriterFactory.OpenWriter()` - Creates a writer for a specified `ArchiveType`
|
||||
- Factories located in: `src/SharpCompress/Factories/`
|
||||
|
||||
## Nullable Reference Types
|
||||
@@ -166,71 +175,31 @@ SharpCompress supports multiple archive and compression formats:
|
||||
- Test stream disposal and `LeaveStreamOpen` behavior.
|
||||
- Test edge cases: empty archives, large files, corrupted archives, encrypted archives.
|
||||
|
||||
### Validation Expectations
|
||||
- Run targeted tests for the changed area first.
|
||||
- Run `dotnet csharpier format .` after code edits.
|
||||
- Run `dotnet csharpier check .` before handing off changes.
|
||||
|
||||
### Test Organization
|
||||
- Base class: `TestBase` - Provides `TEST_ARCHIVES_PATH`, `SCRATCH_FILES_PATH`, temp directory management
|
||||
- Framework: xUnit with AwesomeAssertions
|
||||
- Test archives: `tests/TestArchives/` - Use existing archives, don't create new ones unnecessarily
|
||||
- Match naming style of nearby test files
|
||||
|
||||
### Public API Change Checklist
|
||||
- Preserve existing public method signatures and behavior when possible.
|
||||
- If a breaking change is unavoidable, document it and provide a migration path.
|
||||
- Add or update tests that cover backward compatibility expectations.
|
||||
|
||||
### Stream Ownership and Position Checklist
|
||||
- Verify `LeaveStreamOpen` behavior for externally owned streams.
|
||||
- Validate behavior for both seekable and non-seekable streams.
|
||||
- Ensure stream position assumptions are explicit and tested.
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Don't mix Archive and Reader APIs** - Archive needs seekable stream, Reader doesn't
|
||||
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
|
||||
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
|
||||
4. **Tar + non-seekable stream** - Must provide file size or it will throw
|
||||
6. **Format detection** - Use `ReaderFactory.Open()` for auto-detection, test with actual archive files
|
||||
|
||||
### Async Struct-Copy Bug in LZMA RangeCoder
|
||||
|
||||
When implementing async methods on mutable `struct` types (like `BitEncoder` and `BitDecoder` in the LZMA RangeCoder), be aware that the async state machine copies the struct when `await` is encountered. This means mutations to struct fields after the `await` point may not persist back to the original struct stored in arrays or fields.
|
||||
|
||||
**The Bug:**
|
||||
```csharp
|
||||
// BAD: async method on mutable struct
|
||||
public async ValueTask<uint> DecodeAsync(Decoder decoder, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var newBound = (decoder._range >> K_NUM_BIT_MODEL_TOTAL_BITS) * _prob;
|
||||
if (decoder._code < newBound)
|
||||
{
|
||||
decoder._range = newBound;
|
||||
_prob += (K_BIT_MODEL_TOTAL - _prob) >> K_NUM_MOVE_BITS; // Mutates _prob
|
||||
await decoder.Normalize2Async(cancellationToken).ConfigureAwait(false); // Struct gets copied here
|
||||
return 0; // Original _prob update may be lost
|
||||
}
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
**The Fix:**
|
||||
Refactor async methods on mutable structs to perform all struct mutations synchronously before any `await`, or use a helper method to separate the await from the struct mutation:
|
||||
|
||||
```csharp
|
||||
// GOOD: struct mutations happen synchronously, await is conditional
|
||||
public ValueTask<uint> DecodeAsync(Decoder decoder, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var newBound = (decoder._range >> K_NUM_BIT_MODEL_TOTAL_BITS) * _prob;
|
||||
if (decoder._code < newBound)
|
||||
{
|
||||
decoder._range = newBound;
|
||||
_prob += (K_BIT_MODEL_TOTAL - _prob) >> K_NUM_MOVE_BITS; // All mutations complete
|
||||
return DecodeAsyncHelper(decoder.Normalize2Async(cancellationToken), 0); // Await in helper
|
||||
}
|
||||
decoder._range -= newBound;
|
||||
decoder._code -= newBound;
|
||||
_prob -= (_prob) >> K_NUM_MOVE_BITS; // All mutations complete
|
||||
return DecodeAsyncHelper(decoder.Normalize2Async(cancellationToken), 1); // Await in helper
|
||||
}
|
||||
|
||||
private static async ValueTask<uint> DecodeAsyncHelper(ValueTask normalizeTask, uint result)
|
||||
{
|
||||
await normalizeTask.ConfigureAwait(false);
|
||||
return result;
|
||||
}
|
||||
```
|
||||
|
||||
**Why This Matters:**
|
||||
In LZMA, the `BitEncoder` and `BitDecoder` structs maintain adaptive probability models in their `_prob` field. When these structs are stored in arrays (e.g., `_models[m]`), the async state machine copy breaks the adaptive model, causing incorrect bit decoding and eventually `DataErrorException` exceptions.
|
||||
|
||||
**Related Files:**
|
||||
- `src/SharpCompress/Compressors/LZMA/RangeCoder/RangeCoderBit.Async.cs` - Fixed
|
||||
- `src/SharpCompress/Compressors/LZMA/RangeCoder/RangeCoderBitTree.Async.cs` - Uses readonly structs, so this pattern doesn't apply
|
||||
5. **Format detection** - Use `ReaderFactory.OpenReader()` for auto-detection, test with actual archive files
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
<Project>
|
||||
<ItemGroup>
|
||||
<PackageVersion Include="BenchmarkDotNet" Version="0.15.8" />
|
||||
<PackageVersion Include="Bullseye" Version="6.1.0" />
|
||||
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
|
||||
<PackageVersion Include="Glob" Version="1.1.9" />
|
||||
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.15" />
|
||||
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.16" />
|
||||
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.ILLink.Task" Version="10.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
|
||||
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
|
||||
<PackageVersion Include="SimpleExec" Version="13.0.0" />
|
||||
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
|
||||
<PackageVersion Include="System.Buffers" Version="4.6.1" />
|
||||
<PackageVersion Include="System.Memory" Version="4.6.3" />
|
||||
<PackageVersion Include="xunit.v3" Version="3.2.1" />
|
||||
<PackageVersion Include="xunit.v3" Version="3.2.2" />
|
||||
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
|
||||
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
|
||||
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="10.0.102" />
|
||||
<GlobalPackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
|
||||
<GlobalPackageReference
|
||||
Include="Microsoft.VisualStudio.Threading.Analyzers"
|
||||
|
||||
@@ -26,7 +26,7 @@ RAR is not recommended as it's a proprietary format and the compression is close
|
||||
|
||||
7Zip and XZ both are overly complicated. 7Zip does not support streamable formats. XZ has known holes explained here: (http://www.nongnu.org/lzip/xz_inadequate.html) Use Tar/LZip for LZMA compression instead.
|
||||
|
||||
ZStandard is an efficient format that works well for streaming with a flexible compression level to tweak the speed/performance trade off you are looking for. We currently only implement decompression for ZStandard but as we leverage the [ZstdSharp](https://github.com/oleg-st/ZstdSharp) library one could likely add compression support without much trouble (PRs are welcome!).
|
||||
ZStandard is an efficient format that works well for streaming with a flexible compression level to tweak the speed/performance trade off you are looking for.
|
||||
|
||||
## A Simple Request
|
||||
|
||||
@@ -46,6 +46,8 @@ XZ BCJ filters support contributed by Louis-Michel Bergeron, on behalf of aDolus
|
||||
|
||||
7Zip implementation based on: https://code.google.com/p/managed-lzma/
|
||||
|
||||
Zstandard implementation from: https://github.com/oleg-st/ZstdSharp
|
||||
|
||||
LICENSE
|
||||
Copyright (c) 2000 - 2011 The Legion Of The Bouncy Castle (http://www.bouncycastle.org)
|
||||
|
||||
|
||||
385
build/Program.cs
385
build/Program.cs
@@ -19,6 +19,9 @@ const string Publish = "publish";
|
||||
const string DetermineVersion = "determine-version";
|
||||
const string UpdateVersion = "update-version";
|
||||
const string PushToNuGet = "push-to-nuget";
|
||||
const string DisplayBenchmarkResults = "display-benchmark-results";
|
||||
const string CompareBenchmarkResults = "compare-benchmark-results";
|
||||
const string GenerateBaseline = "generate-baseline";
|
||||
|
||||
Target(
|
||||
Clean,
|
||||
@@ -210,6 +213,249 @@ Target(
|
||||
}
|
||||
);
|
||||
|
||||
Target(
|
||||
DisplayBenchmarkResults,
|
||||
() =>
|
||||
{
|
||||
var githubStepSummary = Environment.GetEnvironmentVariable("GITHUB_STEP_SUMMARY");
|
||||
var resultsDir = "benchmark-results/results";
|
||||
|
||||
if (!Directory.Exists(resultsDir))
|
||||
{
|
||||
Console.WriteLine("No benchmark results found.");
|
||||
return;
|
||||
}
|
||||
|
||||
var markdownFiles = Directory
|
||||
.GetFiles(resultsDir, "*-report-github.md")
|
||||
.OrderBy(f => f)
|
||||
.ToList();
|
||||
|
||||
if (markdownFiles.Count == 0)
|
||||
{
|
||||
Console.WriteLine("No benchmark markdown reports found.");
|
||||
return;
|
||||
}
|
||||
|
||||
var output = new List<string> { "## Benchmark Results", "" };
|
||||
|
||||
foreach (var file in markdownFiles)
|
||||
{
|
||||
Console.WriteLine($"Processing {Path.GetFileName(file)}");
|
||||
var content = File.ReadAllText(file);
|
||||
output.Add(content);
|
||||
output.Add("");
|
||||
}
|
||||
|
||||
// Write to GitHub Step Summary if available
|
||||
if (!string.IsNullOrEmpty(githubStepSummary))
|
||||
{
|
||||
File.AppendAllLines(githubStepSummary, output);
|
||||
Console.WriteLine($"Benchmark results written to GitHub Step Summary");
|
||||
}
|
||||
else
|
||||
{
|
||||
// Write to console if not in GitHub Actions
|
||||
foreach (var line in output)
|
||||
{
|
||||
Console.WriteLine(line);
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
Target(
|
||||
CompareBenchmarkResults,
|
||||
() =>
|
||||
{
|
||||
var githubStepSummary = Environment.GetEnvironmentVariable("GITHUB_STEP_SUMMARY");
|
||||
var baselinePath = "tests/SharpCompress.Performance/baseline-results.md";
|
||||
var resultsDir = "benchmark-results/results";
|
||||
|
||||
var output = new List<string> { "## Comparison with Baseline", "" };
|
||||
|
||||
if (!File.Exists(baselinePath))
|
||||
{
|
||||
Console.WriteLine("Baseline file not found");
|
||||
output.Add("⚠️ Baseline file not found. Run `generate-baseline` to create it.");
|
||||
WriteOutput(output, githubStepSummary);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!Directory.Exists(resultsDir))
|
||||
{
|
||||
Console.WriteLine("No current benchmark results found.");
|
||||
output.Add("⚠️ No current benchmark results found. Showing baseline only.");
|
||||
output.Add("");
|
||||
output.Add("### Baseline Results");
|
||||
output.AddRange(File.ReadAllLines(baselinePath));
|
||||
WriteOutput(output, githubStepSummary);
|
||||
return;
|
||||
}
|
||||
|
||||
var markdownFiles = Directory
|
||||
.GetFiles(resultsDir, "*-report-github.md")
|
||||
.OrderBy(f => f)
|
||||
.ToList();
|
||||
|
||||
if (markdownFiles.Count == 0)
|
||||
{
|
||||
Console.WriteLine("No current benchmark markdown reports found.");
|
||||
output.Add("⚠️ No current benchmark results found. Showing baseline only.");
|
||||
output.Add("");
|
||||
output.Add("### Baseline Results");
|
||||
output.AddRange(File.ReadAllLines(baselinePath));
|
||||
WriteOutput(output, githubStepSummary);
|
||||
return;
|
||||
}
|
||||
|
||||
Console.WriteLine("Parsing baseline results...");
|
||||
var baselineMetrics = ParseBenchmarkResults(File.ReadAllText(baselinePath));
|
||||
|
||||
Console.WriteLine("Parsing current results...");
|
||||
var currentText = string.Join("\n", markdownFiles.Select(f => File.ReadAllText(f)));
|
||||
var currentMetrics = ParseBenchmarkResults(currentText);
|
||||
|
||||
Console.WriteLine("Comparing results...");
|
||||
output.Add("### Performance Comparison");
|
||||
output.Add("");
|
||||
output.Add(
|
||||
"| Benchmark | Baseline Mean | Current Mean | Change | Baseline Memory | Current Memory | Change |"
|
||||
);
|
||||
output.Add(
|
||||
"|-----------|---------------|--------------|--------|-----------------|----------------|--------|"
|
||||
);
|
||||
|
||||
var hasRegressions = false;
|
||||
var hasImprovements = false;
|
||||
|
||||
foreach (var method in currentMetrics.Keys.Union(baselineMetrics.Keys).OrderBy(k => k))
|
||||
{
|
||||
var hasCurrent = currentMetrics.TryGetValue(method, out var current);
|
||||
var hasBaseline = baselineMetrics.TryGetValue(method, out var baseline);
|
||||
|
||||
if (!hasCurrent)
|
||||
{
|
||||
output.Add(
|
||||
$"| {method} | {baseline!.Mean} | ❌ Missing | N/A | {baseline.Memory} | N/A | N/A |"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!hasBaseline)
|
||||
{
|
||||
output.Add(
|
||||
$"| {method} | ❌ New | {current!.Mean} | N/A | N/A | {current.Memory} | N/A |"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
var timeChange = CalculateChange(baseline!.MeanValue, current!.MeanValue);
|
||||
var memChange = CalculateChange(baseline.MemoryValue, current.MemoryValue);
|
||||
|
||||
var timeIcon =
|
||||
timeChange > 25 ? "🔴"
|
||||
: timeChange < -25 ? "🟢"
|
||||
: "⚪";
|
||||
var memIcon =
|
||||
memChange > 25 ? "🔴"
|
||||
: memChange < -25 ? "🟢"
|
||||
: "⚪";
|
||||
|
||||
if (timeChange > 25 || memChange > 25)
|
||||
hasRegressions = true;
|
||||
if (timeChange < -25 || memChange < -25)
|
||||
hasImprovements = true;
|
||||
|
||||
output.Add(
|
||||
$"| {method} | {baseline.Mean} | {current.Mean} | {timeIcon} {timeChange:+0.0;-0.0;0}% | {baseline.Memory} | {current.Memory} | {memIcon} {memChange:+0.0;-0.0;0}% |"
|
||||
);
|
||||
}
|
||||
|
||||
output.Add("");
|
||||
output.Add("**Legend:**");
|
||||
output.Add("- 🔴 Regression (>25% slower/more memory)");
|
||||
output.Add("- 🟢 Improvement (>25% faster/less memory)");
|
||||
output.Add("- ⚪ No significant change");
|
||||
|
||||
if (hasRegressions)
|
||||
{
|
||||
output.Add("");
|
||||
output.Add(
|
||||
"⚠️ **Warning**: Performance regressions detected. Review the changes carefully."
|
||||
);
|
||||
}
|
||||
else if (hasImprovements)
|
||||
{
|
||||
output.Add("");
|
||||
output.Add("✅ Performance improvements detected!");
|
||||
}
|
||||
else
|
||||
{
|
||||
output.Add("");
|
||||
output.Add("✅ Performance is stable compared to baseline.");
|
||||
}
|
||||
|
||||
WriteOutput(output, githubStepSummary);
|
||||
}
|
||||
);
|
||||
|
||||
Target(
|
||||
GenerateBaseline,
|
||||
() =>
|
||||
{
|
||||
var perfProject = "tests/SharpCompress.Performance/SharpCompress.Performance.csproj";
|
||||
var baselinePath = "tests/SharpCompress.Performance/baseline-results.md";
|
||||
var artifactsDir = "baseline-artifacts";
|
||||
|
||||
Console.WriteLine("Building performance project...");
|
||||
Run("dotnet", $"build {perfProject} --configuration Release");
|
||||
|
||||
Console.WriteLine("Running benchmarks to generate baseline...");
|
||||
Run(
|
||||
"dotnet",
|
||||
$"run --project {perfProject} --configuration Release --no-build -- --filter \"*\" --exporters markdown --artifacts {artifactsDir}"
|
||||
);
|
||||
|
||||
var resultsDir = Path.Combine(artifactsDir, "results");
|
||||
if (!Directory.Exists(resultsDir))
|
||||
{
|
||||
Console.WriteLine("ERROR: No benchmark results generated.");
|
||||
return;
|
||||
}
|
||||
|
||||
var markdownFiles = Directory
|
||||
.GetFiles(resultsDir, "*-report-github.md")
|
||||
.OrderBy(f => f)
|
||||
.ToList();
|
||||
|
||||
if (markdownFiles.Count == 0)
|
||||
{
|
||||
Console.WriteLine("ERROR: No markdown reports found.");
|
||||
return;
|
||||
}
|
||||
|
||||
Console.WriteLine($"Combining {markdownFiles.Count} benchmark reports...");
|
||||
var baselineContent = new List<string>();
|
||||
|
||||
foreach (var file in markdownFiles)
|
||||
{
|
||||
var lines = File.ReadAllLines(file);
|
||||
baselineContent.AddRange(lines.Select(l => l.Trim()).Where(l => l.StartsWith('|')));
|
||||
}
|
||||
|
||||
File.WriteAllText(baselinePath, string.Join(Environment.NewLine, baselineContent));
|
||||
Console.WriteLine($"Baseline written to {baselinePath}");
|
||||
|
||||
// Clean up artifacts directory
|
||||
if (Directory.Exists(artifactsDir))
|
||||
{
|
||||
Directory.Delete(artifactsDir, true);
|
||||
Console.WriteLine("Cleaned up artifacts directory.");
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
Target("default", [Publish], () => Console.WriteLine("Done!"));
|
||||
|
||||
await RunTargetsAndExitAsync(args);
|
||||
@@ -302,3 +548,142 @@ static async Task<string> GetGitOutput(string command, string args)
|
||||
throw new Exception($"Git command failed: git {command} {args}\n{ex.Message}", ex);
|
||||
}
|
||||
}
|
||||
|
||||
static void WriteOutput(List<string> output, string? githubStepSummary)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(githubStepSummary))
|
||||
{
|
||||
File.AppendAllLines(githubStepSummary, output);
|
||||
Console.WriteLine("Comparison written to GitHub Step Summary");
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach (var line in output)
|
||||
{
|
||||
Console.WriteLine(line);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static Dictionary<string, BenchmarkMetric> ParseBenchmarkResults(string markdown)
|
||||
{
|
||||
var metrics = new Dictionary<string, BenchmarkMetric>();
|
||||
var lines = markdown.Split('\n');
|
||||
|
||||
for (int i = 0; i < lines.Length; i++)
|
||||
{
|
||||
var line = lines[i].Trim();
|
||||
|
||||
// Look for table rows with benchmark data
|
||||
if (line.StartsWith("|") && line.Contains("'") && i > 0)
|
||||
{
|
||||
var parts = line.Split('|', StringSplitOptions.TrimEntries);
|
||||
if (parts.Length >= 5)
|
||||
{
|
||||
var method = parts[1].Replace("'", "'");
|
||||
var meanStr = parts[2];
|
||||
|
||||
// Find Allocated column - it's usually the last column or labeled "Allocated"
|
||||
string memoryStr = "N/A";
|
||||
for (int j = parts.Length - 2; j >= 2; j--)
|
||||
{
|
||||
if (
|
||||
parts[j].Contains("KB")
|
||||
|| parts[j].Contains("MB")
|
||||
|| parts[j].Contains("GB")
|
||||
|| parts[j].Contains("B")
|
||||
)
|
||||
{
|
||||
memoryStr = parts[j];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
!method.Equals("Method", StringComparison.OrdinalIgnoreCase)
|
||||
&& !string.IsNullOrWhiteSpace(method)
|
||||
)
|
||||
{
|
||||
var metric = new BenchmarkMetric
|
||||
{
|
||||
Method = method,
|
||||
Mean = meanStr,
|
||||
MeanValue = ParseTimeValue(meanStr),
|
||||
Memory = memoryStr,
|
||||
MemoryValue = ParseMemoryValue(memoryStr),
|
||||
};
|
||||
metrics[method] = metric;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return metrics;
|
||||
}
|
||||
|
||||
static double ParseTimeValue(string timeStr)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(timeStr) || timeStr == "N/A" || timeStr == "NA")
|
||||
return 0;
|
||||
|
||||
// Remove thousands separators and parse
|
||||
timeStr = timeStr.Replace(",", "").Trim();
|
||||
|
||||
var match = Regex.Match(timeStr, @"([\d.]+)\s*(\w+)");
|
||||
if (!match.Success)
|
||||
return 0;
|
||||
|
||||
var value = double.Parse(match.Groups[1].Value);
|
||||
var unit = match.Groups[2].Value.ToLower();
|
||||
|
||||
// Convert to microseconds for comparison
|
||||
return unit switch
|
||||
{
|
||||
"s" => value * 1_000_000,
|
||||
"ms" => value * 1_000,
|
||||
"μs" or "us" => value,
|
||||
"ns" => value / 1_000,
|
||||
_ => value,
|
||||
};
|
||||
}
|
||||
|
||||
static double ParseMemoryValue(string memStr)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(memStr) || memStr == "N/A" || memStr == "NA")
|
||||
return 0;
|
||||
|
||||
memStr = memStr.Replace(",", "").Trim();
|
||||
|
||||
var match = Regex.Match(memStr, @"([\d.]+)\s*(\w+)");
|
||||
if (!match.Success)
|
||||
return 0;
|
||||
|
||||
var value = double.Parse(match.Groups[1].Value);
|
||||
var unit = match.Groups[2].Value.ToUpper();
|
||||
|
||||
// Convert to KB for comparison
|
||||
return unit switch
|
||||
{
|
||||
"GB" => value * 1_024 * 1_024,
|
||||
"MB" => value * 1_024,
|
||||
"KB" => value,
|
||||
"B" => value / 1_024,
|
||||
_ => value,
|
||||
};
|
||||
}
|
||||
|
||||
static double CalculateChange(double baseline, double current)
|
||||
{
|
||||
if (baseline == 0)
|
||||
return 0;
|
||||
return ((current - baseline) / baseline) * 100;
|
||||
}
|
||||
|
||||
record BenchmarkMetric
|
||||
{
|
||||
public string Method { get; init; } = "";
|
||||
public string Mean { get; init; } = "";
|
||||
public double MeanValue { get; init; }
|
||||
public string Memory { get; init; } = "";
|
||||
public double MemoryValue { get; init; }
|
||||
}
|
||||
|
||||
@@ -25,12 +25,12 @@
|
||||
},
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
|
||||
"requested": "[10.0.102, )",
|
||||
"resolved": "10.0.102",
|
||||
"contentHash": "Oxq3RCIJSdtpIU4hLqO7XaDe/Ra3HS9Wi8rJl838SAg6Zu1iQjerA0+xXWBgUFYbgknUGCLOU0T+lzMLkvY9Qg==",
|
||||
"dependencies": {
|
||||
"Microsoft.Build.Tasks.Git": "8.0.0",
|
||||
"Microsoft.SourceLink.Common": "8.0.0"
|
||||
"Microsoft.Build.Tasks.Git": "10.0.102",
|
||||
"Microsoft.SourceLink.Common": "10.0.102"
|
||||
}
|
||||
},
|
||||
"Microsoft.VisualStudio.Threading.Analyzers": {
|
||||
@@ -47,8 +47,8 @@
|
||||
},
|
||||
"Microsoft.Build.Tasks.Git": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
|
||||
"resolved": "10.0.102",
|
||||
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
|
||||
"type": "Transitive",
|
||||
@@ -57,8 +57,8 @@
|
||||
},
|
||||
"Microsoft.SourceLink.Common": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
|
||||
"resolved": "10.0.102",
|
||||
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
123
docs/API.md
123
docs/API.md
@@ -20,14 +20,18 @@ using (var archive = RarArchive.OpenArchive("file.rar"))
|
||||
using (var archive = SevenZipArchive.OpenArchive("file.7z"))
|
||||
using (var archive = GZipArchive.OpenArchive("file.gz"))
|
||||
|
||||
// With options
|
||||
var options = new ReaderOptions
|
||||
// With fluent options (preferred)
|
||||
var options = ReaderOptions.ForEncryptedArchive("password")
|
||||
.WithArchiveEncoding(new ArchiveEncoding { Default = Encoding.GetEncoding(932) });
|
||||
using (var archive = ZipArchive.OpenArchive("encrypted.zip", options))
|
||||
|
||||
// Alternative: object initializer
|
||||
var options2 = new ReaderOptions
|
||||
{
|
||||
Password = "password",
|
||||
LeaveStreamOpen = true,
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
|
||||
};
|
||||
using (var archive = ZipArchive.OpenArchive("encrypted.zip", options))
|
||||
```
|
||||
|
||||
### Creating Archives
|
||||
@@ -44,16 +48,21 @@ using (var archive = ZipArchive.CreateArchive())
|
||||
using (var archive = TarArchive.CreateArchive())
|
||||
using (var archive = GZipArchive.CreateArchive())
|
||||
|
||||
// With options
|
||||
var options = new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 9,
|
||||
LeaveStreamOpen = false
|
||||
};
|
||||
// With fluent options (preferred)
|
||||
var options = WriterOptions.ForZip()
|
||||
.WithCompressionLevel(9)
|
||||
.WithLeaveStreamOpen(false);
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.SaveTo("output.zip", options);
|
||||
}
|
||||
|
||||
// Alternative: constructor with object initializer
|
||||
var options2 = new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 9,
|
||||
LeaveStreamOpen = false
|
||||
};
|
||||
```
|
||||
|
||||
---
|
||||
@@ -72,16 +81,11 @@ using (var archive = ZipArchive.OpenArchive("file.zip"))
|
||||
var entry = archive.Entries.FirstOrDefault(e => e.Key == "file.txt");
|
||||
|
||||
// Extract all
|
||||
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
|
||||
// Extract single entry
|
||||
var entry = archive.Entries.First();
|
||||
entry.WriteToFile(@"C:\output\file.txt");
|
||||
entry.WriteToFile(@"C:\output\file.txt", new ExtractionOptions { Overwrite = true });
|
||||
|
||||
// Get entry stream
|
||||
using (var stream = entry.OpenEntryStream())
|
||||
@@ -95,7 +99,6 @@ using (var asyncArchive = await ZipArchive.OpenAsyncArchive("file.zip"))
|
||||
{
|
||||
await asyncArchive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken: cancellationToken
|
||||
);
|
||||
}
|
||||
@@ -187,7 +190,6 @@ using (var reader = await ReaderFactory.OpenAsyncReader(stream))
|
||||
// Async extraction of all entries
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
@@ -229,43 +231,91 @@ using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Zip, Compressio
|
||||
|
||||
### ReaderOptions
|
||||
|
||||
Use factory presets and fluent helpers for common configurations:
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
Password = "password", // For encrypted archives
|
||||
LeaveStreamOpen = true, // Don't close wrapped stream
|
||||
ArchiveEncoding = new ArchiveEncoding // Custom character encoding
|
||||
{
|
||||
Default = Encoding.GetEncoding(932)
|
||||
}
|
||||
};
|
||||
// External stream with password and custom encoding
|
||||
var options = ReaderOptions.ForExternalStream()
|
||||
.WithPassword("password")
|
||||
.WithArchiveEncoding(new ArchiveEncoding { Default = Encoding.GetEncoding(932) });
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip", options))
|
||||
{
|
||||
// ...
|
||||
}
|
||||
|
||||
// Common presets
|
||||
var safeOptions = ReaderOptions.SafeExtract; // No overwrite
|
||||
var flatOptions = ReaderOptions.FlatExtract; // No directory structure
|
||||
|
||||
// Factory defaults:
|
||||
// - file path / FileInfo overloads use LeaveStreamOpen = false
|
||||
// - stream overloads use LeaveStreamOpen = true
|
||||
```
|
||||
|
||||
Alternative: traditional object initializer:
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
Password = "password",
|
||||
LeaveStreamOpen = true,
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
|
||||
};
|
||||
```
|
||||
|
||||
### WriterOptions
|
||||
|
||||
Factory methods provide a clean, discoverable way to create writer options:
|
||||
|
||||
```csharp
|
||||
// Factory methods for common archive types
|
||||
var zipOptions = WriterOptions.ForZip() // ZIP with Deflate
|
||||
.WithCompressionLevel(9) // 0-9 for Deflate
|
||||
.WithLeaveStreamOpen(false); // Close stream when done
|
||||
|
||||
var tarOptions = WriterOptions.ForTar(CompressionType.GZip) // TAR with GZip
|
||||
.WithLeaveStreamOpen(false);
|
||||
|
||||
var gzipOptions = WriterOptions.ForGZip() // GZip file
|
||||
.WithCompressionLevel(6);
|
||||
|
||||
archive.SaveTo("output.zip", zipOptions);
|
||||
```
|
||||
|
||||
Alternative: traditional constructor with object initializer:
|
||||
|
||||
```csharp
|
||||
var options = new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 9, // 0-9 for Deflate
|
||||
LeaveStreamOpen = true, // Don't close stream
|
||||
CompressionLevel = 9,
|
||||
LeaveStreamOpen = true,
|
||||
};
|
||||
archive.SaveTo("output.zip", options);
|
||||
```
|
||||
|
||||
### ExtractionOptions
|
||||
### Extraction behavior
|
||||
|
||||
```csharp
|
||||
var options = new ExtractionOptions
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ExtractFullPath = true, // Recreate directory structure
|
||||
Overwrite = true, // Overwrite existing files
|
||||
PreserveFileTime = true // Keep original timestamps
|
||||
};
|
||||
archive.WriteToDirectory(@"C:\output", options);
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip", options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
```
|
||||
|
||||
### Options matrix
|
||||
|
||||
```text
|
||||
ReaderOptions: open-time behavior (password, encoding, stream ownership, extraction defaults)
|
||||
WriterOptions: write-time behavior (compression type/level, encoding, stream ownership)
|
||||
ZipWriterEntryOptions: per-entry ZIP overrides (compression, level, timestamps, comments, zip64)
|
||||
```
|
||||
|
||||
---
|
||||
@@ -317,13 +367,9 @@ ArchiveType.ZStandard
|
||||
try
|
||||
{
|
||||
using (var archive = ZipArchive.Open("archive.zip",
|
||||
new ReaderOptions { Password = "password" }))
|
||||
ReaderOptions.ForEncryptedArchive("password")))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
catch (PasswordRequiredException)
|
||||
@@ -348,7 +394,7 @@ var progress = new Progress<ProgressReport>(report =>
|
||||
Console.WriteLine($"Extracting {report.EntryPath}: {report.PercentComplete}%");
|
||||
});
|
||||
|
||||
var options = new ReaderOptions { Progress = progress };
|
||||
var options = ReaderOptions.ForOwnedFile().WithProgress(progress);
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip", options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
@@ -367,7 +413,6 @@ try
|
||||
{
|
||||
await archive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken: cts.Token
|
||||
);
|
||||
}
|
||||
|
||||
@@ -90,7 +90,7 @@ Common types, options, and enumerations used across formats.
|
||||
- `ArchiveType.cs` - Enum for archive formats
|
||||
- `CompressionType.cs` - Enum for compression methods
|
||||
- `ArchiveEncoding.cs` - Character encoding configuration
|
||||
- `ExtractionOptions.cs` - Extraction configuration
|
||||
- `IExtractionOptions.cs` - Extraction configuration exposed through `ReaderOptions`
|
||||
- Format-specific headers: `Zip/Headers/`, `Tar/Headers/`, `Rar/Headers/`, etc.
|
||||
|
||||
#### `Compressors/` - Compression Algorithms
|
||||
@@ -215,13 +215,13 @@ using (var compressor = new DeflateStream(nonDisposingStream))
|
||||
public abstract class AbstractArchive : IArchive
|
||||
{
|
||||
// Template methods
|
||||
public virtual void WriteToDirectory(string destinationDirectory, ExtractionOptions options)
|
||||
public virtual void WriteToDirectory(string destinationDirectory)
|
||||
{
|
||||
// Common extraction logic
|
||||
foreach (var entry in Entries)
|
||||
{
|
||||
// Call subclass method
|
||||
entry.WriteToFile(destinationPath, options);
|
||||
entry.WriteToFile(destinationPath);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -267,8 +267,7 @@ public interface IArchive : IDisposable
|
||||
{
|
||||
IEnumerable<IEntry> Entries { get; }
|
||||
|
||||
void WriteToDirectory(string destinationDirectory,
|
||||
ExtractionOptions options = null);
|
||||
void WriteToDirectory(string destinationDirectory);
|
||||
|
||||
IEntry FirstOrDefault(Func<IEntry, bool> predicate);
|
||||
|
||||
@@ -287,8 +286,7 @@ public interface IReader : IDisposable
|
||||
|
||||
bool MoveToNextEntry();
|
||||
|
||||
void WriteEntryToDirectory(string destinationDirectory,
|
||||
ExtractionOptions options = null);
|
||||
void WriteEntryToDirectory(string destinationDirectory);
|
||||
|
||||
Stream OpenEntryStream();
|
||||
|
||||
@@ -327,7 +325,7 @@ public interface IEntry
|
||||
DateTime? LastModifiedTime { get; }
|
||||
CompressionType CompressionType { get; }
|
||||
|
||||
void WriteToFile(string fullPath, ExtractionOptions options = null);
|
||||
void WriteToFile(string fullPath);
|
||||
void WriteToStream(Stream destinationStream);
|
||||
Stream OpenEntryStream();
|
||||
|
||||
|
||||
@@ -1,129 +0,0 @@
|
||||
# DataDescriptorStream and RewindableStream Fix
|
||||
|
||||
## Summary
|
||||
|
||||
Fixed the `Zip_Uncompressed_Read_All` test failure caused by incompatibility between `DataDescriptorStream` seeking requirements and the new `RewindableStream` wrapper used in `StreamingZipHeaderFactory`.
|
||||
|
||||
## Problem Description
|
||||
|
||||
### Symptom
|
||||
The test `Zip_Uncompressed_Read_All` was failing with:
|
||||
```
|
||||
System.NotSupportedException : Cannot seek outside buffered region.
|
||||
```
|
||||
|
||||
### Root Cause
|
||||
|
||||
The issue had two related aspects:
|
||||
|
||||
#### 1. Double-Wrapping of RewindableStream
|
||||
|
||||
`StreamingZipHeaderFactory.ReadStreamHeader()` was creating a new `RewindableStream` wrapper:
|
||||
```csharp
|
||||
var rewindableStream = new RewindableStream(stream);
|
||||
```
|
||||
|
||||
When `ReaderFactory.OpenReader()` already wraps the input stream with `SeekableRewindableStream` (for seekable streams), this resulted in double-wrapping:
|
||||
```
|
||||
DataDescriptorStream
|
||||
-> NonDisposingStream
|
||||
-> RewindableStream (new, plain) <-- created by ReadStreamHeader
|
||||
-> SeekableRewindableStream <-- created by ReaderFactory
|
||||
-> FileStream
|
||||
```
|
||||
|
||||
The inner plain `RewindableStream` lost the seeking capability of `SeekableRewindableStream`.
|
||||
|
||||
#### 2. Recording State Interference
|
||||
|
||||
Even after fixing the double-wrapping using `RewindableStream.EnsureSeekable()`, there was another issue:
|
||||
|
||||
`StreamingZipHeaderFactory.ReadStreamHeader()` contains code to peek ahead when checking for zero-length files with `UsePostDataDescriptor`:
|
||||
|
||||
```csharp
|
||||
rewindableStream.StartRecording();
|
||||
var nextHeaderBytes = reader.ReadUInt32();
|
||||
rewindableStream.Rewind(true);
|
||||
```
|
||||
|
||||
This code was interfering with the recording state that `ReaderFactory.OpenReader()` had set up:
|
||||
|
||||
1. `ReaderFactory.OpenReader()` calls `bStream.StartRecording()` at position 0
|
||||
2. Factory detection calls `StreamingZipHeaderFactory.ReadStreamHeader()` via `IsZipFile()`
|
||||
3. Inside `ReadStreamHeader`, the above code overwrites the recorded position
|
||||
4. `Rewind(true)` stops recording and seeks to the wrong position
|
||||
5. When control returns to `Factory.TryOpenReader()`, it calls `stream.Rewind(true)`, but recording is already stopped, so nothing happens
|
||||
6. The stream position is not at the beginning, causing subsequent reads to fail
|
||||
|
||||
## Solution
|
||||
|
||||
### Fix 1: Use EnsureSeekable instead of new RewindableStream
|
||||
|
||||
Changed `StreamingZipHeaderFactory.ReadStreamHeader()` to use:
|
||||
```csharp
|
||||
var rewindableStream = RewindableStream.EnsureSeekable(stream);
|
||||
```
|
||||
|
||||
This method:
|
||||
- Returns the existing `RewindableStream` if the stream is already one (avoids double-wrapping)
|
||||
- Creates a `SeekableRewindableStream` if the underlying stream is seekable
|
||||
- Creates a plain `RewindableStream` only for non-seekable streams
|
||||
|
||||
### Fix 2: Use direct position save/restore for SeekableRewindableStream
|
||||
|
||||
For the peek-ahead logic, changed the code to check for `SeekableRewindableStream` specifically and use direct position manipulation:
|
||||
|
||||
```csharp
|
||||
if (rewindableStream is SeekableRewindableStream)
|
||||
{
|
||||
// Direct position save/restore avoids interfering with caller's recording state
|
||||
var savedPosition = rewindableStream.Position;
|
||||
var nextHeaderBytes = reader.ReadUInt32();
|
||||
rewindableStream.Position = savedPosition;
|
||||
header.HasData = !IsHeader(nextHeaderBytes);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Plain RewindableStream was created fresh by EnsureSeekable, safe to use recording
|
||||
rewindableStream.StartRecording();
|
||||
var nextHeaderBytes = reader.ReadUInt32();
|
||||
rewindableStream.Rewind(true);
|
||||
header.HasData = !IsHeader(nextHeaderBytes);
|
||||
}
|
||||
```
|
||||
|
||||
This approach:
|
||||
- For `SeekableRewindableStream` (reused from caller): Uses direct position save/restore to avoid clobbering the caller's recording state
|
||||
- For plain `RewindableStream` (freshly created): Uses the recording mechanism which is safe since the stream isn't shared
|
||||
|
||||
## Files Changed
|
||||
|
||||
- `src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs`
|
||||
- `src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.Async.cs`
|
||||
|
||||
## Design Notes
|
||||
|
||||
### Why not fix RewindableStream.CanSeek?
|
||||
|
||||
`RewindableStream.CanSeek` returns `true` even though it can only seek within its buffered region. We considered changing this to `false`, but:
|
||||
1. It would be a breaking change for existing code that relies on `CanSeek`
|
||||
2. The `RewindableStream` does provide limited seeking capability (within buffer)
|
||||
3. Checking for `SeekableRewindableStream` specifically is more precise
|
||||
|
||||
### Stream Wrapper Hierarchy
|
||||
|
||||
Understanding the stream wrapper hierarchy is crucial:
|
||||
|
||||
**For seekable source streams (e.g., FileStream):**
|
||||
```
|
||||
SeekableRewindableStream (full seeking via underlying stream)
|
||||
-> FileStream
|
||||
```
|
||||
|
||||
**For non-seekable source streams (e.g., decompression streams):**
|
||||
```
|
||||
RewindableStream (limited seeking via buffer)
|
||||
-> DecompressionStream
|
||||
```
|
||||
|
||||
`DataDescriptorStream` needs backward seeking to position the stream correctly after finding the data descriptor marker. This is why proper stream wrapper selection matters.
|
||||
@@ -18,14 +18,9 @@ Most archive formats store filenames and metadata as bytes. SharpCompress must c
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
// Configure encoding before opening archive
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding(932) // cp932 for Japanese
|
||||
}
|
||||
};
|
||||
// Configure encoding using fluent factory method (preferred)
|
||||
var options = ReaderOptions.ForEncoding(
|
||||
new ArchiveEncoding { Default = Encoding.GetEncoding(932) }); // cp932 for Japanese
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("japanese.zip", options))
|
||||
{
|
||||
@@ -34,6 +29,12 @@ using (var archive = ZipArchive.OpenArchive("japanese.zip", options))
|
||||
Console.WriteLine(entry.Key); // Now shows correct characters
|
||||
}
|
||||
}
|
||||
|
||||
// Alternative: object initializer
|
||||
var options2 = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
|
||||
};
|
||||
```
|
||||
|
||||
### ArchiveEncoding Properties
|
||||
@@ -47,10 +48,8 @@ using (var archive = ZipArchive.OpenArchive("japanese.zip", options))
|
||||
|
||||
**Archive API:**
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
|
||||
};
|
||||
var options = ReaderOptions.ForEncoding(
|
||||
new ArchiveEncoding { Default = Encoding.GetEncoding(932) });
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip", options))
|
||||
{
|
||||
// Use archive with correct encoding
|
||||
@@ -59,10 +58,8 @@ using (var archive = ZipArchive.OpenArchive("file.zip", options))
|
||||
|
||||
**Reader API:**
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
|
||||
};
|
||||
var options = ReaderOptions.ForEncoding(
|
||||
new ArchiveEncoding { Default = Encoding.GetEncoding(932) });
|
||||
using (var stream = File.OpenRead("file.zip"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream, options))
|
||||
{
|
||||
@@ -390,11 +387,7 @@ var options = new ReaderOptions
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("japanese_files.zip", options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
// Files extracted with correct Japanese names
|
||||
```
|
||||
|
||||
@@ -213,11 +213,7 @@ using (var archive = RarArchive.OpenArchive("solid.rar"))
|
||||
using (var archive = RarArchive.OpenArchive("solid.rar"))
|
||||
{
|
||||
// Method 1: Use WriteToDirectory (recommended)
|
||||
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
|
||||
// Method 2: Use ExtractAllEntries
|
||||
archive.ExtractAllEntries();
|
||||
@@ -337,7 +333,6 @@ using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
await archive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
@@ -355,10 +350,7 @@ Async doesn't improve performance for:
|
||||
// Sync extraction (simpler, same performance on fast I/O)
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
archive.WriteToDirectory(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
|
||||
);
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
// Simple and fast - no async needed
|
||||
```
|
||||
@@ -377,7 +369,6 @@ try
|
||||
{
|
||||
await archive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cts.Token
|
||||
);
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ SharpCompress now provides full async/await support for all I/O operations. All
|
||||
|
||||
**Key Async Methods:**
|
||||
- `reader.WriteEntryToAsync(stream, cancellationToken)` - Extract entry asynchronously
|
||||
- `reader.WriteAllToDirectoryAsync(path, options, cancellationToken)` - Extract all asynchronously
|
||||
- `reader.WriteAllToDirectoryAsync(path, cancellationToken)` - Extract all asynchronously
|
||||
- `writer.WriteAsync(filename, stream, modTime, cancellationToken)` - Write entry asynchronously
|
||||
- `writer.WriteAllAsync(directory, pattern, searchOption, cancellationToken)` - Write directory asynchronously
|
||||
- `entry.OpenEntryStreamAsync(cancellationToken)` - Open entry stream asynchronously
|
||||
@@ -40,6 +40,10 @@ To deal with the "correct" rules as well as the expectations of users, I've deci
|
||||
|
||||
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
|
||||
|
||||
Default behavior in factory APIs:
|
||||
- File path / `FileInfo` overloads set `LeaveStreamOpen = false`.
|
||||
- Caller-provided `Stream` overloads set `LeaveStreamOpen = true`.
|
||||
|
||||
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapper to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
|
||||
|
||||
## Samples
|
||||
@@ -90,14 +94,14 @@ Note: Extracting a solid rar or 7z file needs to be done in sequential order to
|
||||
`ExtractAllEntries` is primarily intended for solid archives (like solid Rar) or 7Zip archives, where sequential extraction provides the best performance. For general/simple extraction with any supported archive type, use `archive.WriteToDirectory()` instead.
|
||||
|
||||
```C#
|
||||
using (var archive = RarArchive.OpenArchive("Test.rar"))
|
||||
// Using fluent factory method for extraction options
|
||||
using (var archive = RarArchive.OpenArchive("Test.rar",
|
||||
ReaderOptions.ForOwnedFile()
|
||||
.WithExtractFullPath(true)
|
||||
.WithOverwrite(true)))
|
||||
{
|
||||
// Simple extraction with RarArchive; this WriteToDirectory pattern works for all archive types
|
||||
archive.WriteToDirectory(@"D:\temp", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
archive.WriteToDirectory(@"D:\temp");
|
||||
}
|
||||
```
|
||||
|
||||
@@ -126,13 +130,13 @@ var progress = new Progress<ProgressReport>(report =>
|
||||
Console.WriteLine($"Extracting {report.EntryPath}: {report.PercentComplete}%");
|
||||
});
|
||||
|
||||
using (var archive = RarArchive.OpenArchive("archive.rar", new ReaderOptions { Progress = progress })) // Must be solid Rar or 7Zip
|
||||
using (var archive = RarArchive.OpenArchive("archive.rar",
|
||||
ReaderOptions.ForOwnedFile()
|
||||
.WithProgress(progress)
|
||||
.WithExtractFullPath(true)
|
||||
.WithOverwrite(true))) // Must be solid Rar or 7Zip
|
||||
{
|
||||
archive.WriteToDirectory(@"D:\output", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
archive.WriteToDirectory(@"D:\output");
|
||||
}
|
||||
```
|
||||
|
||||
@@ -147,11 +151,7 @@ using (var reader = ReaderFactory.OpenReader(stream))
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
Console.WriteLine(reader.Entry.Key);
|
||||
reader.WriteEntryToDirectory(@"C:\temp", new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
reader.WriteEntryToDirectory(@"C:\temp");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -180,10 +180,10 @@ using (var reader = ReaderFactory.OpenReader(stream))
|
||||
|
||||
```C#
|
||||
using (Stream stream = File.OpenWrite("C:\\temp.tgz"))
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)
|
||||
{
|
||||
LeaveOpenStream = true
|
||||
}))
|
||||
using (var writer = WriterFactory.OpenWriter(
|
||||
stream,
|
||||
ArchiveType.Tar,
|
||||
WriterOptions.ForTar(CompressionType.GZip).WithLeaveStreamOpen(true)))
|
||||
{
|
||||
writer.WriteAll("D:\\temp", "*", SearchOption.AllDirectories);
|
||||
}
|
||||
@@ -192,15 +192,15 @@ using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, new Writer
|
||||
### Extract zip which has non-utf8 encoded filename(cp932)
|
||||
|
||||
```C#
|
||||
var opts = new SharpCompress.Readers.ReaderOptions();
|
||||
var encoding = Encoding.GetEncoding(932);
|
||||
opts.ArchiveEncoding = new SharpCompress.Common.ArchiveEncoding();
|
||||
opts.ArchiveEncoding.CustomDecoder = (data, x, y) =>
|
||||
{
|
||||
return encoding.GetString(data);
|
||||
};
|
||||
var tr = SharpCompress.Archives.Zip.ZipArchive.OpenArchive("test.zip", opts);
|
||||
foreach(var entry in tr.Entries)
|
||||
var opts = new ReaderOptions()
|
||||
.WithArchiveEncoding(new ArchiveEncoding
|
||||
{
|
||||
CustomDecoder = (data, x, y) => encoding.GetString(data)
|
||||
});
|
||||
|
||||
using var archive = ZipArchive.OpenArchive("test.zip", opts);
|
||||
foreach(var entry in archive.Entries)
|
||||
{
|
||||
Console.WriteLine($"{entry.Key}");
|
||||
}
|
||||
@@ -238,11 +238,6 @@ using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"D:\temp",
|
||||
new ExtractionOptions()
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
},
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
@@ -321,7 +316,6 @@ using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
// Simple async extraction - works for all archive types
|
||||
await archive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions() { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ public abstract partial class AbstractArchive<TEntry, TVolume>
|
||||
IAsyncEnumerable<TVolume> volumes
|
||||
)
|
||||
{
|
||||
foreach (var item in LoadEntries(await volumes.ToListAsync()))
|
||||
foreach (var item in LoadEntries(await volumes.ToListAsync().ConfigureAwait(false)))
|
||||
{
|
||||
yield return item;
|
||||
}
|
||||
@@ -47,8 +47,8 @@ public abstract partial class AbstractArchive<TEntry, TVolume>
|
||||
|
||||
private async ValueTask EnsureEntriesLoadedAsync()
|
||||
{
|
||||
await _lazyEntriesAsync.EnsureFullyLoaded();
|
||||
await _lazyVolumesAsync.EnsureFullyLoaded();
|
||||
await _lazyEntriesAsync.EnsureFullyLoaded().ConfigureAwait(false);
|
||||
await _lazyVolumesAsync.EnsureFullyLoaded().ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private async IAsyncEnumerable<IArchiveEntry> EntriesAsyncCast()
|
||||
@@ -73,29 +73,31 @@ public abstract partial class AbstractArchive<TEntry, TVolume>
|
||||
|
||||
public async ValueTask<IAsyncReader> ExtractAllEntriesAsync()
|
||||
{
|
||||
if (!await IsSolidAsync() && Type != ArchiveType.SevenZip)
|
||||
if (!await IsSolidAsync().ConfigureAwait(false) && Type != ArchiveType.SevenZip)
|
||||
{
|
||||
throw new SharpCompressException(
|
||||
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
|
||||
);
|
||||
}
|
||||
await EnsureEntriesLoadedAsync();
|
||||
return await CreateReaderForSolidExtractionAsync();
|
||||
await EnsureEntriesLoadedAsync().ConfigureAwait(false);
|
||||
return await CreateReaderForSolidExtractionAsync().ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public virtual ValueTask<bool> IsSolidAsync() => new(false);
|
||||
|
||||
public async ValueTask<bool> IsCompleteAsync()
|
||||
{
|
||||
await EnsureEntriesLoadedAsync();
|
||||
return await EntriesAsync.AllAsync(x => x.IsComplete);
|
||||
await EnsureEntriesLoadedAsync().ConfigureAwait(false);
|
||||
return await EntriesAsync.AllAsync(x => x.IsComplete).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async ValueTask<long> TotalSizeAsync() =>
|
||||
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.CompressedSize);
|
||||
await EntriesAsync
|
||||
.AggregateAsync(0L, (total, cf) => total + cf.CompressedSize)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
public async ValueTask<long> TotalUncompressedSizeAsync() =>
|
||||
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.Size);
|
||||
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.Size).ConfigureAwait(false);
|
||||
|
||||
public ValueTask<bool> IsEncryptedAsync() => new(IsEncrypted);
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ public abstract partial class AbstractArchive<TEntry, TVolume> : IArchive, IAsyn
|
||||
private readonly LazyAsyncReadOnlyCollection<TVolume> _lazyVolumesAsync;
|
||||
private readonly LazyAsyncReadOnlyCollection<TEntry> _lazyEntriesAsync;
|
||||
|
||||
protected ReaderOptions ReaderOptions { get; }
|
||||
public ReaderOptions ReaderOptions { get; protected set; }
|
||||
|
||||
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
|
||||
{
|
||||
|
||||
@@ -5,13 +5,14 @@ using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Common.Options;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public abstract partial class AbstractWritableArchive<TEntry, TVolume>
|
||||
public abstract partial class AbstractWritableArchive<TEntry, TVolume, TOptions>
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
where TOptions : IWriterOptions
|
||||
{
|
||||
// Async property moved from main file
|
||||
private IAsyncEnumerable<TEntry> OldEntriesAsync =>
|
||||
@@ -38,7 +39,7 @@ public abstract partial class AbstractWritableArchive<TEntry, TVolume>
|
||||
if (!removedEntries.Contains(entry))
|
||||
{
|
||||
removedEntries.Add(entry);
|
||||
await RebuildModifiedCollectionAsync();
|
||||
await RebuildModifiedCollectionAsync().ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,7 +86,7 @@ public abstract partial class AbstractWritableArchive<TEntry, TVolume>
|
||||
}
|
||||
var entry = CreateEntry(key, source, size, modified, closeStream);
|
||||
newEntries.Add(entry);
|
||||
await RebuildModifiedCollectionAsync();
|
||||
await RebuildModifiedCollectionAsync().ConfigureAwait(false);
|
||||
return entry;
|
||||
}
|
||||
|
||||
@@ -105,13 +106,13 @@ public abstract partial class AbstractWritableArchive<TEntry, TVolume>
|
||||
}
|
||||
var entry = CreateDirectoryEntry(key, modified);
|
||||
newEntries.Add(entry);
|
||||
await RebuildModifiedCollectionAsync();
|
||||
await RebuildModifiedCollectionAsync().ConfigureAwait(false);
|
||||
return entry;
|
||||
}
|
||||
|
||||
public async ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
TOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
@@ -120,4 +121,12 @@ public abstract partial class AbstractWritableArchive<TEntry, TVolume>
|
||||
await SaveToAsync(stream, options, OldEntriesAsync, newEntries, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
protected abstract ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
TOptions options,
|
||||
IAsyncEnumerable<TEntry> oldEntries,
|
||||
IEnumerable<TEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
}
|
||||
|
||||
@@ -5,23 +5,25 @@ using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public abstract partial class AbstractWritableArchive<TEntry, TVolume>
|
||||
public abstract partial class AbstractWritableArchive<TEntry, TVolume, TOptions>
|
||||
: AbstractArchive<TEntry, TVolume>,
|
||||
IWritableArchive,
|
||||
IWritableAsyncArchive
|
||||
IWritableArchive<TOptions>,
|
||||
IWritableAsyncArchive<TOptions>
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
where TOptions : IWriterOptions
|
||||
{
|
||||
private class RebuildPauseDisposable : IDisposable
|
||||
{
|
||||
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
|
||||
private readonly AbstractWritableArchive<TEntry, TVolume, TOptions> archive;
|
||||
|
||||
public RebuildPauseDisposable(AbstractWritableArchive<TEntry, TVolume> archive)
|
||||
public RebuildPauseDisposable(AbstractWritableArchive<TEntry, TVolume, TOptions> archive)
|
||||
{
|
||||
this.archive = archive;
|
||||
archive.pauseRebuilding = true;
|
||||
@@ -150,13 +152,15 @@ public abstract partial class AbstractWritableArchive<TEntry, TVolume>
|
||||
long size,
|
||||
DateTime? modified,
|
||||
CancellationToken cancellationToken
|
||||
) => await AddEntryAsync(key, source, closeStream, size, modified, cancellationToken);
|
||||
) =>
|
||||
await AddEntryAsync(key, source, closeStream, size, modified, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
async ValueTask<IArchiveEntry> IWritableAsyncArchive.AddDirectoryEntryAsync(
|
||||
string key,
|
||||
DateTime? modified,
|
||||
CancellationToken cancellationToken
|
||||
) => await AddDirectoryEntryAsync(key, modified, cancellationToken);
|
||||
) => await AddDirectoryEntryAsync(key, modified, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
public TEntry AddDirectoryEntry(string key, DateTime? modified = null)
|
||||
{
|
||||
@@ -174,7 +178,7 @@ public abstract partial class AbstractWritableArchive<TEntry, TVolume>
|
||||
return entry;
|
||||
}
|
||||
|
||||
public void SaveTo(Stream stream, WriterOptions options)
|
||||
public void SaveTo(Stream stream, TOptions options)
|
||||
{
|
||||
//reset streams of new entries
|
||||
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
|
||||
@@ -210,19 +214,11 @@ public abstract partial class AbstractWritableArchive<TEntry, TVolume>
|
||||
|
||||
protected abstract void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
TOptions options,
|
||||
IEnumerable<TEntry> oldEntries,
|
||||
IEnumerable<TEntry> newEntries
|
||||
);
|
||||
|
||||
protected abstract ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IAsyncEnumerable<TEntry> oldEntries,
|
||||
IEnumerable<TEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
base.Dispose();
|
||||
|
||||
@@ -19,9 +19,9 @@ public static partial class ArchiveFactory
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
readerOptions ??= new ReaderOptions();
|
||||
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
|
||||
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken);
|
||||
readerOptions ??= ReaderOptions.ForExternalStream;
|
||||
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
return factory.OpenAsyncArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
@@ -41,9 +41,10 @@ public static partial class ArchiveFactory
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
options ??= ReaderOptions.ForOwnedFile;
|
||||
|
||||
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken);
|
||||
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
return factory.OpenAsyncArchive(fileInfo, options);
|
||||
}
|
||||
|
||||
@@ -63,14 +64,16 @@ public static partial class ArchiveFactory
|
||||
var fileInfo = filesArray[0];
|
||||
if (filesArray.Length == 1)
|
||||
{
|
||||
return await OpenAsyncArchive(fileInfo, options, cancellationToken);
|
||||
return await OpenAsyncArchive(fileInfo, options, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
options ??= ReaderOptions.ForOwnedFile;
|
||||
|
||||
var factory = await FindFactoryAsync<IMultiArchiveFactory>(fileInfo, cancellationToken);
|
||||
return factory.OpenAsyncArchive(filesArray, options, cancellationToken);
|
||||
var factory = await FindFactoryAsync<IMultiArchiveFactory>(fileInfo, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
return factory.OpenAsyncArchive(filesArray, options);
|
||||
}
|
||||
|
||||
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
|
||||
@@ -90,13 +93,15 @@ public static partial class ArchiveFactory
|
||||
var firstStream = streamsArray[0];
|
||||
if (streamsArray.Length == 1)
|
||||
{
|
||||
return await OpenAsyncArchive(firstStream, options, cancellationToken);
|
||||
return await OpenAsyncArchive(firstStream, options, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
firstStream.NotNull(nameof(firstStream));
|
||||
options ??= new ReaderOptions();
|
||||
options ??= ReaderOptions.ForExternalStream;
|
||||
|
||||
var factory = await FindFactoryAsync<IMultiArchiveFactory>(firstStream, cancellationToken);
|
||||
var factory = await FindFactoryAsync<IMultiArchiveFactory>(firstStream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
return factory.OpenAsyncArchive(streamsArray, options);
|
||||
}
|
||||
|
||||
@@ -118,7 +123,7 @@ public static partial class ArchiveFactory
|
||||
{
|
||||
finfo.NotNull(nameof(finfo));
|
||||
using Stream stream = finfo.OpenRead();
|
||||
return await FindFactoryAsync<T>(stream, cancellationToken);
|
||||
return await FindFactoryAsync<T>(stream, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static async ValueTask<T> FindFactoryAsync<T>(
|
||||
@@ -141,7 +146,11 @@ public static partial class ArchiveFactory
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
if (await factory.IsArchiveAsync(stream, cancellationToken: cancellationToken))
|
||||
if (
|
||||
await factory
|
||||
.IsArchiveAsync(stream, cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false)
|
||||
)
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
@@ -15,23 +16,23 @@ public static partial class ArchiveFactory
|
||||
{
|
||||
public static IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
readerOptions ??= new ReaderOptions();
|
||||
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
|
||||
readerOptions ??= ReaderOptions.ForExternalStream;
|
||||
return FindFactory<IArchiveFactory>(stream).OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableArchive CreateArchive(ArchiveType type)
|
||||
public static IWritableArchive<TOptions> CreateArchive<TOptions>()
|
||||
where TOptions : IWriterOptions
|
||||
{
|
||||
var factory = Factory
|
||||
.Factories.OfType<IWriteableArchiveFactory>()
|
||||
.FirstOrDefault(item => item.KnownArchiveType == type);
|
||||
.Factories.OfType<IWriteableArchiveFactory<TOptions>>()
|
||||
.FirstOrDefault();
|
||||
|
||||
if (factory != null)
|
||||
{
|
||||
return factory.CreateArchive();
|
||||
}
|
||||
|
||||
throw new NotSupportedException("Cannot create Archives of type: " + type);
|
||||
throw new NotSupportedException("Cannot create Archives of type: " + typeof(TOptions));
|
||||
}
|
||||
|
||||
public static IArchive OpenArchive(string filePath, ReaderOptions? options = null)
|
||||
@@ -42,7 +43,7 @@ public static partial class ArchiveFactory
|
||||
|
||||
public static IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
{
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
options ??= ReaderOptions.ForOwnedFile;
|
||||
|
||||
return FindFactory<IArchiveFactory>(fileInfo).OpenArchive(fileInfo, options);
|
||||
}
|
||||
@@ -66,7 +67,7 @@ public static partial class ArchiveFactory
|
||||
}
|
||||
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
options ??= ReaderOptions.ForOwnedFile;
|
||||
|
||||
return FindFactory<IMultiArchiveFactory>(fileInfo).OpenArchive(filesArray, options);
|
||||
}
|
||||
@@ -87,7 +88,7 @@ public static partial class ArchiveFactory
|
||||
}
|
||||
|
||||
firstStream.NotNull(nameof(firstStream));
|
||||
options ??= new ReaderOptions();
|
||||
options ??= ReaderOptions.ForExternalStream;
|
||||
|
||||
return FindFactory<IMultiArchiveFactory>(firstStream).OpenArchive(streamsArray, options);
|
||||
}
|
||||
@@ -95,11 +96,11 @@ public static partial class ArchiveFactory
|
||||
public static void WriteToDirectory(
|
||||
string sourceArchive,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null
|
||||
ReaderOptions? options = null
|
||||
)
|
||||
{
|
||||
using var archive = OpenArchive(sourceArchive);
|
||||
archive.WriteToDirectory(destinationDirectory, options);
|
||||
using var archive = OpenArchive(sourceArchive, options);
|
||||
archive.WriteToDirectory(destinationDirectory);
|
||||
}
|
||||
|
||||
public static T FindFactory<T>(string path)
|
||||
|
||||
@@ -5,7 +5,7 @@ using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.GZip;
|
||||
using SharpCompress.Writers;
|
||||
@@ -24,13 +24,13 @@ public partial class GZipArchive
|
||||
)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
|
||||
await SaveToAsync(stream, new GZipWriterOptions(CompressionType.GZip), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
protected override async ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
GZipWriterOptions options,
|
||||
IAsyncEnumerable<GZipArchiveEntry> oldEntries,
|
||||
IEnumerable<GZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
@@ -40,14 +40,19 @@ public partial class GZipArchive
|
||||
{
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
|
||||
await using var writer = new GZipWriter(
|
||||
stream,
|
||||
options as GZipWriterOptions ?? new GZipWriterOptions(options)
|
||||
);
|
||||
await foreach (
|
||||
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
{
|
||||
if (!entry.IsDirectory)
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
using var entryStream = await entry
|
||||
.OpenEntryStreamAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
@@ -59,7 +64,9 @@ public partial class GZipArchive
|
||||
}
|
||||
foreach (var entry in newEntries.Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
using var entryStream = await entry
|
||||
.OpenEntryStreamAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
await writer
|
||||
.WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
@@ -77,10 +84,13 @@ public partial class GZipArchive
|
||||
IAsyncEnumerable<GZipVolume> volumes
|
||||
)
|
||||
{
|
||||
var stream = (await volumes.SingleAsync()).Stream;
|
||||
var stream = (await volumes.SingleAsync().ConfigureAwait(false)).Stream;
|
||||
yield return new GZipArchiveEntry(
|
||||
this,
|
||||
await GZipFilePart.CreateAsync(stream, ReaderOptions.ArchiveEncoding)
|
||||
await GZipFilePart
|
||||
.CreateAsync(stream, ReaderOptions.ArchiveEncoding)
|
||||
.ConfigureAwait(false),
|
||||
ReaderOptions
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,43 +5,41 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.GZip;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public partial class GZipArchive
|
||||
#if NET8_0_OR_GREATER
|
||||
: IWritableArchiveOpenable,
|
||||
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
|
||||
: IWritableArchiveOpenable<GZipWriterOptions>,
|
||||
IMultiArchiveOpenable<
|
||||
IWritableArchive<GZipWriterOptions>,
|
||||
IWritableAsyncArchive<GZipWriterOptions>
|
||||
>
|
||||
#endif
|
||||
{
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
path.NotNullOrEmpty(nameof(path));
|
||||
return (IWritableAsyncArchive)OpenArchive(
|
||||
new FileInfo(path),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
);
|
||||
return (IWritableAsyncArchive<GZipWriterOptions>)
|
||||
OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
|
||||
public static IWritableArchive<GZipWriterOptions> OpenArchive(
|
||||
string filePath,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return OpenArchive(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
public static IWritableArchive<GZipWriterOptions> OpenArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
@@ -56,7 +54,7 @@ public partial class GZipArchive
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
public static IWritableArchive<GZipWriterOptions> OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
@@ -72,7 +70,7 @@ public partial class GZipArchive
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
public static IWritableArchive<GZipWriterOptions> OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
@@ -88,7 +86,10 @@ public partial class GZipArchive
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
public static IWritableArchive<GZipWriterOptions> OpenArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
@@ -102,49 +103,30 @@ public partial class GZipArchive
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(stream, readerOptions);
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfo, readerOptions);
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(streams, readerOptions);
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<GZipWriterOptions> OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<GZipWriterOptions>)OpenArchive(fileInfos, readerOptions);
|
||||
|
||||
public static IWritableArchive CreateArchive() => new GZipArchive();
|
||||
public static IWritableArchive<GZipWriterOptions> CreateArchive() => new GZipArchive();
|
||||
|
||||
public static IWritableAsyncArchive CreateAsyncArchive() => new GZipArchive();
|
||||
public static IWritableAsyncArchive<GZipWriterOptions> CreateAsyncArchive() =>
|
||||
new GZipArchive();
|
||||
|
||||
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
|
||||
|
||||
|
||||
@@ -2,10 +2,9 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.GZip;
|
||||
@@ -14,7 +13,8 @@ using SharpCompress.Writers.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
public partial class GZipArchive
|
||||
: AbstractWritableArchive<GZipArchiveEntry, GZipVolume, GZipWriterOptions>
|
||||
{
|
||||
private GZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.GZip, sourceStream) { }
|
||||
@@ -33,7 +33,7 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
|
||||
public void SaveTo(FileInfo fileInfo)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
SaveTo(stream, new WriterOptions(CompressionType.GZip));
|
||||
SaveTo(stream, new GZipWriterOptions(CompressionType.GZip));
|
||||
}
|
||||
|
||||
protected override GZipArchiveEntry CreateEntryInternal(
|
||||
@@ -58,7 +58,7 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
|
||||
|
||||
protected override void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
GZipWriterOptions options,
|
||||
IEnumerable<GZipArchiveEntry> oldEntries,
|
||||
IEnumerable<GZipArchiveEntry> newEntries
|
||||
)
|
||||
@@ -67,7 +67,10 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
|
||||
{
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
|
||||
using var writer = new GZipWriter(
|
||||
stream,
|
||||
options as GZipWriterOptions ?? new GZipWriterOptions(options)
|
||||
);
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
@@ -84,7 +87,8 @@ public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZi
|
||||
var stream = volumes.Single().Stream;
|
||||
yield return new GZipArchiveEntry(
|
||||
this,
|
||||
GZipFilePart.Create(stream, ReaderOptions.ArchiveEncoding)
|
||||
GZipFilePart.Create(stream, ReaderOptions.ArchiveEncoding),
|
||||
ReaderOptions
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.Common.Options;
|
||||
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
|
||||
{
|
||||
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part)
|
||||
: base(part) => Archive = archive;
|
||||
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part, IReaderOptions readerOptions)
|
||||
: base(part, readerOptions) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream()
|
||||
{
|
||||
|
||||
@@ -19,7 +19,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
|
||||
DateTime? lastModified,
|
||||
bool closeStream
|
||||
)
|
||||
: base(archive, null)
|
||||
: base(archive, null, archive.ReaderOptions)
|
||||
{
|
||||
this.stream = stream;
|
||||
Key = path;
|
||||
@@ -58,7 +58,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
|
||||
{
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
return SharpCompressStream.CreateNonDisposing(stream);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
|
||||
@@ -12,6 +12,11 @@ public interface IArchive : IDisposable
|
||||
|
||||
ArchiveType Type { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The options used when opening this archive, including extraction behavior settings.
|
||||
/// </summary>
|
||||
ReaderOptions ReaderOptions { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Use this method to extract all entries in an archive in order.
|
||||
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
|
||||
|
||||
@@ -46,7 +46,15 @@ public static class IArchiveEntryExtensions
|
||||
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
|
||||
}
|
||||
|
||||
using var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
|
||||
#if LEGACY_DOTNET
|
||||
using var entryStream = await archiveEntry
|
||||
.OpenEntryStreamAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
#else
|
||||
await using var entryStream = await archiveEntry
|
||||
.OpenEntryStreamAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
#endif
|
||||
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
|
||||
await sourceStream
|
||||
.CopyToAsync(streamToWriteTo, Constants.BufferSize, cancellationToken)
|
||||
@@ -94,15 +102,11 @@ public static class IArchiveEntryExtensions
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public void WriteToDirectory(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null
|
||||
) =>
|
||||
public void WriteToDirectory(string destinationDirectory) =>
|
||||
ExtractionMethods.WriteEntryToDirectory(
|
||||
entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
entry.WriteToFile
|
||||
(path) => entry.WriteToFile(path)
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
@@ -110,15 +114,14 @@ public static class IArchiveEntryExtensions
|
||||
/// </summary>
|
||||
public async ValueTask WriteToDirectoryAsync(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
await ExtractionMethods
|
||||
.WriteEntryToDirectoryAsync(
|
||||
entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
entry.WriteToFileAsync,
|
||||
async (path, ct) =>
|
||||
await entry.WriteToFileAsync(path, ct).ConfigureAwait(false),
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
@@ -126,11 +129,10 @@ public static class IArchiveEntryExtensions
|
||||
/// <summary>
|
||||
/// Extract to specific file
|
||||
/// </summary>
|
||||
public void WriteToFile(string destinationFileName, ExtractionOptions? options = null) =>
|
||||
public void WriteToFile(string destinationFileName) =>
|
||||
ExtractionMethods.WriteEntryToFile(
|
||||
entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
(x, fm) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
@@ -143,14 +145,12 @@ public static class IArchiveEntryExtensions
|
||||
/// </summary>
|
||||
public async ValueTask WriteToFileAsync(
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
await ExtractionMethods
|
||||
.WriteEntryToFileAsync(
|
||||
entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
async (x, fm, ct) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
|
||||
@@ -14,28 +14,25 @@ public static class IArchiveExtensions
|
||||
/// Extract to specific directory with progress reporting
|
||||
/// </summary>
|
||||
/// <param name="destinationDirectory">The folder to extract into.</param>
|
||||
/// <param name="options">Extraction options.</param>
|
||||
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
|
||||
public void WriteToDirectory(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
IProgress<ProgressReport>? progress = null
|
||||
)
|
||||
{
|
||||
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
|
||||
{
|
||||
using var reader = archive.ExtractAllEntries();
|
||||
reader.WriteAllToDirectory(destinationDirectory, options);
|
||||
reader.WriteAllToDirectory(destinationDirectory);
|
||||
}
|
||||
else
|
||||
{
|
||||
archive.WriteToDirectoryInternal(destinationDirectory, options, progress);
|
||||
archive.WriteToDirectoryInternal(destinationDirectory, progress);
|
||||
}
|
||||
}
|
||||
|
||||
private void WriteToDirectoryInternal(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
IProgress<ProgressReport>? progress
|
||||
)
|
||||
{
|
||||
@@ -61,7 +58,7 @@ public static class IArchiveExtensions
|
||||
continue;
|
||||
}
|
||||
|
||||
entry.WriteToDirectory(destinationDirectory, options);
|
||||
entry.WriteToDirectory(destinationDirectory);
|
||||
|
||||
bytesRead += entry.Size;
|
||||
progress?.Report(
|
||||
|
||||
@@ -20,20 +20,17 @@ public interface IArchiveOpenable<TSync, TASync>
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -17,44 +17,45 @@ public static class IAsyncArchiveExtensions
|
||||
/// </summary>
|
||||
/// <param name="archive">The archive to extract.</param>
|
||||
/// <param name="destinationDirectory">The folder to extract into.</param>
|
||||
/// <param name="options">Extraction options.</param>
|
||||
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
|
||||
/// <param name="cancellationToken">Optional cancellation token.</param>
|
||||
public async ValueTask WriteToDirectoryAsync(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
IProgress<ProgressReport>? progress = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (await archive.IsSolidAsync() || archive.Type == ArchiveType.SevenZip)
|
||||
if (
|
||||
await archive.IsSolidAsync().ConfigureAwait(false)
|
||||
|| archive.Type == ArchiveType.SevenZip
|
||||
)
|
||||
{
|
||||
await using var reader = await archive.ExtractAllEntriesAsync();
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
destinationDirectory,
|
||||
options,
|
||||
cancellationToken
|
||||
);
|
||||
await using var reader = await archive
|
||||
.ExtractAllEntriesAsync()
|
||||
.ConfigureAwait(false);
|
||||
await reader
|
||||
.WriteAllToDirectoryAsync(destinationDirectory, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
await archive.WriteToDirectoryAsyncInternal(
|
||||
destinationDirectory,
|
||||
options,
|
||||
progress,
|
||||
cancellationToken
|
||||
);
|
||||
await archive
|
||||
.WriteToDirectoryAsyncInternal(
|
||||
destinationDirectory,
|
||||
progress,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
private async ValueTask WriteToDirectoryAsyncInternal(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
IProgress<ProgressReport>? progress,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
var totalBytes = await archive.TotalUncompressedSizeAsync();
|
||||
var totalBytes = await archive.TotalUncompressedSizeAsync().ConfigureAwait(false);
|
||||
var bytesRead = 0L;
|
||||
var seenDirectories = new HashSet<string>();
|
||||
|
||||
@@ -79,7 +80,7 @@ public static class IAsyncArchiveExtensions
|
||||
}
|
||||
|
||||
await entry
|
||||
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
|
||||
.WriteToDirectoryAsync(destinationDirectory, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
bytesRead += entry.Size;
|
||||
|
||||
@@ -50,10 +50,8 @@ public interface IMultiArchiveFactory : IFactory
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
IAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
}
|
||||
|
||||
@@ -22,14 +22,12 @@ public interface IMultiArchiveOpenable<TSync, TASync>
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -2,6 +2,7 @@ using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -27,28 +28,23 @@ public interface IWritableArchive : IArchive, IWritableArchiveCommon
|
||||
|
||||
IArchiveEntry AddDirectoryEntry(string key, DateTime? modified = null);
|
||||
|
||||
/// <summary>
|
||||
/// Saves the archive to the specified stream using the given writer options.
|
||||
/// </summary>
|
||||
void SaveTo(Stream stream, WriterOptions options);
|
||||
|
||||
/// <summary>
|
||||
/// Removes the specified entry from the archive.
|
||||
/// </summary>
|
||||
void RemoveEntry(IArchiveEntry entry);
|
||||
}
|
||||
|
||||
public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
|
||||
public interface IWritableArchive<TOptions> : IWritableArchive
|
||||
where TOptions : IWriterOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Asynchronously saves the archive to the specified stream using the given writer options.
|
||||
/// Saves the archive to the specified stream using the given writer options.
|
||||
/// </summary>
|
||||
ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
void SaveTo(Stream stream, TOptions options);
|
||||
}
|
||||
|
||||
public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
|
||||
{
|
||||
/// <summary>
|
||||
/// Asynchronously adds an entry to the archive with the specified key, source stream, and options.
|
||||
/// </summary>
|
||||
@@ -75,3 +71,16 @@ public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
|
||||
/// </summary>
|
||||
ValueTask RemoveEntryAsync(IArchiveEntry entry);
|
||||
}
|
||||
|
||||
public interface IWritableAsyncArchive<TOptions> : IWritableAsyncArchive
|
||||
where TOptions : IWriterOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Asynchronously saves the archive to the specified stream using the given writer options.
|
||||
/// </summary>
|
||||
ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
TOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -57,14 +58,23 @@ public static class IWritableArchiveExtensions
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public void SaveTo(string filePath, WriterOptions? options = null) =>
|
||||
writableArchive.SaveTo(new FileInfo(filePath), options ?? new(CompressionType.Deflate));
|
||||
public static void SaveTo<TOptions>(
|
||||
this IWritableArchive<TOptions> writableArchive,
|
||||
string filePath,
|
||||
TOptions options
|
||||
)
|
||||
where TOptions : IWriterOptions => writableArchive.SaveTo(new FileInfo(filePath), options);
|
||||
|
||||
public void SaveTo(FileInfo fileInfo, WriterOptions? options = null)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
writableArchive.SaveTo(stream, options ?? new(CompressionType.Deflate));
|
||||
}
|
||||
public static void SaveTo<TOptions>(
|
||||
this IWritableArchive<TOptions> writableArchive,
|
||||
FileInfo fileInfo,
|
||||
TOptions options
|
||||
)
|
||||
where TOptions : IWriterOptions
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
writableArchive.SaveTo(stream, options);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
using SharpCompress.Common.Options;
|
||||
|
||||
#if NET8_0_OR_GREATER
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IWritableArchiveOpenable
|
||||
: IArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
|
||||
public interface IWritableArchiveOpenable<TOptions>
|
||||
: IArchiveOpenable<IWritableArchive<TOptions>, IWritableAsyncArchive<TOptions>>
|
||||
where TOptions : IWriterOptions
|
||||
{
|
||||
public static abstract IWritableArchive CreateArchive();
|
||||
public static abstract IWritableAsyncArchive CreateAsyncArchive();
|
||||
public static abstract IWritableArchive<TOptions> CreateArchive();
|
||||
public static abstract IWritableAsyncArchive<TOptions> CreateAsyncArchive();
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -3,6 +3,7 @@ using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -24,13 +25,15 @@ public static class IWritableAsyncArchiveExtensions
|
||||
)
|
||||
{
|
||||
var fileInfo = new FileInfo(path);
|
||||
await writableArchive.AddEntryAsync(
|
||||
path.Substring(filePath.Length),
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
await writableArchive
|
||||
.AddEntryAsync(
|
||||
path.Substring(filePath.Length),
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -59,28 +62,26 @@ public static class IWritableAsyncArchiveExtensions
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public ValueTask SaveToAsync(
|
||||
string filePath,
|
||||
WriterOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
writableArchive.SaveToAsync(
|
||||
new FileInfo(filePath),
|
||||
options ?? new(CompressionType.Deflate),
|
||||
cancellationToken
|
||||
);
|
||||
public static ValueTask SaveToAsync<TOptions>(
|
||||
this IWritableAsyncArchive<TOptions> writableArchive,
|
||||
string filePath,
|
||||
TOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
where TOptions : IWriterOptions =>
|
||||
writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken);
|
||||
|
||||
public async ValueTask SaveToAsync(
|
||||
FileInfo fileInfo,
|
||||
WriterOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await writableArchive
|
||||
.SaveToAsync(stream, options ?? new(CompressionType.Deflate), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
public static async ValueTask SaveToAsync<TOptions>(
|
||||
this IWritableAsyncArchive<TOptions> writableArchive,
|
||||
FileInfo fileInfo,
|
||||
TOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
where TOptions : IWriterOptions
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await writableArchive.SaveToAsync(stream, options, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
using SharpCompress.Common.Options;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
/// <summary>
|
||||
@@ -10,11 +12,12 @@ namespace SharpCompress.Archives;
|
||||
/// <item><see cref="Factories.ZipFactory"/></item>
|
||||
/// <item><see cref="Factories.GZipFactory"/></item>
|
||||
/// </list>
|
||||
public interface IWriteableArchiveFactory : Factories.IFactory
|
||||
public interface IWriteableArchiveFactory<TOptions> : Factories.IFactory
|
||||
where TOptions : IWriterOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates a new, empty archive, ready to be written.
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
IWritableArchive CreateArchive();
|
||||
IWritableArchive<TOptions> CreateArchive();
|
||||
}
|
||||
|
||||
@@ -24,8 +24,7 @@ internal class FileInfoRarArchiveVolume : RarVolume
|
||||
private static ReaderOptions FixOptions(ReaderOptions options)
|
||||
{
|
||||
//make sure we're closing streams with fileinfo
|
||||
options.LeaveStreamOpen = false;
|
||||
return options;
|
||||
return options with { LeaveStreamOpen = false };
|
||||
}
|
||||
|
||||
internal ReadOnlyCollection<RarFilePart> FileParts { get; }
|
||||
|
||||
@@ -25,13 +25,13 @@ public partial class RarArchive
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
await base.DisposeAsync();
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
protected override async ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
|
||||
{
|
||||
if (await this.IsMultipartVolumeAsync())
|
||||
if (await this.IsMultipartVolumeAsync().ConfigureAwait(false))
|
||||
{
|
||||
var streams = await VolumesAsync
|
||||
.Select(volume =>
|
||||
@@ -39,15 +39,18 @@ public partial class RarArchive
|
||||
volume.Stream.Position = 0;
|
||||
return volume.Stream;
|
||||
})
|
||||
.ToListAsync();
|
||||
.ToListAsync()
|
||||
.ConfigureAwait(false);
|
||||
return (RarReader)RarReader.OpenReader(streams, ReaderOptions);
|
||||
}
|
||||
|
||||
var stream = (await VolumesAsync.FirstAsync()).Stream;
|
||||
var stream = (await VolumesAsync.FirstAsync().ConfigureAwait(false)).Stream;
|
||||
stream.Position = 0;
|
||||
return (RarReader)RarReader.OpenReader(stream, ReaderOptions);
|
||||
}
|
||||
|
||||
public override async ValueTask<bool> IsSolidAsync() =>
|
||||
await (await VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsSolidArchiveAsync();
|
||||
await (await VolumesAsync.CastAsync<RarVolume>().FirstAsync().ConfigureAwait(false))
|
||||
.IsSolidArchiveAsync()
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
@@ -25,12 +25,16 @@ public static class RarArchiveExtensions
|
||||
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
|
||||
/// </summary>
|
||||
public async ValueTask<bool> IsFirstVolumeAsync() =>
|
||||
(await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsFirstVolume;
|
||||
(
|
||||
await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync().ConfigureAwait(false)
|
||||
).IsFirstVolume;
|
||||
|
||||
/// <summary>
|
||||
/// RarArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public async ValueTask<bool> IsMultipartVolumeAsync() =>
|
||||
(await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsMultiVolume;
|
||||
(
|
||||
await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync().ConfigureAwait(false)
|
||||
).IsMultiVolume;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,11 +22,9 @@ public partial class RarArchive
|
||||
{
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
path.NotNullOrEmpty(nameof(path));
|
||||
return (IRarAsyncArchive)OpenArchive(new FileInfo(path), readerOptions);
|
||||
}
|
||||
@@ -102,41 +100,33 @@ public partial class RarArchive
|
||||
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IRarAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IRarAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IRarAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IRarAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
|
||||
|
||||
@@ -21,9 +21,9 @@ public partial class RarArchiveEntry
|
||||
stream = new RarStream(
|
||||
archive.UnpackV1.Value,
|
||||
FileHeader,
|
||||
await MultiVolumeReadOnlyAsyncStream.Create(
|
||||
Parts.ToAsyncEnumerable().CastAsync<RarFilePart>()
|
||||
)
|
||||
await MultiVolumeReadOnlyAsyncStream
|
||||
.Create(Parts.ToAsyncEnumerable().CastAsync<RarFilePart>())
|
||||
.ConfigureAwait(false)
|
||||
);
|
||||
}
|
||||
else
|
||||
@@ -31,13 +31,13 @@ public partial class RarArchiveEntry
|
||||
stream = new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
await MultiVolumeReadOnlyAsyncStream.Create(
|
||||
Parts.ToAsyncEnumerable().CastAsync<RarFilePart>()
|
||||
)
|
||||
await MultiVolumeReadOnlyAsyncStream
|
||||
.Create(Parts.ToAsyncEnumerable().CastAsync<RarFilePart>())
|
||||
.ConfigureAwait(false)
|
||||
);
|
||||
}
|
||||
|
||||
await stream.InitializeAsync(cancellationToken);
|
||||
await stream.InitializeAsync(cancellationToken).ConfigureAwait(false);
|
||||
return stream;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ public partial class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
IEnumerable<RarFilePart> parts,
|
||||
ReaderOptions readerOptions
|
||||
)
|
||||
: base(readerOptions)
|
||||
{
|
||||
this.parts = parts.ToList();
|
||||
this.archive = archive;
|
||||
|
||||
@@ -21,15 +21,12 @@ public partial class SevenZipArchive
|
||||
{
|
||||
stream.Position = 0;
|
||||
var reader = new ArchiveReader();
|
||||
await reader.OpenAsync(
|
||||
stream,
|
||||
lookForHeader: ReaderOptions.LookForHeader,
|
||||
cancellationToken
|
||||
);
|
||||
_database = await reader.ReadDatabaseAsync(
|
||||
new PasswordProvider(ReaderOptions.Password),
|
||||
cancellationToken
|
||||
);
|
||||
await reader
|
||||
.OpenAsync(stream, lookForHeader: ReaderOptions.LookForHeader, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
_database = await reader
|
||||
.ReadDatabaseAsync(new PasswordProvider(ReaderOptions.Password), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,8 +34,8 @@ public partial class SevenZipArchive
|
||||
IAsyncEnumerable<SevenZipVolume> volumes
|
||||
)
|
||||
{
|
||||
var stream = (await volumes.SingleAsync()).Stream;
|
||||
await LoadFactoryAsync(stream);
|
||||
var stream = (await volumes.SingleAsync().ConfigureAwait(false)).Stream;
|
||||
await LoadFactoryAsync(stream).ConfigureAwait(false);
|
||||
if (_database is null)
|
||||
{
|
||||
yield break;
|
||||
@@ -49,7 +46,8 @@ public partial class SevenZipArchive
|
||||
var file = _database._files[i];
|
||||
entries[i] = new SevenZipArchiveEntry(
|
||||
this,
|
||||
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
|
||||
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding),
|
||||
ReaderOptions
|
||||
);
|
||||
}
|
||||
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
|
||||
|
||||
@@ -16,13 +16,8 @@ public partial class SevenZipArchive
|
||||
IMultiArchiveOpenable<IArchive, IAsyncArchive>
|
||||
#endif
|
||||
{
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
public static IAsyncArchive OpenAsyncArchive(string path, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
path.NotNullOrEmpty("path");
|
||||
return (IAsyncArchive)OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
@@ -91,43 +86,32 @@ public partial class SevenZipArchive
|
||||
);
|
||||
}
|
||||
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
public static IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
|
||||
@@ -163,7 +147,7 @@ public partial class SevenZipArchive
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
try
|
||||
{
|
||||
return await SignatureMatchAsync(stream, cancellationToken);
|
||||
return await SignatureMatchAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch
|
||||
{
|
||||
|
||||
@@ -55,7 +55,8 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
|
||||
i,
|
||||
file,
|
||||
ReaderOptions.ArchiveEncoding
|
||||
)
|
||||
),
|
||||
ReaderOptions
|
||||
);
|
||||
}
|
||||
foreach (
|
||||
@@ -181,15 +182,15 @@ public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, Sev
|
||||
);
|
||||
}
|
||||
|
||||
// Wrap with SyncOnlyStream to work around LZMA async bugs
|
||||
// Return a ReadOnlySubStream that reads from the shared folder stream
|
||||
return CreateEntryStream(
|
||||
new SyncOnlyStream(
|
||||
new ReadOnlySubStream(_currentFolderStream, entry.Size, leaveOpen: true)
|
||||
)
|
||||
new ReadOnlySubStream(_currentFolderStream, entry.Size, leaveOpen: true)
|
||||
);
|
||||
}
|
||||
|
||||
protected override ValueTask<EntryStream> GetEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => new(GetEntryStream());
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
_currentFolderStream?.Dispose();
|
||||
|
||||
@@ -1,20 +1,28 @@
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.Common.SevenZip;
|
||||
|
||||
namespace SharpCompress.Archives.SevenZip;
|
||||
|
||||
public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
|
||||
{
|
||||
internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part)
|
||||
: base(part) => Archive = archive;
|
||||
internal SevenZipArchiveEntry(
|
||||
SevenZipArchive archive,
|
||||
SevenZipFilePart part,
|
||||
IReaderOptions readerOptions
|
||||
)
|
||||
: base(part, readerOptions) => Archive = archive;
|
||||
|
||||
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
|
||||
|
||||
public async ValueTask<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => (await FilePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
|
||||
) =>
|
||||
(
|
||||
await FilePart.GetCompressedStreamAsync(cancellationToken).ConfigureAwait(false)
|
||||
).NotNull();
|
||||
|
||||
public IArchive Archive { get; }
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.Common.Tar;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.IO;
|
||||
@@ -18,13 +19,16 @@ public partial class TarArchive
|
||||
{
|
||||
protected override async ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
TarWriterOptions options,
|
||||
IAsyncEnumerable<TarArchiveEntry> oldEntries,
|
||||
IEnumerable<TarArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(stream, new TarWriterOptions(options));
|
||||
using var writer = new TarWriter(
|
||||
stream,
|
||||
options as TarWriterOptions ?? new TarWriterOptions(options)
|
||||
);
|
||||
await foreach (
|
||||
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
@@ -92,7 +96,7 @@ public partial class TarArchive
|
||||
IAsyncEnumerable<TarVolume> volumes
|
||||
)
|
||||
{
|
||||
var stream = (await volumes.SingleAsync()).Stream;
|
||||
var stream = (await volumes.SingleAsync().ConfigureAwait(false)).Stream;
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
stream.Position = 0;
|
||||
@@ -123,7 +127,8 @@ public partial class TarArchive
|
||||
var entry = new TarArchiveEntry(
|
||||
this,
|
||||
new TarFilePart(previousHeader, stream),
|
||||
CompressionType.None
|
||||
CompressionType.None,
|
||||
ReaderOptions
|
||||
);
|
||||
|
||||
var oldStreamPos = stream.Position;
|
||||
@@ -131,7 +136,7 @@ public partial class TarArchive
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
using var memoryStream = new MemoryStream();
|
||||
await entryStream.CopyToAsync(memoryStream);
|
||||
await entryStream.CopyToAsync(memoryStream).ConfigureAwait(false);
|
||||
memoryStream.Position = 0;
|
||||
var bytes = memoryStream.ToArray();
|
||||
|
||||
@@ -147,7 +152,8 @@ public partial class TarArchive
|
||||
yield return new TarArchiveEntry(
|
||||
this,
|
||||
new TarFilePart(header, stream),
|
||||
CompressionType.None
|
||||
CompressionType.None,
|
||||
ReaderOptions
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,22 +9,29 @@ using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Writers.Tar;
|
||||
|
||||
namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public partial class TarArchive
|
||||
#if NET8_0_OR_GREATER
|
||||
: IWritableArchiveOpenable,
|
||||
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
|
||||
: IWritableArchiveOpenable<TarWriterOptions>,
|
||||
IMultiArchiveOpenable<
|
||||
IWritableArchive<TarWriterOptions>,
|
||||
IWritableAsyncArchive<TarWriterOptions>
|
||||
>
|
||||
#endif
|
||||
{
|
||||
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
|
||||
public static IWritableArchive<TarWriterOptions> OpenArchive(
|
||||
string filePath,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return OpenArchive(new FileInfo(filePath), readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
public static IWritableArchive<TarWriterOptions> OpenArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
@@ -39,7 +46,7 @@ public partial class TarArchive
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
public static IWritableArchive<TarWriterOptions> OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
@@ -55,7 +62,7 @@ public partial class TarArchive
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
public static IWritableArchive<TarWriterOptions> OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
@@ -71,7 +78,10 @@ public partial class TarArchive
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
public static IWritableArchive<TarWriterOptions> OpenArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
@@ -85,55 +95,30 @@ public partial class TarArchive
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(stream, readerOptions);
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(new FileInfo(path), readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(new FileInfo(path), readerOptions);
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(fileInfo, readerOptions);
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(streams, readerOptions);
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<TarWriterOptions> OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<TarWriterOptions>)OpenArchive(fileInfos, readerOptions);
|
||||
|
||||
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
|
||||
|
||||
@@ -176,8 +161,12 @@ public partial class TarArchive
|
||||
try
|
||||
{
|
||||
var tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
var reader = new AsyncBinaryReader(stream, false);
|
||||
var readSucceeded = await tarHeader.ReadAsync(reader);
|
||||
#if NET8_0_OR_GREATER
|
||||
await using var reader = new AsyncBinaryReader(stream, leaveOpen: true);
|
||||
#else
|
||||
using var reader = new AsyncBinaryReader(stream, leaveOpen: true);
|
||||
#endif
|
||||
var readSucceeded = await tarHeader.ReadAsync(reader).ConfigureAwait(false);
|
||||
var isEmptyArchive =
|
||||
tarHeader.Name?.Length == 0
|
||||
&& tarHeader.Size == 0
|
||||
@@ -192,7 +181,7 @@ public partial class TarArchive
|
||||
}
|
||||
}
|
||||
|
||||
public static IWritableArchive CreateArchive() => new TarArchive();
|
||||
public static IWritableArchive<TarWriterOptions> CreateArchive() => new TarArchive();
|
||||
|
||||
public static IWritableAsyncArchive CreateAsyncArchive() => new TarArchive();
|
||||
public static IWritableAsyncArchive<TarWriterOptions> CreateAsyncArchive() => new TarArchive();
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.Common.Tar;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.IO;
|
||||
@@ -15,7 +16,8 @@ using SharpCompress.Writers.Tar;
|
||||
|
||||
namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
public partial class TarArchive
|
||||
: AbstractWritableArchive<TarArchiveEntry, TarVolume, TarWriterOptions>
|
||||
{
|
||||
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
@@ -58,7 +60,8 @@ public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVo
|
||||
var entry = new TarArchiveEntry(
|
||||
this,
|
||||
new TarFilePart(previousHeader, stream),
|
||||
CompressionType.None
|
||||
CompressionType.None,
|
||||
ReaderOptions
|
||||
);
|
||||
|
||||
var oldStreamPos = stream.Position;
|
||||
@@ -80,7 +83,8 @@ public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVo
|
||||
yield return new TarArchiveEntry(
|
||||
this,
|
||||
new TarFilePart(header, stream),
|
||||
CompressionType.None
|
||||
CompressionType.None,
|
||||
ReaderOptions
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -115,12 +119,15 @@ public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVo
|
||||
|
||||
protected override void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
TarWriterOptions options,
|
||||
IEnumerable<TarArchiveEntry> oldEntries,
|
||||
IEnumerable<TarArchiveEntry> newEntries
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(stream, new TarWriterOptions(options));
|
||||
using var writer = new TarWriter(
|
||||
stream,
|
||||
options as TarWriterOptions ?? new TarWriterOptions(options)
|
||||
);
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
|
||||
@@ -1,21 +1,31 @@
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.Common.Tar;
|
||||
|
||||
namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public class TarArchiveEntry : TarEntry, IArchiveEntry
|
||||
{
|
||||
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
|
||||
: base(part, compressionType) => Archive = archive;
|
||||
internal TarArchiveEntry(
|
||||
TarArchive archive,
|
||||
TarFilePart? part,
|
||||
CompressionType compressionType,
|
||||
IReaderOptions readerOptions
|
||||
)
|
||||
: base(part, compressionType, readerOptions) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
|
||||
|
||||
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
|
||||
new(OpenEntryStream());
|
||||
public async ValueTask<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
(
|
||||
await Parts.Single().GetCompressedStreamAsync(cancellationToken).ConfigureAwait(false)
|
||||
).NotNull();
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
|
||||
DateTime? lastModified,
|
||||
bool closeStream
|
||||
)
|
||||
: base(archive, null, compressionType)
|
||||
: base(archive, null, compressionType, archive.ReaderOptions)
|
||||
{
|
||||
this.stream = stream;
|
||||
Key = path;
|
||||
@@ -36,7 +36,7 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
|
||||
string directoryPath,
|
||||
DateTime? lastModified
|
||||
)
|
||||
: base(archive, null, CompressionType.None)
|
||||
: base(archive, null, CompressionType.None, archive.ReaderOptions)
|
||||
{
|
||||
stream = null;
|
||||
Key = directoryPath;
|
||||
@@ -79,7 +79,7 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
|
||||
}
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
return SharpCompressStream.CreateNonDisposing(stream);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
|
||||
@@ -5,6 +5,7 @@ using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.IO;
|
||||
@@ -20,7 +21,7 @@ public partial class ZipArchive
|
||||
IAsyncEnumerable<ZipVolume> volumes
|
||||
)
|
||||
{
|
||||
var vols = await volumes.ToListAsync();
|
||||
var vols = await volumes.ToListAsync().ConfigureAwait(false);
|
||||
var volsArray = vols.ToArray();
|
||||
|
||||
await foreach (
|
||||
@@ -54,7 +55,8 @@ public partial class ZipArchive
|
||||
|
||||
yield return new ZipArchiveEntry(
|
||||
this,
|
||||
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
|
||||
new SeekableZipFilePart(headerFactory.NotNull(), deh, s),
|
||||
ReaderOptions
|
||||
);
|
||||
}
|
||||
break;
|
||||
@@ -71,13 +73,16 @@ public partial class ZipArchive
|
||||
|
||||
protected override async ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
ZipWriterOptions options,
|
||||
IAsyncEnumerable<ZipArchiveEntry> oldEntries,
|
||||
IEnumerable<ZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
|
||||
using var writer = new ZipWriter(
|
||||
stream,
|
||||
options as ZipWriterOptions ?? new ZipWriterOptions(options)
|
||||
);
|
||||
await foreach (
|
||||
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
|
||||
@@ -9,22 +9,29 @@ using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Writers.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public partial class ZipArchive
|
||||
#if NET8_0_OR_GREATER
|
||||
: IWritableArchiveOpenable,
|
||||
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
|
||||
: IWritableArchiveOpenable<ZipWriterOptions>,
|
||||
IMultiArchiveOpenable<
|
||||
IWritableArchive<ZipWriterOptions>,
|
||||
IWritableAsyncArchive<ZipWriterOptions>
|
||||
>
|
||||
#endif
|
||||
{
|
||||
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
|
||||
public static IWritableArchive<ZipWriterOptions> OpenArchive(
|
||||
string filePath,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return OpenArchive(new FileInfo(filePath), readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
public static IWritableArchive<ZipWriterOptions> OpenArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
@@ -39,7 +46,7 @@ public partial class ZipArchive
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
public static IWritableArchive<ZipWriterOptions> OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
@@ -55,7 +62,7 @@ public partial class ZipArchive
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
public static IWritableArchive<ZipWriterOptions> OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
@@ -71,7 +78,10 @@ public partial class ZipArchive
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
public static IWritableArchive<ZipWriterOptions> OpenArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
@@ -85,55 +95,30 @@ public partial class ZipArchive
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(path, readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(path, readerOptions);
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(stream, readerOptions);
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfo, readerOptions);
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(streams, readerOptions);
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
public static IWritableAsyncArchive<ZipWriterOptions> OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
ReaderOptions? readerOptions = null
|
||||
) => (IWritableAsyncArchive<ZipWriterOptions>)OpenArchive(fileInfos, readerOptions);
|
||||
|
||||
public static bool IsZipFile(string filePath, string? password = null) =>
|
||||
IsZipFile(new FileInfo(filePath), password);
|
||||
@@ -177,11 +162,6 @@ public partial class ZipArchive
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
|
||||
}
|
||||
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
@@ -220,15 +200,11 @@ public partial class ZipArchive
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
|
||||
}
|
||||
|
||||
var header = await headerFactory
|
||||
.ReadStreamHeaderAsync(stream)
|
||||
.Where(x => x.ZipHeaderType != ZipHeaderType.Split)
|
||||
.FirstOrDefaultAsync(cancellationToken);
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (header is null)
|
||||
{
|
||||
return false;
|
||||
@@ -245,9 +221,9 @@ public partial class ZipArchive
|
||||
}
|
||||
}
|
||||
|
||||
public static IWritableArchive CreateArchive() => new ZipArchive();
|
||||
public static IWritableArchive<ZipWriterOptions> CreateArchive() => new ZipArchive();
|
||||
|
||||
public static IWritableAsyncArchive CreateAsyncArchive() => new ZipArchive();
|
||||
public static IWritableAsyncArchive<ZipWriterOptions> CreateAsyncArchive() => new ZipArchive();
|
||||
|
||||
public static async ValueTask<bool> IsZipMultiAsync(
|
||||
Stream stream,
|
||||
@@ -259,11 +235,6 @@ public partial class ZipArchive
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
|
||||
}
|
||||
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
@@ -276,6 +247,7 @@ public partial class ZipArchive
|
||||
await foreach (
|
||||
var h in z.ReadSeekableHeaderAsync(stream)
|
||||
.WithCancellation(cancellationToken)
|
||||
.ConfigureAwait(false)
|
||||
)
|
||||
{
|
||||
x = h;
|
||||
|
||||
@@ -5,6 +5,7 @@ using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
@@ -16,7 +17,8 @@ using SharpCompress.Writers.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
public partial class ZipArchive
|
||||
: AbstractWritableArchive<ZipArchiveEntry, ZipVolume, ZipWriterOptions>
|
||||
{
|
||||
private readonly SeekableZipHeaderFactory? headerFactory;
|
||||
|
||||
@@ -35,7 +37,7 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
|
||||
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
|
||||
{
|
||||
stream.LoadAllParts();
|
||||
stream.Position = 0;
|
||||
//stream.Position = 0;
|
||||
|
||||
var streams = stream.Streams.ToList();
|
||||
var idx = 0;
|
||||
@@ -94,7 +96,8 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
|
||||
|
||||
yield return new ZipArchiveEntry(
|
||||
this,
|
||||
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
|
||||
new SeekableZipFilePart(headerFactory.NotNull(), deh, s),
|
||||
ReaderOptions
|
||||
);
|
||||
}
|
||||
break;
|
||||
@@ -109,16 +112,20 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
|
||||
}
|
||||
}
|
||||
|
||||
public void SaveTo(Stream stream) => SaveTo(stream, new WriterOptions(CompressionType.Deflate));
|
||||
public void SaveTo(Stream stream) =>
|
||||
SaveTo(stream, new ZipWriterOptions(CompressionType.Deflate));
|
||||
|
||||
protected override void SaveTo(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
ZipWriterOptions options,
|
||||
IEnumerable<ZipArchiveEntry> oldEntries,
|
||||
IEnumerable<ZipArchiveEntry> newEntries
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
|
||||
using var writer = new ZipWriter(
|
||||
stream,
|
||||
options as ZipWriterOptions ?? new ZipWriterOptions(options)
|
||||
);
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
@@ -156,7 +163,7 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
((IStreamStack)stream).StackSeek(0);
|
||||
//stream.Position = 0;
|
||||
return ZipReader.OpenReader(stream, ReaderOptions, Entries);
|
||||
}
|
||||
|
||||
|
||||
@@ -15,7 +15,9 @@ public partial class ZipArchiveEntry
|
||||
var part = Parts.Single();
|
||||
if (part is SeekableZipFilePart seekablePart)
|
||||
{
|
||||
return (await seekablePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
|
||||
return (
|
||||
await seekablePart.GetCompressedStreamAsync(cancellationToken).ConfigureAwait(false)
|
||||
).NotNull();
|
||||
}
|
||||
return OpenEntryStream();
|
||||
}
|
||||
|
||||
@@ -1,15 +1,20 @@
|
||||
using System.IO;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.Common.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public partial class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
{
|
||||
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
|
||||
: base(part) => Archive = archive;
|
||||
internal ZipArchiveEntry(
|
||||
ZipArchive archive,
|
||||
SeekableZipFilePart? part,
|
||||
IReaderOptions readerOptions
|
||||
)
|
||||
: base(part, readerOptions) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
DateTime? lastModified,
|
||||
bool closeStream
|
||||
)
|
||||
: base(archive, null)
|
||||
: base(archive, null, archive.ReaderOptions)
|
||||
{
|
||||
this.stream = stream;
|
||||
Key = path;
|
||||
@@ -36,7 +36,7 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
string directoryPath,
|
||||
DateTime? lastModified
|
||||
)
|
||||
: base(archive, null)
|
||||
: base(archive, null, archive.ReaderOptions)
|
||||
{
|
||||
stream = null;
|
||||
Key = directoryPath;
|
||||
@@ -80,7 +80,7 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
|
||||
}
|
||||
//ensure new stream is at the start, this could be reset
|
||||
stream.Seek(0, SeekOrigin.Begin);
|
||||
return SharpCompressStream.Create(stream, leaveOpen: true);
|
||||
return SharpCompressStream.CreateNonDisposing(stream);
|
||||
}
|
||||
|
||||
internal override void Close()
|
||||
|
||||
@@ -4,62 +4,61 @@ using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Ace
|
||||
namespace SharpCompress.Common.Ace;
|
||||
|
||||
public class AceCrc
|
||||
{
|
||||
public class AceCrc
|
||||
// CRC-32 lookup table (standard polynomial 0xEDB88320, reflected)
|
||||
private static readonly uint[] Crc32Table = GenerateTable();
|
||||
|
||||
private static uint[] GenerateTable()
|
||||
{
|
||||
// CRC-32 lookup table (standard polynomial 0xEDB88320, reflected)
|
||||
private static readonly uint[] Crc32Table = GenerateTable();
|
||||
var table = new uint[256];
|
||||
|
||||
private static uint[] GenerateTable()
|
||||
for (int i = 0; i < 256; i++)
|
||||
{
|
||||
var table = new uint[256];
|
||||
uint crc = (uint)i;
|
||||
|
||||
for (int i = 0; i < 256; i++)
|
||||
for (int j = 0; j < 8; j++)
|
||||
{
|
||||
uint crc = (uint)i;
|
||||
|
||||
for (int j = 0; j < 8; j++)
|
||||
if ((crc & 1) != 0)
|
||||
{
|
||||
if ((crc & 1) != 0)
|
||||
{
|
||||
crc = (crc >> 1) ^ 0xEDB88320u;
|
||||
}
|
||||
else
|
||||
{
|
||||
crc >>= 1;
|
||||
}
|
||||
crc = (crc >> 1) ^ 0xEDB88320u;
|
||||
}
|
||||
else
|
||||
{
|
||||
crc >>= 1;
|
||||
}
|
||||
|
||||
table[i] = crc;
|
||||
}
|
||||
|
||||
return table;
|
||||
table[i] = crc;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculate ACE CRC-32 checksum.
|
||||
/// ACE CRC-32 uses standard CRC-32 polynomial (0xEDB88320, reflected)
|
||||
/// with init=0xFFFFFFFF but NO final XOR.
|
||||
/// </summary>
|
||||
public static uint AceCrc32(ReadOnlySpan<byte> data)
|
||||
return table;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculate ACE CRC-32 checksum.
|
||||
/// ACE CRC-32 uses standard CRC-32 polynomial (0xEDB88320, reflected)
|
||||
/// with init=0xFFFFFFFF but NO final XOR.
|
||||
/// </summary>
|
||||
public static uint AceCrc32(ReadOnlySpan<byte> data)
|
||||
{
|
||||
uint crc = 0xFFFFFFFFu;
|
||||
|
||||
foreach (byte b in data)
|
||||
{
|
||||
uint crc = 0xFFFFFFFFu;
|
||||
|
||||
foreach (byte b in data)
|
||||
{
|
||||
crc = (crc >> 8) ^ Crc32Table[(crc ^ b) & 0xFF];
|
||||
}
|
||||
|
||||
return crc; // No final XOR for ACE
|
||||
crc = (crc >> 8) ^ Crc32Table[(crc ^ b) & 0xFF];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// ACE CRC-16 is the lower 16 bits of the ACE CRC-32.
|
||||
/// </summary>
|
||||
public static ushort AceCrc16(ReadOnlySpan<byte> data)
|
||||
{
|
||||
return (ushort)(AceCrc32(data) & 0xFFFF);
|
||||
}
|
||||
return crc; // No final XOR for ACE
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// ACE CRC-16 is the lower 16 bits of the ACE CRC-32.
|
||||
/// </summary>
|
||||
public static ushort AceCrc16(ReadOnlySpan<byte> data)
|
||||
{
|
||||
return (ushort)(AceCrc32(data) & 0xFFFF);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,64 +5,65 @@ using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Ace.Headers;
|
||||
using SharpCompress.Common.Options;
|
||||
|
||||
namespace SharpCompress.Common.Ace
|
||||
namespace SharpCompress.Common.Ace;
|
||||
|
||||
public class AceEntry : Entry
|
||||
{
|
||||
public class AceEntry : Entry
|
||||
private readonly AceFilePart _filePart;
|
||||
|
||||
internal AceEntry(AceFilePart filePart, IReaderOptions readerOptions)
|
||||
: base(readerOptions)
|
||||
{
|
||||
private readonly AceFilePart _filePart;
|
||||
|
||||
internal AceEntry(AceFilePart filePart)
|
||||
{
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override long Crc
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart == null)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
return _filePart.Header.Crc32;
|
||||
}
|
||||
}
|
||||
|
||||
public override string? Key => _filePart?.Header.Filename;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.PackedSize ?? 0;
|
||||
|
||||
public override CompressionType CompressionType
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart.Header.CompressionType == Headers.CompressionType.Stored)
|
||||
{
|
||||
return CompressionType.None;
|
||||
}
|
||||
return CompressionType.AceLZ77;
|
||||
}
|
||||
}
|
||||
|
||||
public override long Size => _filePart?.Header.OriginalSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart.Header.DateTime;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => _filePart.Header.IsFileEncrypted;
|
||||
|
||||
public override bool IsDirectory => _filePart.Header.IsDirectory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override long Crc
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart == null)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
return _filePart.Header.Crc32;
|
||||
}
|
||||
}
|
||||
|
||||
public override string? Key => _filePart?.Header.Filename;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.PackedSize ?? 0;
|
||||
|
||||
public override CompressionType CompressionType
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart.Header.CompressionType == Headers.CompressionType.Stored)
|
||||
{
|
||||
return CompressionType.None;
|
||||
}
|
||||
return CompressionType.AceLZ77;
|
||||
}
|
||||
}
|
||||
|
||||
public override long Size => _filePart?.Header.OriginalSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart.Header.DateTime;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => _filePart.Header.IsFileEncrypted;
|
||||
|
||||
public override bool IsDirectory => _filePart.Header.IsDirectory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
}
|
||||
|
||||
@@ -7,46 +7,45 @@ using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Ace.Headers;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Ace
|
||||
namespace SharpCompress.Common.Ace;
|
||||
|
||||
public class AceFilePart : FilePart
|
||||
{
|
||||
public class AceFilePart : FilePart
|
||||
private readonly Stream _stream;
|
||||
internal AceFileHeader Header { get; set; }
|
||||
|
||||
internal AceFilePart(AceFileHeader localAceHeader, Stream seekableStream)
|
||||
: base(localAceHeader.ArchiveEncoding)
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
internal AceFileHeader Header { get; set; }
|
||||
|
||||
internal AceFilePart(AceFileHeader localAceHeader, Stream seekableStream)
|
||||
: base(localAceHeader.ArchiveEncoding)
|
||||
{
|
||||
_stream = seekableStream;
|
||||
Header = localAceHeader;
|
||||
}
|
||||
|
||||
internal override string? FilePartName => Header.Filename;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionType)
|
||||
{
|
||||
case Headers.CompressionType.Stored:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.PackedSize
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionQuality
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream? GetRawStream() => _stream;
|
||||
_stream = seekableStream;
|
||||
Header = localAceHeader;
|
||||
}
|
||||
|
||||
internal override string? FilePartName => Header.Filename;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionType)
|
||||
{
|
||||
case Headers.CompressionType.Stored:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.PackedSize
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionQuality
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream? GetRawStream() => _stream;
|
||||
}
|
||||
|
||||
@@ -7,29 +7,28 @@ using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arj;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common.Ace
|
||||
namespace SharpCompress.Common.Ace;
|
||||
|
||||
public class AceVolume : Volume
|
||||
{
|
||||
public class AceVolume : Volume
|
||||
public AceVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
: base(stream, readerOptions, index) { }
|
||||
|
||||
public override bool IsFirstVolume
|
||||
{
|
||||
public AceVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
: base(stream, readerOptions, index) { }
|
||||
get { return true; }
|
||||
}
|
||||
|
||||
public override bool IsFirstVolume
|
||||
{
|
||||
get { return true; }
|
||||
}
|
||||
/// <summary>
|
||||
/// ArjArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public override bool IsMultiVolume
|
||||
{
|
||||
get { return false; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// ArjArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public override bool IsMultiVolume
|
||||
{
|
||||
get { return false; }
|
||||
}
|
||||
|
||||
internal IEnumerable<AceFilePart> GetVolumeFileParts()
|
||||
{
|
||||
return new List<AceFilePart>();
|
||||
}
|
||||
internal IEnumerable<AceFilePart> GetVolumeFileParts()
|
||||
{
|
||||
return new List<AceFilePart>();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ public sealed partial class AceFileHeader
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var headerData = await ReadHeaderAsync(stream, cancellationToken);
|
||||
var headerData = await ReadHeaderAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
if (headerData.Length == 0)
|
||||
{
|
||||
return null;
|
||||
|
||||
@@ -7,169 +7,168 @@ using System.Threading.Tasks;
|
||||
using System.Xml.Linq;
|
||||
using SharpCompress.Common.Arc;
|
||||
|
||||
namespace SharpCompress.Common.Ace.Headers
|
||||
namespace SharpCompress.Common.Ace.Headers;
|
||||
|
||||
/// <summary>
|
||||
/// ACE file entry header
|
||||
/// </summary>
|
||||
public sealed partial class AceFileHeader : AceHeader
|
||||
{
|
||||
public long DataStartPosition { get; private set; }
|
||||
public long PackedSize { get; set; }
|
||||
public long OriginalSize { get; set; }
|
||||
public DateTime DateTime { get; set; }
|
||||
public int Attributes { get; set; }
|
||||
public uint Crc32 { get; set; }
|
||||
public CompressionType CompressionType { get; set; }
|
||||
public CompressionQuality CompressionQuality { get; set; }
|
||||
public ushort Parameters { get; set; }
|
||||
public string Filename { get; set; } = string.Empty;
|
||||
public List<byte> Comment { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// ACE file entry header
|
||||
/// File data offset in the archive
|
||||
/// </summary>
|
||||
public sealed partial class AceFileHeader : AceHeader
|
||||
public ulong DataOffset { get; set; }
|
||||
|
||||
public bool IsDirectory => (Attributes & 0x10) != 0;
|
||||
|
||||
public bool IsContinuedFromPrev =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_PREV) != 0;
|
||||
|
||||
public bool IsContinuedToNext =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_NEXT) != 0;
|
||||
|
||||
public int DictionarySize
|
||||
{
|
||||
public long DataStartPosition { get; private set; }
|
||||
public long PackedSize { get; set; }
|
||||
public long OriginalSize { get; set; }
|
||||
public DateTime DateTime { get; set; }
|
||||
public int Attributes { get; set; }
|
||||
public uint Crc32 { get; set; }
|
||||
public CompressionType CompressionType { get; set; }
|
||||
public CompressionQuality CompressionQuality { get; set; }
|
||||
public ushort Parameters { get; set; }
|
||||
public string Filename { get; set; } = string.Empty;
|
||||
public List<byte> Comment { get; set; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// File data offset in the archive
|
||||
/// </summary>
|
||||
public ulong DataOffset { get; set; }
|
||||
|
||||
public bool IsDirectory => (Attributes & 0x10) != 0;
|
||||
|
||||
public bool IsContinuedFromPrev =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_PREV) != 0;
|
||||
|
||||
public bool IsContinuedToNext =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_NEXT) != 0;
|
||||
|
||||
public int DictionarySize
|
||||
get
|
||||
{
|
||||
get
|
||||
{
|
||||
int bits = Parameters & 0x0F;
|
||||
return bits < 10 ? 1024 : 1 << bits;
|
||||
}
|
||||
int bits = Parameters & 0x0F;
|
||||
return bits < 10 ? 1024 : 1 << bits;
|
||||
}
|
||||
|
||||
public AceFileHeader(IArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding, AceHeaderType.FILE) { }
|
||||
|
||||
/// <summary>
|
||||
/// Reads the next file entry header from the stream.
|
||||
/// Returns null if no more entries or end of archive.
|
||||
/// Supports both ACE 1.0 and ACE 2.0 formats.
|
||||
/// </summary>
|
||||
public override AceHeader? Read(Stream stream)
|
||||
{
|
||||
var headerData = ReadHeader(stream);
|
||||
if (headerData.Length == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
int offset = 0;
|
||||
|
||||
// Header type (1 byte)
|
||||
HeaderType = headerData[offset++];
|
||||
|
||||
// Skip recovery record headers (ACE 2.0 feature)
|
||||
if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
|
||||
{
|
||||
// Skip to next header
|
||||
return null;
|
||||
}
|
||||
|
||||
if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
|
||||
{
|
||||
// Unknown header type - skip
|
||||
return null;
|
||||
}
|
||||
|
||||
// Header flags (2 bytes)
|
||||
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Packed size (4 bytes)
|
||||
PackedSize = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// Original size (4 bytes)
|
||||
OriginalSize = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// File date/time in DOS format (4 bytes)
|
||||
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
|
||||
DateTime = ConvertDosDateTime(dosDateTime);
|
||||
offset += 4;
|
||||
|
||||
// File attributes (4 bytes)
|
||||
Attributes = (int)BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// CRC32 (4 bytes)
|
||||
Crc32 = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// Compression type (1 byte)
|
||||
byte compressionType = headerData[offset++];
|
||||
CompressionType = GetCompressionType(compressionType);
|
||||
|
||||
// Compression quality/parameter (1 byte)
|
||||
byte compressionQuality = headerData[offset++];
|
||||
CompressionQuality = GetCompressionQuality(compressionQuality);
|
||||
|
||||
// Parameters (2 bytes)
|
||||
Parameters = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Reserved (2 bytes) - skip
|
||||
offset += 2;
|
||||
|
||||
// Filename length (2 bytes)
|
||||
var filenameLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Filename
|
||||
if (offset + filenameLength <= headerData.Length)
|
||||
{
|
||||
Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
|
||||
offset += filenameLength;
|
||||
}
|
||||
|
||||
// Handle comment if present
|
||||
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
|
||||
{
|
||||
// Comment length (2 bytes)
|
||||
if (offset + 2 <= headerData.Length)
|
||||
{
|
||||
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2 + commentLength; // Skip comment
|
||||
}
|
||||
}
|
||||
|
||||
// Store the data start position
|
||||
DataStartPosition = stream.Position;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
// ReadAsync moved to AceFileHeader.Async.cs
|
||||
|
||||
public CompressionType GetCompressionType(byte value) =>
|
||||
value switch
|
||||
{
|
||||
0 => CompressionType.Stored,
|
||||
1 => CompressionType.Lz77,
|
||||
2 => CompressionType.Blocked,
|
||||
_ => CompressionType.Unknown,
|
||||
};
|
||||
|
||||
public CompressionQuality GetCompressionQuality(byte value) =>
|
||||
value switch
|
||||
{
|
||||
0 => CompressionQuality.None,
|
||||
1 => CompressionQuality.Fastest,
|
||||
2 => CompressionQuality.Fast,
|
||||
3 => CompressionQuality.Normal,
|
||||
4 => CompressionQuality.Good,
|
||||
5 => CompressionQuality.Best,
|
||||
_ => CompressionQuality.Unknown,
|
||||
};
|
||||
}
|
||||
|
||||
public AceFileHeader(IArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding, AceHeaderType.FILE) { }
|
||||
|
||||
/// <summary>
|
||||
/// Reads the next file entry header from the stream.
|
||||
/// Returns null if no more entries or end of archive.
|
||||
/// Supports both ACE 1.0 and ACE 2.0 formats.
|
||||
/// </summary>
|
||||
public override AceHeader? Read(Stream stream)
|
||||
{
|
||||
var headerData = ReadHeader(stream);
|
||||
if (headerData.Length == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
int offset = 0;
|
||||
|
||||
// Header type (1 byte)
|
||||
HeaderType = headerData[offset++];
|
||||
|
||||
// Skip recovery record headers (ACE 2.0 feature)
|
||||
if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
|
||||
{
|
||||
// Skip to next header
|
||||
return null;
|
||||
}
|
||||
|
||||
if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
|
||||
{
|
||||
// Unknown header type - skip
|
||||
return null;
|
||||
}
|
||||
|
||||
// Header flags (2 bytes)
|
||||
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Packed size (4 bytes)
|
||||
PackedSize = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// Original size (4 bytes)
|
||||
OriginalSize = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// File date/time in DOS format (4 bytes)
|
||||
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
|
||||
DateTime = ConvertDosDateTime(dosDateTime);
|
||||
offset += 4;
|
||||
|
||||
// File attributes (4 bytes)
|
||||
Attributes = (int)BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// CRC32 (4 bytes)
|
||||
Crc32 = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// Compression type (1 byte)
|
||||
byte compressionType = headerData[offset++];
|
||||
CompressionType = GetCompressionType(compressionType);
|
||||
|
||||
// Compression quality/parameter (1 byte)
|
||||
byte compressionQuality = headerData[offset++];
|
||||
CompressionQuality = GetCompressionQuality(compressionQuality);
|
||||
|
||||
// Parameters (2 bytes)
|
||||
Parameters = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Reserved (2 bytes) - skip
|
||||
offset += 2;
|
||||
|
||||
// Filename length (2 bytes)
|
||||
var filenameLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Filename
|
||||
if (offset + filenameLength <= headerData.Length)
|
||||
{
|
||||
Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
|
||||
offset += filenameLength;
|
||||
}
|
||||
|
||||
// Handle comment if present
|
||||
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
|
||||
{
|
||||
// Comment length (2 bytes)
|
||||
if (offset + 2 <= headerData.Length)
|
||||
{
|
||||
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2 + commentLength; // Skip comment
|
||||
}
|
||||
}
|
||||
|
||||
// Store the data start position
|
||||
DataStartPosition = stream.Position;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
// ReadAsync moved to AceFileHeader.Async.cs
|
||||
|
||||
public CompressionType GetCompressionType(byte value) =>
|
||||
value switch
|
||||
{
|
||||
0 => CompressionType.Stored,
|
||||
1 => CompressionType.Lz77,
|
||||
2 => CompressionType.Blocked,
|
||||
_ => CompressionType.Unknown,
|
||||
};
|
||||
|
||||
public CompressionQuality GetCompressionQuality(byte value) =>
|
||||
value switch
|
||||
{
|
||||
0 => CompressionQuality.None,
|
||||
1 => CompressionQuality.Fastest,
|
||||
2 => CompressionQuality.Fast,
|
||||
3 => CompressionQuality.Normal,
|
||||
4 => CompressionQuality.Good,
|
||||
5 => CompressionQuality.Best,
|
||||
_ => CompressionQuality.Unknown,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -19,7 +19,9 @@ public abstract partial class AceHeader
|
||||
{
|
||||
// Read header CRC (2 bytes) and header size (2 bytes)
|
||||
var headerBytes = new byte[4];
|
||||
if (await stream.ReadAsync(headerBytes, 0, 4, cancellationToken) != 4)
|
||||
if (
|
||||
!await stream.ReadFullyAsync(headerBytes, 0, 4, cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
@@ -33,7 +35,11 @@ public abstract partial class AceHeader
|
||||
|
||||
// Read the header data
|
||||
var body = new byte[HeaderSize];
|
||||
if (await stream.ReadAsync(body, 0, HeaderSize, cancellationToken) != HeaderSize)
|
||||
if (
|
||||
!await stream
|
||||
.ReadFullyAsync(body, 0, HeaderSize, cancellationToken)
|
||||
.ConfigureAwait(false)
|
||||
)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
@@ -59,7 +65,7 @@ public abstract partial class AceHeader
|
||||
)
|
||||
{
|
||||
var bytes = new byte[14];
|
||||
if (await stream.ReadAsync(bytes, 0, 14, cancellationToken) != 14)
|
||||
if (!await stream.ReadFullyAsync(bytes, 0, 14, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -5,153 +5,152 @@ using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arj.Headers;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Ace.Headers
|
||||
namespace SharpCompress.Common.Ace.Headers;
|
||||
|
||||
/// <summary>
|
||||
/// Header type constants
|
||||
/// </summary>
|
||||
public enum AceHeaderType
|
||||
{
|
||||
/// <summary>
|
||||
/// Header type constants
|
||||
/// </summary>
|
||||
public enum AceHeaderType
|
||||
MAIN = 0,
|
||||
FILE = 1,
|
||||
RECOVERY32 = 2,
|
||||
RECOVERY64A = 3,
|
||||
RECOVERY64B = 4,
|
||||
}
|
||||
|
||||
public abstract partial class AceHeader
|
||||
{
|
||||
// ACE signature: bytes at offset 7 should be "**ACE**"
|
||||
private static readonly byte[] AceSignature =
|
||||
[
|
||||
(byte)'*',
|
||||
(byte)'*',
|
||||
(byte)'A',
|
||||
(byte)'C',
|
||||
(byte)'E',
|
||||
(byte)'*',
|
||||
(byte)'*',
|
||||
];
|
||||
|
||||
public AceHeader(IArchiveEncoding archiveEncoding, AceHeaderType type)
|
||||
{
|
||||
MAIN = 0,
|
||||
FILE = 1,
|
||||
RECOVERY32 = 2,
|
||||
RECOVERY64A = 3,
|
||||
RECOVERY64B = 4,
|
||||
AceHeaderType = type;
|
||||
ArchiveEncoding = archiveEncoding;
|
||||
}
|
||||
|
||||
public abstract partial class AceHeader
|
||||
public IArchiveEncoding ArchiveEncoding { get; }
|
||||
public AceHeaderType AceHeaderType { get; }
|
||||
|
||||
public ushort HeaderFlags { get; set; }
|
||||
public ushort HeaderCrc { get; set; }
|
||||
public ushort HeaderSize { get; set; }
|
||||
public byte HeaderType { get; set; }
|
||||
|
||||
public bool IsFileEncrypted =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.FILE_ENCRYPTED) != 0;
|
||||
public bool Is64Bit =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MEMORY_64BIT) != 0;
|
||||
|
||||
public bool IsSolid =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.SOLID_MAIN) != 0;
|
||||
|
||||
public bool IsMultiVolume =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MULTIVOLUME) != 0;
|
||||
|
||||
public abstract AceHeader? Read(Stream reader);
|
||||
|
||||
// Async methods moved to AceHeader.Async.cs
|
||||
|
||||
public byte[] ReadHeader(Stream stream)
|
||||
{
|
||||
// ACE signature: bytes at offset 7 should be "**ACE**"
|
||||
private static readonly byte[] AceSignature =
|
||||
[
|
||||
(byte)'*',
|
||||
(byte)'*',
|
||||
(byte)'A',
|
||||
(byte)'C',
|
||||
(byte)'E',
|
||||
(byte)'*',
|
||||
(byte)'*',
|
||||
];
|
||||
|
||||
public AceHeader(IArchiveEncoding archiveEncoding, AceHeaderType type)
|
||||
// Read header CRC (2 bytes) and header size (2 bytes)
|
||||
var headerBytes = new byte[4];
|
||||
if (!stream.ReadFully(headerBytes))
|
||||
{
|
||||
AceHeaderType = type;
|
||||
ArchiveEncoding = archiveEncoding;
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
public IArchiveEncoding ArchiveEncoding { get; }
|
||||
public AceHeaderType AceHeaderType { get; }
|
||||
|
||||
public ushort HeaderFlags { get; set; }
|
||||
public ushort HeaderCrc { get; set; }
|
||||
public ushort HeaderSize { get; set; }
|
||||
public byte HeaderType { get; set; }
|
||||
|
||||
public bool IsFileEncrypted =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.FILE_ENCRYPTED) != 0;
|
||||
public bool Is64Bit =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MEMORY_64BIT) != 0;
|
||||
|
||||
public bool IsSolid =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.SOLID_MAIN) != 0;
|
||||
|
||||
public bool IsMultiVolume =>
|
||||
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MULTIVOLUME) != 0;
|
||||
|
||||
public abstract AceHeader? Read(Stream reader);
|
||||
|
||||
// Async methods moved to AceHeader.Async.cs
|
||||
|
||||
public byte[] ReadHeader(Stream stream)
|
||||
HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
|
||||
HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
|
||||
if (HeaderSize == 0)
|
||||
{
|
||||
// Read header CRC (2 bytes) and header size (2 bytes)
|
||||
var headerBytes = new byte[4];
|
||||
if (stream.Read(headerBytes, 0, 4) != 4)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
|
||||
HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
|
||||
if (HeaderSize == 0)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
// Read the header data
|
||||
var body = new byte[HeaderSize];
|
||||
if (stream.Read(body, 0, HeaderSize) != HeaderSize)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
// Verify crc
|
||||
var checksum = AceCrc.AceCrc16(body);
|
||||
if (checksum != HeaderCrc)
|
||||
{
|
||||
throw new InvalidDataException("Header checksum is invalid");
|
||||
}
|
||||
return body;
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
public static bool IsArchive(Stream stream)
|
||||
// Read the header data
|
||||
var body = new byte[HeaderSize];
|
||||
if (!stream.ReadFully(body))
|
||||
{
|
||||
// ACE files have a specific signature
|
||||
// First two bytes are typically 0x60 0xEA (signature bytes)
|
||||
// At offset 7, there should be "**ACE**" (7 bytes)
|
||||
var bytes = new byte[14];
|
||||
if (stream.Read(bytes, 0, 14) != 14)
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
// Verify crc
|
||||
var checksum = AceCrc.AceCrc16(body);
|
||||
if (checksum != HeaderCrc)
|
||||
{
|
||||
throw new InvalidDataException("Header checksum is invalid");
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
public static bool IsArchive(Stream stream)
|
||||
{
|
||||
// ACE files have a specific signature
|
||||
// First two bytes are typically 0x60 0xEA (signature bytes)
|
||||
// At offset 7, there should be "**ACE**" (7 bytes)
|
||||
var bytes = new byte[14];
|
||||
if (stream.Read(bytes, 0, 14) != 14)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for "**ACE**" at offset 7
|
||||
return CheckMagicBytes(bytes, 7);
|
||||
}
|
||||
|
||||
protected static bool CheckMagicBytes(byte[] headerBytes, int offset)
|
||||
{
|
||||
// Check for "**ACE**" at specified offset
|
||||
for (int i = 0; i < AceSignature.Length; i++)
|
||||
{
|
||||
if (headerBytes[offset + i] != AceSignature[i])
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for "**ACE**" at offset 7
|
||||
return CheckMagicBytes(bytes, 7);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
protected static bool CheckMagicBytes(byte[] headerBytes, int offset)
|
||||
protected DateTime ConvertDosDateTime(uint dosDateTime)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Check for "**ACE**" at specified offset
|
||||
for (int i = 0; i < AceSignature.Length; i++)
|
||||
{
|
||||
if (headerBytes[offset + i] != AceSignature[i])
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
int second = (int)(dosDateTime & 0x1F) * 2;
|
||||
int minute = (int)((dosDateTime >> 5) & 0x3F);
|
||||
int hour = (int)((dosDateTime >> 11) & 0x1F);
|
||||
int day = (int)((dosDateTime >> 16) & 0x1F);
|
||||
int month = (int)((dosDateTime >> 21) & 0x0F);
|
||||
int year = (int)((dosDateTime >> 25) & 0x7F) + 1980;
|
||||
|
||||
protected DateTime ConvertDosDateTime(uint dosDateTime)
|
||||
{
|
||||
try
|
||||
{
|
||||
int second = (int)(dosDateTime & 0x1F) * 2;
|
||||
int minute = (int)((dosDateTime >> 5) & 0x3F);
|
||||
int hour = (int)((dosDateTime >> 11) & 0x1F);
|
||||
int day = (int)((dosDateTime >> 16) & 0x1F);
|
||||
int month = (int)((dosDateTime >> 21) & 0x0F);
|
||||
int year = (int)((dosDateTime >> 25) & 0x7F) + 1980;
|
||||
|
||||
if (
|
||||
day < 1
|
||||
|| day > 31
|
||||
|| month < 1
|
||||
|| month > 12
|
||||
|| hour > 23
|
||||
|| minute > 59
|
||||
|| second > 59
|
||||
)
|
||||
{
|
||||
return DateTime.MinValue;
|
||||
}
|
||||
|
||||
return new DateTime(year, month, day, hour, minute, second);
|
||||
}
|
||||
catch
|
||||
if (
|
||||
day < 1
|
||||
|| day > 31
|
||||
|| month < 1
|
||||
|| month > 12
|
||||
|| hour > 23
|
||||
|| minute > 59
|
||||
|| second > 59
|
||||
)
|
||||
{
|
||||
return DateTime.MinValue;
|
||||
}
|
||||
|
||||
return new DateTime(year, month, day, hour, minute, second);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return DateTime.MinValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ public sealed partial class AceMainHeader
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var headerData = await ReadHeaderAsync(stream, cancellationToken);
|
||||
var headerData = await ReadHeaderAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
if (headerData.Length == 0)
|
||||
{
|
||||
return null;
|
||||
|
||||
@@ -8,94 +8,93 @@ using SharpCompress.Common.Ace.Headers;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Ace.Headers
|
||||
namespace SharpCompress.Common.Ace.Headers;
|
||||
|
||||
/// <summary>
|
||||
/// ACE main archive header
|
||||
/// </summary>
|
||||
public sealed partial class AceMainHeader : AceHeader
|
||||
{
|
||||
public byte ExtractVersion { get; set; }
|
||||
public byte CreatorVersion { get; set; }
|
||||
public HostOS HostOS { get; set; }
|
||||
public byte VolumeNumber { get; set; }
|
||||
public DateTime DateTime { get; set; }
|
||||
public string Advert { get; set; } = string.Empty;
|
||||
public List<byte> Comment { get; set; } = new();
|
||||
public byte AceVersion { get; private set; }
|
||||
|
||||
public AceMainHeader(IArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding, AceHeaderType.MAIN) { }
|
||||
|
||||
/// <summary>
|
||||
/// ACE main archive header
|
||||
/// Reads the main archive header from the stream.
|
||||
/// Returns header if this is a valid ACE archive.
|
||||
/// Supports both ACE 1.0 and ACE 2.0 formats.
|
||||
/// </summary>
|
||||
public sealed partial class AceMainHeader : AceHeader
|
||||
public override AceHeader? Read(Stream stream)
|
||||
{
|
||||
public byte ExtractVersion { get; set; }
|
||||
public byte CreatorVersion { get; set; }
|
||||
public HostOS HostOS { get; set; }
|
||||
public byte VolumeNumber { get; set; }
|
||||
public DateTime DateTime { get; set; }
|
||||
public string Advert { get; set; } = string.Empty;
|
||||
public List<byte> Comment { get; set; } = new();
|
||||
public byte AceVersion { get; private set; }
|
||||
|
||||
public AceMainHeader(IArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding, AceHeaderType.MAIN) { }
|
||||
|
||||
/// <summary>
|
||||
/// Reads the main archive header from the stream.
|
||||
/// Returns header if this is a valid ACE archive.
|
||||
/// Supports both ACE 1.0 and ACE 2.0 formats.
|
||||
/// </summary>
|
||||
public override AceHeader? Read(Stream stream)
|
||||
var headerData = ReadHeader(stream);
|
||||
if (headerData.Length == 0)
|
||||
{
|
||||
var headerData = ReadHeader(stream);
|
||||
if (headerData.Length == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
int offset = 0;
|
||||
return null;
|
||||
}
|
||||
int offset = 0;
|
||||
|
||||
// Header type should be 0 for main header
|
||||
if (headerData[offset++] != HeaderType)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Header flags (2 bytes)
|
||||
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Skip signature "**ACE**" (7 bytes)
|
||||
if (!CheckMagicBytes(headerData, offset))
|
||||
{
|
||||
throw new InvalidDataException("Invalid ACE archive signature.");
|
||||
}
|
||||
offset += 7;
|
||||
|
||||
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
|
||||
AceVersion = headerData[offset++];
|
||||
ExtractVersion = headerData[offset++];
|
||||
|
||||
// Host OS (1 byte)
|
||||
if (offset < headerData.Length)
|
||||
{
|
||||
var hostOsByte = headerData[offset++];
|
||||
HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
|
||||
}
|
||||
// Volume number (1 byte)
|
||||
VolumeNumber = headerData[offset++];
|
||||
|
||||
// Creation date/time (4 bytes)
|
||||
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
|
||||
DateTime = ConvertDosDateTime(dosDateTime);
|
||||
offset += 4;
|
||||
|
||||
// Reserved fields (8 bytes)
|
||||
if (offset + 8 <= headerData.Length)
|
||||
{
|
||||
offset += 8;
|
||||
}
|
||||
|
||||
// Skip additional fields based on flags
|
||||
// Handle comment if present
|
||||
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
|
||||
{
|
||||
if (offset + 2 <= headerData.Length)
|
||||
{
|
||||
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2 + commentLength;
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
// Header type should be 0 for main header
|
||||
if (headerData[offset++] != HeaderType)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// ReadAsync moved to AceMainHeader.Async.cs
|
||||
// Header flags (2 bytes)
|
||||
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Skip signature "**ACE**" (7 bytes)
|
||||
if (!CheckMagicBytes(headerData, offset))
|
||||
{
|
||||
throw new InvalidDataException("Invalid ACE archive signature.");
|
||||
}
|
||||
offset += 7;
|
||||
|
||||
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
|
||||
AceVersion = headerData[offset++];
|
||||
ExtractVersion = headerData[offset++];
|
||||
|
||||
// Host OS (1 byte)
|
||||
if (offset < headerData.Length)
|
||||
{
|
||||
var hostOsByte = headerData[offset++];
|
||||
HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
|
||||
}
|
||||
// Volume number (1 byte)
|
||||
VolumeNumber = headerData[offset++];
|
||||
|
||||
// Creation date/time (4 bytes)
|
||||
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
|
||||
DateTime = ConvertDosDateTime(dosDateTime);
|
||||
offset += 4;
|
||||
|
||||
// Reserved fields (8 bytes)
|
||||
if (offset + 8 <= headerData.Length)
|
||||
{
|
||||
offset += 8;
|
||||
}
|
||||
|
||||
// Skip additional fields based on flags
|
||||
// Handle comment if present
|
||||
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
|
||||
{
|
||||
if (offset + 2 <= headerData.Length)
|
||||
{
|
||||
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2 + commentLength;
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
// ReadAsync moved to AceMainHeader.Async.cs
|
||||
}
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
namespace SharpCompress.Common.Ace.Headers
|
||||
namespace SharpCompress.Common.Ace.Headers;
|
||||
|
||||
/// <summary>
|
||||
/// Compression quality
|
||||
/// </summary>
|
||||
public enum CompressionQuality
|
||||
{
|
||||
/// <summary>
|
||||
/// Compression quality
|
||||
/// </summary>
|
||||
public enum CompressionQuality
|
||||
{
|
||||
None,
|
||||
Fastest,
|
||||
Fast,
|
||||
Normal,
|
||||
Good,
|
||||
Best,
|
||||
Unknown,
|
||||
}
|
||||
None,
|
||||
Fastest,
|
||||
Fast,
|
||||
Normal,
|
||||
Good,
|
||||
Best,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
namespace SharpCompress.Common.Ace.Headers
|
||||
namespace SharpCompress.Common.Ace.Headers;
|
||||
|
||||
/// <summary>
|
||||
/// Compression types
|
||||
/// </summary>
|
||||
public enum CompressionType
|
||||
{
|
||||
/// <summary>
|
||||
/// Compression types
|
||||
/// </summary>
|
||||
public enum CompressionType
|
||||
{
|
||||
Stored,
|
||||
Lz77,
|
||||
Blocked,
|
||||
Unknown,
|
||||
}
|
||||
Stored,
|
||||
Lz77,
|
||||
Blocked,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
@@ -1,33 +1,32 @@
|
||||
namespace SharpCompress.Common.Ace.Headers
|
||||
namespace SharpCompress.Common.Ace.Headers;
|
||||
|
||||
/// <summary>
|
||||
/// Header flags (main + file, overlapping meanings)
|
||||
/// </summary>
|
||||
public static class HeaderFlags
|
||||
{
|
||||
/// <summary>
|
||||
/// Header flags (main + file, overlapping meanings)
|
||||
/// </summary>
|
||||
public static class HeaderFlags
|
||||
{
|
||||
// Shared / low bits
|
||||
public const ushort ADDSIZE = 0x0001; // extra size field present
|
||||
public const ushort COMMENT = 0x0002; // comment present
|
||||
public const ushort MEMORY_64BIT = 0x0004;
|
||||
public const ushort AV_STRING = 0x0008; // AV string present
|
||||
public const ushort SOLID = 0x0010; // solid file
|
||||
public const ushort LOCKED = 0x0020;
|
||||
public const ushort PROTECTED = 0x0040;
|
||||
// Shared / low bits
|
||||
public const ushort ADDSIZE = 0x0001; // extra size field present
|
||||
public const ushort COMMENT = 0x0002; // comment present
|
||||
public const ushort MEMORY_64BIT = 0x0004;
|
||||
public const ushort AV_STRING = 0x0008; // AV string present
|
||||
public const ushort SOLID = 0x0010; // solid file
|
||||
public const ushort LOCKED = 0x0020;
|
||||
public const ushort PROTECTED = 0x0040;
|
||||
|
||||
// Main header specific
|
||||
public const ushort V20FORMAT = 0x0100;
|
||||
public const ushort SFX = 0x0200;
|
||||
public const ushort LIMITSFXJR = 0x0400;
|
||||
public const ushort MULTIVOLUME = 0x0800;
|
||||
public const ushort ADVERT = 0x1000;
|
||||
public const ushort RECOVERY = 0x2000;
|
||||
public const ushort LOCKED_MAIN = 0x4000;
|
||||
public const ushort SOLID_MAIN = 0x8000;
|
||||
// Main header specific
|
||||
public const ushort V20FORMAT = 0x0100;
|
||||
public const ushort SFX = 0x0200;
|
||||
public const ushort LIMITSFXJR = 0x0400;
|
||||
public const ushort MULTIVOLUME = 0x0800;
|
||||
public const ushort ADVERT = 0x1000;
|
||||
public const ushort RECOVERY = 0x2000;
|
||||
public const ushort LOCKED_MAIN = 0x4000;
|
||||
public const ushort SOLID_MAIN = 0x8000;
|
||||
|
||||
// File header specific (same bits, different meaning)
|
||||
public const ushort NTSECURITY = 0x0400;
|
||||
public const ushort CONTINUED_PREV = 0x1000;
|
||||
public const ushort CONTINUED_NEXT = 0x2000;
|
||||
public const ushort FILE_ENCRYPTED = 0x4000; // file encrypted (file header)
|
||||
}
|
||||
// File header specific (same bits, different meaning)
|
||||
public const ushort NTSECURITY = 0x0400;
|
||||
public const ushort CONTINUED_PREV = 0x1000;
|
||||
public const ushort CONTINUED_NEXT = 0x2000;
|
||||
public const ushort FILE_ENCRYPTED = 0x4000; // file encrypted (file header)
|
||||
}
|
||||
|
||||
@@ -1,22 +1,21 @@
|
||||
namespace SharpCompress.Common.Ace.Headers
|
||||
namespace SharpCompress.Common.Ace.Headers;
|
||||
|
||||
/// <summary>
|
||||
/// Host OS type
|
||||
/// </summary>
|
||||
public enum HostOS
|
||||
{
|
||||
/// <summary>
|
||||
/// Host OS type
|
||||
/// </summary>
|
||||
public enum HostOS
|
||||
{
|
||||
MsDos = 0,
|
||||
Os2,
|
||||
Windows,
|
||||
Unix,
|
||||
MacOs,
|
||||
WinNt,
|
||||
Primos,
|
||||
AppleGs,
|
||||
Atari,
|
||||
Vax,
|
||||
Amiga,
|
||||
Next,
|
||||
Unknown,
|
||||
}
|
||||
MsDos = 0,
|
||||
Os2,
|
||||
Windows,
|
||||
Unix,
|
||||
MacOs,
|
||||
WinNt,
|
||||
Primos,
|
||||
AppleGs,
|
||||
Atari,
|
||||
Vax,
|
||||
Amiga,
|
||||
Next,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
@@ -5,56 +5,57 @@ using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.Common.Options;
|
||||
using SharpCompress.Common.Tar;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
namespace SharpCompress.Common.Arc;
|
||||
|
||||
public class ArcEntry : Entry
|
||||
{
|
||||
public class ArcEntry : Entry
|
||||
private readonly ArcFilePart? _filePart;
|
||||
|
||||
internal ArcEntry(ArcFilePart? filePart, IReaderOptions readerOptions)
|
||||
: base(readerOptions)
|
||||
{
|
||||
private readonly ArcFilePart? _filePart;
|
||||
|
||||
internal ArcEntry(ArcFilePart? filePart)
|
||||
{
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override long Crc
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart == null)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
return _filePart.Header.Crc16;
|
||||
}
|
||||
}
|
||||
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
|
||||
|
||||
public override CompressionType CompressionType =>
|
||||
_filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
|
||||
|
||||
public override long Size => throw new NotImplementedException();
|
||||
|
||||
public override DateTime? LastModifiedTime => null;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override long Crc
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart == null)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
return _filePart.Header.Crc16;
|
||||
}
|
||||
}
|
||||
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
|
||||
|
||||
public override CompressionType CompressionType =>
|
||||
_filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
|
||||
|
||||
public override long Size => throw new NotImplementedException();
|
||||
|
||||
public override DateTime? LastModifiedTime => null;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
public override DateTime? LastAccessedTime => null;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => false;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
}
|
||||
|
||||
@@ -2,75 +2,94 @@ using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
namespace SharpCompress.Common.Arc;
|
||||
|
||||
public class ArcEntryHeader
|
||||
{
|
||||
public class ArcEntryHeader
|
||||
public IArchiveEncoding ArchiveEncoding { get; }
|
||||
public CompressionType CompressionMethod { get; private set; }
|
||||
public string? Name { get; private set; }
|
||||
public long CompressedSize { get; private set; }
|
||||
public DateTime DateTime { get; private set; }
|
||||
public int Crc16 { get; private set; }
|
||||
public long OriginalSize { get; private set; }
|
||||
public long DataStartPosition { get; private set; }
|
||||
|
||||
public ArcEntryHeader(IArchiveEncoding archiveEncoding)
|
||||
{
|
||||
public IArchiveEncoding ArchiveEncoding { get; }
|
||||
public CompressionType CompressionMethod { get; private set; }
|
||||
public string? Name { get; private set; }
|
||||
public long CompressedSize { get; private set; }
|
||||
public DateTime DateTime { get; private set; }
|
||||
public int Crc16 { get; private set; }
|
||||
public long OriginalSize { get; private set; }
|
||||
public long DataStartPosition { get; private set; }
|
||||
this.ArchiveEncoding = archiveEncoding;
|
||||
}
|
||||
|
||||
public ArcEntryHeader(IArchiveEncoding archiveEncoding)
|
||||
public ArcEntryHeader? ReadHeader(Stream stream)
|
||||
{
|
||||
byte[] headerBytes = new byte[29];
|
||||
if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
|
||||
{
|
||||
this.ArchiveEncoding = archiveEncoding;
|
||||
return null;
|
||||
}
|
||||
DataStartPosition = stream.Position;
|
||||
return LoadFrom(headerBytes);
|
||||
}
|
||||
|
||||
public ArcEntryHeader? ReadHeader(Stream stream)
|
||||
public async ValueTask<ArcEntryHeader?> ReadHeaderAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
byte[] headerBytes = new byte[29];
|
||||
if (
|
||||
await stream
|
||||
.ReadAsync(headerBytes, 0, headerBytes.Length, cancellationToken)
|
||||
.ConfigureAwait(false) != headerBytes.Length
|
||||
)
|
||||
{
|
||||
byte[] headerBytes = new byte[29];
|
||||
if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
DataStartPosition = stream.Position;
|
||||
return LoadFrom(headerBytes);
|
||||
return null;
|
||||
}
|
||||
DataStartPosition = stream.Position;
|
||||
return LoadFrom(headerBytes);
|
||||
}
|
||||
|
||||
public ArcEntryHeader LoadFrom(byte[] headerBytes)
|
||||
public ArcEntryHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
CompressionMethod = GetCompressionType(headerBytes[1]);
|
||||
|
||||
// Read name
|
||||
int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
|
||||
Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
|
||||
|
||||
int offset = 15;
|
||||
CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
|
||||
offset += 4;
|
||||
uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
|
||||
DateTime = ConvertToDateTime(rawDateTime);
|
||||
offset += 4;
|
||||
Crc16 = BitConverter.ToUInt16(headerBytes, offset);
|
||||
offset += 2;
|
||||
OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
|
||||
return this;
|
||||
}
|
||||
|
||||
private CompressionType GetCompressionType(byte value)
|
||||
{
|
||||
return value switch
|
||||
{
|
||||
CompressionMethod = GetCompressionType(headerBytes[1]);
|
||||
1 or 2 => CompressionType.None,
|
||||
3 => CompressionType.Packed,
|
||||
4 => CompressionType.Squeezed,
|
||||
5 or 6 or 7 or 8 => CompressionType.Crunched,
|
||||
9 => CompressionType.Squashed,
|
||||
10 => CompressionType.Crushed,
|
||||
11 => CompressionType.Distilled,
|
||||
_ => CompressionType.Unknown,
|
||||
};
|
||||
}
|
||||
|
||||
// Read name
|
||||
int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
|
||||
Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
|
||||
|
||||
int offset = 15;
|
||||
CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
|
||||
offset += 4;
|
||||
uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
|
||||
DateTime = ConvertToDateTime(rawDateTime);
|
||||
offset += 4;
|
||||
Crc16 = BitConverter.ToUInt16(headerBytes, offset);
|
||||
offset += 2;
|
||||
OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
|
||||
return this;
|
||||
}
|
||||
|
||||
private CompressionType GetCompressionType(byte value)
|
||||
{
|
||||
return value switch
|
||||
{
|
||||
1 or 2 => CompressionType.None,
|
||||
3 => CompressionType.Packed,
|
||||
4 => CompressionType.Squeezed,
|
||||
5 or 6 or 7 or 8 => CompressionType.Crunched,
|
||||
9 => CompressionType.Squashed,
|
||||
10 => CompressionType.Crushed,
|
||||
11 => CompressionType.Distilled,
|
||||
_ => CompressionType.Unknown,
|
||||
};
|
||||
}
|
||||
|
||||
public static DateTime ConvertToDateTime(long rawDateTime)
|
||||
{
|
||||
// Convert Unix timestamp to DateTime (UTC)
|
||||
return DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
|
||||
}
|
||||
public static DateTime ConvertToDateTime(long rawDateTime)
|
||||
{
|
||||
// Convert Unix timestamp to DateTime (UTC)
|
||||
return DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
|
||||
}
|
||||
}
|
||||
|
||||
56
src/SharpCompress/Common/Arc/ArcFilePart.Async.cs
Normal file
56
src/SharpCompress/Common/Arc/ArcFilePart.Async.cs
Normal file
@@ -0,0 +1,56 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Compressors.Lzw;
|
||||
using SharpCompress.Compressors.RLE90;
|
||||
using SharpCompress.Compressors.Squeezed;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Arc;
|
||||
|
||||
public partial class ArcFilePart
|
||||
{
|
||||
internal override async ValueTask<Stream?> GetCompressedStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionMethod)
|
||||
{
|
||||
case CompressionType.None:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionType.Packed:
|
||||
compressedStream = new RunLength90Stream(_stream, (int)Header.CompressedSize);
|
||||
break;
|
||||
case CompressionType.Squeezed:
|
||||
compressedStream = await SqueezeStream
|
||||
.CreateAsync(_stream, (int)Header.CompressedSize, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
break;
|
||||
case CompressionType.Crunched:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod + " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new ArcLzwStream(_stream, (int)Header.CompressedSize, true);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream;
|
||||
}
|
||||
}
|
||||
@@ -13,71 +13,61 @@ using SharpCompress.Compressors.RLE90;
|
||||
using SharpCompress.Compressors.Squeezed;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
namespace SharpCompress.Common.Arc;
|
||||
|
||||
public partial class ArcFilePart : FilePart
|
||||
{
|
||||
public class ArcFilePart : FilePart
|
||||
private readonly Stream? _stream;
|
||||
|
||||
internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
|
||||
: base(localArcHeader.ArchiveEncoding)
|
||||
{
|
||||
private readonly Stream? _stream;
|
||||
|
||||
internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
|
||||
: base(localArcHeader.ArchiveEncoding)
|
||||
{
|
||||
_stream = seekableStream;
|
||||
Header = localArcHeader;
|
||||
}
|
||||
|
||||
internal ArcEntryHeader Header { get; set; }
|
||||
|
||||
internal override string? FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionMethod)
|
||||
{
|
||||
case CompressionType.None:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionType.Packed:
|
||||
compressedStream = new RunLength90Stream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionType.Squeezed:
|
||||
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
|
||||
break;
|
||||
case CompressionType.Crunched:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: "
|
||||
+ Header.CompressionMethod
|
||||
+ " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new ArcLzwStream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize,
|
||||
true
|
||||
);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream? GetRawStream() => _stream;
|
||||
_stream = seekableStream;
|
||||
Header = localArcHeader;
|
||||
}
|
||||
|
||||
internal ArcEntryHeader Header { get; set; }
|
||||
|
||||
internal override string? FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionMethod)
|
||||
{
|
||||
case CompressionType.None:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionType.Packed:
|
||||
compressedStream = new RunLength90Stream(_stream, (int)Header.CompressedSize);
|
||||
break;
|
||||
case CompressionType.Squeezed:
|
||||
compressedStream = SqueezeStream.Create(_stream, (int)Header.CompressedSize);
|
||||
break;
|
||||
case CompressionType.Crunched:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod + " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new ArcLzwStream(_stream, (int)Header.CompressedSize, true);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream? GetRawStream() => _stream;
|
||||
}
|
||||
|
||||
@@ -6,11 +6,10 @@ using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common.Arc
|
||||
namespace SharpCompress.Common.Arc;
|
||||
|
||||
public class ArcVolume : Volume
|
||||
{
|
||||
public class ArcVolume : Volume
|
||||
{
|
||||
public ArcVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
: base(stream, readerOptions, index) { }
|
||||
}
|
||||
public ArcVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
: base(stream, readerOptions, index) { }
|
||||
}
|
||||
|
||||
@@ -10,4 +10,5 @@ public enum ArchiveType
|
||||
Arc,
|
||||
Arj,
|
||||
Ace,
|
||||
Lzw,
|
||||
}
|
||||
|
||||
@@ -5,54 +5,55 @@ using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arc;
|
||||
using SharpCompress.Common.Arj.Headers;
|
||||
using SharpCompress.Common.Options;
|
||||
|
||||
namespace SharpCompress.Common.Arj
|
||||
namespace SharpCompress.Common.Arj;
|
||||
|
||||
public class ArjEntry : Entry
|
||||
{
|
||||
public class ArjEntry : Entry
|
||||
private readonly ArjFilePart _filePart;
|
||||
|
||||
internal ArjEntry(ArjFilePart filePart, IReaderOptions readerOptions)
|
||||
: base(readerOptions)
|
||||
{
|
||||
private readonly ArjFilePart _filePart;
|
||||
|
||||
internal ArjEntry(ArjFilePart filePart)
|
||||
{
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override long Crc => _filePart.Header.OriginalCrc32;
|
||||
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
|
||||
|
||||
public override CompressionType CompressionType
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart.Header.CompressionMethod == CompressionMethod.Stored)
|
||||
{
|
||||
return CompressionType.None;
|
||||
}
|
||||
return CompressionType.ArjLZ77;
|
||||
}
|
||||
}
|
||||
|
||||
public override long Size => _filePart?.Header.OriginalSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart.Header.DateTimeModified.DateTime;
|
||||
|
||||
public override DateTime? CreatedTime => _filePart.Header.DateTimeCreated.DateTime;
|
||||
|
||||
public override DateTime? LastAccessedTime => _filePart.Header.DateTimeAccessed.DateTime;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => _filePart.Header.FileType == FileType.Directory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
_filePart = filePart;
|
||||
}
|
||||
|
||||
public override long Crc => _filePart.Header.OriginalCrc32;
|
||||
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
|
||||
|
||||
public override CompressionType CompressionType
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_filePart.Header.CompressionMethod == CompressionMethod.Stored)
|
||||
{
|
||||
return CompressionType.None;
|
||||
}
|
||||
return CompressionType.ArjLZ77;
|
||||
}
|
||||
}
|
||||
|
||||
public override long Size => _filePart?.Header.OriginalSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart.Header.DateTimeModified.DateTime;
|
||||
|
||||
public override DateTime? CreatedTime => _filePart.Header.DateTimeCreated?.DateTime;
|
||||
|
||||
public override DateTime? LastAccessedTime => _filePart.Header.DateTimeAccessed?.DateTime;
|
||||
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => _filePart.Header.FileType == FileType.Directory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
}
|
||||
|
||||
@@ -8,65 +8,62 @@ using SharpCompress.Common.Arj.Headers;
|
||||
using SharpCompress.Compressors.Arj;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common.Arj
|
||||
namespace SharpCompress.Common.Arj;
|
||||
|
||||
public class ArjFilePart : FilePart
|
||||
{
|
||||
public class ArjFilePart : FilePart
|
||||
private readonly Stream _stream;
|
||||
internal ArjLocalHeader Header { get; set; }
|
||||
|
||||
internal ArjFilePart(ArjLocalHeader localArjHeader, Stream seekableStream)
|
||||
: base(localArjHeader.ArchiveEncoding)
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
internal ArjLocalHeader Header { get; set; }
|
||||
|
||||
internal ArjFilePart(ArjLocalHeader localArjHeader, Stream seekableStream)
|
||||
: base(localArjHeader.ArchiveEncoding)
|
||||
{
|
||||
_stream = seekableStream;
|
||||
Header = localArjHeader;
|
||||
}
|
||||
|
||||
internal override string? FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionMethod)
|
||||
{
|
||||
case CompressionMethod.Stored:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedMost:
|
||||
case CompressionMethod.Compressed:
|
||||
case CompressionMethod.CompressedFaster:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: "
|
||||
+ Header.CompressionMethod
|
||||
+ " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new LhaStream<Lh7DecoderCfg>(
|
||||
_stream,
|
||||
(int)Header.OriginalSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedFastest:
|
||||
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream GetRawStream() => _stream;
|
||||
_stream = seekableStream;
|
||||
Header = localArjHeader;
|
||||
}
|
||||
|
||||
internal override string? FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_stream != null)
|
||||
{
|
||||
Stream compressedStream;
|
||||
switch (Header.CompressionMethod)
|
||||
{
|
||||
case CompressionMethod.Stored:
|
||||
compressedStream = new ReadOnlySubStream(
|
||||
_stream,
|
||||
Header.DataStartPosition,
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedMost:
|
||||
case CompressionMethod.Compressed:
|
||||
case CompressionMethod.CompressedFaster:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod + " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new LhaStream<Lh7DecoderCfg>(
|
||||
_stream,
|
||||
(int)Header.OriginalSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedFastest:
|
||||
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: " + Header.CompressionMethod
|
||||
);
|
||||
}
|
||||
return compressedStream;
|
||||
}
|
||||
return _stream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream GetRawStream() => _stream;
|
||||
}
|
||||
|
||||
@@ -8,29 +8,28 @@ using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common.Arj
|
||||
namespace SharpCompress.Common.Arj;
|
||||
|
||||
public class ArjVolume : Volume
|
||||
{
|
||||
public class ArjVolume : Volume
|
||||
public ArjVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
: base(stream, readerOptions, index) { }
|
||||
|
||||
public override bool IsFirstVolume
|
||||
{
|
||||
public ArjVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
: base(stream, readerOptions, index) { }
|
||||
get { return true; }
|
||||
}
|
||||
|
||||
public override bool IsFirstVolume
|
||||
{
|
||||
get { return true; }
|
||||
}
|
||||
/// <summary>
|
||||
/// ArjArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public override bool IsMultiVolume
|
||||
{
|
||||
get { return false; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// ArjArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public override bool IsMultiVolume
|
||||
{
|
||||
get { return false; }
|
||||
}
|
||||
|
||||
internal IEnumerable<ArjFilePart> GetVolumeFileParts()
|
||||
{
|
||||
return new List<ArjFilePart>();
|
||||
}
|
||||
internal IEnumerable<ArjFilePart> GetVolumeFileParts()
|
||||
{
|
||||
return new List<ArjFilePart>();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ public abstract partial class ArjHeader
|
||||
{
|
||||
// check for magic bytes
|
||||
var magic = new byte[2];
|
||||
if (await stream.ReadAsync(magic, 0, 2, cancellationToken) != 2)
|
||||
if (await stream.ReadAsync(magic, 0, 2, cancellationToken).ConfigureAwait(false) != 2)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
@@ -33,7 +33,7 @@ public abstract partial class ArjHeader
|
||||
|
||||
// read header_size
|
||||
byte[] headerBytes = new byte[2];
|
||||
await stream.ReadAsync(headerBytes, 0, 2, cancellationToken);
|
||||
await stream.ReadAsync(headerBytes, 0, 2, cancellationToken).ConfigureAwait(false);
|
||||
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
|
||||
if (headerSize < 1)
|
||||
{
|
||||
@@ -41,14 +41,16 @@ public abstract partial class ArjHeader
|
||||
}
|
||||
|
||||
var body = new byte[headerSize];
|
||||
var read = await stream.ReadAsync(body, 0, headerSize, cancellationToken);
|
||||
var read = await stream
|
||||
.ReadAsync(body, 0, headerSize, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (read < headerSize)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
byte[] crc = new byte[4];
|
||||
read = await stream.ReadAsync(crc, 0, 4, cancellationToken);
|
||||
await stream.ReadFullyAsync(crc, 0, 4, cancellationToken).ConfigureAwait(false);
|
||||
var checksum = Crc32Stream.Compute(body);
|
||||
// Compute the hash value
|
||||
if (checksum != BitConverter.ToUInt32(crc, 0))
|
||||
@@ -68,7 +70,9 @@ public abstract partial class ArjHeader
|
||||
|
||||
while (true)
|
||||
{
|
||||
int bytesRead = await reader.ReadAsync(buffer, 0, 2, cancellationToken);
|
||||
int bytesRead = await reader
|
||||
.ReadAsync(buffer, 0, 2, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (bytesRead < 2)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
@@ -83,7 +87,9 @@ public abstract partial class ArjHeader
|
||||
}
|
||||
|
||||
byte[] header = new byte[extHeaderSize];
|
||||
bytesRead = await reader.ReadAsync(header, 0, extHeaderSize, cancellationToken);
|
||||
bytesRead = await reader
|
||||
.ReadAsync(header, 0, extHeaderSize, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (bytesRead < extHeaderSize)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
@@ -92,7 +98,9 @@ public abstract partial class ArjHeader
|
||||
}
|
||||
|
||||
byte[] crcextended = new byte[4];
|
||||
bytesRead = await reader.ReadAsync(crcextended, 0, 4, cancellationToken);
|
||||
bytesRead = await reader
|
||||
.ReadAsync(crcextended, 0, 4, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (bytesRead < 4)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
@@ -122,7 +130,7 @@ public abstract partial class ArjHeader
|
||||
)
|
||||
{
|
||||
var bytes = new byte[2];
|
||||
if (await stream.ReadAsync(bytes, 0, 2, cancellationToken) != 2)
|
||||
if (await stream.ReadAsync(bytes, 0, 2, cancellationToken).ConfigureAwait(false) != 2)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -8,156 +8,153 @@ using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
namespace SharpCompress.Common.Arj.Headers;
|
||||
|
||||
public enum ArjHeaderType
|
||||
{
|
||||
public enum ArjHeaderType
|
||||
MainHeader,
|
||||
LocalHeader,
|
||||
}
|
||||
|
||||
public abstract partial class ArjHeader
|
||||
{
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
|
||||
public ArjHeader(ArjHeaderType type)
|
||||
{
|
||||
MainHeader,
|
||||
LocalHeader,
|
||||
ArjHeaderType = type;
|
||||
}
|
||||
|
||||
public abstract partial class ArjHeader
|
||||
{
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
public ArjHeaderType ArjHeaderType { get; }
|
||||
public byte Flags { get; set; }
|
||||
public FileType FileType { get; set; }
|
||||
|
||||
public ArjHeader(ArjHeaderType type)
|
||||
public abstract ArjHeader? Read(Stream reader);
|
||||
|
||||
// Async methods moved to ArjHeader.Async.cs
|
||||
|
||||
public byte[] ReadHeader(Stream stream)
|
||||
{
|
||||
// check for magic bytes
|
||||
var magic = new byte[2];
|
||||
if (stream.Read(magic) != 2)
|
||||
{
|
||||
ArjHeaderType = type;
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
public ArjHeaderType ArjHeaderType { get; }
|
||||
public byte Flags { get; set; }
|
||||
public FileType FileType { get; set; }
|
||||
|
||||
public abstract ArjHeader? Read(Stream reader);
|
||||
|
||||
// Async methods moved to ArjHeader.Async.cs
|
||||
|
||||
public byte[] ReadHeader(Stream stream)
|
||||
if (!CheckMagicBytes(magic))
|
||||
{
|
||||
// check for magic bytes
|
||||
var magic = new byte[2];
|
||||
if (stream.Read(magic) != 2)
|
||||
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
|
||||
}
|
||||
|
||||
// read header_size
|
||||
byte[] headerBytes = new byte[2];
|
||||
stream.Read(headerBytes, 0, 2);
|
||||
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
|
||||
if (headerSize < 1)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
var body = new byte[headerSize];
|
||||
var read = stream.Read(body, 0, headerSize);
|
||||
if (read < headerSize)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
byte[] crc = new byte[4];
|
||||
read = stream.Read(crc, 0, 4);
|
||||
var checksum = Crc32Stream.Compute(body);
|
||||
// Compute the hash value
|
||||
if (checksum != BitConverter.ToUInt32(crc, 0))
|
||||
{
|
||||
throw new InvalidDataException("Header checksum is invalid");
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
// ReadHeaderAsync moved to ArjHeader.Async.cs
|
||||
|
||||
protected List<byte[]> ReadExtendedHeaders(Stream reader)
|
||||
{
|
||||
List<byte[]> extendedHeader = new List<byte[]>();
|
||||
byte[] buffer = new byte[2];
|
||||
|
||||
while (true)
|
||||
{
|
||||
int bytesRead = reader.Read(buffer, 0, 2);
|
||||
if (bytesRead < 2)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header size."
|
||||
);
|
||||
}
|
||||
|
||||
if (!CheckMagicBytes(magic))
|
||||
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
|
||||
if (extHeaderSize == 0)
|
||||
{
|
||||
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
|
||||
return extendedHeader;
|
||||
}
|
||||
|
||||
// read header_size
|
||||
byte[] headerBytes = new byte[2];
|
||||
stream.Read(headerBytes, 0, 2);
|
||||
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
|
||||
if (headerSize < 1)
|
||||
byte[] header = new byte[extHeaderSize];
|
||||
bytesRead = reader.Read(header, 0, extHeaderSize);
|
||||
if (bytesRead < extHeaderSize)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
var body = new byte[headerSize];
|
||||
var read = stream.Read(body, 0, headerSize);
|
||||
if (read < headerSize)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header data."
|
||||
);
|
||||
}
|
||||
|
||||
byte[] crc = new byte[4];
|
||||
read = stream.Read(crc, 0, 4);
|
||||
var checksum = Crc32Stream.Compute(body);
|
||||
// Compute the hash value
|
||||
bytesRead = reader.Read(crc, 0, 4);
|
||||
if (bytesRead < 4)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header CRC."
|
||||
);
|
||||
}
|
||||
|
||||
var checksum = Crc32Stream.Compute(header);
|
||||
if (checksum != BitConverter.ToUInt32(crc, 0))
|
||||
{
|
||||
throw new InvalidDataException("Header checksum is invalid");
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
// ReadHeaderAsync moved to ArjHeader.Async.cs
|
||||
|
||||
protected List<byte[]> ReadExtendedHeaders(Stream reader)
|
||||
{
|
||||
List<byte[]> extendedHeader = new List<byte[]>();
|
||||
byte[] buffer = new byte[2];
|
||||
|
||||
while (true)
|
||||
{
|
||||
int bytesRead = reader.Read(buffer, 0, 2);
|
||||
if (bytesRead < 2)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header size."
|
||||
);
|
||||
}
|
||||
|
||||
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
|
||||
if (extHeaderSize == 0)
|
||||
{
|
||||
return extendedHeader;
|
||||
}
|
||||
|
||||
byte[] header = new byte[extHeaderSize];
|
||||
bytesRead = reader.Read(header, 0, extHeaderSize);
|
||||
if (bytesRead < extHeaderSize)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header data."
|
||||
);
|
||||
}
|
||||
|
||||
byte[] crc = new byte[4];
|
||||
bytesRead = reader.Read(crc, 0, 4);
|
||||
if (bytesRead < 4)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header CRC."
|
||||
);
|
||||
}
|
||||
|
||||
var checksum = Crc32Stream.Compute(header);
|
||||
if (checksum != BitConverter.ToUInt32(crc, 0))
|
||||
{
|
||||
throw new InvalidDataException("Extended header checksum is invalid");
|
||||
}
|
||||
|
||||
extendedHeader.Add(header);
|
||||
}
|
||||
}
|
||||
|
||||
// Flag helpers
|
||||
public bool IsGabled => (Flags & 0x01) != 0;
|
||||
public bool IsAnsiPage => (Flags & 0x02) != 0;
|
||||
public bool IsVolume => (Flags & 0x04) != 0;
|
||||
public bool IsArjProtected => (Flags & 0x08) != 0;
|
||||
public bool IsPathSym => (Flags & 0x10) != 0;
|
||||
public bool IsBackup => (Flags & 0x20) != 0;
|
||||
public bool IsSecured => (Flags & 0x40) != 0;
|
||||
public bool IsAltName => (Flags & 0x80) != 0;
|
||||
|
||||
public static FileType FileTypeFromByte(byte value)
|
||||
{
|
||||
return Enum.IsDefined(typeof(FileType), value)
|
||||
? (FileType)value
|
||||
: Headers.FileType.Unknown;
|
||||
}
|
||||
|
||||
public static bool IsArchive(Stream stream)
|
||||
{
|
||||
var bytes = new byte[2];
|
||||
if (stream.Read(bytes, 0, 2) != 2)
|
||||
{
|
||||
return false;
|
||||
throw new InvalidDataException("Extended header checksum is invalid");
|
||||
}
|
||||
|
||||
return CheckMagicBytes(bytes);
|
||||
}
|
||||
|
||||
protected static bool CheckMagicBytes(byte[] headerBytes)
|
||||
{
|
||||
var magicValue = (ushort)(headerBytes[0] | headerBytes[1] << 8);
|
||||
return magicValue == ARJ_MAGIC;
|
||||
extendedHeader.Add(header);
|
||||
}
|
||||
}
|
||||
|
||||
// Flag helpers
|
||||
public bool IsGabled => (Flags & 0x01) != 0;
|
||||
public bool IsAnsiPage => (Flags & 0x02) != 0;
|
||||
public bool IsVolume => (Flags & 0x04) != 0;
|
||||
public bool IsArjProtected => (Flags & 0x08) != 0;
|
||||
public bool IsPathSym => (Flags & 0x10) != 0;
|
||||
public bool IsBackup => (Flags & 0x20) != 0;
|
||||
public bool IsSecured => (Flags & 0x40) != 0;
|
||||
public bool IsAltName => (Flags & 0x80) != 0;
|
||||
|
||||
public static FileType FileTypeFromByte(byte value)
|
||||
{
|
||||
return Enum.IsDefined(typeof(FileType), value) ? (FileType)value : Headers.FileType.Unknown;
|
||||
}
|
||||
|
||||
public static bool IsArchive(Stream stream)
|
||||
{
|
||||
var bytes = new byte[2];
|
||||
if (stream.Read(bytes, 0, 2) != 2)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return CheckMagicBytes(bytes);
|
||||
}
|
||||
|
||||
protected static bool CheckMagicBytes(byte[] headerBytes)
|
||||
{
|
||||
var magicValue = (ushort)(headerBytes[0] | headerBytes[1] << 8);
|
||||
return magicValue == ARJ_MAGIC;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,10 +11,10 @@ public partial class ArjLocalHeader
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var body = await ReadHeaderAsync(stream, cancellationToken);
|
||||
var body = await ReadHeaderAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
if (body.Length > 0)
|
||||
{
|
||||
await ReadExtendedHeadersAsync(stream, cancellationToken);
|
||||
await ReadExtendedHeadersAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
var header = LoadFrom(body);
|
||||
header.DataStartPosition = stream.Position;
|
||||
return header;
|
||||
|
||||
@@ -7,158 +7,154 @@ using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
namespace SharpCompress.Common.Arj.Headers;
|
||||
|
||||
public partial class ArjLocalHeader : ArjHeader
|
||||
{
|
||||
public partial class ArjLocalHeader : ArjHeader
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
public long DataStartPosition { get; protected set; }
|
||||
|
||||
public byte ArchiverVersionNumber { get; set; }
|
||||
public byte MinVersionToExtract { get; set; }
|
||||
public HostOS HostOS { get; set; }
|
||||
public CompressionMethod CompressionMethod { get; set; }
|
||||
public DosDateTime DateTimeModified { get; set; } = new DosDateTime(0);
|
||||
public long CompressedSize { get; set; }
|
||||
public long OriginalSize { get; set; }
|
||||
public long OriginalCrc32 { get; set; }
|
||||
public int FileSpecPosition { get; set; }
|
||||
public int FileAccessMode { get; set; }
|
||||
public byte FirstChapter { get; set; }
|
||||
public byte LastChapter { get; set; }
|
||||
public long ExtendedFilePosition { get; set; }
|
||||
public DosDateTime? DateTimeAccessed { get; set; }
|
||||
public DosDateTime? DateTimeCreated { get; set; }
|
||||
public long OriginalSizeEvenForVolumes { get; set; }
|
||||
public string Name { get; set; } = string.Empty;
|
||||
public string Comment { get; set; } = string.Empty;
|
||||
|
||||
private const byte StdHdrSize = 30;
|
||||
private const byte R9HdrSize = 46;
|
||||
|
||||
public ArjLocalHeader(ArchiveEncoding archiveEncoding)
|
||||
: base(ArjHeaderType.LocalHeader)
|
||||
{
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
public long DataStartPosition { get; protected set; }
|
||||
ArchiveEncoding =
|
||||
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
|
||||
}
|
||||
|
||||
public byte ArchiverVersionNumber { get; set; }
|
||||
public byte MinVersionToExtract { get; set; }
|
||||
public HostOS HostOS { get; set; }
|
||||
public CompressionMethod CompressionMethod { get; set; }
|
||||
public DosDateTime DateTimeModified { get; set; } = new DosDateTime(0);
|
||||
public long CompressedSize { get; set; }
|
||||
public long OriginalSize { get; set; }
|
||||
public long OriginalCrc32 { get; set; }
|
||||
public int FileSpecPosition { get; set; }
|
||||
public int FileAccessMode { get; set; }
|
||||
public byte FirstChapter { get; set; }
|
||||
public byte LastChapter { get; set; }
|
||||
public long ExtendedFilePosition { get; set; }
|
||||
public DosDateTime DateTimeAccessed { get; set; } = new DosDateTime(0);
|
||||
public DosDateTime DateTimeCreated { get; set; } = new DosDateTime(0);
|
||||
public long OriginalSizeEvenForVolumes { get; set; }
|
||||
public string Name { get; set; } = string.Empty;
|
||||
public string Comment { get; set; } = string.Empty;
|
||||
|
||||
private const byte StdHdrSize = 30;
|
||||
private const byte R9HdrSize = 46;
|
||||
|
||||
public ArjLocalHeader(ArchiveEncoding archiveEncoding)
|
||||
: base(ArjHeaderType.LocalHeader)
|
||||
public override ArjHeader? Read(Stream stream)
|
||||
{
|
||||
var body = ReadHeader(stream);
|
||||
if (body.Length > 0)
|
||||
{
|
||||
ArchiveEncoding =
|
||||
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
|
||||
ReadExtendedHeaders(stream);
|
||||
var header = LoadFrom(body);
|
||||
header.DataStartPosition = stream.Position;
|
||||
return header;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// ReadAsync moved to ArjLocalHeader.Async.cs
|
||||
|
||||
public ArjLocalHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
int offset = 0;
|
||||
|
||||
int ReadInt16()
|
||||
{
|
||||
if (offset + 1 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
|
||||
offset += 2;
|
||||
return v;
|
||||
}
|
||||
long ReadInt32()
|
||||
{
|
||||
if (offset + 3 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
long v =
|
||||
headerBytes[offset] & 0xFF
|
||||
| (headerBytes[offset + 1] & 0xFF) << 8
|
||||
| (headerBytes[offset + 2] & 0xFF) << 16
|
||||
| (headerBytes[offset + 3] & 0xFF) << 24;
|
||||
offset += 4;
|
||||
return v;
|
||||
}
|
||||
|
||||
public override ArjHeader? Read(Stream stream)
|
||||
byte headerSize = headerBytes[offset++];
|
||||
ArchiverVersionNumber = headerBytes[offset++];
|
||||
MinVersionToExtract = headerBytes[offset++];
|
||||
HostOS hostOS = (HostOS)headerBytes[offset++];
|
||||
Flags = headerBytes[offset++];
|
||||
CompressionMethod = CompressionMethodFromByte(headerBytes[offset++]);
|
||||
FileType = FileTypeFromByte(headerBytes[offset++]);
|
||||
|
||||
offset++; // Skip 1 byte
|
||||
|
||||
var rawTimestamp = ReadInt32();
|
||||
DateTimeModified = rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
|
||||
|
||||
CompressedSize = ReadInt32();
|
||||
OriginalSize = ReadInt32();
|
||||
OriginalCrc32 = ReadInt32();
|
||||
FileSpecPosition = ReadInt16();
|
||||
FileAccessMode = ReadInt16();
|
||||
|
||||
FirstChapter = headerBytes[offset++];
|
||||
LastChapter = headerBytes[offset++];
|
||||
|
||||
ExtendedFilePosition = 0;
|
||||
OriginalSizeEvenForVolumes = 0;
|
||||
|
||||
if (headerSize > StdHdrSize)
|
||||
{
|
||||
var body = ReadHeader(stream);
|
||||
if (body.Length > 0)
|
||||
ExtendedFilePosition = ReadInt32();
|
||||
|
||||
if (headerSize >= R9HdrSize)
|
||||
{
|
||||
ReadExtendedHeaders(stream);
|
||||
var header = LoadFrom(body);
|
||||
header.DataStartPosition = stream.Position;
|
||||
return header;
|
||||
rawTimestamp = ReadInt32();
|
||||
DateTimeAccessed = rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : null;
|
||||
rawTimestamp = ReadInt32();
|
||||
DateTimeCreated = rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : null;
|
||||
OriginalSizeEvenForVolumes = ReadInt32();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// ReadAsync moved to ArjLocalHeader.Async.cs
|
||||
Name = Encoding.ASCII.GetString(
|
||||
headerBytes,
|
||||
offset,
|
||||
Array.IndexOf(headerBytes, (byte)0, offset) - offset
|
||||
);
|
||||
offset += Name.Length + 1;
|
||||
|
||||
public ArjLocalHeader LoadFrom(byte[] headerBytes)
|
||||
Comment = Encoding.ASCII.GetString(
|
||||
headerBytes,
|
||||
offset,
|
||||
Array.IndexOf(headerBytes, (byte)0, offset) - offset
|
||||
);
|
||||
offset += Comment.Length + 1;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public static CompressionMethod CompressionMethodFromByte(byte value)
|
||||
{
|
||||
return value switch
|
||||
{
|
||||
int offset = 0;
|
||||
|
||||
int ReadInt16()
|
||||
{
|
||||
if (offset + 1 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
|
||||
offset += 2;
|
||||
return v;
|
||||
}
|
||||
long ReadInt32()
|
||||
{
|
||||
if (offset + 3 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
long v =
|
||||
headerBytes[offset] & 0xFF
|
||||
| (headerBytes[offset + 1] & 0xFF) << 8
|
||||
| (headerBytes[offset + 2] & 0xFF) << 16
|
||||
| (headerBytes[offset + 3] & 0xFF) << 24;
|
||||
offset += 4;
|
||||
return v;
|
||||
}
|
||||
|
||||
byte headerSize = headerBytes[offset++];
|
||||
ArchiverVersionNumber = headerBytes[offset++];
|
||||
MinVersionToExtract = headerBytes[offset++];
|
||||
HostOS hostOS = (HostOS)headerBytes[offset++];
|
||||
Flags = headerBytes[offset++];
|
||||
CompressionMethod = CompressionMethodFromByte(headerBytes[offset++]);
|
||||
FileType = FileTypeFromByte(headerBytes[offset++]);
|
||||
|
||||
offset++; // Skip 1 byte
|
||||
|
||||
var rawTimestamp = ReadInt32();
|
||||
DateTimeModified =
|
||||
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
|
||||
|
||||
CompressedSize = ReadInt32();
|
||||
OriginalSize = ReadInt32();
|
||||
OriginalCrc32 = ReadInt32();
|
||||
FileSpecPosition = ReadInt16();
|
||||
FileAccessMode = ReadInt16();
|
||||
|
||||
FirstChapter = headerBytes[offset++];
|
||||
LastChapter = headerBytes[offset++];
|
||||
|
||||
ExtendedFilePosition = 0;
|
||||
OriginalSizeEvenForVolumes = 0;
|
||||
|
||||
if (headerSize > StdHdrSize)
|
||||
{
|
||||
ExtendedFilePosition = ReadInt32();
|
||||
|
||||
if (headerSize >= R9HdrSize)
|
||||
{
|
||||
rawTimestamp = ReadInt32();
|
||||
DateTimeAccessed =
|
||||
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
|
||||
rawTimestamp = ReadInt32();
|
||||
DateTimeCreated =
|
||||
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
|
||||
OriginalSizeEvenForVolumes = ReadInt32();
|
||||
}
|
||||
}
|
||||
|
||||
Name = Encoding.ASCII.GetString(
|
||||
headerBytes,
|
||||
offset,
|
||||
Array.IndexOf(headerBytes, (byte)0, offset) - offset
|
||||
);
|
||||
offset += Name.Length + 1;
|
||||
|
||||
Comment = Encoding.ASCII.GetString(
|
||||
headerBytes,
|
||||
offset,
|
||||
Array.IndexOf(headerBytes, (byte)0, offset) - offset
|
||||
);
|
||||
offset += Comment.Length + 1;
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
public static CompressionMethod CompressionMethodFromByte(byte value)
|
||||
{
|
||||
return value switch
|
||||
{
|
||||
0 => CompressionMethod.Stored,
|
||||
1 => CompressionMethod.CompressedMost,
|
||||
2 => CompressionMethod.Compressed,
|
||||
3 => CompressionMethod.CompressedFaster,
|
||||
4 => CompressionMethod.CompressedFastest,
|
||||
8 => CompressionMethod.NoDataNoCrc,
|
||||
9 => CompressionMethod.NoData,
|
||||
_ => CompressionMethod.Unknown,
|
||||
};
|
||||
}
|
||||
0 => CompressionMethod.Stored,
|
||||
1 => CompressionMethod.CompressedMost,
|
||||
2 => CompressionMethod.Compressed,
|
||||
3 => CompressionMethod.CompressedFaster,
|
||||
4 => CompressionMethod.CompressedFastest,
|
||||
8 => CompressionMethod.NoDataNoCrc,
|
||||
9 => CompressionMethod.NoData,
|
||||
_ => CompressionMethod.Unknown,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,8 +11,8 @@ public partial class ArjMainHeader
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var body = await ReadHeaderAsync(stream, cancellationToken);
|
||||
await ReadExtendedHeadersAsync(stream, cancellationToken);
|
||||
var body = await ReadHeaderAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
await ReadExtendedHeadersAsync(stream, cancellationToken).ConfigureAwait(false);
|
||||
return LoadFrom(body);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,137 +6,136 @@ using System.Threading.Tasks;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
namespace SharpCompress.Common.Arj.Headers;
|
||||
|
||||
public partial class ArjMainHeader : ArjHeader
|
||||
{
|
||||
public partial class ArjMainHeader : ArjHeader
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
public int ArchiverVersionNumber { get; private set; }
|
||||
public int MinVersionToExtract { get; private set; }
|
||||
public HostOS HostOs { get; private set; }
|
||||
public int SecurityVersion { get; private set; }
|
||||
public DosDateTime CreationDateTime { get; private set; } = new DosDateTime(0);
|
||||
public long CompressedSize { get; private set; }
|
||||
public long ArchiveSize { get; private set; }
|
||||
public long SecurityEnvelope { get; private set; }
|
||||
public int FileSpecPosition { get; private set; }
|
||||
public int SecurityEnvelopeLength { get; private set; }
|
||||
public int EncryptionVersion { get; private set; }
|
||||
public int LastChapter { get; private set; }
|
||||
|
||||
public int ArjProtectionFactor { get; private set; }
|
||||
public int Flags2 { get; private set; }
|
||||
public string Name { get; private set; } = string.Empty;
|
||||
public string Comment { get; private set; } = string.Empty;
|
||||
|
||||
public ArjMainHeader(ArchiveEncoding archiveEncoding)
|
||||
: base(ArjHeaderType.MainHeader)
|
||||
{
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
ArchiveEncoding =
|
||||
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
|
||||
}
|
||||
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
public override ArjHeader? Read(Stream stream)
|
||||
{
|
||||
var body = ReadHeader(stream);
|
||||
ReadExtendedHeaders(stream);
|
||||
return LoadFrom(body);
|
||||
}
|
||||
|
||||
public int ArchiverVersionNumber { get; private set; }
|
||||
public int MinVersionToExtract { get; private set; }
|
||||
public HostOS HostOs { get; private set; }
|
||||
public int SecurityVersion { get; private set; }
|
||||
public DosDateTime CreationDateTime { get; private set; } = new DosDateTime(0);
|
||||
public long CompressedSize { get; private set; }
|
||||
public long ArchiveSize { get; private set; }
|
||||
public long SecurityEnvelope { get; private set; }
|
||||
public int FileSpecPosition { get; private set; }
|
||||
public int SecurityEnvelopeLength { get; private set; }
|
||||
public int EncryptionVersion { get; private set; }
|
||||
public int LastChapter { get; private set; }
|
||||
// ReadAsync moved to ArjMainHeader.Async.cs
|
||||
|
||||
public int ArjProtectionFactor { get; private set; }
|
||||
public int Flags2 { get; private set; }
|
||||
public string Name { get; private set; } = string.Empty;
|
||||
public string Comment { get; private set; } = string.Empty;
|
||||
public ArjMainHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
var offset = 1;
|
||||
|
||||
public ArjMainHeader(ArchiveEncoding archiveEncoding)
|
||||
: base(ArjHeaderType.MainHeader)
|
||||
byte ReadByte()
|
||||
{
|
||||
ArchiveEncoding =
|
||||
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
|
||||
if (offset >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
return (byte)(headerBytes[offset++] & 0xFF);
|
||||
}
|
||||
|
||||
public override ArjHeader? Read(Stream stream)
|
||||
int ReadInt16()
|
||||
{
|
||||
var body = ReadHeader(stream);
|
||||
ReadExtendedHeaders(stream);
|
||||
return LoadFrom(body);
|
||||
if (offset + 1 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
|
||||
offset += 2;
|
||||
return v;
|
||||
}
|
||||
|
||||
// ReadAsync moved to ArjMainHeader.Async.cs
|
||||
|
||||
public ArjMainHeader LoadFrom(byte[] headerBytes)
|
||||
long ReadInt32()
|
||||
{
|
||||
var offset = 1;
|
||||
|
||||
byte ReadByte()
|
||||
if (offset + 3 >= headerBytes.Length)
|
||||
{
|
||||
if (offset >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
return (byte)(headerBytes[offset++] & 0xFF);
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
long v =
|
||||
headerBytes[offset] & 0xFF
|
||||
| (headerBytes[offset + 1] & 0xFF) << 8
|
||||
| (headerBytes[offset + 2] & 0xFF) << 16
|
||||
| (headerBytes[offset + 3] & 0xFF) << 24;
|
||||
offset += 4;
|
||||
return v;
|
||||
}
|
||||
string ReadNullTerminatedString(byte[] x, int startIndex)
|
||||
{
|
||||
var result = new StringBuilder();
|
||||
int i = startIndex;
|
||||
|
||||
int ReadInt16()
|
||||
while (i < x.Length && x[i] != 0)
|
||||
{
|
||||
if (offset + 1 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
|
||||
offset += 2;
|
||||
return v;
|
||||
}
|
||||
|
||||
long ReadInt32()
|
||||
{
|
||||
if (offset + 3 >= headerBytes.Length)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
long v =
|
||||
headerBytes[offset] & 0xFF
|
||||
| (headerBytes[offset + 1] & 0xFF) << 8
|
||||
| (headerBytes[offset + 2] & 0xFF) << 16
|
||||
| (headerBytes[offset + 3] & 0xFF) << 24;
|
||||
offset += 4;
|
||||
return v;
|
||||
}
|
||||
string ReadNullTerminatedString(byte[] x, int startIndex)
|
||||
{
|
||||
var result = new StringBuilder();
|
||||
int i = startIndex;
|
||||
|
||||
while (i < x.Length && x[i] != 0)
|
||||
{
|
||||
result.Append((char)x[i]);
|
||||
i++;
|
||||
}
|
||||
|
||||
// Skip the null terminator
|
||||
result.Append((char)x[i]);
|
||||
i++;
|
||||
if (i < x.Length)
|
||||
{
|
||||
byte[] remainder = new byte[x.Length - i];
|
||||
Array.Copy(x, i, remainder, 0, remainder.Length);
|
||||
x = remainder;
|
||||
}
|
||||
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
ArchiverVersionNumber = ReadByte();
|
||||
MinVersionToExtract = ReadByte();
|
||||
// Skip the null terminator
|
||||
i++;
|
||||
if (i < x.Length)
|
||||
{
|
||||
byte[] remainder = new byte[x.Length - i];
|
||||
Array.Copy(x, i, remainder, 0, remainder.Length);
|
||||
x = remainder;
|
||||
}
|
||||
|
||||
var hostOsByte = ReadByte();
|
||||
HostOs = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
|
||||
|
||||
Flags = ReadByte();
|
||||
SecurityVersion = ReadByte();
|
||||
FileType = FileTypeFromByte(ReadByte());
|
||||
|
||||
offset++; // skip reserved
|
||||
|
||||
CreationDateTime = new DosDateTime((int)ReadInt32());
|
||||
CompressedSize = ReadInt32();
|
||||
ArchiveSize = ReadInt32();
|
||||
|
||||
SecurityEnvelope = ReadInt32();
|
||||
FileSpecPosition = ReadInt16();
|
||||
SecurityEnvelopeLength = ReadInt16();
|
||||
|
||||
EncryptionVersion = ReadByte();
|
||||
LastChapter = ReadByte();
|
||||
|
||||
Name = ReadNullTerminatedString(headerBytes, offset);
|
||||
Comment = ReadNullTerminatedString(headerBytes, offset + 1 + Name.Length);
|
||||
|
||||
return this;
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
ArchiverVersionNumber = ReadByte();
|
||||
MinVersionToExtract = ReadByte();
|
||||
|
||||
var hostOsByte = ReadByte();
|
||||
HostOs = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
|
||||
|
||||
Flags = ReadByte();
|
||||
SecurityVersion = ReadByte();
|
||||
FileType = FileTypeFromByte(ReadByte());
|
||||
|
||||
offset++; // skip reserved
|
||||
|
||||
CreationDateTime = new DosDateTime((int)ReadInt32());
|
||||
CompressedSize = ReadInt32();
|
||||
ArchiveSize = ReadInt32();
|
||||
|
||||
SecurityEnvelope = ReadInt32();
|
||||
FileSpecPosition = ReadInt16();
|
||||
SecurityEnvelopeLength = ReadInt16();
|
||||
|
||||
EncryptionVersion = ReadByte();
|
||||
LastChapter = ReadByte();
|
||||
|
||||
Name = ReadNullTerminatedString(headerBytes, offset);
|
||||
Comment = ReadNullTerminatedString(headerBytes, offset + 1 + Name.Length);
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,17 +4,16 @@ using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
namespace SharpCompress.Common.Arj.Headers;
|
||||
|
||||
public enum CompressionMethod
|
||||
{
|
||||
public enum CompressionMethod
|
||||
{
|
||||
Stored = 0,
|
||||
CompressedMost = 1,
|
||||
Compressed = 2,
|
||||
CompressedFaster = 3,
|
||||
CompressedFastest = 4,
|
||||
NoDataNoCrc = 8,
|
||||
NoData = 9,
|
||||
Unknown,
|
||||
}
|
||||
Stored = 0,
|
||||
CompressedMost = 1,
|
||||
Compressed = 2,
|
||||
CompressedFaster = 3,
|
||||
CompressedFastest = 4,
|
||||
NoDataNoCrc = 8,
|
||||
NoData = 9,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
@@ -1,37 +1,36 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
namespace SharpCompress.Common.Arj.Headers;
|
||||
|
||||
public class DosDateTime
|
||||
{
|
||||
public class DosDateTime
|
||||
public DateTime DateTime { get; }
|
||||
|
||||
public DosDateTime(long dosValue)
|
||||
{
|
||||
public DateTime DateTime { get; }
|
||||
// Ensure only the lower 32 bits are used
|
||||
int value = unchecked((int)(dosValue & 0xFFFFFFFF));
|
||||
|
||||
public DosDateTime(long dosValue)
|
||||
var date = (value >> 16) & 0xFFFF;
|
||||
var time = value & 0xFFFF;
|
||||
|
||||
var day = date & 0x1F;
|
||||
var month = (date >> 5) & 0x0F;
|
||||
var year = ((date >> 9) & 0x7F) + 1980;
|
||||
|
||||
var second = (time & 0x1F) * 2;
|
||||
var minute = (time >> 5) & 0x3F;
|
||||
var hour = (time >> 11) & 0x1F;
|
||||
|
||||
try
|
||||
{
|
||||
// Ensure only the lower 32 bits are used
|
||||
int value = unchecked((int)(dosValue & 0xFFFFFFFF));
|
||||
|
||||
var date = (value >> 16) & 0xFFFF;
|
||||
var time = value & 0xFFFF;
|
||||
|
||||
var day = date & 0x1F;
|
||||
var month = (date >> 5) & 0x0F;
|
||||
var year = ((date >> 9) & 0x7F) + 1980;
|
||||
|
||||
var second = (time & 0x1F) * 2;
|
||||
var minute = (time >> 5) & 0x3F;
|
||||
var hour = (time >> 11) & 0x1F;
|
||||
|
||||
try
|
||||
{
|
||||
DateTime = new DateTime(year, month, day, hour, minute, second);
|
||||
}
|
||||
catch
|
||||
{
|
||||
DateTime = DateTime.MinValue;
|
||||
}
|
||||
DateTime = new DateTime(year, month, day, hour, minute, second);
|
||||
}
|
||||
catch
|
||||
{
|
||||
DateTime = DateTime.MinValue;
|
||||
}
|
||||
|
||||
public override string ToString() => DateTime.ToString("yyyy-MM-dd HH:mm:ss");
|
||||
}
|
||||
|
||||
public override string ToString() => DateTime.ToString("yyyy-MM-dd HH:mm:ss");
|
||||
}
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
namespace SharpCompress.Common.Arj.Headers;
|
||||
|
||||
public enum FileType : byte
|
||||
{
|
||||
public enum FileType : byte
|
||||
{
|
||||
Binary = 0,
|
||||
Text7Bit = 1,
|
||||
CommentHeader = 2,
|
||||
Directory = 3,
|
||||
VolumeLabel = 4,
|
||||
ChapterLabel = 5,
|
||||
Unknown = 255,
|
||||
}
|
||||
Binary = 0,
|
||||
Text7Bit = 1,
|
||||
CommentHeader = 2,
|
||||
Directory = 3,
|
||||
VolumeLabel = 4,
|
||||
ChapterLabel = 5,
|
||||
Unknown = 255,
|
||||
}
|
||||
|
||||
@@ -1,19 +1,18 @@
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
namespace SharpCompress.Common.Arj.Headers;
|
||||
|
||||
public enum HostOS
|
||||
{
|
||||
public enum HostOS
|
||||
{
|
||||
MsDos = 0,
|
||||
PrimOS = 1,
|
||||
Unix = 2,
|
||||
Amiga = 3,
|
||||
MacOs = 4,
|
||||
OS2 = 5,
|
||||
AppleGS = 6,
|
||||
AtariST = 7,
|
||||
NeXT = 8,
|
||||
VaxVMS = 9,
|
||||
Win95 = 10,
|
||||
Win32 = 11,
|
||||
Unknown = 255,
|
||||
}
|
||||
MsDos = 0,
|
||||
PrimOS = 1,
|
||||
Unix = 2,
|
||||
Amiga = 3,
|
||||
MacOs = 4,
|
||||
OS2 = 5,
|
||||
AppleGS = 6,
|
||||
AtariST = 7,
|
||||
NeXT = 8,
|
||||
VaxVMS = 9,
|
||||
Win95 = 10,
|
||||
Win32 = 11,
|
||||
Unknown = 255,
|
||||
}
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
public sealed class AsyncBinaryReader : IDisposable
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
private readonly Stream _originalStream;
|
||||
private readonly bool _leaveOpen;
|
||||
private readonly byte[] _buffer = new byte[8];
|
||||
private bool _disposed;
|
||||
|
||||
public AsyncBinaryReader(Stream stream, bool leaveOpen = false, int bufferSize = 4096)
|
||||
{
|
||||
if (!stream.CanRead)
|
||||
{
|
||||
throw new ArgumentException("Stream must be readable.");
|
||||
}
|
||||
|
||||
_originalStream = stream ?? throw new ArgumentNullException(nameof(stream));
|
||||
_leaveOpen = leaveOpen;
|
||||
|
||||
// Use the stream directly without wrapping in BufferedStream
|
||||
// BufferedStream uses synchronous Read internally which doesn't work with async-only streams
|
||||
// SharpCompress uses SharpCompressStream for buffering which supports true async reads
|
||||
_stream = stream;
|
||||
}
|
||||
|
||||
public Stream BaseStream => _stream;
|
||||
|
||||
public async ValueTask<byte> ReadByteAsync(CancellationToken ct = default)
|
||||
{
|
||||
await _stream.ReadExactAsync(_buffer, 0, 1, ct).ConfigureAwait(false);
|
||||
return _buffer[0];
|
||||
}
|
||||
|
||||
public async ValueTask<ushort> ReadUInt16Async(CancellationToken ct = default)
|
||||
{
|
||||
await _stream.ReadExactAsync(_buffer, 0, 2, ct).ConfigureAwait(false);
|
||||
return BinaryPrimitives.ReadUInt16LittleEndian(_buffer);
|
||||
}
|
||||
|
||||
public async ValueTask<uint> ReadUInt32Async(CancellationToken ct = default)
|
||||
{
|
||||
await _stream.ReadExactAsync(_buffer, 0, 4, ct).ConfigureAwait(false);
|
||||
return BinaryPrimitives.ReadUInt32LittleEndian(_buffer);
|
||||
}
|
||||
|
||||
public async ValueTask<ulong> ReadUInt64Async(CancellationToken ct = default)
|
||||
{
|
||||
await _stream.ReadExactAsync(_buffer, 0, 8, ct).ConfigureAwait(false);
|
||||
return BinaryPrimitives.ReadUInt64LittleEndian(_buffer);
|
||||
}
|
||||
|
||||
public async ValueTask ReadBytesAsync(
|
||||
byte[] bytes,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken ct = default
|
||||
)
|
||||
{
|
||||
await _stream.ReadExactAsync(bytes, offset, count, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async ValueTask SkipAsync(int count, CancellationToken ct = default)
|
||||
{
|
||||
await _stream.SkipAsync(count, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
|
||||
// Dispose the original stream if we own it
|
||||
if (!_leaveOpen)
|
||||
{
|
||||
_originalStream.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
#if NET8_0_OR_GREATER
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
|
||||
// Dispose the original stream if we own it
|
||||
if (!_leaveOpen)
|
||||
{
|
||||
await _originalStream.DisposeAsync().ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
@@ -8,5 +8,34 @@ public static class Constants
|
||||
/// </summary>
|
||||
public static int BufferSize { get; set; } = 81920;
|
||||
|
||||
/// <summary>
|
||||
/// The default size for rewindable buffers in SharpCompressStream.
|
||||
/// Used for format detection on non-seekable streams.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// When opening archives from non-seekable streams (network streams, pipes,
|
||||
/// compressed streams), SharpCompress uses a ring buffer to enable format
|
||||
/// auto-detection. This buffer allows the library to try multiple decoders
|
||||
/// by rewinding and re-reading the same data.
|
||||
/// </para>
|
||||
/// <para>
|
||||
/// <b>Default:</b> 81920 bytes (81KB) - sufficient for typical format detection.
|
||||
/// </para>
|
||||
/// <para>
|
||||
/// <b>Typical usage:</b> 500-1000 bytes for most archives
|
||||
/// </para>
|
||||
/// <para>
|
||||
/// <b>Can be overridden per-stream via ReaderOptions.RewindableBufferSize.</b>
|
||||
/// </para>
|
||||
/// <para>
|
||||
/// <b>Increase if:</b>
|
||||
/// <list type="bullet">
|
||||
/// <item>Handling self-extracting archives (may need 512KB+)</item>
|
||||
/// <item>Format detection fails with buffer overflow errors</item>
|
||||
/// <item>Using custom formats with large headers</item>
|
||||
/// </list>
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public static int RewindableBufferSize { get; set; } = 81920;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using SharpCompress.Common.Options;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
@@ -87,4 +88,14 @@ public abstract class Entry : IEntry
|
||||
/// Entry file attribute.
|
||||
/// </summary>
|
||||
public virtual int? Attrib => throw new NotImplementedException();
|
||||
|
||||
/// <summary>
|
||||
/// The options used when opening this entry's source (reader or archive).
|
||||
/// </summary>
|
||||
public IReaderOptions Options { get; protected set; }
|
||||
|
||||
protected Entry(IReaderOptions readerOptions)
|
||||
{
|
||||
Options = readerOptions;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,9 +42,6 @@ public partial class EntryStream
|
||||
await lzmaStream.FlushAsync().ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(EntryStream));
|
||||
#endif
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
await _stream.DisposeAsync().ConfigureAwait(false);
|
||||
}
|
||||
|
||||
@@ -8,28 +8,8 @@ using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public partial class EntryStream : Stream, IStreamStack
|
||||
public partial class EntryStream : Stream
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
long IStreamStack.InstanceId { get; set; }
|
||||
#endif
|
||||
int IStreamStack.DefaultBufferSize { get; set; }
|
||||
|
||||
Stream IStreamStack.BaseStream() => _stream;
|
||||
|
||||
int IStreamStack.BufferSize
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
int IStreamStack.BufferPosition
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
|
||||
void IStreamStack.SetPosition(long position) { }
|
||||
|
||||
private readonly IReader _reader;
|
||||
private readonly Stream _stream;
|
||||
private bool _completed;
|
||||
@@ -39,9 +19,6 @@ public partial class EntryStream : Stream, IStreamStack
|
||||
{
|
||||
_reader = reader;
|
||||
_stream = stream;
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugConstruct(typeof(EntryStream));
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -62,24 +39,34 @@ public partial class EntryStream : Stream, IStreamStack
|
||||
_isDisposed = true;
|
||||
if (!(_completed || _reader.Cancelled))
|
||||
{
|
||||
SkipEntry();
|
||||
if (Utility.UseSyncOverAsyncDispose())
|
||||
{
|
||||
SkipEntryAsync().GetAwaiter().GetResult();
|
||||
}
|
||||
else
|
||||
{
|
||||
SkipEntry();
|
||||
}
|
||||
}
|
||||
|
||||
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
|
||||
if (_stream is IStreamStack ss)
|
||||
{
|
||||
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
|
||||
if (
|
||||
ss.GetStream<SharpCompress.Compressors.Deflate.DeflateStream>()
|
||||
is SharpCompress.Compressors.Deflate.DeflateStream deflateStream
|
||||
)
|
||||
{
|
||||
deflateStream.Flush(); //Deflate over reads. Knock it back
|
||||
}
|
||||
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
|
||||
else if (
|
||||
ss.GetStream<SharpCompress.Compressors.LZMA.LzmaStream>()
|
||||
is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream
|
||||
)
|
||||
{
|
||||
lzmaStream.Flush(); //Lzma over reads. Knock it back
|
||||
}
|
||||
}
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(EntryStream));
|
||||
#endif
|
||||
base.Dispose(disposing);
|
||||
_stream.Dispose();
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user