Compare commits

...

69 Commits

Author SHA1 Message Date
Adam Hathcock
73fd2d70ba Merge pull request #1184 from adamhathcock/adam/cleanup
Some clean up post-async merging
2026-02-04 16:47:59 +00:00
Adam Hathcock
c026e02390 remove some redundant casts 2026-02-04 16:47:27 +00:00
Adam Hathcock
9676d6ccc9 fix readonly 2026-02-04 16:14:48 +00:00
Adam Hathcock
fad7cf3b1d fix usages of AsyncBinaryReader 2026-02-04 16:07:06 +00:00
Adam Hathcock
17cb952772 use DIspose on BinaryReader 2026-02-04 15:59:22 +00:00
Adam Hathcock
923217ef0e remove memorystream 2026-02-04 15:45:38 +00:00
Adam Hathcock
0d81d7f243 Merge remote-tracking branch 'origin/master' into adam/cleanup 2026-02-04 15:05:12 +00:00
Adam Hathcock
2070ab282f Merge pull request #1183 from adamhathcock/adam/release-merge
release to master merge
2026-02-04 15:01:18 +00:00
Adam Hathcock
faacff414d changed checks to be less than or equal to zero 2026-02-04 14:57:17 +00:00
Adam Hathcock
642b8bddb8 fixed merge 2026-02-04 14:41:55 +00:00
Adam Hathcock
27f7221902 Merge remote-tracking branch 'origin/release' into adam/release-merge
# Conflicts:
#	Directory.Packages.props
#	build/packages.lock.json
#	src/SharpCompress/Common/Zip/WinzipAesEncryptionData.cs
#	src/SharpCompress/SharpCompress.csproj
#	src/SharpCompress/packages.lock.json
#	tests/SharpCompress.Performance/packages.lock.json
#	tests/SharpCompress.Test/SharpCompress.Test.csproj
#	tests/SharpCompress.Test/packages.lock.json
2026-02-04 14:35:40 +00:00
Adam Hathcock
eb738b44a8 clean up naming 2026-02-04 14:32:50 +00:00
Adam Hathcock
57c0d00b37 rename Rewindable to SharpCompressStream 2026-02-04 14:12:04 +00:00
Adam Hathcock
43276b32b7 better sync over async 2026-02-04 13:25:05 +00:00
Adam Hathcock
94b275c41b Merge branch 'master' into adam/cleanup
# Conflicts:
#	src/SharpCompress/Common/EntryStream.cs
2026-02-04 12:55:46 +00:00
Adam Hathcock
ae4ae799b9 merge NonDisposingStream into RewindableStream 2026-02-04 12:40:39 +00:00
Adam Hathcock
9def35f78a Merge pull request #1174 from adamhathcock/adam/merge-release-to-master
merge release to master
2026-02-04 12:37:59 +00:00
Adam Hathcock
7e8005a9d8 fmt 2026-02-04 11:27:03 +00:00
Adam Hathcock
b93ed79ef3 another sync over async 2026-02-04 11:26:47 +00:00
Adam Hathcock
c7b8021c2e remove extra debug 2026-02-04 10:08:50 +00:00
Adam Hathcock
3af0e1091c remove the rar flag 2026-02-04 09:46:19 +00:00
Adam Hathcock
1323c96bc8 remove more scoped namespaces 2026-02-04 09:25:48 +00:00
Adam Hathcock
94716a5ba9 add sync over async dispose 2026-02-04 09:20:43 +00:00
Adam Hathcock
f67168f479 try to fix test 2026-02-04 08:40:43 +00:00
Adam Hathcock
7e54f91bfa fmt 2026-02-04 08:31:58 +00:00
Adam Hathcock
3ab4478275 use ringbuffer 2026-02-04 08:30:02 +00:00
Adam Hathcock
8759cf08ff tests all pass 2026-02-04 08:20:05 +00:00
Adam Hathcock
8cff7cb551 rewinding works more? 2026-02-03 17:21:29 +00:00
Adam Hathcock
1b0ec2410d fix deflate rewinding? 2026-02-03 17:07:39 +00:00
Adam Hathcock
22d15f73f0 Merge pull request #1181 from adamhathcock/adam/add-aot
Add AOT to props and clean up in release
2026-02-03 16:55:59 +00:00
Adam Hathcock
4e0d78d6c8 update desc 2026-02-03 16:41:19 +00:00
Adam Hathcock
63a1927838 Merge pull request #1182 from adamhathcock/copilot/sub-pr-1181
[WIP] WIP address feedback on AOT props and cleanup
2026-02-03 16:32:02 +00:00
copilot-swe-agent[bot]
3d745bfa05 Fix invalid TFM: change netstandard20 to netstandard2.0
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-03 16:30:47 +00:00
copilot-swe-agent[bot]
ce26d50792 Initial plan 2026-02-03 16:28:28 +00:00
Adam Hathcock
01e162fcc4 properly add netstandard 2 support 2026-02-03 16:17:58 +00:00
Adam Hathcock
443f7b8b0c Add AOT to props and clean up in release 2026-02-03 16:13:15 +00:00
Adam Hathcock
08d64ee8a1 format 2026-02-03 14:13:57 +00:00
Adam Hathcock
eedc7c7a0f Arj async passes with header fix 2026-02-03 09:29:12 +00:00
Adam Hathcock
3198b32008 Arc is now async 2026-02-03 09:03:33 +00:00
Adam Hathcock
dff17a95e8 new fix for RewindableStream with tests 2026-02-03 08:56:35 +00:00
Adam Hathcock
236ee215b9 fix async parts of arc 2026-02-03 08:14:25 +00:00
Adam Hathcock
6af612fd54 fix ace async 2026-02-02 17:26:37 +00:00
Adam Hathcock
361e695380 non-async ace works 2026-02-02 14:38:16 +00:00
Adam Hathcock
8a8784a974 async xz and tar completed 2026-02-02 13:28:13 +00:00
Adam Hathcock
ddc01527bd async filtering for lzma and others 2026-02-02 12:58:13 +00:00
Adam Hathcock
e6ad44def8 more async streams 2026-02-02 12:39:36 +00:00
Adam Hathcock
df63e152c1 Merge pull request #1178 from adamhathcock/copilot/fix-infinite-loop-rar-archive-again
Fix infinite loop in SourceStream.Seek for malformed archives
2026-02-02 10:57:10 +00:00
copilot-swe-agent[bot]
ad7e64ba43 Fix test to use correct RarArchive API - all RAR tests passing
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-02 09:33:37 +00:00
copilot-swe-agent[bot]
8737b7a38e Apply infinite loop fix to SourceStream.cs and add test case
Co-authored-by: adamhathcock <527620+adamhathcock@users.noreply.github.com>
2026-02-02 09:31:38 +00:00
copilot-swe-agent[bot]
13199fcfd1 Initial plan 2026-02-02 09:28:50 +00:00
Adam Hathcock
9b8e3d8530 remove some tests 2026-02-02 08:52:21 +00:00
Adam Hathcock
0b15b60506 remove some extra markdown 2026-02-01 13:19:21 +00:00
Adam Hathcock
46e2ea8507 more async fixes 2026-02-01 09:38:46 +00:00
Adam Hathcock
62b8fc92d1 not sure I like this fix 2026-02-01 08:47:17 +00:00
Adam Hathcock
cb27b117b4 remove IStreamStack from non specialized streams 2026-01-31 19:03:49 +00:00
Adam Hathcock
6112b2d1d9 recording isn't great here but it works better 2026-01-31 17:42:32 +00:00
Adam Hathcock
0e2f8068a6 async crypto 2026-01-31 15:56:44 +00:00
Adam Hathcock
227e70926b fmt 2026-01-31 15:53:52 +00:00
Adam Hathcock
86e412cf77 more fixes? 2026-01-31 15:44:09 +00:00
Adam Hathcock
037b6842bf remove SharpCompressStream 2026-01-31 15:29:34 +00:00
Adam Hathcock
895dd02830 another fix 2026-01-31 14:20:01 +00:00
Adam Hathcock
7112dba345 some shrink fixes 2026-01-31 13:56:58 +00:00
Adam Hathcock
0767292bb0 ReduceStream is async 2026-01-31 13:19:10 +00:00
Adam Hathcock
b40e1a002a Merge remote-tracking branch 'origin/adam/data-descriptor-fix' into adam/more-explode-async 2026-01-31 11:18:42 +00:00
Adam Hathcock
c096164486 add shrink stream async 2026-01-31 11:18:16 +00:00
Adam Hathcock
b48e938c98 finish PPMD? 2026-01-30 13:46:30 +00:00
Adam Hathcock
4ed1f89866 more ppmd async 2026-01-30 13:19:24 +00:00
Adam Hathcock
525bcea989 ppmd create 2026-01-30 12:37:21 +00:00
Adam Hathcock
6c3f7c86da lzma works with zip 2026-01-30 12:28:01 +00:00
250 changed files with 11358 additions and 9958 deletions

View File

@@ -1,7 +0,0 @@
enabled: true
agent:
name: copilot-coding-agent
allow:
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
actions: ["create", "modify"]
require_review_before_merge: true

View File

@@ -1,17 +0,0 @@
enabled: true
agent:
name: copilot-coding-agent
allow:
- paths: ["src/**/*", "tests/**/*", "README.md", "AGENTS.md"]
actions: ["create", "modify", "delete"]
require_review_before_merge: true
required_approvals: 1
allowed_merge_strategies:
- squash
- merge
auto_merge_on_green: false
run_workflows: true
notes: |
- This manifest expresses the policy for the Copilot coding agent in this repository.
- It does NOT install or authorize the agent; a repository admin must install the Copilot coding agent app and grant the repository the necessary permissions (contents: write, pull_requests: write, checks: write, actions: write/read, issues: write) to allow the agent to act.
- Keep allow paths narrow and prefer require_review_before_merge during initial rollout.

View File

@@ -1,25 +0,0 @@
# Plan: Implement Missing Async Functionality in SharpCompress
SharpCompress has async support for low-level stream operations and Reader/Writer APIs, but critical entry points (Archive.Open, factory methods, initialization) remain synchronous. This plan adds async overloads for all user-facing I/O operations and fixes existing async bugs, enabling full end-to-end async workflows.
## Steps
1. **Add async factory methods** to [ArchiveFactory.cs](src/SharpCompress/Factories/ArchiveFactory.cs), [ReaderFactory.cs](src/SharpCompress/Factories/ReaderFactory.cs), and [WriterFactory.cs](src/SharpCompress/Factories/WriterFactory.cs) with `OpenAsync` and `CreateAsync` overloads accepting `CancellationToken`
2. **Implement async Open methods** on concrete archive types ([ZipArchive.cs](src/SharpCompress/Archives/Zip/ZipArchive.cs), [TarArchive.cs](src/SharpCompress/Archives/Tar/TarArchive.cs), [RarArchive.cs](src/SharpCompress/Archives/Rar/RarArchive.cs), [GZipArchive.cs](src/SharpCompress/Archives/GZip/GZipArchive.cs), [SevenZipArchive.cs](src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs)) and reader types ([ZipReader.cs](src/SharpCompress/Readers/Zip/ZipReader.cs), [TarReader.cs](src/SharpCompress/Readers/Tar/TarReader.cs), etc.)
3. **Convert archive initialization logic to async** including header reading, volume loading, and format signature detection across archive constructors and internal initialization methods
4. **Fix LZMA decoder async bugs** in [LzmaStream.cs](src/SharpCompress/Compressors/LZMA/LzmaStream.cs), [Decoder.cs](src/SharpCompress/Compressors/LZMA/Decoder.cs), and [OutWindow.cs](src/SharpCompress/Compressors/LZMA/OutWindow.cs) to enable true async 7Zip support and remove `NonDisposingStream` workaround
5. **Complete Rar async implementation** by converting `UnpackV2017` methods to async in [UnpackV2017.cs](src/SharpCompress/Compressors/Rar/UnpackV2017.cs) and updating Rar20 decompression
6. **Add comprehensive async tests** covering all new async entry points, cancellation scenarios, and concurrent operations across all archive formats in test files
## Further Considerations
1. **Breaking changes** - Should new async methods be added alongside existing sync methods (non-breaking), or should sync methods eventually be deprecated? Recommend additive approach for backward compatibility.
2. **Performance impact** - Header parsing for formats like Zip/Tar is often small; consider whether truly async parsing adds value vs sync parsing wrapped in Task, or make it conditional based on stream type (network vs file).
3. **7Zip complexity** - The LZMA async bug fix (Step 4) may be challenging due to state management in the decoder; consider whether to scope it separately or implement a simpler workaround that maintains correctness.

View File

@@ -1,123 +0,0 @@
# Plan: Modernize SharpCompress Public API
Based on comprehensive analysis, the API has several inconsistencies around factory patterns, async support, format capabilities, and options classes. Most improvements can be done incrementally without breaking changes.
## Steps
1. **Standardize factory patterns** by deprecating format-specific static `Open` methods in [Archives/Zip/ZipArchive.cs](src/SharpCompress/Archives/Zip/ZipArchive.cs), [Archives/Tar/TarArchive.cs](src/SharpCompress/Archives/Tar/TarArchive.cs), etc. in favor of centralized [Factories/ArchiveFactory.cs](src/SharpCompress/Factories/ArchiveFactory.cs)
2. **Complete async implementation** in [Writers/Zip/ZipWriter.cs](src/SharpCompress/Writers/Zip/ZipWriter.cs) and other writers that currently use sync-over-async, implementing true async I/O throughout the writer hierarchy
3. **Unify options classes** by making [Common/ExtractionOptions.cs](src/SharpCompress/Common/ExtractionOptions.cs) inherit from `OptionsBase` and adding progress reporting to extraction methods consistently
4. **Clarify GZip semantics** in [Archives/GZip/GZipArchive.cs](src/SharpCompress/Archives/GZip/GZipArchive.cs) by adding XML documentation explaining single-entry limitation and relationship to GZip compression used in Tar.gz
## Further Considerations
1. **Breaking changes roadmap** - Should we plan a major version (2.0) to remove deprecated factory methods, clean up `ArchiveType` enum (remove Arc/Arj or add full support), and consolidate naming patterns?
2. **Progress reporting consistency** - Should `IProgress<ArchiveExtractionProgress<IEntry>>` be added to all extraction extension methods or consolidated into options classes?
## Detailed Analysis
### Factory Pattern Issues
Three different factory patterns exist with overlapping functionality:
1. **Static Factories**: ArchiveFactory, ReaderFactory, WriterFactory
2. **Instance Factories**: IArchiveFactory, IReaderFactory, IWriterFactory
3. **Format-specific static methods**: Each archive class has static `Open` methods
**Example confusion:**
```csharp
// Three ways to open a Zip archive - which is recommended?
var archive1 = ArchiveFactory.Open("file.zip");
var archive2 = ZipArchive.Open("file.zip");
var archive3 = ArchiveFactory.AutoFactory.Open(fileInfo, options);
```
### Async Support Gaps
Base `IWriter` interface has async methods, but writer implementations provide minimal async support. Most writers just call synchronous methods:
```csharp
public virtual async Task WriteAsync(...)
{
// Default implementation calls synchronous version
Write(filename, source, modificationTime);
await Task.CompletedTask.ConfigureAwait(false);
}
```
Real async implementations only in:
- `TarWriter` - Proper async implementation
- Most other writers use sync-over-async
### GZip Archive Special Case
GZip is treated as both a compression format and an archive format, but only supports single-entry archives:
```csharp
protected override GZipArchiveEntry CreateEntryInternal(...)
{
if (Entries.Any())
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
// ...
}
```
### Options Class Hierarchy
```
OptionsBase (LeaveStreamOpen, ArchiveEncoding)
├─ ReaderOptions (LookForHeader, Password, DisableCheckIncomplete, BufferSize, ExtensionHint, Progress)
├─ WriterOptions (CompressionType, CompressionLevel, Progress)
│ ├─ ZipWriterOptions (ArchiveComment, UseZip64)
│ ├─ TarWriterOptions (FinalizeArchiveOnClose, HeaderFormat)
│ └─ GZipWriterOptions (no additional properties)
└─ ExtractionOptions (standalone - Overwrite, ExtractFullPath, PreserveFileTime, PreserveAttributes)
```
**Issues:**
- `ExtractionOptions` doesn't inherit from `OptionsBase` - no encoding support during extraction
- Progress reporting inconsistency between readers and extraction
- Obsolete properties (`ChecksumIsValid`, `Version`) with unclear migration path
### Implementation Priorities
**High Priority (Non-Breaking):**
1. Add API usage guide (Archive vs Reader, factory recommendations, async best practices)
2. Fix progress reporting consistency
3. Complete async implementation in writers
**Medium Priority (Next Major Version):**
1. Unify factory pattern - deprecate format-specific static `Open` methods
2. Clean up options classes - make `ExtractionOptions` inherit from `OptionsBase`
3. Clarify archive types - remove Arc/Arj from `ArchiveType` enum or add full support
4. Standardize naming across archive types
**Low Priority:**
1. Add BZip2 archive support similar to GZipArchive
2. Complete obsolete property cleanup with migration guide
### Backward Compatibility Strategy
**Safe (Non-Breaking) Changes:**
- Add new methods to interfaces (use default implementations)
- Add new options properties (with defaults)
- Add new factory methods
- Improve async implementations
- Add progress reporting support
**Breaking Changes to Avoid:**
- ❌ Removing format-specific `Open` methods (deprecate instead)
- ❌ Changing `LeaveStreamOpen` default (currently `true`)
- ❌ Removing obsolete properties before major version bump
- ❌ Changing return types or signatures of existing methods
**Deprecation Pattern:**
- Use `[Obsolete]` for one major version
- Use `[EditorBrowsable(EditorBrowsableState.Never)]` in next major version
- Remove in following major version

View File

@@ -3,18 +3,17 @@
<PackageVersion Include="Bullseye" Version="6.1.0" />
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.15" />
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.16" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
<PackageVersion Include="Microsoft.NET.ILLink.Task" Version="10.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="13.0.0" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
<PackageVersion Include="System.Buffers" Version="4.6.1" />
<PackageVersion Include="System.Memory" Version="4.6.3" />
<PackageVersion Include="xunit.v3" Version="3.2.1" />
<PackageVersion Include="xunit.v3" Version="3.2.2" />
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="10.0.102" />
<GlobalPackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
<GlobalPackageReference
Include="Microsoft.VisualStudio.Threading.Analyzers"

View File

@@ -25,12 +25,12 @@
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"requested": "[10.0.102, )",
"resolved": "10.0.102",
"contentHash": "Oxq3RCIJSdtpIU4hLqO7XaDe/Ra3HS9Wi8rJl838SAg6Zu1iQjerA0+xXWBgUFYbgknUGCLOU0T+lzMLkvY9Qg==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
"Microsoft.Build.Tasks.Git": "10.0.102",
"Microsoft.SourceLink.Common": "10.0.102"
}
},
"Microsoft.VisualStudio.Threading.Analyzers": {
@@ -47,8 +47,8 @@
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
"resolved": "10.0.102",
"contentHash": "0i81LYX31U6UiXz4NOLbvc++u+/mVDmOt+PskrM/MygpDxkv9THKQyRUmavBpLK6iBV0abNWnn+CQgSRz//Pwg=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
"type": "Transitive",
@@ -57,8 +57,8 @@
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
"resolved": "10.0.102",
"contentHash": "Mk1IMb9q5tahC2NltxYXFkLBtuBvfBoCQ3pIxYQWfzbCE9o1OB9SsHe0hnNGo7lWgTA/ePbFAJLWu6nLL9K17A=="
}
}
}

View File

@@ -1,129 +0,0 @@
# DataDescriptorStream and RewindableStream Fix
## Summary
Fixed the `Zip_Uncompressed_Read_All` test failure caused by incompatibility between `DataDescriptorStream` seeking requirements and the new `RewindableStream` wrapper used in `StreamingZipHeaderFactory`.
## Problem Description
### Symptom
The test `Zip_Uncompressed_Read_All` was failing with:
```
System.NotSupportedException : Cannot seek outside buffered region.
```
### Root Cause
The issue had two related aspects:
#### 1. Double-Wrapping of RewindableStream
`StreamingZipHeaderFactory.ReadStreamHeader()` was creating a new `RewindableStream` wrapper:
```csharp
var rewindableStream = new RewindableStream(stream);
```
When `ReaderFactory.OpenReader()` already wraps the input stream with `SeekableRewindableStream` (for seekable streams), this resulted in double-wrapping:
```
DataDescriptorStream
-> NonDisposingStream
-> RewindableStream (new, plain) <-- created by ReadStreamHeader
-> SeekableRewindableStream <-- created by ReaderFactory
-> FileStream
```
The inner plain `RewindableStream` lost the seeking capability of `SeekableRewindableStream`.
#### 2. Recording State Interference
Even after fixing the double-wrapping using `RewindableStream.EnsureSeekable()`, there was another issue:
`StreamingZipHeaderFactory.ReadStreamHeader()` contains code to peek ahead when checking for zero-length files with `UsePostDataDescriptor`:
```csharp
rewindableStream.StartRecording();
var nextHeaderBytes = reader.ReadUInt32();
rewindableStream.Rewind(true);
```
This code was interfering with the recording state that `ReaderFactory.OpenReader()` had set up:
1. `ReaderFactory.OpenReader()` calls `bStream.StartRecording()` at position 0
2. Factory detection calls `StreamingZipHeaderFactory.ReadStreamHeader()` via `IsZipFile()`
3. Inside `ReadStreamHeader`, the above code overwrites the recorded position
4. `Rewind(true)` stops recording and seeks to the wrong position
5. When control returns to `Factory.TryOpenReader()`, it calls `stream.Rewind(true)`, but recording is already stopped, so nothing happens
6. The stream position is not at the beginning, causing subsequent reads to fail
## Solution
### Fix 1: Use EnsureSeekable instead of new RewindableStream
Changed `StreamingZipHeaderFactory.ReadStreamHeader()` to use:
```csharp
var rewindableStream = RewindableStream.EnsureSeekable(stream);
```
This method:
- Returns the existing `RewindableStream` if the stream is already one (avoids double-wrapping)
- Creates a `SeekableRewindableStream` if the underlying stream is seekable
- Creates a plain `RewindableStream` only for non-seekable streams
### Fix 2: Use direct position save/restore for SeekableRewindableStream
For the peek-ahead logic, changed the code to check for `SeekableRewindableStream` specifically and use direct position manipulation:
```csharp
if (rewindableStream is SeekableRewindableStream)
{
// Direct position save/restore avoids interfering with caller's recording state
var savedPosition = rewindableStream.Position;
var nextHeaderBytes = reader.ReadUInt32();
rewindableStream.Position = savedPosition;
header.HasData = !IsHeader(nextHeaderBytes);
}
else
{
// Plain RewindableStream was created fresh by EnsureSeekable, safe to use recording
rewindableStream.StartRecording();
var nextHeaderBytes = reader.ReadUInt32();
rewindableStream.Rewind(true);
header.HasData = !IsHeader(nextHeaderBytes);
}
```
This approach:
- For `SeekableRewindableStream` (reused from caller): Uses direct position save/restore to avoid clobbering the caller's recording state
- For plain `RewindableStream` (freshly created): Uses the recording mechanism which is safe since the stream isn't shared
## Files Changed
- `src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs`
- `src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.Async.cs`
## Design Notes
### Why not fix RewindableStream.CanSeek?
`RewindableStream.CanSeek` returns `true` even though it can only seek within its buffered region. We considered changing this to `false`, but:
1. It would be a breaking change for existing code that relies on `CanSeek`
2. The `RewindableStream` does provide limited seeking capability (within buffer)
3. Checking for `SeekableRewindableStream` specifically is more precise
### Stream Wrapper Hierarchy
Understanding the stream wrapper hierarchy is crucial:
**For seekable source streams (e.g., FileStream):**
```
SeekableRewindableStream (full seeking via underlying stream)
-> FileStream
```
**For non-seekable source streams (e.g., decompression streams):**
```
RewindableStream (limited seeking via buffer)
-> DecompressionStream
```
`DataDescriptorStream` needs backward seeking to position the stream correctly after finding the data descriptor marker. This is why proper stream wrapper selection matters.

View File

@@ -20,7 +20,6 @@ public static partial class ArchiveFactory
)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken);
return factory.OpenAsyncArchive(stream, readerOptions);
}

View File

@@ -16,7 +16,6 @@ public static partial class ArchiveFactory
public static IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
{
readerOptions ??= new ReaderOptions();
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
return FindFactory<IArchiveFactory>(stream).OpenArchive(stream, readerOptions);
}

View File

@@ -58,7 +58,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
{
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
return SharpCompressStream.CreateNonDisposing(stream);
}
internal override void Close()

View File

@@ -46,7 +46,13 @@ public static class IArchiveEntryExtensions
throw new ExtractionException("Entry is a file directory and cannot be extracted.");
}
#if LEGACY_DOTNET
using var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
#else
await using var entryStream = await archiveEntry.OpenEntryStreamAsync(
cancellationToken
);
#endif
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
await sourceStream
.CopyToAsync(streamToWriteTo, Constants.BufferSize, cancellationToken)

View File

@@ -176,7 +176,11 @@ public partial class TarArchive
try
{
var tarHeader = new TarHeader(new ArchiveEncoding());
var reader = new AsyncBinaryReader(stream, false);
#if NET8_0_OR_GREATER
await using var reader = new AsyncBinaryReader(stream, leaveOpen: true);
#else
using var reader = new AsyncBinaryReader(stream, leaveOpen: true);
#endif
var readSucceeded = await tarHeader.ReadAsync(reader);
var isEmptyArchive =
tarHeader.Name?.Length == 0

View File

@@ -14,8 +14,9 @@ public class TarArchiveEntry : TarEntry, IArchiveEntry
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
new(OpenEntryStream());
public async ValueTask<Stream> OpenEntryStreamAsync(
CancellationToken cancellationToken = default
) => (await Parts.Single().GetCompressedStreamAsync(cancellationToken)).NotNull();
#region IArchiveEntry Members

View File

@@ -79,7 +79,7 @@ internal sealed class TarWritableArchiveEntry : TarArchiveEntry, IWritableArchiv
}
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
return SharpCompressStream.CreateNonDisposing(stream);
}
internal override void Close()

View File

@@ -177,11 +177,6 @@ public partial class ZipArchive
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
@@ -220,11 +215,6 @@ public partial class ZipArchive
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
}
var header = await headerFactory
.ReadStreamHeaderAsync(stream)
.Where(x => x.ZipHeaderType != ZipHeaderType.Split)
@@ -259,11 +249,6 @@ public partial class ZipArchive
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
try
{
if (stream is not SharpCompressStream)
{
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
}
var header = headerFactory
.ReadStreamHeader(stream)
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);

View File

@@ -35,7 +35,7 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
{
stream.LoadAllParts();
stream.Position = 0;
//stream.Position = 0;
var streams = stream.Streams.ToList();
var idx = 0;
@@ -156,7 +156,7 @@ public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVo
protected override IReader CreateReaderForSolidExtraction()
{
var stream = Volumes.Single().Stream;
((IStreamStack)stream).StackSeek(0);
//stream.Position = 0;
return ZipReader.OpenReader(stream, ReaderOptions, Entries);
}

View File

@@ -80,7 +80,7 @@ internal class ZipWritableArchiveEntry : ZipArchiveEntry, IWritableArchiveEntry
}
//ensure new stream is at the start, this could be reset
stream.Seek(0, SeekOrigin.Begin);
return SharpCompressStream.Create(stream, leaveOpen: true);
return SharpCompressStream.CreateNonDisposing(stream);
}
internal override void Close()

View File

@@ -4,62 +4,61 @@ using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Common.Ace
namespace SharpCompress.Common.Ace;
public class AceCrc
{
public class AceCrc
// CRC-32 lookup table (standard polynomial 0xEDB88320, reflected)
private static readonly uint[] Crc32Table = GenerateTable();
private static uint[] GenerateTable()
{
// CRC-32 lookup table (standard polynomial 0xEDB88320, reflected)
private static readonly uint[] Crc32Table = GenerateTable();
var table = new uint[256];
private static uint[] GenerateTable()
for (int i = 0; i < 256; i++)
{
var table = new uint[256];
uint crc = (uint)i;
for (int i = 0; i < 256; i++)
for (int j = 0; j < 8; j++)
{
uint crc = (uint)i;
for (int j = 0; j < 8; j++)
if ((crc & 1) != 0)
{
if ((crc & 1) != 0)
{
crc = (crc >> 1) ^ 0xEDB88320u;
}
else
{
crc >>= 1;
}
crc = (crc >> 1) ^ 0xEDB88320u;
}
else
{
crc >>= 1;
}
table[i] = crc;
}
return table;
table[i] = crc;
}
/// <summary>
/// Calculate ACE CRC-32 checksum.
/// ACE CRC-32 uses standard CRC-32 polynomial (0xEDB88320, reflected)
/// with init=0xFFFFFFFF but NO final XOR.
/// </summary>
public static uint AceCrc32(ReadOnlySpan<byte> data)
return table;
}
/// <summary>
/// Calculate ACE CRC-32 checksum.
/// ACE CRC-32 uses standard CRC-32 polynomial (0xEDB88320, reflected)
/// with init=0xFFFFFFFF but NO final XOR.
/// </summary>
public static uint AceCrc32(ReadOnlySpan<byte> data)
{
uint crc = 0xFFFFFFFFu;
foreach (byte b in data)
{
uint crc = 0xFFFFFFFFu;
foreach (byte b in data)
{
crc = (crc >> 8) ^ Crc32Table[(crc ^ b) & 0xFF];
}
return crc; // No final XOR for ACE
crc = (crc >> 8) ^ Crc32Table[(crc ^ b) & 0xFF];
}
/// <summary>
/// ACE CRC-16 is the lower 16 bits of the ACE CRC-32.
/// </summary>
public static ushort AceCrc16(ReadOnlySpan<byte> data)
{
return (ushort)(AceCrc32(data) & 0xFFFF);
}
return crc; // No final XOR for ACE
}
/// <summary>
/// ACE CRC-16 is the lower 16 bits of the ACE CRC-32.
/// </summary>
public static ushort AceCrc16(ReadOnlySpan<byte> data)
{
return (ushort)(AceCrc32(data) & 0xFFFF);
}
}

View File

@@ -6,63 +6,62 @@ using System.Text;
using System.Threading.Tasks;
using SharpCompress.Common.Ace.Headers;
namespace SharpCompress.Common.Ace
namespace SharpCompress.Common.Ace;
public class AceEntry : Entry
{
public class AceEntry : Entry
private readonly AceFilePart _filePart;
internal AceEntry(AceFilePart filePart)
{
private readonly AceFilePart _filePart;
internal AceEntry(AceFilePart filePart)
{
_filePart = filePart;
}
public override long Crc
{
get
{
if (_filePart == null)
{
return 0;
}
return _filePart.Header.Crc32;
}
}
public override string? Key => _filePart?.Header.Filename;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.PackedSize ?? 0;
public override CompressionType CompressionType
{
get
{
if (_filePart.Header.CompressionType == Headers.CompressionType.Stored)
{
return CompressionType.None;
}
return CompressionType.AceLZ77;
}
}
public override long Size => _filePart?.Header.OriginalSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.Header.DateTime;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => _filePart.Header.IsFileEncrypted;
public override bool IsDirectory => _filePart.Header.IsDirectory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
_filePart = filePart;
}
public override long Crc
{
get
{
if (_filePart == null)
{
return 0;
}
return _filePart.Header.Crc32;
}
}
public override string? Key => _filePart?.Header.Filename;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.PackedSize ?? 0;
public override CompressionType CompressionType
{
get
{
if (_filePart.Header.CompressionType == Headers.CompressionType.Stored)
{
return CompressionType.None;
}
return CompressionType.AceLZ77;
}
}
public override long Size => _filePart?.Header.OriginalSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.Header.DateTime;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => _filePart.Header.IsFileEncrypted;
public override bool IsDirectory => _filePart.Header.IsDirectory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}

View File

@@ -7,46 +7,45 @@ using System.Threading.Tasks;
using SharpCompress.Common.Ace.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Ace
namespace SharpCompress.Common.Ace;
public class AceFilePart : FilePart
{
public class AceFilePart : FilePart
private readonly Stream _stream;
internal AceFileHeader Header { get; set; }
internal AceFilePart(AceFileHeader localAceHeader, Stream seekableStream)
: base(localAceHeader.ArchiveEncoding)
{
private readonly Stream _stream;
internal AceFileHeader Header { get; set; }
internal AceFilePart(AceFileHeader localAceHeader, Stream seekableStream)
: base(localAceHeader.ArchiveEncoding)
{
_stream = seekableStream;
Header = localAceHeader;
}
internal override string? FilePartName => Header.Filename;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionType)
{
case Headers.CompressionType.Stored:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.PackedSize
);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionQuality
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream? GetRawStream() => _stream;
_stream = seekableStream;
Header = localAceHeader;
}
internal override string? FilePartName => Header.Filename;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionType)
{
case Headers.CompressionType.Stored:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.PackedSize
);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionQuality
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream? GetRawStream() => _stream;
}

View File

@@ -7,29 +7,28 @@ using System.Threading.Tasks;
using SharpCompress.Common.Arj;
using SharpCompress.Readers;
namespace SharpCompress.Common.Ace
namespace SharpCompress.Common.Ace;
public class AceVolume : Volume
{
public class AceVolume : Volume
public AceVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
public override bool IsFirstVolume
{
public AceVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
get { return true; }
}
public override bool IsFirstVolume
{
get { return true; }
}
/// <summary>
/// ArjArchive is part of a multi-part archive.
/// </summary>
public override bool IsMultiVolume
{
get { return false; }
}
/// <summary>
/// ArjArchive is part of a multi-part archive.
/// </summary>
public override bool IsMultiVolume
{
get { return false; }
}
internal IEnumerable<AceFilePart> GetVolumeFileParts()
{
return new List<AceFilePart>();
}
internal IEnumerable<AceFilePart> GetVolumeFileParts()
{
return new List<AceFilePart>();
}
}

View File

@@ -7,169 +7,168 @@ using System.Threading.Tasks;
using System.Xml.Linq;
using SharpCompress.Common.Arc;
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// ACE file entry header
/// </summary>
public sealed partial class AceFileHeader : AceHeader
{
public long DataStartPosition { get; private set; }
public long PackedSize { get; set; }
public long OriginalSize { get; set; }
public DateTime DateTime { get; set; }
public int Attributes { get; set; }
public uint Crc32 { get; set; }
public CompressionType CompressionType { get; set; }
public CompressionQuality CompressionQuality { get; set; }
public ushort Parameters { get; set; }
public string Filename { get; set; } = string.Empty;
public List<byte> Comment { get; set; } = new();
/// <summary>
/// ACE file entry header
/// File data offset in the archive
/// </summary>
public sealed partial class AceFileHeader : AceHeader
public ulong DataOffset { get; set; }
public bool IsDirectory => (Attributes & 0x10) != 0;
public bool IsContinuedFromPrev =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_PREV) != 0;
public bool IsContinuedToNext =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_NEXT) != 0;
public int DictionarySize
{
public long DataStartPosition { get; private set; }
public long PackedSize { get; set; }
public long OriginalSize { get; set; }
public DateTime DateTime { get; set; }
public int Attributes { get; set; }
public uint Crc32 { get; set; }
public CompressionType CompressionType { get; set; }
public CompressionQuality CompressionQuality { get; set; }
public ushort Parameters { get; set; }
public string Filename { get; set; } = string.Empty;
public List<byte> Comment { get; set; } = new();
/// <summary>
/// File data offset in the archive
/// </summary>
public ulong DataOffset { get; set; }
public bool IsDirectory => (Attributes & 0x10) != 0;
public bool IsContinuedFromPrev =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_PREV) != 0;
public bool IsContinuedToNext =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.CONTINUED_NEXT) != 0;
public int DictionarySize
get
{
get
{
int bits = Parameters & 0x0F;
return bits < 10 ? 1024 : 1 << bits;
}
int bits = Parameters & 0x0F;
return bits < 10 ? 1024 : 1 << bits;
}
public AceFileHeader(IArchiveEncoding archiveEncoding)
: base(archiveEncoding, AceHeaderType.FILE) { }
/// <summary>
/// Reads the next file entry header from the stream.
/// Returns null if no more entries or end of archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override AceHeader? Read(Stream stream)
{
var headerData = ReadHeader(stream);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
// Header type (1 byte)
HeaderType = headerData[offset++];
// Skip recovery record headers (ACE 2.0 feature)
if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
{
// Skip to next header
return null;
}
if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
{
// Unknown header type - skip
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Packed size (4 bytes)
PackedSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Original size (4 bytes)
OriginalSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// File date/time in DOS format (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// File attributes (4 bytes)
Attributes = (int)BitConverter.ToUInt32(headerData, offset);
offset += 4;
// CRC32 (4 bytes)
Crc32 = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Compression type (1 byte)
byte compressionType = headerData[offset++];
CompressionType = GetCompressionType(compressionType);
// Compression quality/parameter (1 byte)
byte compressionQuality = headerData[offset++];
CompressionQuality = GetCompressionQuality(compressionQuality);
// Parameters (2 bytes)
Parameters = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Reserved (2 bytes) - skip
offset += 2;
// Filename length (2 bytes)
var filenameLength = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Filename
if (offset + filenameLength <= headerData.Length)
{
Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
offset += filenameLength;
}
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
// Comment length (2 bytes)
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength; // Skip comment
}
}
// Store the data start position
DataStartPosition = stream.Position;
return this;
}
// ReadAsync moved to AceFileHeader.Async.cs
public CompressionType GetCompressionType(byte value) =>
value switch
{
0 => CompressionType.Stored,
1 => CompressionType.Lz77,
2 => CompressionType.Blocked,
_ => CompressionType.Unknown,
};
public CompressionQuality GetCompressionQuality(byte value) =>
value switch
{
0 => CompressionQuality.None,
1 => CompressionQuality.Fastest,
2 => CompressionQuality.Fast,
3 => CompressionQuality.Normal,
4 => CompressionQuality.Good,
5 => CompressionQuality.Best,
_ => CompressionQuality.Unknown,
};
}
public AceFileHeader(IArchiveEncoding archiveEncoding)
: base(archiveEncoding, AceHeaderType.FILE) { }
/// <summary>
/// Reads the next file entry header from the stream.
/// Returns null if no more entries or end of archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override AceHeader? Read(Stream stream)
{
var headerData = ReadHeader(stream);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
// Header type (1 byte)
HeaderType = headerData[offset++];
// Skip recovery record headers (ACE 2.0 feature)
if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
{
// Skip to next header
return null;
}
if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
{
// Unknown header type - skip
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Packed size (4 bytes)
PackedSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Original size (4 bytes)
OriginalSize = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// File date/time in DOS format (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// File attributes (4 bytes)
Attributes = (int)BitConverter.ToUInt32(headerData, offset);
offset += 4;
// CRC32 (4 bytes)
Crc32 = BitConverter.ToUInt32(headerData, offset);
offset += 4;
// Compression type (1 byte)
byte compressionType = headerData[offset++];
CompressionType = GetCompressionType(compressionType);
// Compression quality/parameter (1 byte)
byte compressionQuality = headerData[offset++];
CompressionQuality = GetCompressionQuality(compressionQuality);
// Parameters (2 bytes)
Parameters = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Reserved (2 bytes) - skip
offset += 2;
// Filename length (2 bytes)
var filenameLength = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Filename
if (offset + filenameLength <= headerData.Length)
{
Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
offset += filenameLength;
}
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
// Comment length (2 bytes)
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength; // Skip comment
}
}
// Store the data start position
DataStartPosition = stream.Position;
return this;
}
// ReadAsync moved to AceFileHeader.Async.cs
public CompressionType GetCompressionType(byte value) =>
value switch
{
0 => CompressionType.Stored,
1 => CompressionType.Lz77,
2 => CompressionType.Blocked,
_ => CompressionType.Unknown,
};
public CompressionQuality GetCompressionQuality(byte value) =>
value switch
{
0 => CompressionQuality.None,
1 => CompressionQuality.Fastest,
2 => CompressionQuality.Fast,
3 => CompressionQuality.Normal,
4 => CompressionQuality.Good,
5 => CompressionQuality.Best,
_ => CompressionQuality.Unknown,
};
}

View File

@@ -19,7 +19,7 @@ public abstract partial class AceHeader
{
// Read header CRC (2 bytes) and header size (2 bytes)
var headerBytes = new byte[4];
if (await stream.ReadAsync(headerBytes, 0, 4, cancellationToken) != 4)
if (!await stream.ReadFullyAsync(headerBytes, 0, 4, cancellationToken))
{
return Array.Empty<byte>();
}
@@ -33,7 +33,7 @@ public abstract partial class AceHeader
// Read the header data
var body = new byte[HeaderSize];
if (await stream.ReadAsync(body, 0, HeaderSize, cancellationToken) != HeaderSize)
if (!await stream.ReadFullyAsync(body, 0, HeaderSize, cancellationToken))
{
return Array.Empty<byte>();
}
@@ -59,7 +59,7 @@ public abstract partial class AceHeader
)
{
var bytes = new byte[14];
if (await stream.ReadAsync(bytes, 0, 14, cancellationToken) != 14)
if (!await stream.ReadFullyAsync(bytes, 0, 14, cancellationToken))
{
return false;
}

View File

@@ -5,153 +5,152 @@ using System.Threading.Tasks;
using SharpCompress.Common.Arj.Headers;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// Header type constants
/// </summary>
public enum AceHeaderType
{
/// <summary>
/// Header type constants
/// </summary>
public enum AceHeaderType
MAIN = 0,
FILE = 1,
RECOVERY32 = 2,
RECOVERY64A = 3,
RECOVERY64B = 4,
}
public abstract partial class AceHeader
{
// ACE signature: bytes at offset 7 should be "**ACE**"
private static readonly byte[] AceSignature =
[
(byte)'*',
(byte)'*',
(byte)'A',
(byte)'C',
(byte)'E',
(byte)'*',
(byte)'*',
];
public AceHeader(IArchiveEncoding archiveEncoding, AceHeaderType type)
{
MAIN = 0,
FILE = 1,
RECOVERY32 = 2,
RECOVERY64A = 3,
RECOVERY64B = 4,
AceHeaderType = type;
ArchiveEncoding = archiveEncoding;
}
public abstract partial class AceHeader
public IArchiveEncoding ArchiveEncoding { get; }
public AceHeaderType AceHeaderType { get; }
public ushort HeaderFlags { get; set; }
public ushort HeaderCrc { get; set; }
public ushort HeaderSize { get; set; }
public byte HeaderType { get; set; }
public bool IsFileEncrypted =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.FILE_ENCRYPTED) != 0;
public bool Is64Bit =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MEMORY_64BIT) != 0;
public bool IsSolid =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.SOLID_MAIN) != 0;
public bool IsMultiVolume =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MULTIVOLUME) != 0;
public abstract AceHeader? Read(Stream reader);
// Async methods moved to AceHeader.Async.cs
public byte[] ReadHeader(Stream stream)
{
// ACE signature: bytes at offset 7 should be "**ACE**"
private static readonly byte[] AceSignature =
[
(byte)'*',
(byte)'*',
(byte)'A',
(byte)'C',
(byte)'E',
(byte)'*',
(byte)'*',
];
public AceHeader(IArchiveEncoding archiveEncoding, AceHeaderType type)
// Read header CRC (2 bytes) and header size (2 bytes)
var headerBytes = new byte[4];
if (!stream.ReadFully(headerBytes))
{
AceHeaderType = type;
ArchiveEncoding = archiveEncoding;
return Array.Empty<byte>();
}
public IArchiveEncoding ArchiveEncoding { get; }
public AceHeaderType AceHeaderType { get; }
public ushort HeaderFlags { get; set; }
public ushort HeaderCrc { get; set; }
public ushort HeaderSize { get; set; }
public byte HeaderType { get; set; }
public bool IsFileEncrypted =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.FILE_ENCRYPTED) != 0;
public bool Is64Bit =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MEMORY_64BIT) != 0;
public bool IsSolid =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.SOLID_MAIN) != 0;
public bool IsMultiVolume =>
(HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.MULTIVOLUME) != 0;
public abstract AceHeader? Read(Stream reader);
// Async methods moved to AceHeader.Async.cs
public byte[] ReadHeader(Stream stream)
HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
if (HeaderSize == 0)
{
// Read header CRC (2 bytes) and header size (2 bytes)
var headerBytes = new byte[4];
if (stream.Read(headerBytes, 0, 4) != 4)
{
return Array.Empty<byte>();
}
HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
if (HeaderSize == 0)
{
return Array.Empty<byte>();
}
// Read the header data
var body = new byte[HeaderSize];
if (stream.Read(body, 0, HeaderSize) != HeaderSize)
{
return Array.Empty<byte>();
}
// Verify crc
var checksum = AceCrc.AceCrc16(body);
if (checksum != HeaderCrc)
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
return Array.Empty<byte>();
}
public static bool IsArchive(Stream stream)
// Read the header data
var body = new byte[HeaderSize];
if (!stream.ReadFully(body))
{
// ACE files have a specific signature
// First two bytes are typically 0x60 0xEA (signature bytes)
// At offset 7, there should be "**ACE**" (7 bytes)
var bytes = new byte[14];
if (stream.Read(bytes, 0, 14) != 14)
return Array.Empty<byte>();
}
// Verify crc
var checksum = AceCrc.AceCrc16(body);
if (checksum != HeaderCrc)
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
public static bool IsArchive(Stream stream)
{
// ACE files have a specific signature
// First two bytes are typically 0x60 0xEA (signature bytes)
// At offset 7, there should be "**ACE**" (7 bytes)
var bytes = new byte[14];
if (stream.Read(bytes, 0, 14) != 14)
{
return false;
}
// Check for "**ACE**" at offset 7
return CheckMagicBytes(bytes, 7);
}
protected static bool CheckMagicBytes(byte[] headerBytes, int offset)
{
// Check for "**ACE**" at specified offset
for (int i = 0; i < AceSignature.Length; i++)
{
if (headerBytes[offset + i] != AceSignature[i])
{
return false;
}
// Check for "**ACE**" at offset 7
return CheckMagicBytes(bytes, 7);
}
return true;
}
protected static bool CheckMagicBytes(byte[] headerBytes, int offset)
protected DateTime ConvertDosDateTime(uint dosDateTime)
{
try
{
// Check for "**ACE**" at specified offset
for (int i = 0; i < AceSignature.Length; i++)
{
if (headerBytes[offset + i] != AceSignature[i])
{
return false;
}
}
return true;
}
int second = (int)(dosDateTime & 0x1F) * 2;
int minute = (int)((dosDateTime >> 5) & 0x3F);
int hour = (int)((dosDateTime >> 11) & 0x1F);
int day = (int)((dosDateTime >> 16) & 0x1F);
int month = (int)((dosDateTime >> 21) & 0x0F);
int year = (int)((dosDateTime >> 25) & 0x7F) + 1980;
protected DateTime ConvertDosDateTime(uint dosDateTime)
{
try
{
int second = (int)(dosDateTime & 0x1F) * 2;
int minute = (int)((dosDateTime >> 5) & 0x3F);
int hour = (int)((dosDateTime >> 11) & 0x1F);
int day = (int)((dosDateTime >> 16) & 0x1F);
int month = (int)((dosDateTime >> 21) & 0x0F);
int year = (int)((dosDateTime >> 25) & 0x7F) + 1980;
if (
day < 1
|| day > 31
|| month < 1
|| month > 12
|| hour > 23
|| minute > 59
|| second > 59
)
{
return DateTime.MinValue;
}
return new DateTime(year, month, day, hour, minute, second);
}
catch
if (
day < 1
|| day > 31
|| month < 1
|| month > 12
|| hour > 23
|| minute > 59
|| second > 59
)
{
return DateTime.MinValue;
}
return new DateTime(year, month, day, hour, minute, second);
}
catch
{
return DateTime.MinValue;
}
}
}

View File

@@ -8,94 +8,93 @@ using SharpCompress.Common.Ace.Headers;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// ACE main archive header
/// </summary>
public sealed partial class AceMainHeader : AceHeader
{
public byte ExtractVersion { get; set; }
public byte CreatorVersion { get; set; }
public HostOS HostOS { get; set; }
public byte VolumeNumber { get; set; }
public DateTime DateTime { get; set; }
public string Advert { get; set; } = string.Empty;
public List<byte> Comment { get; set; } = new();
public byte AceVersion { get; private set; }
public AceMainHeader(IArchiveEncoding archiveEncoding)
: base(archiveEncoding, AceHeaderType.MAIN) { }
/// <summary>
/// ACE main archive header
/// Reads the main archive header from the stream.
/// Returns header if this is a valid ACE archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public sealed partial class AceMainHeader : AceHeader
public override AceHeader? Read(Stream stream)
{
public byte ExtractVersion { get; set; }
public byte CreatorVersion { get; set; }
public HostOS HostOS { get; set; }
public byte VolumeNumber { get; set; }
public DateTime DateTime { get; set; }
public string Advert { get; set; } = string.Empty;
public List<byte> Comment { get; set; } = new();
public byte AceVersion { get; private set; }
public AceMainHeader(IArchiveEncoding archiveEncoding)
: base(archiveEncoding, AceHeaderType.MAIN) { }
/// <summary>
/// Reads the main archive header from the stream.
/// Returns header if this is a valid ACE archive.
/// Supports both ACE 1.0 and ACE 2.0 formats.
/// </summary>
public override AceHeader? Read(Stream stream)
var headerData = ReadHeader(stream);
if (headerData.Length == 0)
{
var headerData = ReadHeader(stream);
if (headerData.Length == 0)
{
return null;
}
int offset = 0;
return null;
}
int offset = 0;
// Header type should be 0 for main header
if (headerData[offset++] != HeaderType)
{
return null;
}
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Skip signature "**ACE**" (7 bytes)
if (!CheckMagicBytes(headerData, offset))
{
throw new InvalidDataException("Invalid ACE archive signature.");
}
offset += 7;
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
AceVersion = headerData[offset++];
ExtractVersion = headerData[offset++];
// Host OS (1 byte)
if (offset < headerData.Length)
{
var hostOsByte = headerData[offset++];
HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
}
// Volume number (1 byte)
VolumeNumber = headerData[offset++];
// Creation date/time (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// Reserved fields (8 bytes)
if (offset + 8 <= headerData.Length)
{
offset += 8;
}
// Skip additional fields based on flags
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength;
}
}
return this;
// Header type should be 0 for main header
if (headerData[offset++] != HeaderType)
{
return null;
}
// ReadAsync moved to AceMainHeader.Async.cs
// Header flags (2 bytes)
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
offset += 2;
// Skip signature "**ACE**" (7 bytes)
if (!CheckMagicBytes(headerData, offset))
{
throw new InvalidDataException("Invalid ACE archive signature.");
}
offset += 7;
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
AceVersion = headerData[offset++];
ExtractVersion = headerData[offset++];
// Host OS (1 byte)
if (offset < headerData.Length)
{
var hostOsByte = headerData[offset++];
HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
}
// Volume number (1 byte)
VolumeNumber = headerData[offset++];
// Creation date/time (4 bytes)
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
DateTime = ConvertDosDateTime(dosDateTime);
offset += 4;
// Reserved fields (8 bytes)
if (offset + 8 <= headerData.Length)
{
offset += 8;
}
// Skip additional fields based on flags
// Handle comment if present
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
{
if (offset + 2 <= headerData.Length)
{
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
offset += 2 + commentLength;
}
}
return this;
}
// ReadAsync moved to AceMainHeader.Async.cs
}

View File

@@ -1,16 +1,15 @@
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// Compression quality
/// </summary>
public enum CompressionQuality
{
/// <summary>
/// Compression quality
/// </summary>
public enum CompressionQuality
{
None,
Fastest,
Fast,
Normal,
Good,
Best,
Unknown,
}
None,
Fastest,
Fast,
Normal,
Good,
Best,
Unknown,
}

View File

@@ -1,13 +1,12 @@
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// Compression types
/// </summary>
public enum CompressionType
{
/// <summary>
/// Compression types
/// </summary>
public enum CompressionType
{
Stored,
Lz77,
Blocked,
Unknown,
}
Stored,
Lz77,
Blocked,
Unknown,
}

View File

@@ -1,33 +1,32 @@
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// Header flags (main + file, overlapping meanings)
/// </summary>
public static class HeaderFlags
{
/// <summary>
/// Header flags (main + file, overlapping meanings)
/// </summary>
public static class HeaderFlags
{
// Shared / low bits
public const ushort ADDSIZE = 0x0001; // extra size field present
public const ushort COMMENT = 0x0002; // comment present
public const ushort MEMORY_64BIT = 0x0004;
public const ushort AV_STRING = 0x0008; // AV string present
public const ushort SOLID = 0x0010; // solid file
public const ushort LOCKED = 0x0020;
public const ushort PROTECTED = 0x0040;
// Shared / low bits
public const ushort ADDSIZE = 0x0001; // extra size field present
public const ushort COMMENT = 0x0002; // comment present
public const ushort MEMORY_64BIT = 0x0004;
public const ushort AV_STRING = 0x0008; // AV string present
public const ushort SOLID = 0x0010; // solid file
public const ushort LOCKED = 0x0020;
public const ushort PROTECTED = 0x0040;
// Main header specific
public const ushort V20FORMAT = 0x0100;
public const ushort SFX = 0x0200;
public const ushort LIMITSFXJR = 0x0400;
public const ushort MULTIVOLUME = 0x0800;
public const ushort ADVERT = 0x1000;
public const ushort RECOVERY = 0x2000;
public const ushort LOCKED_MAIN = 0x4000;
public const ushort SOLID_MAIN = 0x8000;
// Main header specific
public const ushort V20FORMAT = 0x0100;
public const ushort SFX = 0x0200;
public const ushort LIMITSFXJR = 0x0400;
public const ushort MULTIVOLUME = 0x0800;
public const ushort ADVERT = 0x1000;
public const ushort RECOVERY = 0x2000;
public const ushort LOCKED_MAIN = 0x4000;
public const ushort SOLID_MAIN = 0x8000;
// File header specific (same bits, different meaning)
public const ushort NTSECURITY = 0x0400;
public const ushort CONTINUED_PREV = 0x1000;
public const ushort CONTINUED_NEXT = 0x2000;
public const ushort FILE_ENCRYPTED = 0x4000; // file encrypted (file header)
}
// File header specific (same bits, different meaning)
public const ushort NTSECURITY = 0x0400;
public const ushort CONTINUED_PREV = 0x1000;
public const ushort CONTINUED_NEXT = 0x2000;
public const ushort FILE_ENCRYPTED = 0x4000; // file encrypted (file header)
}

View File

@@ -1,22 +1,21 @@
namespace SharpCompress.Common.Ace.Headers
namespace SharpCompress.Common.Ace.Headers;
/// <summary>
/// Host OS type
/// </summary>
public enum HostOS
{
/// <summary>
/// Host OS type
/// </summary>
public enum HostOS
{
MsDos = 0,
Os2,
Windows,
Unix,
MacOs,
WinNt,
Primos,
AppleGs,
Atari,
Vax,
Amiga,
Next,
Unknown,
}
MsDos = 0,
Os2,
Windows,
Unix,
MacOs,
WinNt,
Primos,
AppleGs,
Atari,
Vax,
Amiga,
Next,
Unknown,
}

View File

@@ -7,54 +7,53 @@ using System.Threading.Tasks;
using SharpCompress.Common.GZip;
using SharpCompress.Common.Tar;
namespace SharpCompress.Common.Arc
namespace SharpCompress.Common.Arc;
public class ArcEntry : Entry
{
public class ArcEntry : Entry
private readonly ArcFilePart? _filePart;
internal ArcEntry(ArcFilePart? filePart)
{
private readonly ArcFilePart? _filePart;
internal ArcEntry(ArcFilePart? filePart)
{
_filePart = filePart;
}
public override long Crc
{
get
{
if (_filePart == null)
{
return 0;
}
return _filePart.Header.Crc16;
}
}
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType =>
_filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
public override long Size => throw new NotImplementedException();
public override DateTime? LastModifiedTime => null;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
_filePart = filePart;
}
public override long Crc
{
get
{
if (_filePart == null)
{
return 0;
}
return _filePart.Header.Crc16;
}
}
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType =>
_filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
public override long Size => throw new NotImplementedException();
public override DateTime? LastModifiedTime => null;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}

View File

@@ -2,75 +2,93 @@ using System;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arc
namespace SharpCompress.Common.Arc;
public class ArcEntryHeader
{
public class ArcEntryHeader
public IArchiveEncoding ArchiveEncoding { get; }
public CompressionType CompressionMethod { get; private set; }
public string? Name { get; private set; }
public long CompressedSize { get; private set; }
public DateTime DateTime { get; private set; }
public int Crc16 { get; private set; }
public long OriginalSize { get; private set; }
public long DataStartPosition { get; private set; }
public ArcEntryHeader(IArchiveEncoding archiveEncoding)
{
public IArchiveEncoding ArchiveEncoding { get; }
public CompressionType CompressionMethod { get; private set; }
public string? Name { get; private set; }
public long CompressedSize { get; private set; }
public DateTime DateTime { get; private set; }
public int Crc16 { get; private set; }
public long OriginalSize { get; private set; }
public long DataStartPosition { get; private set; }
this.ArchiveEncoding = archiveEncoding;
}
public ArcEntryHeader(IArchiveEncoding archiveEncoding)
public ArcEntryHeader? ReadHeader(Stream stream)
{
byte[] headerBytes = new byte[29];
if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
{
this.ArchiveEncoding = archiveEncoding;
return null;
}
DataStartPosition = stream.Position;
return LoadFrom(headerBytes);
}
public ArcEntryHeader? ReadHeader(Stream stream)
public async ValueTask<ArcEntryHeader?> ReadHeaderAsync(
Stream stream,
CancellationToken cancellationToken = default
)
{
byte[] headerBytes = new byte[29];
if (
await stream.ReadAsync(headerBytes, 0, headerBytes.Length, cancellationToken)
!= headerBytes.Length
)
{
byte[] headerBytes = new byte[29];
if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
{
return null;
}
DataStartPosition = stream.Position;
return LoadFrom(headerBytes);
return null;
}
DataStartPosition = stream.Position;
return LoadFrom(headerBytes);
}
public ArcEntryHeader LoadFrom(byte[] headerBytes)
public ArcEntryHeader LoadFrom(byte[] headerBytes)
{
CompressionMethod = GetCompressionType(headerBytes[1]);
// Read name
int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
int offset = 15;
CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
offset += 4;
uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
DateTime = ConvertToDateTime(rawDateTime);
offset += 4;
Crc16 = BitConverter.ToUInt16(headerBytes, offset);
offset += 2;
OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
return this;
}
private CompressionType GetCompressionType(byte value)
{
return value switch
{
CompressionMethod = GetCompressionType(headerBytes[1]);
1 or 2 => CompressionType.None,
3 => CompressionType.Packed,
4 => CompressionType.Squeezed,
5 or 6 or 7 or 8 => CompressionType.Crunched,
9 => CompressionType.Squashed,
10 => CompressionType.Crushed,
11 => CompressionType.Distilled,
_ => CompressionType.Unknown,
};
}
// Read name
int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
int offset = 15;
CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
offset += 4;
uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
DateTime = ConvertToDateTime(rawDateTime);
offset += 4;
Crc16 = BitConverter.ToUInt16(headerBytes, offset);
offset += 2;
OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
return this;
}
private CompressionType GetCompressionType(byte value)
{
return value switch
{
1 or 2 => CompressionType.None,
3 => CompressionType.Packed,
4 => CompressionType.Squeezed,
5 or 6 or 7 or 8 => CompressionType.Crunched,
9 => CompressionType.Squashed,
10 => CompressionType.Crushed,
11 => CompressionType.Distilled,
_ => CompressionType.Unknown,
};
}
public static DateTime ConvertToDateTime(long rawDateTime)
{
// Convert Unix timestamp to DateTime (UTC)
return DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
}
public static DateTime ConvertToDateTime(long rawDateTime)
{
// Convert Unix timestamp to DateTime (UTC)
return DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
}
}

View File

@@ -0,0 +1,58 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;
using SharpCompress.IO;
namespace SharpCompress.Common.Arc;
public partial class ArcFilePart
{
internal override async ValueTask<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionType.None:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionType.Packed:
compressedStream = new RunLength90Stream(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Squeezed:
compressedStream = await SqueezeStream.CreateAsync(
_stream,
(int)Header.CompressedSize,
cancellationToken
);
break;
case CompressionType.Crunched:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod + " with size > 128KB"
);
}
compressedStream = new ArcLzwStream(_stream, (int)Header.CompressedSize, true);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream;
}
}

View File

@@ -13,71 +13,61 @@ using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;
using SharpCompress.IO;
namespace SharpCompress.Common.Arc
namespace SharpCompress.Common.Arc;
public partial class ArcFilePart : FilePart
{
public class ArcFilePart : FilePart
private readonly Stream? _stream;
internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
: base(localArcHeader.ArchiveEncoding)
{
private readonly Stream? _stream;
internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
: base(localArcHeader.ArchiveEncoding)
{
_stream = seekableStream;
Header = localArcHeader;
}
internal ArcEntryHeader Header { get; set; }
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionType.None:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionType.Packed:
compressedStream = new RunLength90Stream(
_stream,
(int)Header.CompressedSize
);
break;
case CompressionType.Squeezed:
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Crunched:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: "
+ Header.CompressionMethod
+ " with size > 128KB"
);
}
compressedStream = new ArcLzwStream(
_stream,
(int)Header.CompressedSize,
true
);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream? GetRawStream() => _stream;
_stream = seekableStream;
Header = localArcHeader;
}
internal ArcEntryHeader Header { get; set; }
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionType.None:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionType.Packed:
compressedStream = new RunLength90Stream(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Squeezed:
compressedStream = SqueezeStream.Create(_stream, (int)Header.CompressedSize);
break;
case CompressionType.Crunched:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod + " with size > 128KB"
);
}
compressedStream = new ArcLzwStream(_stream, (int)Header.CompressedSize, true);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream? GetRawStream() => _stream;
}

View File

@@ -6,11 +6,10 @@ using System.Text;
using System.Threading.Tasks;
using SharpCompress.Readers;
namespace SharpCompress.Common.Arc
namespace SharpCompress.Common.Arc;
public class ArcVolume : Volume
{
public class ArcVolume : Volume
{
public ArcVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
}
public ArcVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
}

View File

@@ -6,53 +6,52 @@ using System.Threading.Tasks;
using SharpCompress.Common.Arc;
using SharpCompress.Common.Arj.Headers;
namespace SharpCompress.Common.Arj
namespace SharpCompress.Common.Arj;
public class ArjEntry : Entry
{
public class ArjEntry : Entry
private readonly ArjFilePart _filePart;
internal ArjEntry(ArjFilePart filePart)
{
private readonly ArjFilePart _filePart;
internal ArjEntry(ArjFilePart filePart)
{
_filePart = filePart;
}
public override long Crc => _filePart.Header.OriginalCrc32;
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType
{
get
{
if (_filePart.Header.CompressionMethod == CompressionMethod.Stored)
{
return CompressionType.None;
}
return CompressionType.ArjLZ77;
}
}
public override long Size => _filePart?.Header.OriginalSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.Header.DateTimeModified.DateTime;
public override DateTime? CreatedTime => _filePart.Header.DateTimeCreated.DateTime;
public override DateTime? LastAccessedTime => _filePart.Header.DateTimeAccessed.DateTime;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => _filePart.Header.FileType == FileType.Directory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
_filePart = filePart;
}
public override long Crc => _filePart.Header.OriginalCrc32;
public override string? Key => _filePart?.Header.Name;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override CompressionType CompressionType
{
get
{
if (_filePart.Header.CompressionMethod == CompressionMethod.Stored)
{
return CompressionType.None;
}
return CompressionType.ArjLZ77;
}
}
public override long Size => _filePart?.Header.OriginalSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.Header.DateTimeModified.DateTime;
public override DateTime? CreatedTime => _filePart.Header.DateTimeCreated.DateTime;
public override DateTime? LastAccessedTime => _filePart.Header.DateTimeAccessed.DateTime;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => _filePart.Header.FileType == FileType.Directory;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}

View File

@@ -8,65 +8,62 @@ using SharpCompress.Common.Arj.Headers;
using SharpCompress.Compressors.Arj;
using SharpCompress.IO;
namespace SharpCompress.Common.Arj
namespace SharpCompress.Common.Arj;
public class ArjFilePart : FilePart
{
public class ArjFilePart : FilePart
private readonly Stream _stream;
internal ArjLocalHeader Header { get; set; }
internal ArjFilePart(ArjLocalHeader localArjHeader, Stream seekableStream)
: base(localArjHeader.ArchiveEncoding)
{
private readonly Stream _stream;
internal ArjLocalHeader Header { get; set; }
internal ArjFilePart(ArjLocalHeader localArjHeader, Stream seekableStream)
: base(localArjHeader.ArchiveEncoding)
{
_stream = seekableStream;
Header = localArjHeader;
}
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionMethod.Stored:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionMethod.CompressedMost:
case CompressionMethod.Compressed:
case CompressionMethod.CompressedFaster:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: "
+ Header.CompressionMethod
+ " with size > 128KB"
);
}
compressedStream = new LhaStream<Lh7DecoderCfg>(
_stream,
(int)Header.OriginalSize
);
break;
case CompressionMethod.CompressedFastest:
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream GetRawStream() => _stream;
_stream = seekableStream;
Header = localArjHeader;
}
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
if (_stream != null)
{
Stream compressedStream;
switch (Header.CompressionMethod)
{
case CompressionMethod.Stored:
compressedStream = new ReadOnlySubStream(
_stream,
Header.DataStartPosition,
Header.CompressedSize
);
break;
case CompressionMethod.CompressedMost:
case CompressionMethod.Compressed:
case CompressionMethod.CompressedFaster:
if (Header.OriginalSize > 128 * 1024)
{
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod + " with size > 128KB"
);
}
compressedStream = new LhaStream<Lh7DecoderCfg>(
_stream,
(int)Header.OriginalSize
);
break;
case CompressionMethod.CompressedFastest:
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
break;
default:
throw new NotSupportedException(
"CompressionMethod: " + Header.CompressionMethod
);
}
return compressedStream;
}
return _stream.NotNull();
}
internal override Stream GetRawStream() => _stream;
}

View File

@@ -8,29 +8,28 @@ using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Readers;
namespace SharpCompress.Common.Arj
namespace SharpCompress.Common.Arj;
public class ArjVolume : Volume
{
public class ArjVolume : Volume
public ArjVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
public override bool IsFirstVolume
{
public ArjVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
: base(stream, readerOptions, index) { }
get { return true; }
}
public override bool IsFirstVolume
{
get { return true; }
}
/// <summary>
/// ArjArchive is part of a multi-part archive.
/// </summary>
public override bool IsMultiVolume
{
get { return false; }
}
/// <summary>
/// ArjArchive is part of a multi-part archive.
/// </summary>
public override bool IsMultiVolume
{
get { return false; }
}
internal IEnumerable<ArjFilePart> GetVolumeFileParts()
{
return new List<ArjFilePart>();
}
internal IEnumerable<ArjFilePart> GetVolumeFileParts()
{
return new List<ArjFilePart>();
}
}

View File

@@ -8,156 +8,153 @@ using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Arj.Headers
namespace SharpCompress.Common.Arj.Headers;
public enum ArjHeaderType
{
public enum ArjHeaderType
MainHeader,
LocalHeader,
}
public abstract partial class ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArjHeader(ArjHeaderType type)
{
MainHeader,
LocalHeader,
ArjHeaderType = type;
}
public abstract partial class ArjHeader
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArjHeaderType ArjHeaderType { get; }
public byte Flags { get; set; }
public FileType FileType { get; set; }
public ArjHeader(ArjHeaderType type)
public abstract ArjHeader? Read(Stream reader);
// Async methods moved to ArjHeader.Async.cs
public byte[] ReadHeader(Stream stream)
{
// check for magic bytes
var magic = new byte[2];
if (stream.Read(magic) != 2)
{
ArjHeaderType = type;
return Array.Empty<byte>();
}
public ArjHeaderType ArjHeaderType { get; }
public byte Flags { get; set; }
public FileType FileType { get; set; }
public abstract ArjHeader? Read(Stream reader);
// Async methods moved to ArjHeader.Async.cs
public byte[] ReadHeader(Stream stream)
if (!CheckMagicBytes(magic))
{
// check for magic bytes
var magic = new byte[2];
if (stream.Read(magic) != 2)
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
}
// read header_size
byte[] headerBytes = new byte[2];
stream.Read(headerBytes, 0, 2);
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
if (headerSize < 1)
{
return Array.Empty<byte>();
}
var body = new byte[headerSize];
var read = stream.Read(body, 0, headerSize);
if (read < headerSize)
{
return Array.Empty<byte>();
}
byte[] crc = new byte[4];
read = stream.Read(crc, 0, 4);
var checksum = Crc32Stream.Compute(body);
// Compute the hash value
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
// ReadHeaderAsync moved to ArjHeader.Async.cs
protected List<byte[]> ReadExtendedHeaders(Stream reader)
{
List<byte[]> extendedHeader = new List<byte[]>();
byte[] buffer = new byte[2];
while (true)
{
int bytesRead = reader.Read(buffer, 0, 2);
if (bytesRead < 2)
{
return Array.Empty<byte>();
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header size."
);
}
if (!CheckMagicBytes(magic))
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
if (extHeaderSize == 0)
{
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
return extendedHeader;
}
// read header_size
byte[] headerBytes = new byte[2];
stream.Read(headerBytes, 0, 2);
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
if (headerSize < 1)
byte[] header = new byte[extHeaderSize];
bytesRead = reader.Read(header, 0, extHeaderSize);
if (bytesRead < extHeaderSize)
{
return Array.Empty<byte>();
}
var body = new byte[headerSize];
var read = stream.Read(body, 0, headerSize);
if (read < headerSize)
{
return Array.Empty<byte>();
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header data."
);
}
byte[] crc = new byte[4];
read = stream.Read(crc, 0, 4);
var checksum = Crc32Stream.Compute(body);
// Compute the hash value
bytesRead = reader.Read(crc, 0, 4);
if (bytesRead < 4)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header CRC."
);
}
var checksum = Crc32Stream.Compute(header);
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Header checksum is invalid");
}
return body;
}
// ReadHeaderAsync moved to ArjHeader.Async.cs
protected List<byte[]> ReadExtendedHeaders(Stream reader)
{
List<byte[]> extendedHeader = new List<byte[]>();
byte[] buffer = new byte[2];
while (true)
{
int bytesRead = reader.Read(buffer, 0, 2);
if (bytesRead < 2)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header size."
);
}
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
if (extHeaderSize == 0)
{
return extendedHeader;
}
byte[] header = new byte[extHeaderSize];
bytesRead = reader.Read(header, 0, extHeaderSize);
if (bytesRead < extHeaderSize)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header data."
);
}
byte[] crc = new byte[4];
bytesRead = reader.Read(crc, 0, 4);
if (bytesRead < 4)
{
throw new EndOfStreamException(
"Unexpected end of stream while reading extended header CRC."
);
}
var checksum = Crc32Stream.Compute(header);
if (checksum != BitConverter.ToUInt32(crc, 0))
{
throw new InvalidDataException("Extended header checksum is invalid");
}
extendedHeader.Add(header);
}
}
// Flag helpers
public bool IsGabled => (Flags & 0x01) != 0;
public bool IsAnsiPage => (Flags & 0x02) != 0;
public bool IsVolume => (Flags & 0x04) != 0;
public bool IsArjProtected => (Flags & 0x08) != 0;
public bool IsPathSym => (Flags & 0x10) != 0;
public bool IsBackup => (Flags & 0x20) != 0;
public bool IsSecured => (Flags & 0x40) != 0;
public bool IsAltName => (Flags & 0x80) != 0;
public static FileType FileTypeFromByte(byte value)
{
return Enum.IsDefined(typeof(FileType), value)
? (FileType)value
: Headers.FileType.Unknown;
}
public static bool IsArchive(Stream stream)
{
var bytes = new byte[2];
if (stream.Read(bytes, 0, 2) != 2)
{
return false;
throw new InvalidDataException("Extended header checksum is invalid");
}
return CheckMagicBytes(bytes);
}
protected static bool CheckMagicBytes(byte[] headerBytes)
{
var magicValue = (ushort)(headerBytes[0] | headerBytes[1] << 8);
return magicValue == ARJ_MAGIC;
extendedHeader.Add(header);
}
}
// Flag helpers
public bool IsGabled => (Flags & 0x01) != 0;
public bool IsAnsiPage => (Flags & 0x02) != 0;
public bool IsVolume => (Flags & 0x04) != 0;
public bool IsArjProtected => (Flags & 0x08) != 0;
public bool IsPathSym => (Flags & 0x10) != 0;
public bool IsBackup => (Flags & 0x20) != 0;
public bool IsSecured => (Flags & 0x40) != 0;
public bool IsAltName => (Flags & 0x80) != 0;
public static FileType FileTypeFromByte(byte value)
{
return Enum.IsDefined(typeof(FileType), value) ? (FileType)value : Headers.FileType.Unknown;
}
public static bool IsArchive(Stream stream)
{
var bytes = new byte[2];
if (stream.Read(bytes, 0, 2) != 2)
{
return false;
}
return CheckMagicBytes(bytes);
}
protected static bool CheckMagicBytes(byte[] headerBytes)
{
var magicValue = (ushort)(headerBytes[0] | headerBytes[1] << 8);
return magicValue == ARJ_MAGIC;
}
}

View File

@@ -7,158 +7,156 @@ using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers
namespace SharpCompress.Common.Arj.Headers;
public partial class ArjLocalHeader : ArjHeader
{
public partial class ArjLocalHeader : ArjHeader
public ArchiveEncoding ArchiveEncoding { get; }
public long DataStartPosition { get; protected set; }
public byte ArchiverVersionNumber { get; set; }
public byte MinVersionToExtract { get; set; }
public HostOS HostOS { get; set; }
public CompressionMethod CompressionMethod { get; set; }
public DosDateTime DateTimeModified { get; set; } = new DosDateTime(0);
public long CompressedSize { get; set; }
public long OriginalSize { get; set; }
public long OriginalCrc32 { get; set; }
public int FileSpecPosition { get; set; }
public int FileAccessMode { get; set; }
public byte FirstChapter { get; set; }
public byte LastChapter { get; set; }
public long ExtendedFilePosition { get; set; }
public DosDateTime DateTimeAccessed { get; set; } = new DosDateTime(0);
public DosDateTime DateTimeCreated { get; set; } = new DosDateTime(0);
public long OriginalSizeEvenForVolumes { get; set; }
public string Name { get; set; } = string.Empty;
public string Comment { get; set; } = string.Empty;
private const byte StdHdrSize = 30;
private const byte R9HdrSize = 46;
public ArjLocalHeader(ArchiveEncoding archiveEncoding)
: base(ArjHeaderType.LocalHeader)
{
public ArchiveEncoding ArchiveEncoding { get; }
public long DataStartPosition { get; protected set; }
ArchiveEncoding =
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
}
public byte ArchiverVersionNumber { get; set; }
public byte MinVersionToExtract { get; set; }
public HostOS HostOS { get; set; }
public CompressionMethod CompressionMethod { get; set; }
public DosDateTime DateTimeModified { get; set; } = new DosDateTime(0);
public long CompressedSize { get; set; }
public long OriginalSize { get; set; }
public long OriginalCrc32 { get; set; }
public int FileSpecPosition { get; set; }
public int FileAccessMode { get; set; }
public byte FirstChapter { get; set; }
public byte LastChapter { get; set; }
public long ExtendedFilePosition { get; set; }
public DosDateTime DateTimeAccessed { get; set; } = new DosDateTime(0);
public DosDateTime DateTimeCreated { get; set; } = new DosDateTime(0);
public long OriginalSizeEvenForVolumes { get; set; }
public string Name { get; set; } = string.Empty;
public string Comment { get; set; } = string.Empty;
private const byte StdHdrSize = 30;
private const byte R9HdrSize = 46;
public ArjLocalHeader(ArchiveEncoding archiveEncoding)
: base(ArjHeaderType.LocalHeader)
public override ArjHeader? Read(Stream stream)
{
var body = ReadHeader(stream);
if (body.Length > 0)
{
ArchiveEncoding =
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
ReadExtendedHeaders(stream);
var header = LoadFrom(body);
header.DataStartPosition = stream.Position;
return header;
}
return null;
}
// ReadAsync moved to ArjLocalHeader.Async.cs
public ArjLocalHeader LoadFrom(byte[] headerBytes)
{
int offset = 0;
int ReadInt16()
{
if (offset + 1 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
return v;
}
long ReadInt32()
{
if (offset + 3 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF
| (headerBytes[offset + 1] & 0xFF) << 8
| (headerBytes[offset + 2] & 0xFF) << 16
| (headerBytes[offset + 3] & 0xFF) << 24;
offset += 4;
return v;
}
public override ArjHeader? Read(Stream stream)
byte headerSize = headerBytes[offset++];
ArchiverVersionNumber = headerBytes[offset++];
MinVersionToExtract = headerBytes[offset++];
HostOS hostOS = (HostOS)headerBytes[offset++];
Flags = headerBytes[offset++];
CompressionMethod = CompressionMethodFromByte(headerBytes[offset++]);
FileType = FileTypeFromByte(headerBytes[offset++]);
offset++; // Skip 1 byte
var rawTimestamp = ReadInt32();
DateTimeModified = rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
CompressedSize = ReadInt32();
OriginalSize = ReadInt32();
OriginalCrc32 = ReadInt32();
FileSpecPosition = ReadInt16();
FileAccessMode = ReadInt16();
FirstChapter = headerBytes[offset++];
LastChapter = headerBytes[offset++];
ExtendedFilePosition = 0;
OriginalSizeEvenForVolumes = 0;
if (headerSize > StdHdrSize)
{
var body = ReadHeader(stream);
if (body.Length > 0)
ExtendedFilePosition = ReadInt32();
if (headerSize >= R9HdrSize)
{
ReadExtendedHeaders(stream);
var header = LoadFrom(body);
header.DataStartPosition = stream.Position;
return header;
rawTimestamp = ReadInt32();
DateTimeAccessed =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
rawTimestamp = ReadInt32();
DateTimeCreated =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
OriginalSizeEvenForVolumes = ReadInt32();
}
return null;
}
// ReadAsync moved to ArjLocalHeader.Async.cs
Name = Encoding.ASCII.GetString(
headerBytes,
offset,
Array.IndexOf(headerBytes, (byte)0, offset) - offset
);
offset += Name.Length + 1;
public ArjLocalHeader LoadFrom(byte[] headerBytes)
Comment = Encoding.ASCII.GetString(
headerBytes,
offset,
Array.IndexOf(headerBytes, (byte)0, offset) - offset
);
offset += Comment.Length + 1;
return this;
}
public static CompressionMethod CompressionMethodFromByte(byte value)
{
return value switch
{
int offset = 0;
int ReadInt16()
{
if (offset + 1 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
return v;
}
long ReadInt32()
{
if (offset + 3 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF
| (headerBytes[offset + 1] & 0xFF) << 8
| (headerBytes[offset + 2] & 0xFF) << 16
| (headerBytes[offset + 3] & 0xFF) << 24;
offset += 4;
return v;
}
byte headerSize = headerBytes[offset++];
ArchiverVersionNumber = headerBytes[offset++];
MinVersionToExtract = headerBytes[offset++];
HostOS hostOS = (HostOS)headerBytes[offset++];
Flags = headerBytes[offset++];
CompressionMethod = CompressionMethodFromByte(headerBytes[offset++]);
FileType = FileTypeFromByte(headerBytes[offset++]);
offset++; // Skip 1 byte
var rawTimestamp = ReadInt32();
DateTimeModified =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
CompressedSize = ReadInt32();
OriginalSize = ReadInt32();
OriginalCrc32 = ReadInt32();
FileSpecPosition = ReadInt16();
FileAccessMode = ReadInt16();
FirstChapter = headerBytes[offset++];
LastChapter = headerBytes[offset++];
ExtendedFilePosition = 0;
OriginalSizeEvenForVolumes = 0;
if (headerSize > StdHdrSize)
{
ExtendedFilePosition = ReadInt32();
if (headerSize >= R9HdrSize)
{
rawTimestamp = ReadInt32();
DateTimeAccessed =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
rawTimestamp = ReadInt32();
DateTimeCreated =
rawTimestamp != 0 ? new DosDateTime(rawTimestamp) : new DosDateTime(0);
OriginalSizeEvenForVolumes = ReadInt32();
}
}
Name = Encoding.ASCII.GetString(
headerBytes,
offset,
Array.IndexOf(headerBytes, (byte)0, offset) - offset
);
offset += Name.Length + 1;
Comment = Encoding.ASCII.GetString(
headerBytes,
offset,
Array.IndexOf(headerBytes, (byte)0, offset) - offset
);
offset += Comment.Length + 1;
return this;
}
public static CompressionMethod CompressionMethodFromByte(byte value)
{
return value switch
{
0 => CompressionMethod.Stored,
1 => CompressionMethod.CompressedMost,
2 => CompressionMethod.Compressed,
3 => CompressionMethod.CompressedFaster,
4 => CompressionMethod.CompressedFastest,
8 => CompressionMethod.NoDataNoCrc,
9 => CompressionMethod.NoData,
_ => CompressionMethod.Unknown,
};
}
0 => CompressionMethod.Stored,
1 => CompressionMethod.CompressedMost,
2 => CompressionMethod.Compressed,
3 => CompressionMethod.CompressedFaster,
4 => CompressionMethod.CompressedFastest,
8 => CompressionMethod.NoDataNoCrc,
9 => CompressionMethod.NoData,
_ => CompressionMethod.Unknown,
};
}
}

View File

@@ -6,137 +6,136 @@ using System.Threading.Tasks;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Arj.Headers
namespace SharpCompress.Common.Arj.Headers;
public partial class ArjMainHeader : ArjHeader
{
public partial class ArjMainHeader : ArjHeader
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
public ArchiveEncoding ArchiveEncoding { get; }
public int ArchiverVersionNumber { get; private set; }
public int MinVersionToExtract { get; private set; }
public HostOS HostOs { get; private set; }
public int SecurityVersion { get; private set; }
public DosDateTime CreationDateTime { get; private set; } = new DosDateTime(0);
public long CompressedSize { get; private set; }
public long ArchiveSize { get; private set; }
public long SecurityEnvelope { get; private set; }
public int FileSpecPosition { get; private set; }
public int SecurityEnvelopeLength { get; private set; }
public int EncryptionVersion { get; private set; }
public int LastChapter { get; private set; }
public int ArjProtectionFactor { get; private set; }
public int Flags2 { get; private set; }
public string Name { get; private set; } = string.Empty;
public string Comment { get; private set; } = string.Empty;
public ArjMainHeader(ArchiveEncoding archiveEncoding)
: base(ArjHeaderType.MainHeader)
{
private const int FIRST_HDR_SIZE = 34;
private const ushort ARJ_MAGIC = 0xEA60;
ArchiveEncoding =
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
}
public ArchiveEncoding ArchiveEncoding { get; }
public override ArjHeader? Read(Stream stream)
{
var body = ReadHeader(stream);
ReadExtendedHeaders(stream);
return LoadFrom(body);
}
public int ArchiverVersionNumber { get; private set; }
public int MinVersionToExtract { get; private set; }
public HostOS HostOs { get; private set; }
public int SecurityVersion { get; private set; }
public DosDateTime CreationDateTime { get; private set; } = new DosDateTime(0);
public long CompressedSize { get; private set; }
public long ArchiveSize { get; private set; }
public long SecurityEnvelope { get; private set; }
public int FileSpecPosition { get; private set; }
public int SecurityEnvelopeLength { get; private set; }
public int EncryptionVersion { get; private set; }
public int LastChapter { get; private set; }
// ReadAsync moved to ArjMainHeader.Async.cs
public int ArjProtectionFactor { get; private set; }
public int Flags2 { get; private set; }
public string Name { get; private set; } = string.Empty;
public string Comment { get; private set; } = string.Empty;
public ArjMainHeader LoadFrom(byte[] headerBytes)
{
var offset = 1;
public ArjMainHeader(ArchiveEncoding archiveEncoding)
: base(ArjHeaderType.MainHeader)
byte ReadByte()
{
ArchiveEncoding =
archiveEncoding ?? throw new ArgumentNullException(nameof(archiveEncoding));
if (offset >= headerBytes.Length)
{
throw new EndOfStreamException();
}
return (byte)(headerBytes[offset++] & 0xFF);
}
public override ArjHeader? Read(Stream stream)
int ReadInt16()
{
var body = ReadHeader(stream);
ReadExtendedHeaders(stream);
return LoadFrom(body);
if (offset + 1 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
return v;
}
// ReadAsync moved to ArjMainHeader.Async.cs
public ArjMainHeader LoadFrom(byte[] headerBytes)
long ReadInt32()
{
var offset = 1;
byte ReadByte()
if (offset + 3 >= headerBytes.Length)
{
if (offset >= headerBytes.Length)
{
throw new EndOfStreamException();
}
return (byte)(headerBytes[offset++] & 0xFF);
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF
| (headerBytes[offset + 1] & 0xFF) << 8
| (headerBytes[offset + 2] & 0xFF) << 16
| (headerBytes[offset + 3] & 0xFF) << 24;
offset += 4;
return v;
}
string ReadNullTerminatedString(byte[] x, int startIndex)
{
var result = new StringBuilder();
int i = startIndex;
int ReadInt16()
while (i < x.Length && x[i] != 0)
{
if (offset + 1 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
var v = headerBytes[offset] & 0xFF | (headerBytes[offset + 1] & 0xFF) << 8;
offset += 2;
return v;
}
long ReadInt32()
{
if (offset + 3 >= headerBytes.Length)
{
throw new EndOfStreamException();
}
long v =
headerBytes[offset] & 0xFF
| (headerBytes[offset + 1] & 0xFF) << 8
| (headerBytes[offset + 2] & 0xFF) << 16
| (headerBytes[offset + 3] & 0xFF) << 24;
offset += 4;
return v;
}
string ReadNullTerminatedString(byte[] x, int startIndex)
{
var result = new StringBuilder();
int i = startIndex;
while (i < x.Length && x[i] != 0)
{
result.Append((char)x[i]);
i++;
}
// Skip the null terminator
result.Append((char)x[i]);
i++;
if (i < x.Length)
{
byte[] remainder = new byte[x.Length - i];
Array.Copy(x, i, remainder, 0, remainder.Length);
x = remainder;
}
return result.ToString();
}
ArchiverVersionNumber = ReadByte();
MinVersionToExtract = ReadByte();
// Skip the null terminator
i++;
if (i < x.Length)
{
byte[] remainder = new byte[x.Length - i];
Array.Copy(x, i, remainder, 0, remainder.Length);
x = remainder;
}
var hostOsByte = ReadByte();
HostOs = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
Flags = ReadByte();
SecurityVersion = ReadByte();
FileType = FileTypeFromByte(ReadByte());
offset++; // skip reserved
CreationDateTime = new DosDateTime((int)ReadInt32());
CompressedSize = ReadInt32();
ArchiveSize = ReadInt32();
SecurityEnvelope = ReadInt32();
FileSpecPosition = ReadInt16();
SecurityEnvelopeLength = ReadInt16();
EncryptionVersion = ReadByte();
LastChapter = ReadByte();
Name = ReadNullTerminatedString(headerBytes, offset);
Comment = ReadNullTerminatedString(headerBytes, offset + 1 + Name.Length);
return this;
return result.ToString();
}
ArchiverVersionNumber = ReadByte();
MinVersionToExtract = ReadByte();
var hostOsByte = ReadByte();
HostOs = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
Flags = ReadByte();
SecurityVersion = ReadByte();
FileType = FileTypeFromByte(ReadByte());
offset++; // skip reserved
CreationDateTime = new DosDateTime((int)ReadInt32());
CompressedSize = ReadInt32();
ArchiveSize = ReadInt32();
SecurityEnvelope = ReadInt32();
FileSpecPosition = ReadInt16();
SecurityEnvelopeLength = ReadInt16();
EncryptionVersion = ReadByte();
LastChapter = ReadByte();
Name = ReadNullTerminatedString(headerBytes, offset);
Comment = ReadNullTerminatedString(headerBytes, offset + 1 + Name.Length);
return this;
}
}

View File

@@ -4,17 +4,16 @@ using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Common.Arj.Headers
namespace SharpCompress.Common.Arj.Headers;
public enum CompressionMethod
{
public enum CompressionMethod
{
Stored = 0,
CompressedMost = 1,
Compressed = 2,
CompressedFaster = 3,
CompressedFastest = 4,
NoDataNoCrc = 8,
NoData = 9,
Unknown,
}
Stored = 0,
CompressedMost = 1,
Compressed = 2,
CompressedFaster = 3,
CompressedFastest = 4,
NoDataNoCrc = 8,
NoData = 9,
Unknown,
}

View File

@@ -1,37 +1,36 @@
using System;
namespace SharpCompress.Common.Arj.Headers
namespace SharpCompress.Common.Arj.Headers;
public class DosDateTime
{
public class DosDateTime
public DateTime DateTime { get; }
public DosDateTime(long dosValue)
{
public DateTime DateTime { get; }
// Ensure only the lower 32 bits are used
int value = unchecked((int)(dosValue & 0xFFFFFFFF));
public DosDateTime(long dosValue)
var date = (value >> 16) & 0xFFFF;
var time = value & 0xFFFF;
var day = date & 0x1F;
var month = (date >> 5) & 0x0F;
var year = ((date >> 9) & 0x7F) + 1980;
var second = (time & 0x1F) * 2;
var minute = (time >> 5) & 0x3F;
var hour = (time >> 11) & 0x1F;
try
{
// Ensure only the lower 32 bits are used
int value = unchecked((int)(dosValue & 0xFFFFFFFF));
var date = (value >> 16) & 0xFFFF;
var time = value & 0xFFFF;
var day = date & 0x1F;
var month = (date >> 5) & 0x0F;
var year = ((date >> 9) & 0x7F) + 1980;
var second = (time & 0x1F) * 2;
var minute = (time >> 5) & 0x3F;
var hour = (time >> 11) & 0x1F;
try
{
DateTime = new DateTime(year, month, day, hour, minute, second);
}
catch
{
DateTime = DateTime.MinValue;
}
DateTime = new DateTime(year, month, day, hour, minute, second);
}
catch
{
DateTime = DateTime.MinValue;
}
public override string ToString() => DateTime.ToString("yyyy-MM-dd HH:mm:ss");
}
public override string ToString() => DateTime.ToString("yyyy-MM-dd HH:mm:ss");
}

View File

@@ -1,13 +1,12 @@
namespace SharpCompress.Common.Arj.Headers
namespace SharpCompress.Common.Arj.Headers;
public enum FileType : byte
{
public enum FileType : byte
{
Binary = 0,
Text7Bit = 1,
CommentHeader = 2,
Directory = 3,
VolumeLabel = 4,
ChapterLabel = 5,
Unknown = 255,
}
Binary = 0,
Text7Bit = 1,
CommentHeader = 2,
Directory = 3,
VolumeLabel = 4,
ChapterLabel = 5,
Unknown = 255,
}

View File

@@ -1,19 +1,18 @@
namespace SharpCompress.Common.Arj.Headers
namespace SharpCompress.Common.Arj.Headers;
public enum HostOS
{
public enum HostOS
{
MsDos = 0,
PrimOS = 1,
Unix = 2,
Amiga = 3,
MacOs = 4,
OS2 = 5,
AppleGS = 6,
AtariST = 7,
NeXT = 8,
VaxVMS = 9,
Win95 = 10,
Win32 = 11,
Unknown = 255,
}
MsDos = 0,
PrimOS = 1,
Unix = 2,
Amiga = 3,
MacOs = 4,
OS2 = 5,
AppleGS = 6,
AtariST = 7,
NeXT = 8,
VaxVMS = 9,
Win95 = 10,
Win32 = 11,
Unknown = 255,
}

View File

@@ -1,108 +0,0 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common
{
public sealed class AsyncBinaryReader : IDisposable
{
private readonly Stream _stream;
private readonly Stream _originalStream;
private readonly bool _leaveOpen;
private readonly byte[] _buffer = new byte[8];
private bool _disposed;
public AsyncBinaryReader(Stream stream, bool leaveOpen = false, int bufferSize = 4096)
{
if (!stream.CanRead)
{
throw new ArgumentException("Stream must be readable.");
}
_originalStream = stream ?? throw new ArgumentNullException(nameof(stream));
_leaveOpen = leaveOpen;
// Use the stream directly without wrapping in BufferedStream
// BufferedStream uses synchronous Read internally which doesn't work with async-only streams
// SharpCompress uses SharpCompressStream for buffering which supports true async reads
_stream = stream;
}
public Stream BaseStream => _stream;
public async ValueTask<byte> ReadByteAsync(CancellationToken ct = default)
{
await _stream.ReadExactAsync(_buffer, 0, 1, ct).ConfigureAwait(false);
return _buffer[0];
}
public async ValueTask<ushort> ReadUInt16Async(CancellationToken ct = default)
{
await _stream.ReadExactAsync(_buffer, 0, 2, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt16LittleEndian(_buffer);
}
public async ValueTask<uint> ReadUInt32Async(CancellationToken ct = default)
{
await _stream.ReadExactAsync(_buffer, 0, 4, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt32LittleEndian(_buffer);
}
public async ValueTask<ulong> ReadUInt64Async(CancellationToken ct = default)
{
await _stream.ReadExactAsync(_buffer, 0, 8, ct).ConfigureAwait(false);
return BinaryPrimitives.ReadUInt64LittleEndian(_buffer);
}
public async ValueTask ReadBytesAsync(
byte[] bytes,
int offset,
int count,
CancellationToken ct = default
)
{
await _stream.ReadExactAsync(bytes, offset, count, ct).ConfigureAwait(false);
}
public async ValueTask SkipAsync(int count, CancellationToken ct = default)
{
await _stream.SkipAsync(count, ct).ConfigureAwait(false);
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
// Dispose the original stream if we own it
if (!_leaveOpen)
{
_originalStream.Dispose();
}
}
#if NET8_0_OR_GREATER
public async ValueTask DisposeAsync()
{
if (_disposed)
{
return;
}
_disposed = true;
// Dispose the original stream if we own it
if (!_leaveOpen)
{
await _originalStream.DisposeAsync().ConfigureAwait(false);
}
}
#endif
}
}

View File

@@ -8,5 +8,34 @@ public static class Constants
/// </summary>
public static int BufferSize { get; set; } = 81920;
/// <summary>
/// The default size for rewindable buffers in SharpCompressStream.
/// Used for format detection on non-seekable streams.
/// </summary>
/// <remarks>
/// <para>
/// When opening archives from non-seekable streams (network streams, pipes,
/// compressed streams), SharpCompress uses a ring buffer to enable format
/// auto-detection. This buffer allows the library to try multiple decoders
/// by rewinding and re-reading the same data.
/// </para>
/// <para>
/// <b>Default:</b> 81920 bytes (81KB) - sufficient for typical format detection.
/// </para>
/// <para>
/// <b>Typical usage:</b> 500-1000 bytes for most archives
/// </para>
/// <para>
/// <b>Can be overridden per-stream via ReaderOptions.RewindableBufferSize.</b>
/// </para>
/// <para>
/// <b>Increase if:</b>
/// <list type="bullet">
/// <item>Handling self-extracting archives (may need 512KB+)</item>
/// <item>Format detection fails with buffer overflow errors</item>
/// <item>Using custom formats with large headers</item>
/// </list>
/// </para>
/// </remarks>
public static int RewindableBufferSize { get; set; } = 81920;
}

View File

@@ -42,9 +42,6 @@ public partial class EntryStream
await lzmaStream.FlushAsync().ConfigureAwait(false);
}
}
#if DEBUG_STREAMS
this.DebugDispose(typeof(EntryStream));
#endif
await base.DisposeAsync().ConfigureAwait(false);
await _stream.DisposeAsync().ConfigureAwait(false);
}

View File

@@ -8,28 +8,8 @@ using SharpCompress.Readers;
namespace SharpCompress.Common;
public partial class EntryStream : Stream, IStreamStack
public partial class EntryStream : Stream
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _stream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
private readonly IReader _reader;
private readonly Stream _stream;
private bool _completed;
@@ -39,9 +19,6 @@ public partial class EntryStream : Stream, IStreamStack
{
_reader = reader;
_stream = stream;
#if DEBUG_STREAMS
this.DebugConstruct(typeof(EntryStream));
#endif
}
/// <summary>
@@ -62,24 +39,34 @@ public partial class EntryStream : Stream, IStreamStack
_isDisposed = true;
if (!(_completed || _reader.Cancelled))
{
SkipEntry();
if (Utility.UseSyncOverAsyncDispose())
{
SkipEntryAsync().GetAwaiter().GetResult();
}
else
{
SkipEntry();
}
}
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
if (_stream is IStreamStack ss)
{
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
if (
ss.GetStream<SharpCompress.Compressors.Deflate.DeflateStream>()
is SharpCompress.Compressors.Deflate.DeflateStream deflateStream
)
{
deflateStream.Flush(); //Deflate over reads. Knock it back
}
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
else if (
ss.GetStream<SharpCompress.Compressors.LZMA.LzmaStream>()
is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream
)
{
lzmaStream.Flush(); //Lzma over reads. Knock it back
}
}
#if DEBUG_STREAMS
this.DebugDispose(typeof(EntryStream));
#endif
base.Dispose(disposing);
_stream.Dispose();
}

View File

@@ -4,6 +4,7 @@ using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar;

View File

@@ -6,13 +6,7 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
#if !Rar2017_64bit
using size_t = System.UInt32;
#else
using nint = System.Int64;
using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
namespace SharpCompress.Common.Rar.Headers;

View File

@@ -6,13 +6,7 @@ using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
#if !Rar2017_64bit
using size_t = System.UInt32;
#else
using nint = System.Int64;
using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
namespace SharpCompress.Common.Rar.Headers;

View File

@@ -1,6 +1,9 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Rar;
@@ -55,6 +58,83 @@ internal sealed class RarCryptoWrapper : Stream
return count;
}
public override Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
) => ReadAndDecryptAsync(buffer, offset, count, cancellationToken);
private async Task<int> ReadAndDecryptAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
cancellationToken.ThrowIfCancellationRequested();
var queueSize = _data.Count;
var sizeToRead = count - queueSize;
if (sizeToRead > 0)
{
var alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
byte[] cipherText = new byte[16];
try
{
for (var i = 0; i < alignedSize / 16; i++)
{
await _actualStream
.ReadExactAsync(cipherText, 0, 16, cancellationToken)
.ConfigureAwait(false);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
}
catch (EndOfStreamException e)
{
throw new InvalidFormatException("Unexpected end of encrypted stream", e);
}
}
var bytesToReturn = Math.Min(count, _data.Count);
for (var i = 0; i < bytesToReturn; i++)
{
buffer[offset + i] = _data.Dequeue();
}
return bytesToReturn;
}
#if NETCOREAPP2_1_OR_GREATER || NETSTANDARD2_1_OR_GREATER
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
cancellationToken.ThrowIfCancellationRequested();
var array = System.Buffers.ArrayPool<byte>.Shared.Rent(buffer.Length);
try
{
var bytesRead = await ReadAndDecryptAsync(array, 0, buffer.Length, cancellationToken)
.ConfigureAwait(false);
new ReadOnlySpan<byte>(array, 0, bytesRead).CopyTo(buffer.Span);
return bytesRead;
}
finally
{
System.Buffers.ArrayPool<byte>.Shared.Return(array);
}
}
#endif
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();

View File

@@ -116,7 +116,9 @@ public abstract class RarVolume : Volume
if (fh.FileName == "CMT")
{
var buffer = new byte[fh.CompressedSize];
fh.PackedStream.NotNull().ReadFully(buffer);
await fh
.PackedStream.NotNull()
.ReadFullyAsync(buffer, cancellationToken);
Comment = Encoding.UTF8.GetString(buffer, 0, buffer.Length - 1);
}
}

View File

@@ -6,6 +6,7 @@ using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common.Tar.Headers;

View File

@@ -1,4 +1,6 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common.Tar.Headers;
namespace SharpCompress.Common.Tar;
@@ -23,10 +25,28 @@ internal sealed class TarFilePart : FilePart
if (_seekableStream != null)
{
_seekableStream.Position = Header.DataStartPosition ?? 0;
return new TarReadOnlySubStream(_seekableStream, Header.Size);
return new TarReadOnlySubStream(_seekableStream, Header.Size, false);
}
return Header.PackedStream.NotNull();
}
internal override ValueTask<Stream?> GetCompressedStreamAsync(
CancellationToken cancellationToken = default
)
{
if (_seekableStream != null)
{
var useSyncOverAsync = false;
#if LEGACY_DOTNET
useSyncOverAsync = true;
#endif
_seekableStream.Position = Header.DataStartPosition ?? 0;
return new ValueTask<Stream?>(
new TarReadOnlySubStream(_seekableStream, Header.Size, useSyncOverAsync)
);
}
return new ValueTask<Stream?>(Header.PackedStream.NotNull());
}
internal override Stream? GetRawStream() => null;
}

View File

@@ -13,12 +13,17 @@ internal static partial class TarHeaderFactory
IArchiveEncoding archiveEncoding
)
{
#if NET8_0_OR_GREATER
await using var reader = new AsyncBinaryReader(stream, leaveOpen: true);
#else
using var reader = new AsyncBinaryReader(stream, leaveOpen: true);
#endif
while (true)
{
TarHeader? header = null;
try
{
var reader = new AsyncBinaryReader(stream, false);
header = new TarHeader(archiveEncoding);
if (!await header.ReadAsync(reader))
{
@@ -36,7 +41,15 @@ internal static partial class TarHeaderFactory
break;
case StreamingMode.Streaming:
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
var useSyncOverAsync = false;
#if LEGACY_DOTNET
useSyncOverAsync = true;
#endif
header.PackedStream = new TarReadOnlySubStream(
stream,
header.Size,
useSyncOverAsync
);
}
break;
default:

View File

@@ -37,7 +37,11 @@ internal static partial class TarHeaderFactory
break;
case StreamingMode.Streaming:
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
header.PackedStream = new TarReadOnlySubStream(
stream,
header.Size,
false
);
}
break;
default:

View File

@@ -1,40 +1,21 @@
using System;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Tar;
internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
internal class TarReadOnlySubStream : Stream
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
Stream IStreamStack.BaseStream() => base.Stream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
private readonly Stream _stream;
private readonly bool _useSyncOverAsyncDispose;
private bool _isDisposed;
private long _amountRead;
public TarReadOnlySubStream(Stream stream, long bytesToRead)
: base(stream, leaveOpen: true, throwOnDispose: false)
public TarReadOnlySubStream(Stream stream, long bytesToRead, bool useSyncOverAsyncDispose)
{
_stream = stream;
_useSyncOverAsyncDispose = useSyncOverAsyncDispose;
BytesLeftToRead = bytesToRead;
#if DEBUG_STREAMS
this.DebugConstruct(typeof(TarReadOnlySubStream));
#endif
}
protected override void Dispose(bool disposing)
@@ -45,13 +26,10 @@ internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(TarReadOnlySubStream));
#endif
if (disposing)
{
// Ensure we read all remaining blocks for this entry.
Stream.Skip(BytesLeftToRead);
_stream.Skip(BytesLeftToRead);
_amountRead += BytesLeftToRead;
// If the last block wasn't a full 512 bytes, skip the remaining padding bytes.
@@ -59,11 +37,16 @@ internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
if (bytesInLastBlock != 0)
{
Stream.Skip(512 - bytesInLastBlock);
if (Utility.UseSyncOverAsyncDispose())
{
_stream.SkipAsync(512 - bytesInLastBlock).GetAwaiter().GetResult();
}
else
{
_stream.Skip(512 - bytesInLastBlock);
}
}
}
base.Dispose(disposing);
}
#if !LEGACY_DOTNET
@@ -75,11 +58,8 @@ internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(TarReadOnlySubStream));
#endif
// Ensure we read all remaining blocks for this entry.
await Stream.SkipAsync(BytesLeftToRead).ConfigureAwait(false);
await _stream.SkipAsync(BytesLeftToRead).ConfigureAwait(false);
_amountRead += BytesLeftToRead;
// If the last block wasn't a full 512 bytes, skip the remaining padding bytes.
@@ -87,11 +67,9 @@ internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
if (bytesInLastBlock != 0)
{
await Stream.SkipAsync(512 - bytesInLastBlock).ConfigureAwait(false);
await _stream.SkipAsync(512 - bytesInLastBlock).ConfigureAwait(false);
}
// Call base Dispose instead of base DisposeAsync to avoid double disposal
base.Dispose(true);
GC.SuppressFinalize(this);
}
#endif
@@ -124,7 +102,7 @@ internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
{
count = (int)BytesLeftToRead;
}
var read = Stream.Read(buffer, offset, count);
var read = _stream.Read(buffer, offset, count);
if (read > 0)
{
BytesLeftToRead -= read;
@@ -139,7 +117,7 @@ internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
{
return -1;
}
var value = Stream.ReadByte();
var value = _stream.ReadByte();
if (value != -1)
{
--BytesLeftToRead;
@@ -159,7 +137,7 @@ internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
{
count = (int)BytesLeftToRead;
}
var read = await Stream
var read = await _stream
.ReadAsync(buffer, offset, count, cancellationToken)
.ConfigureAwait(false);
if (read > 0)
@@ -180,7 +158,7 @@ internal class TarReadOnlySubStream : SharpCompressStream, IStreamStack
{
buffer = buffer.Slice(0, (int)BytesLeftToRead);
}
var read = await Stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
var read = await _stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
if (read > 0)
{
BytesLeftToRead -= read;

View File

@@ -16,14 +16,15 @@ public abstract partial class Volume : IVolume, IAsyncDisposable
Index = index;
ReaderOptions = readerOptions ?? new ReaderOptions();
_baseStream = stream;
// Only rewind if it's a buffered SharpCompressStream (not passthrough)
if (stream is SharpCompressStream ss && !ss.IsPassthrough)
{
ss.Rewind();
}
if (ReaderOptions.LeaveStreamOpen)
{
stream = new NonDisposingStream(stream);
}
if (stream is IStreamStack ss)
{
ss.SetBuffer(ReaderOptions.BufferSize, true);
stream = SharpCompressStream.CreateNonDisposing(stream);
}
_actualStream = stream;

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip.Headers;

View File

@@ -2,6 +2,7 @@ using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip.Headers;

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip.Headers;

View File

@@ -1,6 +1,7 @@
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip.Headers;

View File

@@ -1,6 +1,7 @@
using System;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip.Headers;

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip.Headers;

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip.Headers;

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip.Headers;

View File

@@ -0,0 +1,132 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip;
internal partial class PkwareTraditionalCryptoStream
{
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (_mode == CryptoMode.Encrypt)
{
throw new NotSupportedException("This stream does not encrypt via Read()");
}
if (buffer is null)
{
throw new ArgumentNullException(nameof(buffer));
}
var temp = new byte[count];
var readBytes = await _stream
.ReadAsync(temp, 0, count, cancellationToken)
.ConfigureAwait(false);
var decrypted = _encryptor.Decrypt(temp, readBytes);
Buffer.BlockCopy(decrypted, 0, buffer, offset, readBytes);
return readBytes;
}
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (_mode == CryptoMode.Encrypt)
{
throw new NotSupportedException("This stream does not encrypt via Read()");
}
byte[] temp = ArrayPool<byte>.Shared.Rent(buffer.Length);
try
{
int readBytes = await _stream
.ReadAsync(temp.AsMemory(0, buffer.Length), cancellationToken)
.ConfigureAwait(false);
var decrypted = _encryptor.Decrypt(temp, readBytes);
decrypted.AsMemory(0, readBytes).CopyTo(buffer);
return readBytes;
}
finally
{
ArrayPool<byte>.Shared.Return(temp);
}
}
#endif
public override async Task WriteAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (_mode == CryptoMode.Decrypt)
{
throw new NotSupportedException("This stream does not Decrypt via Write()");
}
if (count == 0)
{
return;
}
byte[] plaintext;
if (offset != 0)
{
plaintext = new byte[count];
Buffer.BlockCopy(buffer, offset, plaintext, 0, count);
}
else
{
plaintext = buffer;
}
var encrypted = _encryptor.Encrypt(plaintext, count);
await _stream
.WriteAsync(encrypted, 0, encrypted.Length, cancellationToken)
.ConfigureAwait(false);
}
#if !LEGACY_DOTNET
public override async ValueTask WriteAsync(
ReadOnlyMemory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (_mode == CryptoMode.Decrypt)
{
throw new NotSupportedException("This stream does not Decrypt via Write()");
}
if (buffer.Length == 0)
{
return;
}
byte[] plaintext;
if (buffer.Span.Overlaps(buffer.Span))
{
plaintext = buffer.ToArray();
}
else
{
plaintext = new byte[buffer.Length];
buffer.CopyTo(plaintext);
}
var encrypted = _encryptor.Encrypt(plaintext, buffer.Length);
await _stream
.WriteAsync(encrypted.AsMemory(0, encrypted.Length), cancellationToken)
.ConfigureAwait(false);
}
#endif
}

View File

@@ -1,6 +1,5 @@
using System;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip;
@@ -10,28 +9,8 @@ internal enum CryptoMode
Decrypt,
}
internal class PkwareTraditionalCryptoStream : Stream, IStreamStack
internal partial class PkwareTraditionalCryptoStream : Stream
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _stream;
int IStreamStack.BufferSize
{
get => 0;
set { return; }
}
int IStreamStack.BufferPosition
{
get => 0;
set { return; }
}
void IStreamStack.SetPosition(long position) { }
private readonly PkwareTraditionalEncryptionData _encryptor;
private readonly CryptoMode _mode;
private readonly Stream _stream;
@@ -46,10 +25,6 @@ internal class PkwareTraditionalCryptoStream : Stream, IStreamStack
_encryptor = encryptor;
_stream = stream;
_mode = mode;
#if DEBUG_STREAMS
this.DebugConstruct(typeof(PkwareTraditionalCryptoStream));
#endif
}
public override bool CanRead => (_mode == CryptoMode.Decrypt);
@@ -125,9 +100,6 @@ internal class PkwareTraditionalCryptoStream : Stream, IStreamStack
return;
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(PkwareTraditionalCryptoStream));
#endif
base.Dispose(disposing);
_stream.Dispose();
}

View File

@@ -4,6 +4,7 @@ using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip;
@@ -11,7 +12,11 @@ internal sealed partial class SeekableZipHeaderFactory
{
internal async IAsyncEnumerable<ZipHeader> ReadSeekableHeaderAsync(Stream stream)
{
var reader = new AsyncBinaryReader(stream);
#if NET8_0_OR_GREATER
await using var reader = new AsyncBinaryReader(stream, leaveOpen: true);
#else
using var reader = new AsyncBinaryReader(stream, leaveOpen: true);
#endif
await SeekBackToHeaderAsync(stream, reader);
@@ -127,7 +132,11 @@ internal sealed partial class SeekableZipHeaderFactory
)
{
stream.Seek(directoryEntryHeader.RelativeOffsetOfEntryHeader, SeekOrigin.Begin);
var reader = new AsyncBinaryReader(stream);
#if NET8_0_OR_GREATER
await using var reader = new AsyncBinaryReader(stream, leaveOpen: true);
#else
using var reader = new AsyncBinaryReader(stream, leaveOpen: true);
#endif
var signature = await reader.ReadUInt32Async();
if (await ReadHeader(signature, reader, _zip64) is not LocalEntryHeader localEntryHeader)
{

View File

@@ -24,7 +24,7 @@ internal sealed partial class StreamingZipFilePart
.ConfigureAwait(false);
if (LeaveStreamOpen)
{
return SharpCompressStream.Create(_decompressionStream, leaveOpen: true);
return SharpCompressStream.CreateNonDisposing(_decompressionStream);
}
return _decompressionStream;
}

View File

@@ -26,16 +26,16 @@ internal sealed partial class StreamingZipFilePart : ZipFilePart
);
if (LeaveStreamOpen)
{
return new NonDisposingStream(_decompressionStream);
return SharpCompressStream.CreateNonDisposing(_decompressionStream);
}
return _decompressionStream;
}
internal BinaryReader FixStreamedFileLocation(ref SharpCompressStream rewindableStream)
internal BinaryReader FixStreamedFileLocation(ref Stream stream)
{
if (Header.IsDirectory)
{
return new BinaryReader(rewindableStream);
return new BinaryReader(stream, System.Text.Encoding.Default, leaveOpen: true);
}
if (Header.HasData && !Skipped)
@@ -49,12 +49,12 @@ internal sealed partial class StreamingZipFilePart : ZipFilePart
if (_decompressionStream is DeflateStream deflateStream)
{
((IStreamStack)rewindableStream).StackSeek(0);
stream.Position = 0;
}
Skipped = true;
}
var reader = new BinaryReader(rewindableStream);
var reader = new BinaryReader(stream, System.Text.Encoding.Default, leaveOpen: true);
_decompressionStream = null;
return reader;
}

View File

@@ -60,7 +60,7 @@ internal sealed partial class StreamingZipHeaderFactory
private sealed class StreamHeaderAsyncEnumerator : IAsyncEnumerator<ZipHeader>, IDisposable
{
private readonly StreamingZipHeaderFactory _headerFactory;
private readonly RewindableStream _rewindableStream;
private readonly SharpCompressStream _sharpCompressStream;
private readonly AsyncBinaryReader _reader;
private readonly CancellationToken _cancellationToken;
private bool _completed;
@@ -72,10 +72,10 @@ internal sealed partial class StreamingZipHeaderFactory
)
{
_headerFactory = headerFactory;
// Use EnsureSeekable to avoid double-wrapping if stream is already a RewindableStream,
// Use EnsureSeekable to avoid double-wrapping if stream is already a SharpCompressStream,
// and to preserve seekability for DataDescriptorStream which needs to seek backward
_rewindableStream = RewindableStream.EnsureSeekable(stream);
_reader = new AsyncBinaryReader(_rewindableStream, leaveOpen: true);
_sharpCompressStream = SharpCompressStream.EnsureSeekable(stream);
_reader = new AsyncBinaryReader(_sharpCompressStream, leaveOpen: true);
_cancellationToken = cancellationToken;
}
@@ -110,7 +110,9 @@ internal sealed partial class StreamingZipHeaderFactory
continue;
}
var pos = _rewindableStream.CanSeek ? (long?)_rewindableStream.Position : null;
var pos = _sharpCompressStream.CanSeek
? (long?)_sharpCompressStream.Position
: null;
var crc = await _reader
.ReadUInt32Async(_cancellationToken)
@@ -178,7 +180,9 @@ internal sealed partial class StreamingZipHeaderFactory
continue;
}
var pos = _rewindableStream.CanSeek ? (long?)_rewindableStream.Position : null;
var pos = _sharpCompressStream.CanSeek
? (long?)_sharpCompressStream.Position
: null;
headerBytes = await _reader
.ReadUInt32Async(_cancellationToken)
@@ -236,8 +240,13 @@ internal sealed partial class StreamingZipHeaderFactory
{
lastEntryHeader.DataStartPosition = pos - lastEntryHeader.CompressedSize;
// For SeekableSharpCompressStream, seek back to just after the local header signature.
// Plain SharpCompressStream cannot seek to arbitrary positions, so we skip this.
// 4 = First 4 bytes of the entry header (i.e. 50 4B 03 04)
_rewindableStream.Position = pos.Value + 4;
if (_sharpCompressStream is SeekableSharpCompressStream)
{
_sharpCompressStream.Position = pos.Value + 4;
}
}
}
else
@@ -284,28 +293,14 @@ internal sealed partial class StreamingZipHeaderFactory
else if (localHeader.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor))
{
// Peek ahead to check if next data is a header or file data.
// For SeekableRewindableStream, use direct position save/restore to avoid
// interfering with any recording state set by the caller (e.g., ReaderFactory).
// Plain RewindableStream can use StartRecording/Rewind safely since it was
// created fresh by EnsureSeekable and isn't shared with the caller.
if (_rewindableStream is SeekableRewindableStream)
{
var savedPosition = _rewindableStream.Position;
var nextHeaderBytes = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
_rewindableStream.Position = savedPosition;
header.HasData = !IsHeader(nextHeaderBytes);
}
else
{
_rewindableStream.StartRecording();
var nextHeaderBytes = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
_rewindableStream.Rewind(true);
header.HasData = !IsHeader(nextHeaderBytes);
}
// Use the IStreamStack.Rewind mechanism to give back the peeked bytes.
var nextHeaderBytes = await _reader
.ReadUInt32Async(_cancellationToken)
.ConfigureAwait(false);
_sharpCompressStream.Rewind(sizeof(uint));
// Check if next data is PostDataDescriptor, streamed file with 0 length
header.HasData = !IsHeader(nextHeaderBytes);
}
else // We are not streaming and compressed size is 0, we have no data
{

View File

@@ -2,15 +2,12 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip;
internal sealed partial class StreamingZipHeaderFactory : ZipHeaderFactory
internal partial class StreamingZipHeaderFactory : ZipHeaderFactory
{
private IEnumerable<ZipEntry>? _entries;
@@ -23,185 +20,192 @@ internal sealed partial class StreamingZipHeaderFactory : ZipHeaderFactory
internal IEnumerable<ZipHeader> ReadStreamHeader(Stream stream)
{
// Use EnsureSeekable to avoid double-wrapping if stream is already a RewindableStream,
// Use EnsureSeekable to avoid double-wrapping if stream is already a SharpCompressStream,
// and to preserve seekability for DataDescriptorStream which needs to seek backward
var rewindableStream = RewindableStream.EnsureSeekable(stream);
while (true)
var sharpCompressStream = SharpCompressStream.EnsureSeekable(stream);
var reader = new BinaryReader(
sharpCompressStream,
System.Text.Encoding.Default,
leaveOpen: true
);
try
{
var reader = new BinaryReader(rewindableStream);
uint headerBytes = 0;
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
)
while (true)
{
if (_lastEntryHeader.Part is null)
uint headerBytes = 0;
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(
_lastEntryHeader.Flags,
HeaderFlags.UsePostDataDescriptor
)
)
{
continue;
}
// removed requirement for FixStreamedFileLocation()
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
var crc = reader.ReadUInt32();
if (crc == POST_DATA_DESCRIPTOR)
{
crc = reader.ReadUInt32();
}
_lastEntryHeader.Crc = crc;
//attempt 32bit read
ulong compSize = reader.ReadUInt32();
ulong uncompSize = reader.ReadUInt32();
headerBytes = reader.ReadUInt32();
//check for zip64 sentinel or unexpected header
bool isSentinel = compSize == 0xFFFFFFFF || uncompSize == 0xFFFFFFFF;
bool isHeader = headerBytes == 0x04034b50 || headerBytes == 0x02014b50;
if (!isHeader && !isSentinel)
{
//reshuffle into 64-bit values
compSize = (uncompSize << 32) | compSize;
uncompSize = ((ulong)headerBytes << 32) | reader.ReadUInt32();
headerBytes = reader.ReadUInt32();
}
else if (isSentinel)
{
//standards-compliant zip64 descriptor
compSize = reader.ReadUInt64();
uncompSize = reader.ReadUInt64();
}
_lastEntryHeader.CompressedSize = (long)compSize;
_lastEntryHeader.UncompressedSize = (long)uncompSize;
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
}
}
else if (_lastEntryHeader != null && _lastEntryHeader.IsZip64)
{
if (_lastEntryHeader.Part is null)
{
continue;
}
//reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
// ref rewindableStream
//);
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
headerBytes = reader.ReadUInt32();
var version = reader.ReadUInt16();
var flags = (HeaderFlags)reader.ReadUInt16();
var compressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
var lastModifiedDate = reader.ReadUInt16();
var lastModifiedTime = reader.ReadUInt16();
var crc = reader.ReadUInt32();
if (crc == POST_DATA_DESCRIPTOR)
{
crc = reader.ReadUInt32();
}
_lastEntryHeader.Crc = crc;
// The DataDescriptor can be either 64bit or 32bit
var compressed_size = reader.ReadUInt32();
var uncompressed_size = reader.ReadUInt32();
// Check if we have header or 64bit DataDescriptor
var test_header = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50);
var test_64bit = ((long)uncompressed_size << 32) | compressed_size;
if (test_64bit == _lastEntryHeader.CompressedSize && test_header)
{
_lastEntryHeader.UncompressedSize =
((long)reader.ReadUInt32() << 32) | headerBytes;
headerBytes = reader.ReadUInt32();
}
else
{
_lastEntryHeader.UncompressedSize = uncompressed_size;
}
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
// 4 = First 4 bytes of the entry header (i.e. 50 4B 03 04)
rewindableStream.Position = pos.Value + 4;
}
}
else
{
headerBytes = reader.ReadUInt32();
}
_lastEntryHeader = null;
var header = ReadHeader(headerBytes, reader);
if (header is null)
{
yield break;
}
//entry could be zero bytes so we need to know that.
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
{
var local_header = ((LocalEntryHeader)header);
var dir_header = _entries?.FirstOrDefault(entry =>
entry.Key == local_header.Name
&& local_header.CompressedSize == 0
&& local_header.UncompressedSize == 0
&& local_header.Crc == 0
&& local_header.IsDirectory == false
);
if (dir_header != null)
{
local_header.UncompressedSize = dir_header.Size;
local_header.CompressedSize = dir_header.CompressedSize;
local_header.Crc = (uint)dir_header.Crc;
}
// If we have CompressedSize, there is data to be read
if (local_header.CompressedSize > 0)
{
header.HasData = true;
} // Check if zip is streaming ( Length is 0 and is declared in PostDataDescriptor )
else if (local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor))
{
// Peek ahead to check if next data is a header or file data.
// For SeekableRewindableStream, use direct position save/restore to avoid
// interfering with any recording state set by the caller (e.g., ReaderFactory).
// Plain RewindableStream can use StartRecording/Rewind safely since it was
// created fresh by EnsureSeekable and isn't shared with the caller.
if (rewindableStream is SeekableRewindableStream)
if (_lastEntryHeader.Part is null)
{
var savedPosition = rewindableStream.Position;
var nextHeaderBytes = reader.ReadUInt32();
rewindableStream.Position = savedPosition;
header.HasData = !IsHeader(nextHeaderBytes);
continue;
}
// removed requirement for FixStreamedFileLocation()
var pos = sharpCompressStream.CanSeek
? (long?)sharpCompressStream.Position
: null;
var crc = reader.ReadUInt32();
if (crc == POST_DATA_DESCRIPTOR)
{
crc = reader.ReadUInt32();
}
_lastEntryHeader.Crc = crc;
//attempt 32bit read
ulong compSize = reader.ReadUInt32();
ulong uncompSize = reader.ReadUInt32();
headerBytes = reader.ReadUInt32();
//check for zip64 sentinel or unexpected header
bool isSentinel = compSize == 0xFFFFFFFF || uncompSize == 0xFFFFFFFF;
bool isHeader = headerBytes == 0x04034b50 || headerBytes == 0x02014b50;
if (!isHeader && !isSentinel)
{
//reshuffle into 64-bit values
compSize = (uncompSize << 32) | compSize;
uncompSize = ((ulong)headerBytes << 32) | reader.ReadUInt32();
headerBytes = reader.ReadUInt32();
}
else if (isSentinel)
{
//standards-compliant zip64 descriptor
compSize = reader.ReadUInt64();
uncompSize = reader.ReadUInt64();
}
_lastEntryHeader.CompressedSize = (long)compSize;
_lastEntryHeader.UncompressedSize = (long)uncompSize;
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
}
}
else if (_lastEntryHeader != null && _lastEntryHeader.IsZip64)
{
if (_lastEntryHeader.Part is null)
{
continue;
}
//reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
// ref sharpCompressStream
//);
var pos = sharpCompressStream.CanSeek
? (long?)sharpCompressStream.Position
: null;
headerBytes = reader.ReadUInt32();
var version = reader.ReadUInt16();
var flags = (HeaderFlags)reader.ReadUInt16();
var compressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
var lastModifiedDate = reader.ReadUInt16();
var lastModifiedTime = reader.ReadUInt16();
var crc = reader.ReadUInt32();
if (crc == POST_DATA_DESCRIPTOR)
{
crc = reader.ReadUInt32();
}
_lastEntryHeader.Crc = crc;
// The DataDescriptor can be either 64bit or 32bit
var compressed_size = reader.ReadUInt32();
var uncompressed_size = reader.ReadUInt32();
// Check if we have header or 64bit DataDescriptor
var test_header = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50);
var test_64bit = ((long)uncompressed_size << 32) | compressed_size;
if (test_64bit == _lastEntryHeader.CompressedSize && test_header)
{
_lastEntryHeader.UncompressedSize =
((long)reader.ReadUInt32() << 32) | headerBytes;
headerBytes = reader.ReadUInt32();
}
else
{
rewindableStream.StartRecording();
var nextHeaderBytes = reader.ReadUInt32();
rewindableStream.Rewind(true);
header.HasData = !IsHeader(nextHeaderBytes);
_lastEntryHeader.UncompressedSize = uncompressed_size;
}
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
// 4 = First 4 bytes of the entry header (i.e. 50 4B 03 04)
sharpCompressStream.Position = pos.Value + 4;
}
}
else // We are not streaming and compressed size is 0, we have no data
else
{
header.HasData = false;
headerBytes = reader.ReadUInt32();
}
_lastEntryHeader = null;
var header = ReadHeader(headerBytes, reader);
if (header is null)
{
yield break;
}
//entry could be zero bytes so we need to know that.
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
{
var local_header = ((LocalEntryHeader)header);
var dir_header = _entries?.FirstOrDefault(entry =>
entry.Key == local_header.Name
&& local_header.CompressedSize == 0
&& local_header.UncompressedSize == 0
&& local_header.Crc == 0
&& local_header.IsDirectory == false
);
if (dir_header != null)
{
local_header.UncompressedSize = dir_header.Size;
local_header.CompressedSize = dir_header.CompressedSize;
local_header.Crc = (uint)dir_header.Crc;
}
// If we have CompressedSize, there is data to be read
if (local_header.CompressedSize > 0)
{
header.HasData = true;
} // Check if zip is streaming ( Length is 0 and is declared in PostDataDescriptor )
else if (local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor))
{
// Peek ahead to check if next data is a header or file data.
// Use the IStreamStack.Rewind mechanism to give back the peeked bytes.
var nextHeaderBytes = reader.ReadUInt32();
sharpCompressStream.Rewind(sizeof(uint));
// Check if next data is PostDataDescriptor, streamed file with 0 length
header.HasData = !IsHeader(nextHeaderBytes);
}
else // We are not streaming and compressed size is 0, we have no data
{
header.HasData = false;
}
}
yield return header;
}
yield return header;
}
finally
{
reader.Dispose();
}
}
}

View File

@@ -0,0 +1,139 @@
using System;
using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip;
internal partial class WinzipAesCryptoStream
{
#if !LEGACY_DOTNET
public override async ValueTask DisposeAsync()
{
if (_isDisposed)
{
return;
}
_isDisposed = true;
// Read out last 10 auth bytes asynchronously
byte[] authBytes = ArrayPool<byte>.Shared.Rent(10);
try
{
await _stream.ReadFullyAsync(authBytes, 0, 10).ConfigureAwait(false);
}
finally
{
ArrayPool<byte>.Shared.Return(authBytes);
await _stream.DisposeAsync().ConfigureAwait(false);
}
}
#endif
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (_totalBytesLeftToRead == 0)
{
return 0;
}
var bytesToRead = count;
if (count > _totalBytesLeftToRead)
{
bytesToRead = (int)_totalBytesLeftToRead;
}
var read = await _stream
.ReadAsync(buffer, offset, bytesToRead, cancellationToken)
.ConfigureAwait(false);
_totalBytesLeftToRead -= read;
ReadTransformBlocks(buffer, offset, read);
return read;
}
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (_totalBytesLeftToRead == 0)
{
return 0;
}
var bytesToRead = buffer.Length;
if (buffer.Length > _totalBytesLeftToRead)
{
bytesToRead = (int)_totalBytesLeftToRead;
}
var read = await _stream
.ReadAsync(buffer.Slice(0, bytesToRead), cancellationToken)
.ConfigureAwait(false);
_totalBytesLeftToRead -= read;
ReadTransformBlocks(buffer.Span, read);
return read;
}
private void ReadTransformBlocks(Span<byte> buffer, int count)
{
var posn = 0;
var last = count;
while (posn < buffer.Length && posn < last)
{
var n = ReadTransformOneBlock(buffer, posn, last);
posn += n;
}
}
private int ReadTransformOneBlock(Span<byte> buffer, int offset, int last)
{
if (_isFinalBlock)
{
throw new InvalidOperationException();
}
var bytesRemaining = last - offset;
var bytesToRead =
(bytesRemaining > BLOCK_SIZE_IN_BYTES) ? BLOCK_SIZE_IN_BYTES : bytesRemaining;
// update the counter
System.Buffers.Binary.BinaryPrimitives.WriteInt32LittleEndian(_counter, _nonce++);
// Determine if this is the final block
if ((bytesToRead == bytesRemaining) && (_totalBytesLeftToRead == 0))
{
_counterOut = _transform.TransformFinalBlock(_counter, 0, BLOCK_SIZE_IN_BYTES);
_isFinalBlock = true;
}
else
{
_transform.TransformBlock(
_counter,
0, // offset
BLOCK_SIZE_IN_BYTES,
_counterOut,
0
); // offset
}
XorInPlace(buffer, offset, bytesToRead);
return bytesToRead;
}
private void XorInPlace(Span<byte> buffer, int offset, int count)
{
for (var i = 0; i < count; i++)
{
buffer[offset + i] = (byte)(_counterOut[i] ^ buffer[offset + i]);
}
}
#endif
}

View File

@@ -1,33 +1,14 @@
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.IO;
using System.Security.Cryptography;
using SharpCompress.IO;
using System.Threading.Tasks;
namespace SharpCompress.Common.Zip;
internal class WinzipAesCryptoStream : Stream, IStreamStack
internal partial class WinzipAesCryptoStream : Stream
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _stream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
private const int BLOCK_SIZE_IN_BYTES = 16;
private readonly SymmetricAlgorithm _cipher;
private readonly byte[] _counter = new byte[BLOCK_SIZE_IN_BYTES];
@@ -48,10 +29,6 @@ internal class WinzipAesCryptoStream : Stream, IStreamStack
_stream = stream;
_totalBytesLeftToRead = length;
#if DEBUG_STREAMS
this.DebugConstruct(typeof(WinzipAesCryptoStream));
#endif
_cipher = CreateCipher(winzipAesEncryptionData);
var iv = new byte[BLOCK_SIZE_IN_BYTES];
@@ -89,18 +66,36 @@ internal class WinzipAesCryptoStream : Stream, IStreamStack
return;
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(WinzipAesCryptoStream));
#endif
if (disposing)
{
//read out last 10 auth bytes
Span<byte> ten = stackalloc byte[10];
_stream.ReadFully(ten);
// Read out last 10 auth bytes - catch exceptions for async-only streams
if (Utility.UseSyncOverAsyncDispose())
{
var ten = ArrayPool<byte>.Shared.Rent(10);
try
{
_stream.ReadFullyAsync(ten, 0, 10).GetAwaiter().GetResult();
}
finally
{
ArrayPool<byte>.Shared.Return(ten);
}
}
else
{
Span<byte> ten = stackalloc byte[10];
_stream.ReadFully(ten);
}
_stream.Dispose();
}
}
private async Task ReadAuthBytesAsync()
{
byte[] authBytes = new byte[10];
await _stream.ReadFullyAsync(authBytes, 0, 10).ConfigureAwait(false);
}
public override void Flush() { }
public override int Read(byte[] buffer, int offset, int count)

View File

@@ -39,7 +39,7 @@ internal abstract partial class ZipFilePart
.ConfigureAwait(false);
if (LeaveStreamOpen)
{
return SharpCompressStream.Create(decompressionStream, leaveOpen: true);
return SharpCompressStream.CreateNonDisposing(decompressionStream);
}
return decompressionStream;
}
@@ -63,7 +63,7 @@ internal abstract partial class ZipFilePart
) || Header.IsZip64
)
{
plainStream = SharpCompressStream.Create(plainStream, leaveOpen: true); //make sure AES doesn't close
plainStream = SharpCompressStream.CreateNonDisposing(plainStream); //make sure AES doesn't close
}
else
{
@@ -136,28 +136,55 @@ internal abstract partial class ZipFilePart
}
case ZipCompressionMethod.Shrink:
{
return new ShrinkStream(
stream,
CompressionMode.Decompress,
Header.CompressedSize,
Header.UncompressedSize
);
return await ShrinkStream
.CreateAsync(
stream,
CompressionMode.Decompress,
Header.CompressedSize,
Header.UncompressedSize,
cancellationToken
)
.ConfigureAwait(false);
}
case ZipCompressionMethod.Reduce1:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 1);
return await ReduceStream.CreateAsync(
stream,
Header.CompressedSize,
Header.UncompressedSize,
1,
cancellationToken
);
}
case ZipCompressionMethod.Reduce2:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 2);
return await ReduceStream.CreateAsync(
stream,
Header.CompressedSize,
Header.UncompressedSize,
2,
cancellationToken
);
}
case ZipCompressionMethod.Reduce3:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 3);
return await ReduceStream.CreateAsync(
stream,
Header.CompressedSize,
Header.UncompressedSize,
3,
cancellationToken
);
}
case ZipCompressionMethod.Reduce4:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 4);
return await ReduceStream.CreateAsync(
stream,
Header.CompressedSize,
Header.UncompressedSize,
4,
cancellationToken
);
}
case ZipCompressionMethod.Explode:
{
@@ -201,7 +228,7 @@ internal abstract partial class ZipFilePart
await stream
.ReadFullyAsync(props, 0, propsSize, cancellationToken)
.ConfigureAwait(false);
return LzmaStream.Create(
return await LzmaStream.CreateAsync(
props,
stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
@@ -222,7 +249,9 @@ internal abstract partial class ZipFilePart
{
var props = new byte[2];
await stream.ReadFullyAsync(props, 0, 2, cancellationToken).ConfigureAwait(false);
return new PpmdStream(new PpmdProperties(props), stream, false);
return await PpmdStream
.CreateAsync(new PpmdProperties(props), stream, false, cancellationToken)
.ConfigureAwait(false);
}
case ZipCompressionMethod.WinzipAes:
{

View File

@@ -45,7 +45,7 @@ internal abstract partial class ZipFilePart : FilePart
);
if (LeaveStreamOpen)
{
return SharpCompressStream.Create(decompressionStream, leaveOpen: true);
return SharpCompressStream.CreateNonDisposing(decompressionStream);
}
return decompressionStream;
}
@@ -88,19 +88,39 @@ internal abstract partial class ZipFilePart : FilePart
}
case ZipCompressionMethod.Reduce1:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 1);
return ReduceStream.Create(
stream,
Header.CompressedSize,
Header.UncompressedSize,
1
);
}
case ZipCompressionMethod.Reduce2:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 2);
return ReduceStream.Create(
stream,
Header.CompressedSize,
Header.UncompressedSize,
2
);
}
case ZipCompressionMethod.Reduce3:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 3);
return ReduceStream.Create(
stream,
Header.CompressedSize,
Header.UncompressedSize,
3
);
}
case ZipCompressionMethod.Reduce4:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 4);
return ReduceStream.Create(
stream,
Header.CompressedSize,
Header.UncompressedSize,
4
);
}
case ZipCompressionMethod.Explode:
{
@@ -130,18 +150,26 @@ internal abstract partial class ZipFilePart : FilePart
{
throw new NotSupportedException("LZMA with pkware encryption.");
}
var reader = new BinaryReader(stream);
reader.ReadUInt16(); //LZMA version
var props = new byte[reader.ReadUInt16()];
reader.Read(props, 0, props.Length);
return LzmaStream.Create(
props,
stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: Header.UncompressedSize
);
using (
var reader = new BinaryReader(
stream,
System.Text.Encoding.Default,
leaveOpen: true
)
)
{
reader.ReadUInt16(); //LZMA version
var props = new byte[reader.ReadUInt16()];
reader.Read(props, 0, props.Length);
return LzmaStream.Create(
props,
stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: Header.UncompressedSize
);
}
}
case ZipCompressionMethod.Xz:
{
@@ -155,7 +183,7 @@ internal abstract partial class ZipFilePart : FilePart
{
Span<byte> props = stackalloc byte[2];
stream.ReadFully(props);
return new PpmdStream(new PpmdProperties(props), stream, false);
return PpmdStream.Create(new PpmdProperties(props), stream, false);
}
case ZipCompressionMethod.WinzipAes:
{
@@ -213,7 +241,7 @@ internal abstract partial class ZipFilePart : FilePart
) || Header.IsZip64
)
{
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
plainStream = SharpCompressStream.CreateNonDisposing(plainStream); //make sure AES doesn't close
}
else
{

View File

@@ -30,35 +30,14 @@ using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.ADC;
/// <summary>
/// Provides a forward readable only stream that decompresses ADC data
/// </summary>
public sealed partial class ADCStream : Stream, IStreamStack
public sealed partial class ADCStream : Stream
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _stream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
/// <summary>
/// This stream holds the compressed data
/// </summary>
@@ -97,9 +76,6 @@ public sealed partial class ADCStream : Stream, IStreamStack
}
_stream = stream;
#if DEBUG_STREAMS
this.DebugConstruct(typeof(ADCStream));
#endif
}
public override bool CanRead => _stream.CanRead;
@@ -125,9 +101,6 @@ public sealed partial class ADCStream : Stream, IStreamStack
return;
}
_isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(ADCStream));
#endif
base.Dispose(disposing);
}

View File

@@ -0,0 +1,75 @@
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Compressors.RLE90;
public partial class ArcLzwStream
{
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (_processed)
{
return 0;
}
_processed = true;
var data = new byte[_compressedSize];
int totalRead = 0;
while (totalRead < _compressedSize)
{
int read = await _stream
.ReadAsync(data, totalRead, _compressedSize - totalRead, cancellationToken)
.ConfigureAwait(false);
if (read == 0)
{
break;
}
totalRead += read;
}
var decoded = Decompress(data, _useCrunched);
var result = decoded.Count();
if (_useCrunched)
{
var unpacked = RLE.UnpackRLE(decoded.ToArray());
unpacked.CopyTo(buffer, 0);
result = unpacked.Count;
}
else
{
decoded.CopyTo(buffer, 0);
}
return result;
}
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (buffer.IsEmpty)
{
return 0;
}
byte[] array = System.Buffers.ArrayPool<byte>.Shared.Rent(buffer.Length);
try
{
int read = await ReadAsync(array, 0, buffer.Length, cancellationToken)
.ConfigureAwait(false);
array.AsSpan(0, read).CopyTo(buffer.Span);
return read;
}
finally
{
System.Buffers.ArrayPool<byte>.Shared.Return(array);
}
}
#endif
}

View File

@@ -4,30 +4,9 @@ using System.IO;
using System.Linq;
using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;
using SharpCompress.IO;
public partial class ArcLzwStream : Stream, IStreamStack
public partial class ArcLzwStream : Stream
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _stream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
private Stream _stream;
private bool _processed;
private bool _useCrunched;
@@ -54,9 +33,6 @@ public partial class ArcLzwStream : Stream, IStreamStack
public ArcLzwStream(Stream stream, int compressedSize, bool useCrunched = true)
{
_stream = stream;
#if DEBUG_STREAMS
this.DebugConstruct(typeof(ArcLzwStream));
#endif
_useCrunched = useCrunched;
_compressedSize = compressedSize;
@@ -223,9 +199,6 @@ public partial class ArcLzwStream : Stream, IStreamStack
protected override void Dispose(bool disposing)
{
#if DEBUG_STREAMS
this.DebugDispose(typeof(ArcLzwStream));
#endif
base.Dispose(disposing);
}
}

View File

@@ -1,72 +1,68 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.Arj
namespace SharpCompress.Compressors.Arj;
[CLSCompliant(true)]
public partial class BitReader
{
[CLSCompliant(true)]
public partial class BitReader
private readonly Stream _input;
private int _bitBuffer; // currently buffered bits
private int _bitCount; // number of bits in buffer
public BitReader(Stream input)
{
private readonly Stream _input;
private int _bitBuffer; // currently buffered bits
private int _bitCount; // number of bits in buffer
_input = input ?? throw new ArgumentNullException(nameof(input));
_bitBuffer = 0;
_bitCount = 0;
}
public BitReader(Stream input)
/// <summary>
/// Reads a single bit from the stream. Returns 0 or 1.
/// </summary>
public int ReadBit()
{
if (_bitCount == 0)
{
_input = input ?? throw new ArgumentNullException(nameof(input));
_bitBuffer = 0;
_bitCount = 0;
}
/// <summary>
/// Reads a single bit from the stream. Returns 0 or 1.
/// </summary>
public int ReadBit()
{
if (_bitCount == 0)
int nextByte = _input.ReadByte();
if (nextByte < 0)
{
int nextByte = _input.ReadByte();
if (nextByte < 0)
{
throw new EndOfStreamException("No more data available in BitReader.");
}
_bitBuffer = nextByte;
_bitCount = 8;
throw new EndOfStreamException("No more data available in BitReader.");
}
int bit = (_bitBuffer >> (_bitCount - 1)) & 1;
_bitCount--;
return bit;
_bitBuffer = nextByte;
_bitCount = 8;
}
/// <summary>
/// Reads n bits (up to 32) from the stream.
/// </summary>
public int ReadBits(int count)
int bit = (_bitBuffer >> (_bitCount - 1)) & 1;
_bitCount--;
return bit;
}
/// <summary>
/// Reads n bits (up to 32) from the stream.
/// </summary>
public int ReadBits(int count)
{
if (count < 0 || count > 32)
{
if (count < 0 || count > 32)
{
throw new ArgumentOutOfRangeException(
nameof(count),
"Count must be between 0 and 32."
);
}
int result = 0;
for (int i = 0; i < count; i++)
{
result = (result << 1) | ReadBit();
}
return result;
throw new ArgumentOutOfRangeException(nameof(count), "Count must be between 0 and 32.");
}
/// <summary>
/// Resets any buffered bits.
/// </summary>
public void AlignToByte()
int result = 0;
for (int i = 0; i < count; i++)
{
_bitCount = 0;
_bitBuffer = 0;
result = (result << 1) | ReadBit();
}
return result;
}
/// <summary>
/// Resets any buffered bits.
/// </summary>
public void AlignToByte()
{
_bitCount = 0;
_bitBuffer = 0;
}
}

View File

@@ -2,42 +2,41 @@ using System;
using System.Collections;
using System.Collections.Generic;
namespace SharpCompress.Compressors.Arj
namespace SharpCompress.Compressors.Arj;
/// <summary>
/// Iterator that reads & pushes values back into the ring buffer.
/// </summary>
public class HistoryIterator : IEnumerator<byte>
{
/// <summary>
/// Iterator that reads & pushes values back into the ring buffer.
/// </summary>
public class HistoryIterator : IEnumerator<byte>
private int _index;
private readonly IRingBuffer _ring;
public HistoryIterator(IRingBuffer ring, int startIndex)
{
private int _index;
private readonly IRingBuffer _ring;
public HistoryIterator(IRingBuffer ring, int startIndex)
{
_ring = ring;
_index = startIndex;
}
public bool MoveNext()
{
Current = _ring[_index];
_index = unchecked(_index + 1);
// Push value back into the ring buffer
_ring.Push(Current);
return true; // iterator is infinite
}
public void Reset()
{
throw new NotSupportedException();
}
public byte Current { get; private set; }
object IEnumerator.Current => Current;
public void Dispose() { }
_ring = ring;
_index = startIndex;
}
public bool MoveNext()
{
Current = _ring[_index];
_index = unchecked(_index + 1);
// Push value back into the ring buffer
_ring.Push(Current);
return true; // iterator is infinite
}
public void Reset()
{
throw new NotSupportedException();
}
public byte Current { get; private set; }
object IEnumerator.Current => Current;
public void Dispose() { }
}

View File

@@ -0,0 +1,38 @@
using System;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Arj;
public sealed partial class HuffTree
{
public async ValueTask<int> ReadEntryAsync(
BitReader reader,
CancellationToken cancellationToken
)
{
if (_tree.Count == 0)
{
throw new InvalidOperationException("Tree not initialized");
}
TreeEntry node = _tree[0];
while (true)
{
if (node.Type == NodeType.Leaf)
{
return node.LeafValue;
}
int bit = await reader.ReadBitAsync(cancellationToken).ConfigureAwait(false);
int index = node.BranchIndex + bit;
if (index >= _tree.Count)
{
throw new InvalidOperationException("Invalid branch index during read");
}
node = _tree[index];
}
}
}

View File

@@ -3,216 +3,212 @@ using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Compressors.Arj
namespace SharpCompress.Compressors.Arj;
[CLSCompliant(true)]
public enum NodeType
{
[CLSCompliant(true)]
public enum NodeType
Leaf,
Branch,
}
[CLSCompliant(true)]
public sealed class TreeEntry
{
public readonly NodeType Type;
public readonly int LeafValue;
public readonly int BranchIndex;
public const int MAX_INDEX = 4096;
private TreeEntry(NodeType type, int leafValue, int branchIndex)
{
Leaf,
Branch,
Type = type;
LeafValue = leafValue;
BranchIndex = branchIndex;
}
[CLSCompliant(true)]
public sealed class TreeEntry
public static TreeEntry Leaf(int value)
{
public readonly NodeType Type;
public readonly int LeafValue;
public readonly int BranchIndex;
public const int MAX_INDEX = 4096;
private TreeEntry(NodeType type, int leafValue, int branchIndex)
{
Type = type;
LeafValue = leafValue;
BranchIndex = branchIndex;
}
public static TreeEntry Leaf(int value)
{
return new TreeEntry(NodeType.Leaf, value, -1);
}
public static TreeEntry Branch(int index)
{
if (index >= MAX_INDEX)
{
throw new ArgumentOutOfRangeException(
nameof(index),
"Branch index exceeds MAX_INDEX"
);
}
return new TreeEntry(NodeType.Branch, 0, index);
}
return new TreeEntry(NodeType.Leaf, value, -1);
}
[CLSCompliant(true)]
public sealed class HuffTree
public static TreeEntry Branch(int index)
{
private readonly List<TreeEntry> _tree;
public HuffTree(int capacity = 0)
if (index >= MAX_INDEX)
{
_tree = new List<TreeEntry>(capacity);
}
public void SetSingle(int value)
{
_tree.Clear();
_tree.Add(TreeEntry.Leaf(value));
}
public void BuildTree(byte[] lengths, int count)
{
if (lengths == null)
{
throw new ArgumentNullException(nameof(lengths));
}
if (count < 0 || count > lengths.Length)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (count > TreeEntry.MAX_INDEX / 2)
{
throw new ArgumentException(
$"Count exceeds maximum allowed: {TreeEntry.MAX_INDEX / 2}"
);
}
byte[] slice = new byte[count];
Array.Copy(lengths, slice, count);
BuildTree(slice);
}
public void BuildTree(byte[] valueLengths)
{
if (valueLengths == null)
{
throw new ArgumentNullException(nameof(valueLengths));
}
if (valueLengths.Length > TreeEntry.MAX_INDEX / 2)
{
throw new InvalidOperationException("Too many code lengths");
}
_tree.Clear();
int maxAllocated = 1; // start with a single (root) node
for (byte currentLen = 1; ; currentLen++)
{
// add missing branches up to current limit
int maxLimit = maxAllocated;
for (int i = _tree.Count; i < maxLimit; i++)
{
// TreeEntry.Branch may throw if index too large
try
{
_tree.Add(TreeEntry.Branch(maxAllocated));
}
catch (ArgumentOutOfRangeException e)
{
_tree.Clear();
throw new InvalidOperationException("Branch index exceeds limit", e);
}
// each branch node allocates two children
maxAllocated += 2;
}
// fill tree with leaves found in the lengths table at the current length
bool moreLeaves = false;
for (int value = 0; value < valueLengths.Length; value++)
{
byte len = valueLengths[value];
if (len == currentLen)
{
_tree.Add(TreeEntry.Leaf(value));
}
else if (len > currentLen)
{
moreLeaves = true; // there are more leaves to process
}
}
// sanity check (too many leaves)
if (_tree.Count > maxAllocated)
{
throw new InvalidOperationException("Too many leaves");
}
// stop when no longer finding longer codes
if (!moreLeaves)
{
break;
}
}
// ensure tree is complete
if (_tree.Count != maxAllocated)
{
throw new InvalidOperationException(
$"Missing some leaves: tree count = {_tree.Count}, expected = {maxAllocated}"
);
}
}
public int ReadEntry(BitReader reader)
{
if (_tree.Count == 0)
{
throw new InvalidOperationException("Tree not initialized");
}
TreeEntry node = _tree[0];
while (true)
{
if (node.Type == NodeType.Leaf)
{
return node.LeafValue;
}
int bit = reader.ReadBit();
int index = node.BranchIndex + bit;
if (index >= _tree.Count)
{
throw new InvalidOperationException("Invalid branch index during read");
}
node = _tree[index];
}
}
public override string ToString()
{
var result = new StringBuilder();
void FormatStep(int index, string prefix)
{
var node = _tree[index];
if (node.Type == NodeType.Leaf)
{
result.AppendLine($"{prefix} -> {node.LeafValue}");
}
else
{
FormatStep(node.BranchIndex, prefix + "0");
FormatStep(node.BranchIndex + 1, prefix + "1");
}
}
if (_tree.Count > 0)
{
FormatStep(0, "");
}
return result.ToString();
throw new ArgumentOutOfRangeException(nameof(index), "Branch index exceeds MAX_INDEX");
}
return new TreeEntry(NodeType.Branch, 0, index);
}
}
[CLSCompliant(true)]
public sealed partial class HuffTree
{
private readonly List<TreeEntry> _tree;
public HuffTree(int capacity = 0)
{
_tree = new List<TreeEntry>(capacity);
}
public void SetSingle(int value)
{
_tree.Clear();
_tree.Add(TreeEntry.Leaf(value));
}
public void BuildTree(byte[] lengths, int count)
{
if (lengths == null)
{
throw new ArgumentNullException(nameof(lengths));
}
if (count < 0 || count > lengths.Length)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (count > TreeEntry.MAX_INDEX / 2)
{
throw new ArgumentException(
$"Count exceeds maximum allowed: {TreeEntry.MAX_INDEX / 2}"
);
}
byte[] slice = new byte[count];
Array.Copy(lengths, slice, count);
BuildTree(slice);
}
public void BuildTree(byte[] valueLengths)
{
if (valueLengths == null)
{
throw new ArgumentNullException(nameof(valueLengths));
}
if (valueLengths.Length > TreeEntry.MAX_INDEX / 2)
{
throw new InvalidOperationException("Too many code lengths");
}
_tree.Clear();
int maxAllocated = 1; // start with a single (root) node
for (byte currentLen = 1; ; currentLen++)
{
// add missing branches up to current limit
int maxLimit = maxAllocated;
for (int i = _tree.Count; i < maxLimit; i++)
{
// TreeEntry.Branch may throw if index too large
try
{
_tree.Add(TreeEntry.Branch(maxAllocated));
}
catch (ArgumentOutOfRangeException e)
{
_tree.Clear();
throw new InvalidOperationException("Branch index exceeds limit", e);
}
// each branch node allocates two children
maxAllocated += 2;
}
// fill tree with leaves found in the lengths table at the current length
bool moreLeaves = false;
for (int value = 0; value < valueLengths.Length; value++)
{
byte len = valueLengths[value];
if (len == currentLen)
{
_tree.Add(TreeEntry.Leaf(value));
}
else if (len > currentLen)
{
moreLeaves = true; // there are more leaves to process
}
}
// sanity check (too many leaves)
if (_tree.Count > maxAllocated)
{
throw new InvalidOperationException("Too many leaves");
}
// stop when no longer finding longer codes
if (!moreLeaves)
{
break;
}
}
// ensure tree is complete
if (_tree.Count != maxAllocated)
{
throw new InvalidOperationException(
$"Missing some leaves: tree count = {_tree.Count}, expected = {maxAllocated}"
);
}
}
public int ReadEntry(BitReader reader)
{
if (_tree.Count == 0)
{
throw new InvalidOperationException("Tree not initialized");
}
TreeEntry node = _tree[0];
while (true)
{
if (node.Type == NodeType.Leaf)
{
return node.LeafValue;
}
int bit = reader.ReadBit();
int index = node.BranchIndex + bit;
if (index >= _tree.Count)
{
throw new InvalidOperationException("Invalid branch index during read");
}
node = _tree[index];
}
}
public override string ToString()
{
var result = new StringBuilder();
void FormatStep(int index, string prefix)
{
var node = _tree[index];
if (node.Type == NodeType.Leaf)
{
result.AppendLine($"{prefix} -> {node.LeafValue}");
}
else
{
FormatStep(node.BranchIndex, prefix + "0");
FormatStep(node.BranchIndex + 1, prefix + "1");
}
}
if (_tree.Count > 0)
{
FormatStep(0, "");
}
return result.ToString();
}
}

View File

@@ -1,9 +1,8 @@
namespace SharpCompress.Compressors.Arj
namespace SharpCompress.Compressors.Arj;
public interface ILhaDecoderConfig
{
public interface ILhaDecoderConfig
{
int HistoryBits { get; }
int OffsetBits { get; }
RingBuffer RingBuffer { get; }
}
int HistoryBits { get; }
int OffsetBits { get; }
RingBuffer RingBuffer { get; }
}

View File

@@ -1,17 +1,16 @@
namespace SharpCompress.Compressors.Arj
namespace SharpCompress.Compressors.Arj;
public interface IRingBuffer
{
public interface IRingBuffer
{
int BufferSize { get; }
int BufferSize { get; }
int Cursor { get; }
void SetCursor(int pos);
int Cursor { get; }
void SetCursor(int pos);
void Push(byte value);
void Push(byte value);
HistoryIterator IterFromOffset(int offset);
HistoryIterator IterFromPos(int pos);
HistoryIterator IterFromOffset(int offset);
HistoryIterator IterFromPos(int pos);
byte this[int index] { get; }
}
byte this[int index] { get; }
}

View File

@@ -1,205 +1,182 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Arj
namespace SharpCompress.Compressors.Arj;
[CLSCompliant(true)]
public sealed partial class LHDecoderStream : Stream
{
[CLSCompliant(true)]
public sealed partial class LHDecoderStream : Stream, IStreamStack
private readonly BitReader _bitReader;
private readonly Stream _stream;
// Buffer containing *all* bytes decoded so far.
private readonly List<byte> _buffer = new();
private long _readPosition;
private readonly int _originalSize;
private bool _finishedDecoding;
private bool _disposed;
private const int THRESHOLD = 3;
public LHDecoderStream(Stream compressedStream, int originalSize)
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _stream;
int IStreamStack.BufferSize
_stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream));
if (!compressedStream.CanRead)
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
throw new ArgumentException(
"compressedStream must be readable.",
nameof(compressedStream)
);
}
void IStreamStack.SetPosition(long position) { }
_bitReader = new BitReader(compressedStream);
_originalSize = originalSize;
_readPosition = 0;
_finishedDecoding = (originalSize == 0);
}
private readonly BitReader _bitReader;
private readonly Stream _stream;
public Stream BaseStream => _stream;
// Buffer containing *all* bytes decoded so far.
private readonly List<byte> _buffer = new();
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
private long _readPosition;
private readonly int _originalSize;
private bool _finishedDecoding;
private bool _disposed;
public override long Length => _originalSize;
private const int THRESHOLD = 3;
public override long Position
{
get => _readPosition;
set => throw new NotSupportedException();
}
public LHDecoderStream(Stream compressedStream, int originalSize)
/// <summary>
/// Decodes a single element (literal or back-reference) and appends it to _buffer.
/// Returns true if data was added, or false if all input has already been decoded.
/// </summary>
private bool DecodeNext()
{
if (_buffer.Count >= _originalSize)
{
_stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream));
if (!compressedStream.CanRead)
_finishedDecoding = true;
return false;
}
int len = DecodeVal(0, 7);
if (len == 0)
{
byte nextChar = (byte)_bitReader.ReadBits(8);
_buffer.Add(nextChar);
}
else
{
int repCount = len + THRESHOLD - 1;
int backPtr = DecodeVal(9, 13);
if (backPtr >= _buffer.Count)
{
throw new ArgumentException(
"compressedStream must be readable.",
nameof(compressedStream)
);
throw new InvalidDataException("Invalid back_ptr in LH stream");
}
_bitReader = new BitReader(compressedStream);
_originalSize = originalSize;
_readPosition = 0;
_finishedDecoding = (originalSize == 0);
int srcIndex = _buffer.Count - 1 - backPtr;
for (int j = 0; j < repCount && _buffer.Count < _originalSize; j++)
{
byte b = _buffer[srcIndex];
_buffer.Add(b);
srcIndex++;
// srcIndex may grow; it's allowed (source region can overlap destination)
}
}
public Stream BaseStream => _stream;
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => _originalSize;
public override long Position
if (_buffer.Count >= _originalSize)
{
get => _readPosition;
set => throw new NotSupportedException();
_finishedDecoding = true;
}
/// <summary>
/// Decodes a single element (literal or back-reference) and appends it to _buffer.
/// Returns true if data was added, or false if all input has already been decoded.
/// </summary>
private bool DecodeNext()
return true;
}
private int DecodeVal(int from, int to)
{
int add = 0;
int bit = from;
while (bit < to && _bitReader.ReadBits(1) == 1)
{
if (_buffer.Count >= _originalSize)
{
_finishedDecoding = true;
return false;
}
add |= 1 << bit;
bit++;
}
int len = DecodeVal(0, 7);
if (len == 0)
{
byte nextChar = (byte)_bitReader.ReadBits(8);
_buffer.Add(nextChar);
}
else
{
int repCount = len + THRESHOLD - 1;
int backPtr = DecodeVal(9, 13);
int res = bit > 0 ? _bitReader.ReadBits(bit) : 0;
return res + add;
}
if (backPtr >= _buffer.Count)
/// <summary>
/// Reads decompressed bytes into buffer[offset..offset+count].
/// The method decodes additional data on demand when needed.
/// </summary>
public override int Read(byte[] buffer, int offset, int count)
{
if (_disposed)
{
throw new ObjectDisposedException(nameof(LHDecoderStream));
}
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0 || count < 0 || offset + count > buffer.Length)
{
throw new ArgumentOutOfRangeException("offset/count");
}
if (_readPosition >= _originalSize)
{
return 0; // EOF
}
int totalRead = 0;
while (totalRead < count && _readPosition < _originalSize)
{
if (_readPosition >= _buffer.Count)
{
bool had = DecodeNext();
if (!had)
{
throw new InvalidDataException("Invalid back_ptr in LH stream");
}
int srcIndex = _buffer.Count - 1 - backPtr;
for (int j = 0; j < repCount && _buffer.Count < _originalSize; j++)
{
byte b = _buffer[srcIndex];
_buffer.Add(b);
srcIndex++;
// srcIndex may grow; it's allowed (source region can overlap destination)
}
}
if (_buffer.Count >= _originalSize)
{
_finishedDecoding = true;
}
return true;
}
private int DecodeVal(int from, int to)
{
int add = 0;
int bit = from;
while (bit < to && _bitReader.ReadBits(1) == 1)
{
add |= 1 << bit;
bit++;
}
int res = bit > 0 ? _bitReader.ReadBits(bit) : 0;
return res + add;
}
/// <summary>
/// Reads decompressed bytes into buffer[offset..offset+count].
/// The method decodes additional data on demand when needed.
/// </summary>
public override int Read(byte[] buffer, int offset, int count)
{
if (_disposed)
{
throw new ObjectDisposedException(nameof(LHDecoderStream));
}
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0 || count < 0 || offset + count > buffer.Length)
{
throw new ArgumentOutOfRangeException("offset/count");
}
if (_readPosition >= _originalSize)
{
return 0; // EOF
}
int totalRead = 0;
while (totalRead < count && _readPosition < _originalSize)
{
if (_readPosition >= _buffer.Count)
{
bool had = DecodeNext();
if (!had)
{
break;
}
}
int available = _buffer.Count - (int)_readPosition;
if (available <= 0)
{
if (!_finishedDecoding)
{
continue;
}
break;
}
int toCopy = Math.Min(available, count - totalRead);
_buffer.CopyTo((int)_readPosition, buffer, offset + totalRead, toCopy);
_readPosition += toCopy;
totalRead += toCopy;
}
return totalRead;
int available = _buffer.Count - (int)_readPosition;
if (available <= 0)
{
if (!_finishedDecoding)
{
continue;
}
break;
}
int toCopy = Math.Min(available, count - totalRead);
_buffer.CopyTo((int)_readPosition, buffer, offset + totalRead, toCopy);
_readPosition += toCopy;
totalRead += toCopy;
}
public override void Flush() => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
return totalRead;
}
public override void Flush() => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
}

View File

@@ -1,9 +1,8 @@
namespace SharpCompress.Compressors.Arj
namespace SharpCompress.Compressors.Arj;
public class Lh5DecoderCfg : ILhaDecoderConfig
{
public class Lh5DecoderCfg : ILhaDecoderConfig
{
public int HistoryBits => 14;
public int OffsetBits => 4;
public RingBuffer RingBuffer { get; } = new RingBuffer(1 << 14);
}
public int HistoryBits => 14;
public int OffsetBits => 4;
public RingBuffer RingBuffer { get; } = new RingBuffer(1 << 14);
}

View File

@@ -1,9 +1,8 @@
namespace SharpCompress.Compressors.Arj
namespace SharpCompress.Compressors.Arj;
public class Lh7DecoderCfg : ILhaDecoderConfig
{
public class Lh7DecoderCfg : ILhaDecoderConfig
{
public int HistoryBits => 17;
public int OffsetBits => 5;
public RingBuffer RingBuffer { get; } = new RingBuffer(1 << 17);
}
public int HistoryBits => 17;
public int OffsetBits => 5;
public RingBuffer RingBuffer { get; } = new RingBuffer(1 << 17);
}

View File

@@ -0,0 +1,401 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Arj;
public sealed partial class LhaStream<C>
{
public override async Task<int> ReadAsync(
byte[] buffer,
int offset,
int count,
CancellationToken cancellationToken
)
{
if (buffer is null)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0 || count < 0 || (offset + count) > buffer.Length)
{
throw new ArgumentOutOfRangeException();
}
if (_producedBytes >= _originalSize)
{
return 0; // EOF
}
if (count == 0)
{
return 0;
}
int bytesRead = await FillBufferAsync(buffer, cancellationToken).ConfigureAwait(false);
return bytesRead;
}
#if !LEGACY_DOTNET
public override async ValueTask<int> ReadAsync(
Memory<byte> buffer,
CancellationToken cancellationToken = default
)
{
if (_producedBytes >= _originalSize)
{
return 0; // EOF
}
if (buffer.Length == 0)
{
return 0;
}
int bytesRead = await FillBufferAsync(buffer, cancellationToken).ConfigureAwait(false);
return bytesRead;
}
#endif
private async ValueTask<byte> ReadCodeLengthAsync(CancellationToken cancellationToken)
{
byte len = (byte)await _bitReader.ReadBitsAsync(3, cancellationToken).ConfigureAwait(false);
if (len == 7)
{
while (await _bitReader.ReadBitAsync(cancellationToken).ConfigureAwait(false) != 0)
{
len++;
if (len > 255)
{
throw new InvalidOperationException("Code length overflow");
}
}
}
return len;
}
private async ValueTask<int> ReadCodeSkipAsync(
int skipRange,
CancellationToken cancellationToken
)
{
int bits;
int increment;
switch (skipRange)
{
case 0:
return 1;
case 1:
bits = 4;
increment = 3; // 3..=18
break;
default:
bits = 9;
increment = 20; // 20..=531
break;
}
int skip = await _bitReader.ReadBitsAsync(bits, cancellationToken).ConfigureAwait(false);
return skip + increment;
}
private async ValueTask ReadTempTreeAsync(CancellationToken cancellationToken)
{
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
// number of codes to read (5 bits)
int numCodes = await _bitReader.ReadBitsAsync(5, cancellationToken).ConfigureAwait(false);
// single code only
if (numCodes == 0)
{
int code = await _bitReader.ReadBitsAsync(5, cancellationToken).ConfigureAwait(false);
_offsetTree.SetSingle((byte)code);
return;
}
if (numCodes > NUM_TEMP_CODELEN)
{
throw new Exception("temporary codelen table has invalid size");
}
// read actual lengths
int count = Math.Min(3, numCodes);
for (int i = 0; i < count; i++)
{
codeLengths[i] = (byte)
await ReadCodeLengthAsync(cancellationToken).ConfigureAwait(false);
}
// 2-bit skip value follows
int skip = await _bitReader.ReadBitsAsync(2, cancellationToken).ConfigureAwait(false);
if (3 + skip > numCodes)
{
throw new Exception("temporary codelen table has invalid size");
}
for (int i = 3 + skip; i < numCodes; i++)
{
codeLengths[i] = (byte)
await ReadCodeLengthAsync(cancellationToken).ConfigureAwait(false);
}
_offsetTree.BuildTree(codeLengths, numCodes);
}
private async ValueTask ReadCommandTreeAsync(CancellationToken cancellationToken)
{
byte[] codeLengths = new byte[NUM_COMMANDS];
// number of codes to read (9 bits)
int numCodes = await _bitReader.ReadBitsAsync(9, cancellationToken).ConfigureAwait(false);
// single code only
if (numCodes == 0)
{
int code = await _bitReader.ReadBitsAsync(9, cancellationToken).ConfigureAwait(false);
_commandTree.SetSingle((ushort)code);
return;
}
if (numCodes > NUM_COMMANDS)
{
throw new Exception("commands codelen table has invalid size");
}
int index = 0;
while (index < numCodes)
{
for (int n = 0; n < numCodes - index; n++)
{
int code = await _offsetTree
.ReadEntryAsync(_bitReader, cancellationToken)
.ConfigureAwait(false);
if (code >= 0 && code <= 2) // skip range
{
int skipCount = await ReadCodeSkipAsync(code, cancellationToken)
.ConfigureAwait(false);
index += n + skipCount;
goto outerLoop;
}
else
{
codeLengths[index + n] = (byte)(code - 2);
}
}
break;
outerLoop:
;
}
_commandTree.BuildTree(codeLengths, numCodes);
}
private async ValueTask ReadOffsetTreeAsync(CancellationToken cancellationToken)
{
int numCodes = await _bitReader
.ReadBitsAsync(_config.OffsetBits, cancellationToken)
.ConfigureAwait(false);
if (numCodes == 0)
{
int code = await _bitReader
.ReadBitsAsync(_config.OffsetBits, cancellationToken)
.ConfigureAwait(false);
_offsetTree.SetSingle(code);
return;
}
if (numCodes > _config.HistoryBits)
{
throw new InvalidDataException("Offset code table too large");
}
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
for (int i = 0; i < numCodes; i++)
{
codeLengths[i] = (byte)
await ReadCodeLengthAsync(cancellationToken).ConfigureAwait(false);
}
_offsetTree.BuildTree(codeLengths, numCodes);
}
private async ValueTask BeginNewBlockAsync(CancellationToken cancellationToken)
{
await ReadTempTreeAsync(cancellationToken).ConfigureAwait(false);
await ReadCommandTreeAsync(cancellationToken).ConfigureAwait(false);
await ReadOffsetTreeAsync(cancellationToken).ConfigureAwait(false);
}
private ValueTask<int> ReadCommandAsync(CancellationToken cancellationToken) =>
_commandTree.ReadEntryAsync(_bitReader, cancellationToken);
private async ValueTask<int> ReadOffsetAsync(CancellationToken cancellationToken)
{
int bits = await _offsetTree
.ReadEntryAsync(_bitReader, cancellationToken)
.ConfigureAwait(false);
if (bits <= 1)
{
return bits;
}
int res = await _bitReader.ReadBitsAsync(bits - 1, cancellationToken).ConfigureAwait(false);
return res | (1 << (bits - 1));
}
public async ValueTask<int> FillBufferAsync(byte[] buffer, CancellationToken cancellationToken)
{
int bufLen = buffer.Length;
int bufIndex = 0;
// stop when we reached original size
if (_producedBytes >= _originalSize)
{
return 0;
}
// calculate limit, so that we don't go over the original size
int remaining = (int)Math.Min(bufLen, _originalSize - _producedBytes);
while (bufIndex < remaining)
{
if (_copyProgress.HasValue)
{
var (offset, count) = _copyProgress.Value;
int copied = CopyFromHistory(
buffer,
bufIndex,
offset,
(int)Math.Min(count, remaining - bufIndex)
);
bufIndex += copied;
_copyProgress = null;
}
if (_remainingCommands == 0)
{
_remainingCommands = await _bitReader
.ReadBitsAsync(16, cancellationToken)
.ConfigureAwait(false);
if (bufIndex + _remainingCommands > remaining)
{
break;
}
await BeginNewBlockAsync(cancellationToken).ConfigureAwait(false);
}
_remainingCommands--;
int command = await ReadCommandAsync(cancellationToken).ConfigureAwait(false);
if (command >= 0 && command <= 0xFF)
{
byte value = (byte)command;
buffer[bufIndex++] = value;
_ringBuffer.Push(value);
}
else
{
int count = command - 0x100 + 3;
int offset = await ReadOffsetAsync(cancellationToken).ConfigureAwait(false);
int copyCount = (int)Math.Min(count, remaining - bufIndex);
bufIndex += CopyFromHistory(buffer, bufIndex, offset, copyCount);
}
}
_producedBytes += bufIndex;
return bufIndex;
}
#if !LEGACY_DOTNET
public async ValueTask<int> FillBufferAsync(
Memory<byte> buffer,
CancellationToken cancellationToken
)
{
int bufLen = buffer.Length;
int bufIndex = 0;
// stop when we reached original size
if (_producedBytes >= _originalSize)
{
return 0;
}
// calculate limit, so that we don't go over the original size
int remaining = (int)Math.Min(bufLen, _originalSize - _producedBytes);
while (bufIndex < remaining)
{
if (_copyProgress.HasValue)
{
var (offset, count) = _copyProgress.Value;
int copied = CopyFromHistory(
buffer.Span,
bufIndex,
offset,
(int)Math.Min(count, remaining - bufIndex)
);
bufIndex += copied;
_copyProgress = null;
}
if (_remainingCommands == 0)
{
_remainingCommands = await _bitReader
.ReadBitsAsync(16, cancellationToken)
.ConfigureAwait(false);
if (bufIndex + _remainingCommands > remaining)
{
break;
}
await BeginNewBlockAsync(cancellationToken).ConfigureAwait(false);
}
_remainingCommands--;
int command = await ReadCommandAsync(cancellationToken).ConfigureAwait(false);
if (command >= 0 && command <= 0xFF)
{
byte value = (byte)command;
buffer.Span[bufIndex++] = value;
_ringBuffer.Push(value);
}
else
{
int count = command - 0x100 + 3;
int offset = await ReadOffsetAsync(cancellationToken).ConfigureAwait(false);
int copyCount = (int)Math.Min(count, remaining - bufIndex);
bufIndex += CopyFromHistory(buffer.Span, bufIndex, offset, copyCount);
}
}
_producedBytes += bufIndex;
return bufIndex;
}
private int CopyFromHistory(Span<byte> target, int targetIndex, int offset, int count)
{
var historyIter = _ringBuffer.IterFromOffset(offset);
int copied = 0;
while (copied < count && historyIter.MoveNext() && (targetIndex + copied) < target.Length)
{
target[targetIndex + copied] = historyIter.Current;
copied++;
}
if (copied < count)
{
_copyProgress = (offset, count - copied);
}
return copied;
}
#endif
}

View File

@@ -2,362 +2,337 @@ using System;
using System.Data;
using System.IO;
using System.Linq;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Arj
namespace SharpCompress.Compressors.Arj;
[CLSCompliant(true)]
public sealed partial class LhaStream<C> : Stream
where C : ILhaDecoderConfig, new()
{
[CLSCompliant(true)]
public sealed class LhaStream<C> : Stream, IStreamStack
where C : ILhaDecoderConfig, new()
private readonly BitReader _bitReader;
private readonly Stream _stream;
private readonly HuffTree _commandTree;
private readonly HuffTree _offsetTree;
private int _remainingCommands;
private (int offset, int count)? _copyProgress;
private readonly RingBuffer _ringBuffer;
private readonly C _config = new C();
private const int NUM_COMMANDS = 510;
private const int NUM_TEMP_CODELEN = 20;
private readonly int _originalSize;
private int _producedBytes = 0;
public LhaStream(Stream compressedStream, int originalSize)
{
private readonly BitReader _bitReader;
private readonly Stream _stream;
_stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream));
_bitReader = new BitReader(compressedStream);
_ringBuffer = _config.RingBuffer;
_commandTree = new HuffTree(NUM_COMMANDS * 2);
_offsetTree = new HuffTree(NUM_TEMP_CODELEN * 2);
_remainingCommands = 0;
_copyProgress = null;
_originalSize = originalSize;
}
private readonly HuffTree _commandTree;
private readonly HuffTree _offsetTree;
private int _remainingCommands;
private (int offset, int count)? _copyProgress;
private readonly RingBuffer _ringBuffer;
private readonly C _config = new C();
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotSupportedException();
public override long Position
{
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
private const int NUM_COMMANDS = 510;
private const int NUM_TEMP_CODELEN = 20;
public override void Flush() { }
private readonly int _originalSize;
private int _producedBytes = 0;
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
public override void SetLength(long value) => throw new NotSupportedException();
Stream IStreamStack.BaseStream() => _stream;
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
int IStreamStack.BufferSize
public override int Read(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
get => 0;
set { }
throw new ArgumentNullException(nameof(buffer));
}
int IStreamStack.BufferPosition
if (offset < 0 || count < 0 || (offset + count) > buffer.Length)
{
get => 0;
set { }
throw new ArgumentOutOfRangeException();
}
void IStreamStack.SetPosition(long position) { }
public LhaStream(Stream compressedStream, int originalSize)
if (_producedBytes >= _originalSize)
{
_stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream));
_bitReader = new BitReader(compressedStream);
_ringBuffer = _config.RingBuffer;
_commandTree = new HuffTree(NUM_COMMANDS * 2);
_offsetTree = new HuffTree(NUM_TEMP_CODELEN * 2);
_remainingCommands = 0;
_copyProgress = null;
_originalSize = originalSize;
return 0; // EOF
}
if (count == 0)
{
return 0;
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => throw new NotSupportedException();
public override long Position
int bytesRead = FillBuffer(buffer);
return bytesRead;
}
private byte ReadCodeLength()
{
byte len = (byte)_bitReader.ReadBits(3);
if (len == 7)
{
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
public override void Flush() { }
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
public override int Read(byte[] buffer, int offset, int count)
{
if (buffer == null)
while (_bitReader.ReadBit() != 0)
{
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0 || count < 0 || (offset + count) > buffer.Length)
{
throw new ArgumentOutOfRangeException();
}
if (_producedBytes >= _originalSize)
{
return 0; // EOF
}
if (count == 0)
{
return 0;
}
int bytesRead = FillBuffer(buffer);
return bytesRead;
}
private byte ReadCodeLength()
{
byte len = (byte)_bitReader.ReadBits(3);
if (len == 7)
{
while (_bitReader.ReadBit() != 0)
len++;
if (len > 255)
{
len++;
if (len > 255)
{
throw new InvalidOperationException("Code length overflow");
}
throw new InvalidOperationException("Code length overflow");
}
}
return len;
}
return len;
}
private int ReadCodeSkip(int skipRange)
private int ReadCodeSkip(int skipRange)
{
int bits;
int increment;
switch (skipRange)
{
int bits;
int increment;
switch (skipRange)
{
case 0:
return 1;
case 1:
bits = 4;
increment = 3; // 3..=18
break;
default:
bits = 9;
increment = 20; // 20..=531
break;
}
int skip = _bitReader.ReadBits(bits);
return skip + increment;
}
private void ReadTempTree()
{
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
// number of codes to read (5 bits)
int numCodes = _bitReader.ReadBits(5);
// single code only
if (numCodes == 0)
{
int code = _bitReader.ReadBits(5);
_offsetTree.SetSingle((byte)code);
return;
}
if (numCodes > NUM_TEMP_CODELEN)
{
throw new Exception("temporary codelen table has invalid size");
}
// read actual lengths
int count = Math.Min(3, numCodes);
for (int i = 0; i < count; i++)
{
codeLengths[i] = (byte)ReadCodeLength();
}
// 2-bit skip value follows
int skip = _bitReader.ReadBits(2);
if (3 + skip > numCodes)
{
throw new Exception("temporary codelen table has invalid size");
}
for (int i = 3 + skip; i < numCodes; i++)
{
codeLengths[i] = (byte)ReadCodeLength();
}
_offsetTree.BuildTree(codeLengths, numCodes);
}
private void ReadCommandTree()
{
byte[] codeLengths = new byte[NUM_COMMANDS];
// number of codes to read (9 bits)
int numCodes = _bitReader.ReadBits(9);
// single code only
if (numCodes == 0)
{
int code = _bitReader.ReadBits(9);
_commandTree.SetSingle((ushort)code);
return;
}
if (numCodes > NUM_COMMANDS)
{
throw new Exception("commands codelen table has invalid size");
}
int index = 0;
while (index < numCodes)
{
for (int n = 0; n < numCodes - index; n++)
{
int code = _offsetTree.ReadEntry(_bitReader);
if (code >= 0 && code <= 2) // skip range
{
int skipCount = ReadCodeSkip(code);
index += n + skipCount;
goto outerLoop;
}
else
{
codeLengths[index + n] = (byte)(code - 2);
}
}
case 0:
return 1;
case 1:
bits = 4;
increment = 3; // 3..=18
break;
default:
bits = 9;
increment = 20; // 20..=531
break;
outerLoop:
;
}
_commandTree.BuildTree(codeLengths, numCodes);
}
private void ReadOffsetTree()
int skip = _bitReader.ReadBits(bits);
return skip + increment;
}
private void ReadTempTree()
{
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
// number of codes to read (5 bits)
int numCodes = _bitReader.ReadBits(5);
// single code only
if (numCodes == 0)
{
int numCodes = _bitReader.ReadBits(_config.OffsetBits);
if (numCodes == 0)
{
int code = _bitReader.ReadBits(_config.OffsetBits);
_offsetTree.SetSingle(code);
return;
}
if (numCodes > _config.HistoryBits)
{
throw new InvalidDataException("Offset code table too large");
}
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
for (int i = 0; i < numCodes; i++)
{
codeLengths[i] = (byte)ReadCodeLength();
}
_offsetTree.BuildTree(codeLengths, numCodes);
int code = _bitReader.ReadBits(5);
_offsetTree.SetSingle((byte)code);
return;
}
private void BeginNewBlock()
if (numCodes > NUM_TEMP_CODELEN)
{
ReadTempTree();
ReadCommandTree();
ReadOffsetTree();
throw new Exception("temporary codelen table has invalid size");
}
private int ReadCommand() => _commandTree.ReadEntry(_bitReader);
private int ReadOffset()
// read actual lengths
int count = Math.Min(3, numCodes);
for (int i = 0; i < count; i++)
{
int bits = _offsetTree.ReadEntry(_bitReader);
if (bits <= 1)
{
return bits;
}
int res = _bitReader.ReadBits(bits - 1);
return res | (1 << (bits - 1));
codeLengths[i] = (byte)ReadCodeLength();
}
private int CopyFromHistory(byte[] target, int targetIndex, int offset, int count)
// 2-bit skip value follows
int skip = _bitReader.ReadBits(2);
if (3 + skip > numCodes)
{
var historyIter = _ringBuffer.IterFromOffset(offset);
int copied = 0;
while (
copied < count && historyIter.MoveNext() && (targetIndex + copied) < target.Length
)
{
target[targetIndex + copied] = historyIter.Current;
copied++;
}
if (copied < count)
{
_copyProgress = (offset, count - copied);
}
return copied;
throw new Exception("temporary codelen table has invalid size");
}
public int FillBuffer(byte[] buffer)
for (int i = 3 + skip; i < numCodes; i++)
{
int bufLen = buffer.Length;
int bufIndex = 0;
codeLengths[i] = (byte)ReadCodeLength();
}
// stop when we reached original size
if (_producedBytes >= _originalSize)
_offsetTree.BuildTree(codeLengths, numCodes);
}
private void ReadCommandTree()
{
byte[] codeLengths = new byte[NUM_COMMANDS];
// number of codes to read (9 bits)
int numCodes = _bitReader.ReadBits(9);
// single code only
if (numCodes == 0)
{
int code = _bitReader.ReadBits(9);
_commandTree.SetSingle((ushort)code);
return;
}
if (numCodes > NUM_COMMANDS)
{
throw new Exception("commands codelen table has invalid size");
}
int index = 0;
while (index < numCodes)
{
for (int n = 0; n < numCodes - index; n++)
{
return 0;
}
int code = _offsetTree.ReadEntry(_bitReader);
// calculate limit, so that we don't go over the original size
int remaining = (int)Math.Min(bufLen, _originalSize - _producedBytes);
while (bufIndex < remaining)
{
if (_copyProgress.HasValue)
if (code >= 0 && code <= 2) // skip range
{
var (offset, count) = _copyProgress.Value;
int copied = CopyFromHistory(
buffer,
bufIndex,
offset,
(int)Math.Min(count, remaining - bufIndex)
);
bufIndex += copied;
_copyProgress = null;
}
if (_remainingCommands == 0)
{
_remainingCommands = _bitReader.ReadBits(16);
if (bufIndex + _remainingCommands > remaining)
{
break;
}
BeginNewBlock();
}
_remainingCommands--;
int command = ReadCommand();
if (command >= 0 && command <= 0xFF)
{
byte value = (byte)command;
buffer[bufIndex++] = value;
_ringBuffer.Push(value);
int skipCount = ReadCodeSkip(code);
index += n + skipCount;
goto outerLoop;
}
else
{
int count = command - 0x100 + 3;
int offset = ReadOffset();
int copyCount = (int)Math.Min(count, remaining - bufIndex);
bufIndex += CopyFromHistory(buffer, bufIndex, offset, copyCount);
codeLengths[index + n] = (byte)(code - 2);
}
}
break;
_producedBytes += bufIndex;
return bufIndex;
outerLoop:
;
}
_commandTree.BuildTree(codeLengths, numCodes);
}
private void ReadOffsetTree()
{
int numCodes = _bitReader.ReadBits(_config.OffsetBits);
if (numCodes == 0)
{
int code = _bitReader.ReadBits(_config.OffsetBits);
_offsetTree.SetSingle(code);
return;
}
if (numCodes > _config.HistoryBits)
{
throw new InvalidDataException("Offset code table too large");
}
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
for (int i = 0; i < numCodes; i++)
{
codeLengths[i] = (byte)ReadCodeLength();
}
_offsetTree.BuildTree(codeLengths, numCodes);
}
private void BeginNewBlock()
{
ReadTempTree();
ReadCommandTree();
ReadOffsetTree();
}
private int ReadCommand() => _commandTree.ReadEntry(_bitReader);
private int ReadOffset()
{
int bits = _offsetTree.ReadEntry(_bitReader);
if (bits <= 1)
{
return bits;
}
int res = _bitReader.ReadBits(bits - 1);
return res | (1 << (bits - 1));
}
private int CopyFromHistory(byte[] target, int targetIndex, int offset, int count)
{
var historyIter = _ringBuffer.IterFromOffset(offset);
int copied = 0;
while (copied < count && historyIter.MoveNext() && (targetIndex + copied) < target.Length)
{
target[targetIndex + copied] = historyIter.Current;
copied++;
}
if (copied < count)
{
_copyProgress = (offset, count - copied);
}
return copied;
}
public int FillBuffer(byte[] buffer)
{
int bufLen = buffer.Length;
int bufIndex = 0;
// stop when we reached original size
if (_producedBytes >= _originalSize)
{
return 0;
}
// calculate limit, so that we don't go over the original size
int remaining = (int)Math.Min(bufLen, _originalSize - _producedBytes);
while (bufIndex < remaining)
{
if (_copyProgress.HasValue)
{
var (offset, count) = _copyProgress.Value;
int copied = CopyFromHistory(
buffer,
bufIndex,
offset,
(int)Math.Min(count, remaining - bufIndex)
);
bufIndex += copied;
_copyProgress = null;
}
if (_remainingCommands == 0)
{
_remainingCommands = _bitReader.ReadBits(16);
if (bufIndex + _remainingCommands > remaining)
{
break;
}
BeginNewBlock();
}
_remainingCommands--;
int command = ReadCommand();
if (command >= 0 && command <= 0xFF)
{
byte value = (byte)command;
buffer[bufIndex++] = value;
_ringBuffer.Push(value);
}
else
{
int count = command - 0x100 + 3;
int offset = ReadOffset();
int copyCount = (int)Math.Min(count, remaining - bufIndex);
bufIndex += CopyFromHistory(buffer, bufIndex, offset, copyCount);
}
}
_producedBytes += bufIndex;
return bufIndex;
}
}

View File

@@ -2,66 +2,65 @@ using System;
using System.Collections;
using System.Collections.Generic;
namespace SharpCompress.Compressors.Arj
namespace SharpCompress.Compressors.Arj;
/// <summary>
/// A fixed-size ring buffer where N must be a power of two.
/// </summary>
public class RingBuffer : IRingBuffer
{
/// <summary>
/// A fixed-size ring buffer where N must be a power of two.
/// </summary>
public class RingBuffer : IRingBuffer
private readonly byte[] _buffer;
private int _cursor;
public int BufferSize { get; }
public int Cursor => _cursor;
private readonly int _mask;
public RingBuffer(int size)
{
private readonly byte[] _buffer;
private int _cursor;
public int BufferSize { get; }
public int Cursor => _cursor;
private readonly int _mask;
public RingBuffer(int size)
if ((size & (size - 1)) != 0)
{
if ((size & (size - 1)) != 0)
{
throw new ArgumentException("RingArrayBuffer size must be a power of two");
}
BufferSize = size;
_buffer = new byte[size];
_cursor = 0;
_mask = size - 1;
// Fill with spaces
for (int i = 0; i < size; i++)
{
_buffer[i] = (byte)' ';
}
throw new ArgumentException("RingArrayBuffer size must be a power of two");
}
public void SetCursor(int pos)
{
_cursor = pos & _mask;
}
BufferSize = size;
_buffer = new byte[size];
_cursor = 0;
_mask = size - 1;
public void Push(byte value)
// Fill with spaces
for (int i = 0; i < size; i++)
{
int index = _cursor;
_buffer[index & _mask] = value;
_cursor = (index + 1) & _mask;
}
public byte this[int index] => _buffer[index & _mask];
public HistoryIterator IterFromOffset(int offset)
{
int masked = (offset & _mask) + 1;
int startIndex = _cursor + BufferSize - masked;
return new HistoryIterator(this, startIndex);
}
public HistoryIterator IterFromPos(int pos)
{
int startIndex = pos & _mask;
return new HistoryIterator(this, startIndex);
_buffer[i] = (byte)' ';
}
}
public void SetCursor(int pos)
{
_cursor = pos & _mask;
}
public void Push(byte value)
{
int index = _cursor;
_buffer[index & _mask] = value;
_cursor = (index + 1) & _mask;
}
public byte this[int index] => _buffer[index & _mask];
public HistoryIterator IterFromOffset(int offset)
{
int masked = (offset & _mask) + 1;
int startIndex = _cursor + BufferSize - masked;
return new HistoryIterator(this, startIndex);
}
public HistoryIterator IterFromPos(int pos)
{
int startIndex = pos & _mask;
return new HistoryIterator(this, startIndex);
}
}

View File

@@ -23,11 +23,7 @@ public sealed partial class BZip2Stream
CancellationToken cancellationToken = default
)
{
var bZip2Stream = new BZip2Stream();
bZip2Stream.leaveOpen = leaveOpen;
#if DEBUG_STREAMS
bZip2Stream.DebugConstruct(typeof(BZip2Stream));
#endif
var bZip2Stream = new BZip2Stream(leaveOpen);
bZip2Stream.Mode = compressionMode;
if (bZip2Stream.Mode == CompressionMode.Compress)
{
@@ -38,6 +34,7 @@ public sealed partial class BZip2Stream
bZip2Stream.stream = await CBZip2InputStream.CreateAsync(
stream,
decompressConcatenated,
leaveOpen,
cancellationToken
);
}

View File

@@ -1,38 +1,21 @@
using System;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.BZip2;
public sealed partial class BZip2Stream : Stream, IStreamStack
public sealed partial class BZip2Stream : Stream
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => stream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
private Stream stream = default!;
private bool isDisposed;
private bool leaveOpen;
private readonly bool leaveOpen;
private BZip2Stream() { }
private BZip2Stream(bool leaveOpen)
{
this.leaveOpen = leaveOpen;
}
/// <summary>
/// Create a BZip2Stream
@@ -47,11 +30,7 @@ public sealed partial class BZip2Stream : Stream, IStreamStack
bool leaveOpen = false
)
{
var bZip2Stream = new BZip2Stream();
bZip2Stream.leaveOpen = leaveOpen;
#if DEBUG_STREAMS
bZip2Stream.DebugConstruct(typeof(BZip2Stream));
#endif
var bZip2Stream = new BZip2Stream(leaveOpen);
bZip2Stream.Mode = compressionMode;
if (bZip2Stream.Mode == CompressionMode.Compress)
{
@@ -78,9 +57,6 @@ public sealed partial class BZip2Stream : Stream, IStreamStack
return;
}
isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(BZip2Stream));
#endif
if (disposing)
{
stream.Dispose();
@@ -132,7 +108,7 @@ public sealed partial class BZip2Stream : Stream, IStreamStack
/// <returns></returns>
public static bool IsBZip2(Stream stream)
{
var br = new BinaryReader(stream);
using var br = new BinaryReader(stream, Encoding.Default, leaveOpen: true);
var chars = br.ReadBytes(2);
if (chars.Length < 2 || chars[0] != 'B' || chars[1] != 'Z')
{

View File

@@ -857,11 +857,11 @@ internal partial class CBZip2InputStream
public static async ValueTask<CBZip2InputStream> CreateAsync(
Stream zStream,
bool decompressConcatenated,
bool leaveOpen = false,
CancellationToken cancellationToken = default
)
{
var cbZip2InputStream = new CBZip2InputStream();
cbZip2InputStream.decompressConcatenated = decompressConcatenated;
var cbZip2InputStream = new CBZip2InputStream(decompressConcatenated, leaveOpen);
cbZip2InputStream.ll8 = null;
cbZip2InputStream.tt = null;
await cbZip2InputStream.BsSetStreamAsync(zStream, cancellationToken);

View File

@@ -5,7 +5,6 @@ using System.Buffers;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
/*
* Copyright 2001,2004-2005 The Apache Software Foundation
@@ -41,28 +40,8 @@ namespace SharpCompress.Compressors.BZip2;
* start of the BZIP2 stream to make it compatible with other PGP programs.
*/
internal partial class CBZip2InputStream : Stream, IStreamStack
internal partial class CBZip2InputStream : Stream
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => bsStream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
private static void Cadvise()
{
//System.out.Println("CRC Error");
@@ -168,8 +147,8 @@ internal partial class CBZip2InputStream : Stream, IStreamStack
storedCombinedCRC;
private int computedBlockCRC,
computedCombinedCRC;
private bool decompressConcatenated;
private bool leaveOpen;
private readonly bool decompressConcatenated;
private readonly bool leaveOpen;
private int i2,
count,
@@ -183,7 +162,11 @@ internal partial class CBZip2InputStream : Stream, IStreamStack
private char z;
private bool isDisposed;
private CBZip2InputStream() { }
private CBZip2InputStream(bool decompressConcatenated, bool leaveOpen)
{
this.decompressConcatenated = decompressConcatenated;
this.leaveOpen = leaveOpen;
}
public static CBZip2InputStream Create(
Stream zStream,
@@ -191,9 +174,7 @@ internal partial class CBZip2InputStream : Stream, IStreamStack
bool leaveOpen
)
{
var cbZip2InputStream = new CBZip2InputStream();
cbZip2InputStream.decompressConcatenated = decompressConcatenated;
cbZip2InputStream.leaveOpen = leaveOpen;
var cbZip2InputStream = new CBZip2InputStream(decompressConcatenated, leaveOpen);
cbZip2InputStream.ll8 = null;
cbZip2InputStream.tt = null;
cbZip2InputStream.BsSetStream(zStream);
@@ -210,9 +191,6 @@ internal partial class CBZip2InputStream : Stream, IStreamStack
return;
}
isDisposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(CBZip2InputStream));
#endif
base.Dispose(disposing);
bsStream?.Dispose();
}

View File

@@ -2,8 +2,6 @@ using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
/*
* Copyright 2001,2004-2005 The Apache Software Foundation
*
@@ -41,28 +39,8 @@ namespace SharpCompress.Compressors.BZip2;
* start of the BZIP2 stream to make it compatible with other PGP programs.
*/
internal sealed class CBZip2OutputStream : Stream, IStreamStack
internal sealed class CBZip2OutputStream : Stream
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => bsStream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
private const int SETMASK = (1 << 21);
private const int CLEARMASK = (~SETMASK);
private const int GREATER_ICOST = 15;
@@ -359,10 +337,6 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
BsSetStream(inStream);
#if DEBUG_STREAMS
this.DebugConstruct(typeof(CBZip2OutputStream));
#endif
workFactor = 50;
if (inBlockSize > 9)
{
@@ -479,9 +453,6 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
Finish();
disposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(CBZip2OutputStream));
#endif
Dispose();
if (!leaveOpen)
{

View File

@@ -35,26 +35,6 @@ namespace SharpCompress.Compressors.Deflate;
public partial class DeflateStream : Stream, IStreamStack
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => _baseStream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
private readonly ZlibBaseStream _baseStream;
private bool _disposed;
private readonly bool _leaveOpen;
@@ -84,10 +64,6 @@ public partial class DeflateStream : Stream, IStreamStack
leaveOpen,
forceEncoding
);
#if DEBUG_STREAMS
this.DebugConstruct(typeof(DeflateStream));
#endif
}
#region Zlib properties
@@ -274,9 +250,6 @@ public partial class DeflateStream : Stream, IStreamStack
{
if (!_disposed)
{
#if DEBUG_STREAMS
this.DebugDispose(typeof(DeflateStream));
#endif
if (disposing && !_leaveOpen)
{
_baseStream?.Dispose();
@@ -290,6 +263,8 @@ public partial class DeflateStream : Stream, IStreamStack
}
}
Stream IStreamStack.BaseStream() => _baseStream;
/// <summary>
/// Flush the stream.
/// </summary>

View File

@@ -32,32 +32,11 @@ using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate;
public partial class GZipStream : Stream, IStreamStack
public partial class GZipStream : Stream
{
#if DEBUG_STREAMS
long IStreamStack.InstanceId { get; set; }
#endif
int IStreamStack.DefaultBufferSize { get; set; }
Stream IStreamStack.BaseStream() => BaseStream;
int IStreamStack.BufferSize
{
get => 0;
set { }
}
int IStreamStack.BufferPosition
{
get => 0;
set { }
}
void IStreamStack.SetPosition(long position) { }
internal static readonly DateTime UNIX_EPOCH = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
private string? _comment;
@@ -85,9 +64,6 @@ public partial class GZipStream : Stream, IStreamStack
)
{
BaseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.GZIP, encoding);
#if DEBUG_STREAMS
this.DebugConstruct(typeof(GZipStream));
#endif
_encoding = encoding;
}
@@ -236,9 +212,6 @@ public partial class GZipStream : Stream, IStreamStack
Crc32 = BaseStream.Crc32;
}
_disposed = true;
#if DEBUG_STREAMS
this.DebugDispose(typeof(GZipStream));
#endif
}
}
finally

Some files were not shown because too many files have changed in this diff Show More