mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-08 13:34:57 +00:00
Compare commits
78 Commits
copilot/fi
...
copilot/fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f59b14a278 | ||
|
|
3870cc8d34 | ||
|
|
242e442a8c | ||
|
|
d95d1e928b | ||
|
|
2a3086a0d7 | ||
|
|
41c3cc1a18 | ||
|
|
1b1df86a11 | ||
|
|
e0660e7775 | ||
|
|
99a6c4de88 | ||
|
|
ffa765bd97 | ||
|
|
b1696524b3 | ||
|
|
6a37c55085 | ||
|
|
9c1c6fff9f | ||
|
|
db8c6f4bcb | ||
|
|
ff17ecda7d | ||
|
|
692058677c | ||
|
|
1e90d69912 | ||
|
|
64a1cc68e1 | ||
|
|
20353f35ff | ||
|
|
e44a43d2b1 | ||
|
|
8997f00b9b | ||
|
|
c5da416764 | ||
|
|
840e58fc03 | ||
|
|
7f911c5219 | ||
|
|
a887390c23 | ||
|
|
f4dddcec8e | ||
|
|
0d9d82d7e6 | ||
|
|
d34a47c148 | ||
|
|
5aa216bd21 | ||
|
|
8af47548fe | ||
|
|
131bd2b7b8 | ||
|
|
1993673a22 | ||
|
|
30e036f9ec | ||
|
|
095c871174 | ||
|
|
6d73c5b295 | ||
|
|
cc4d28193c | ||
|
|
9433e06b93 | ||
|
|
a92aaa51d5 | ||
|
|
d41908adeb | ||
|
|
81ca15b567 | ||
|
|
b81d0fd730 | ||
|
|
3a1bb187e8 | ||
|
|
3fee14a070 | ||
|
|
5bf789ac65 | ||
|
|
be06049db3 | ||
|
|
a0435f6a60 | ||
|
|
2321e2c90b | ||
|
|
97e98d8629 | ||
|
|
d96e7362d2 | ||
|
|
7dd46fe5ed | ||
|
|
04c044cb2b | ||
|
|
cc10a12fbc | ||
|
|
8b0a1c699f | ||
|
|
15ca7c9807 | ||
|
|
2b4da7e39b | ||
|
|
31f81f38af | ||
|
|
72cf77b7c7 | ||
|
|
0fe48c647e | ||
|
|
7b06652bff | ||
|
|
434ce05416 | ||
|
|
0698031ed4 | ||
|
|
51237a34eb | ||
|
|
b8264a8131 | ||
|
|
cad923018e | ||
|
|
db94b49941 | ||
|
|
72d15d9cbf | ||
|
|
e0186eadc0 | ||
|
|
4cfa5b04af | ||
|
|
f2c54b1f8b | ||
|
|
d7d0bc6582 | ||
|
|
dd9dc2500b | ||
|
|
4efb109da8 | ||
|
|
4c61628078 | ||
|
|
fa1d7af22f | ||
|
|
a771ba3bc0 | ||
|
|
7dd0da5fd7 | ||
|
|
da87e45534 | ||
|
|
2ffaef5563 |
@@ -3,7 +3,7 @@
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"csharpier": {
|
||||
"version": "1.1.2",
|
||||
"version": "1.2.1",
|
||||
"commands": [
|
||||
"csharpier"
|
||||
],
|
||||
|
||||
4
.github/workflows/dotnetcore.yml
vendored
4
.github/workflows/dotnetcore.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
os: [windows-latest, ubuntu-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-dotnet@v5
|
||||
with:
|
||||
dotnet-version: 8.0.x
|
||||
dotnet-version: 10.0.x
|
||||
- run: dotnet run --project build/build.csproj
|
||||
- uses: actions/upload-artifact@v5
|
||||
with:
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -11,11 +11,11 @@ TestResults/
|
||||
packages/*/
|
||||
project.lock.json
|
||||
tests/TestArchives/Scratch
|
||||
tests/TestArchives/*/Scratch
|
||||
tests/TestArchives/*/Scratch2
|
||||
.vs
|
||||
tools
|
||||
.vscode
|
||||
.idea/
|
||||
|
||||
.DS_Store
|
||||
*.snupkg
|
||||
/tests/TestArchives/6d23a38c-f064-4ef1-ad89-b942396f53b9/Scratch
|
||||
|
||||
9
.vscode/extensions.json
vendored
Normal file
9
.vscode/extensions.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"ms-dotnettools.csdevkit",
|
||||
"ms-dotnettools.csharp",
|
||||
"ms-dotnettools.vscode-dotnet-runtime",
|
||||
"csharpier.csharpier-vscode",
|
||||
"formulahendry.dotnet-test-explorer"
|
||||
]
|
||||
}
|
||||
97
.vscode/launch.json
vendored
Normal file
97
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Debug Tests (net10.0)",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build",
|
||||
"program": "dotnet",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"-f",
|
||||
"net10.0",
|
||||
"--no-build",
|
||||
"--verbosity=normal"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
},
|
||||
{
|
||||
"name": "Debug Specific Test (net10.0)",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build",
|
||||
"program": "dotnet",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"-f",
|
||||
"net10.0",
|
||||
"--no-build",
|
||||
"--filter",
|
||||
"FullyQualifiedName~${input:testName}"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
},
|
||||
{
|
||||
"name": "Debug Performance Tests",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"preLaunchTask": "build",
|
||||
"program": "dotnet",
|
||||
"args": [
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
|
||||
"--no-build"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
},
|
||||
{
|
||||
"name": "Debug Build Script",
|
||||
"type": "coreclr",
|
||||
"request": "launch",
|
||||
"program": "dotnet",
|
||||
"args": [
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/build/build.csproj",
|
||||
"--",
|
||||
"${input:buildTarget}"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "internalConsole",
|
||||
"stopAtEntry": false
|
||||
}
|
||||
],
|
||||
"inputs": [
|
||||
{
|
||||
"id": "testName",
|
||||
"type": "promptString",
|
||||
"description": "Enter test name or pattern (e.g., TestMethodName or ClassName)",
|
||||
"default": ""
|
||||
},
|
||||
{
|
||||
"id": "buildTarget",
|
||||
"type": "pickString",
|
||||
"description": "Select build target",
|
||||
"options": [
|
||||
"clean",
|
||||
"restore",
|
||||
"build",
|
||||
"test",
|
||||
"format",
|
||||
"publish",
|
||||
"default"
|
||||
],
|
||||
"default": "build"
|
||||
}
|
||||
]
|
||||
}
|
||||
29
.vscode/settings.json
vendored
Normal file
29
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"dotnet.defaultSolution": "SharpCompress.sln",
|
||||
"files.exclude": {
|
||||
"**/bin": true,
|
||||
"**/obj": true
|
||||
},
|
||||
"files.watcherExclude": {
|
||||
"**/bin/**": true,
|
||||
"**/obj/**": true,
|
||||
"**/artifacts/**": true
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/bin": true,
|
||||
"**/obj": true,
|
||||
"**/artifacts": true
|
||||
},
|
||||
"editor.formatOnSave": false,
|
||||
"[csharp]": {
|
||||
"editor.defaultFormatter": "csharpier.csharpier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll": "explicit"
|
||||
}
|
||||
},
|
||||
"csharpier.enableDebugLogs": false,
|
||||
"omnisharp.enableRoslynAnalyzers": true,
|
||||
"omnisharp.enableEditorConfigSupport": true,
|
||||
"dotnet-test-explorer.testProjectPath": "tests/**/*.csproj"
|
||||
}
|
||||
178
.vscode/tasks.json
vendored
Normal file
178
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,178 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "build",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/SharpCompress.sln",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary;ForceNoAlign"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "build-release",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/SharpCompress.sln",
|
||||
"-c",
|
||||
"Release",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary;ForceNoAlign"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "build-library",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"build",
|
||||
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
|
||||
"/property:GenerateFullPaths=true",
|
||||
"/consoleloggerparameters:NoSummary;ForceNoAlign"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "restore",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"restore",
|
||||
"${workspaceFolder}/SharpCompress.sln"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "clean",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"clean",
|
||||
"${workspaceFolder}/SharpCompress.sln"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "test",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"--no-build",
|
||||
"--verbosity=normal"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"dependsOn": "build"
|
||||
},
|
||||
{
|
||||
"label": "test-net10",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"-f",
|
||||
"net10.0",
|
||||
"--no-build",
|
||||
"--verbosity=normal"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": "test",
|
||||
"dependsOn": "build"
|
||||
},
|
||||
{
|
||||
"label": "test-net48",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"test",
|
||||
"${workspaceFolder}/tests/SharpCompress.Test/SharpCompress.Test.csproj",
|
||||
"-f",
|
||||
"net48",
|
||||
"--no-build",
|
||||
"--verbosity=normal"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"group": "test",
|
||||
"dependsOn": "build"
|
||||
},
|
||||
{
|
||||
"label": "format",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"csharpier",
|
||||
"."
|
||||
],
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "format-check",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"csharpier",
|
||||
"check",
|
||||
"."
|
||||
],
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "run-build-script",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/build/build.csproj"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
},
|
||||
{
|
||||
"label": "pack",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"pack",
|
||||
"${workspaceFolder}/src/SharpCompress/SharpCompress.csproj",
|
||||
"-c",
|
||||
"Release",
|
||||
"-o",
|
||||
"${workspaceFolder}/artifacts/"
|
||||
],
|
||||
"problemMatcher": "$msCompile",
|
||||
"dependsOn": "build-release"
|
||||
},
|
||||
{
|
||||
"label": "performance-tests",
|
||||
"command": "dotnet",
|
||||
"type": "process",
|
||||
"args": [
|
||||
"run",
|
||||
"--project",
|
||||
"${workspaceFolder}/tests/SharpCompress.Performance/SharpCompress.Performance.csproj",
|
||||
"-c",
|
||||
"Release"
|
||||
],
|
||||
"problemMatcher": "$msCompile"
|
||||
}
|
||||
]
|
||||
}
|
||||
39
AGENTS.md
39
AGENTS.md
@@ -49,6 +49,30 @@ SharpCompress is a pure C# compression library supporting multiple archive forma
|
||||
- Use `dotnet test` to run tests
|
||||
- Solution file: `SharpCompress.sln`
|
||||
|
||||
### Directory Structure
|
||||
```
|
||||
src/SharpCompress/
|
||||
├── Archives/ # IArchive implementations (Zip, Tar, Rar, 7Zip, GZip)
|
||||
├── Readers/ # IReader implementations (forward-only)
|
||||
├── Writers/ # IWriter implementations (forward-only)
|
||||
├── Compressors/ # Low-level compression streams (BZip2, Deflate, LZMA, etc.)
|
||||
├── Factories/ # Format detection and factory pattern
|
||||
├── Common/ # Shared types (ArchiveType, Entry, Options)
|
||||
├── Crypto/ # Encryption implementations
|
||||
└── IO/ # Stream utilities and wrappers
|
||||
|
||||
tests/SharpCompress.Test/
|
||||
├── Zip/, Tar/, Rar/, SevenZip/, GZip/, BZip2/ # Format-specific tests
|
||||
├── TestBase.cs # Base test class with helper methods
|
||||
└── TestArchives/ # Test data (not checked into main test project)
|
||||
```
|
||||
|
||||
### Factory Pattern
|
||||
All format types implement factory interfaces (`IArchiveFactory`, `IReaderFactory`, `IWriterFactory`) for auto-detection:
|
||||
- `ReaderFactory.Open()` - Auto-detects format by probing stream
|
||||
- `WriterFactory.Open()` - Creates writer for specified `ArchiveType`
|
||||
- Factories located in: `src/SharpCompress/Factories/`
|
||||
|
||||
## Nullable Reference Types
|
||||
|
||||
- Declare variables non-nullable, and check for `null` at entry points.
|
||||
@@ -116,3 +140,18 @@ SharpCompress supports multiple archive and compression formats:
|
||||
- Use test archives from `tests/TestArchives` directory for consistency.
|
||||
- Test stream disposal and `LeaveStreamOpen` behavior.
|
||||
- Test edge cases: empty archives, large files, corrupted archives, encrypted archives.
|
||||
|
||||
### Test Organization
|
||||
- Base class: `TestBase` - Provides `TEST_ARCHIVES_PATH`, `SCRATCH_FILES_PATH`, temp directory management
|
||||
- Framework: xUnit with AwesomeAssertions
|
||||
- Test archives: `tests/TestArchives/` - Use existing archives, don't create new ones unnecessarily
|
||||
- Match naming style of nearby test files
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Don't mix Archive and Reader APIs** - Archive needs seekable stream, Reader doesn't
|
||||
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
|
||||
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
|
||||
4. **Tar + non-seekable stream** - Must provide file size or it will throw
|
||||
5. **Multi-framework differences** - Some features differ between .NET Framework and modern .NET (e.g., Mono.Posix)
|
||||
6. **Format detection** - Use `ReaderFactory.Open()` for auto-detection, test with actual archive files
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
<Project>
|
||||
<ItemGroup>
|
||||
<PackageVersion Include="Bullseye" Version="6.0.0" />
|
||||
<PackageVersion Include="AwesomeAssertions" Version="9.2.1" />
|
||||
<PackageVersion Include="AwesomeAssertions" Version="9.3.0" />
|
||||
<PackageVersion Include="Glob" Version="1.1.9" />
|
||||
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.14" />
|
||||
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.0" />
|
||||
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
|
||||
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
|
||||
<PackageVersion Include="SimpleExec" Version="12.0.0" />
|
||||
<PackageVersion Include="System.Buffers" Version="4.6.1" />
|
||||
<PackageVersion Include="System.Memory" Version="4.6.3" />
|
||||
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
|
||||
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
|
||||
<PackageVersion Include="xunit" Version="2.9.3" />
|
||||
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
|
||||
<PackageVersion Include="ZstdSharp.Port" Version="0.8.6" />
|
||||
<PackageVersion Include="Microsoft.NET.ILLink.Tasks" Version="8.0.21" />
|
||||
<PackageVersion Include="Microsoft.NET.ILLink.Tasks" Version="10.0.0" />
|
||||
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
|
||||
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
|
||||
</ItemGroup>
|
||||
|
||||
@@ -22,11 +22,16 @@
|
||||
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
|
||||
|
||||
1. SOLID Rars are only supported in the RarReader API.
|
||||
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.
|
||||
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading. See [Zip Format Notes](#zip-format-notes) for details on multi-volume archives and streaming behavior.
|
||||
3. The Tar format requires a file size in the header. If no size is specified to the TarWriter and the stream is not seekable, then an exception will be thrown.
|
||||
4. The 7Zip format doesn't allow for reading as a forward-only stream so 7Zip is only supported through the Archive API
|
||||
5. LZip has no support for extra data like the file name or timestamp. There is a default filename used when looking at the entry Key on the archive.
|
||||
|
||||
### Zip Format Notes
|
||||
|
||||
- Multi-volume/split ZIP archives require ZipArchive (seekable streams) as ZipReader cannot seek across volume files.
|
||||
- ZipReader processes entries from LocalEntry headers (which include directory entries ending with `/`) and intentionally skips DirectoryEntry headers from the central directory, as they are redundant in streaming mode - all entry data comes from LocalEntry headers which ZipReader has already processed.
|
||||
|
||||
## Compression Streams
|
||||
|
||||
For those who want to directly compress/decompress bits. The single file formats are represented here as well. However, BZip2, LZip and XZ have no metadata (GZip has a little) so using them without something like a Tar file makes little sense.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# SharpCompress
|
||||
|
||||
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
SharpCompress is a compression library in pure C# for .NET Framework 4.8, .NET 8.0 and .NET 10.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip, unzstd, unarc and unarj with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
|
||||
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Bullseye" />
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"version": 2,
|
||||
"dependencies": {
|
||||
"net8.0": {
|
||||
"net10.0": {
|
||||
"Bullseye": {
|
||||
"type": "Direct",
|
||||
"requested": "[6.0.0, )",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"sdk": {
|
||||
"version": "8.0.100",
|
||||
"version": "10.0.100",
|
||||
"rollForward": "latestFeature"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -161,6 +161,11 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
/// </summary>
|
||||
public virtual bool IsSolid => false;
|
||||
|
||||
/// <summary>
|
||||
/// Archive is ENCRYPTED (this means the Archive has password-protected files).
|
||||
/// </summary>
|
||||
public virtual bool IsEncrypted => false;
|
||||
|
||||
/// <summary>
|
||||
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.
|
||||
/// </summary>
|
||||
|
||||
@@ -3,6 +3,10 @@ using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.Compressors;
|
||||
using SharpCompress.Compressors.BZip2;
|
||||
using SharpCompress.Compressors.LZMA;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
@@ -131,10 +135,10 @@ public static class ArchiveFactory
|
||||
{
|
||||
finfo.NotNull(nameof(finfo));
|
||||
using Stream stream = finfo.OpenRead();
|
||||
return FindFactory<T>(stream);
|
||||
return FindFactory<T>(stream, finfo.Name);
|
||||
}
|
||||
|
||||
private static T FindFactory<T>(Stream stream)
|
||||
private static T FindFactory<T>(Stream stream, string? fileName = null)
|
||||
where T : IFactory
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
@@ -159,6 +163,16 @@ public static class ArchiveFactory
|
||||
}
|
||||
}
|
||||
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
// Check if this is a compressed tar file (tar.bz2, tar.lz, etc.)
|
||||
// These formats are supported by ReaderFactory but not by ArchiveFactory
|
||||
var compressedTarMessage = TryGetCompressedTarMessage(stream, fileName);
|
||||
if (compressedTarMessage != null)
|
||||
{
|
||||
throw new InvalidOperationException(compressedTarMessage);
|
||||
}
|
||||
|
||||
var extensions = string.Join(", ", factories.Select(item => item.Name));
|
||||
|
||||
throw new InvalidOperationException(
|
||||
@@ -248,4 +262,111 @@ public static class ArchiveFactory
|
||||
}
|
||||
|
||||
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the stream is a compressed tar file (tar.bz2, tar.lz, etc.) that should use ReaderFactory instead.
|
||||
/// Returns an error message if detected, null otherwise.
|
||||
/// </summary>
|
||||
private static string? TryGetCompressedTarMessage(Stream stream, string? fileName)
|
||||
{
|
||||
var startPosition = stream.Position;
|
||||
try
|
||||
{
|
||||
// Check if it's a BZip2 file
|
||||
if (BZip2Stream.IsBZip2(stream))
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
// Try to decompress and check if it contains a tar archive
|
||||
using var decompressed = new BZip2Stream(stream, CompressionMode.Decompress, true);
|
||||
if (IsTarStream(decompressed))
|
||||
{
|
||||
return "This appears to be a tar.bz2 archive. The Archive API requires seekable streams, but decompression streams are not seekable. "
|
||||
+ "Please use ReaderFactory.Open() instead for forward-only extraction, "
|
||||
+ "or decompress the file first and then open the resulting tar file with ArchiveFactory.Open().";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
// Check if it's an LZip file
|
||||
if (LZipStream.IsLZipFile(stream))
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
// Try to decompress and check if it contains a tar archive
|
||||
using var decompressed = new LZipStream(stream, CompressionMode.Decompress);
|
||||
if (IsTarStream(decompressed))
|
||||
{
|
||||
return "This appears to be a tar.lz archive. The Archive API requires seekable streams, but decompression streams are not seekable. "
|
||||
+ "Please use ReaderFactory.Open() instead for forward-only extraction, "
|
||||
+ "or decompress the file first and then open the resulting tar file with ArchiveFactory.Open().";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check file extension as a fallback for other compressed tar formats
|
||||
if (fileName != null)
|
||||
{
|
||||
var lowerFileName = fileName.ToLowerInvariant();
|
||||
if (
|
||||
lowerFileName.EndsWith(".tar.bz2")
|
||||
|| lowerFileName.EndsWith(".tbz")
|
||||
|| lowerFileName.EndsWith(".tbz2")
|
||||
|| lowerFileName.EndsWith(".tb2")
|
||||
|| lowerFileName.EndsWith(".tz2")
|
||||
|| lowerFileName.EndsWith(".tar.lz")
|
||||
|| lowerFileName.EndsWith(".tar.xz")
|
||||
|| lowerFileName.EndsWith(".txz")
|
||||
|| lowerFileName.EndsWith(".tar.zst")
|
||||
|| lowerFileName.EndsWith(".tar.zstd")
|
||||
|| lowerFileName.EndsWith(".tzst")
|
||||
|| lowerFileName.EndsWith(".tzstd")
|
||||
|| lowerFileName.EndsWith(".tar.z")
|
||||
|| lowerFileName.EndsWith(".tz")
|
||||
|| lowerFileName.EndsWith(".taz")
|
||||
)
|
||||
{
|
||||
return $"The file '{fileName}' appears to be a compressed tar archive. The Archive API requires seekable streams, but decompression streams are not seekable. "
|
||||
+ "Please use ReaderFactory.Open() instead for forward-only extraction, "
|
||||
+ "or decompress the file first and then open the resulting tar file with ArchiveFactory.Open().";
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// If we can't determine, just return null and let the normal error handling proceed
|
||||
return null;
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore seek failures
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a stream contains a tar archive by trying to read a tar header.
|
||||
/// </summary>
|
||||
private static bool IsTarStream(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
var tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
return tarHeader.Read(new BinaryReader(stream));
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -128,6 +128,7 @@ public static class IArchiveEntryExtensions
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
await entry.WriteToAsync(fs, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
},
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
|
||||
@@ -84,6 +84,8 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
|
||||
public override bool IsSolid => Volumes.First().IsSolidArchive;
|
||||
|
||||
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
|
||||
|
||||
public virtual int MinVersion => Volumes.First().MinVersion;
|
||||
public virtual int MaxVersion => Volumes.First().MaxVersion;
|
||||
|
||||
|
||||
@@ -205,6 +205,8 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
.GroupBy(x => x.FilePart.Folder)
|
||||
.Any(folder => folder.Count() > 1);
|
||||
|
||||
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
|
||||
|
||||
public override long TotalSize =>
|
||||
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ namespace SharpCompress.Common.Arc
|
||||
return value switch
|
||||
{
|
||||
1 or 2 => CompressionType.None,
|
||||
3 => CompressionType.RLE90,
|
||||
3 => CompressionType.Packed,
|
||||
4 => CompressionType.Squeezed,
|
||||
5 or 6 or 7 or 8 => CompressionType.Crunched,
|
||||
9 => CompressionType.Squashed,
|
||||
|
||||
@@ -44,7 +44,7 @@ namespace SharpCompress.Common.Arc
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionType.RLE90:
|
||||
case CompressionType.Packed:
|
||||
compressedStream = new RunLength90Stream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize
|
||||
@@ -54,6 +54,14 @@ namespace SharpCompress.Common.Arc
|
||||
compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
|
||||
break;
|
||||
case CompressionType.Crunched:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: "
|
||||
+ Header.CompressionMethod
|
||||
+ " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new ArcLzwStream(
|
||||
_stream,
|
||||
(int)Header.CompressedSize,
|
||||
|
||||
@@ -38,6 +38,22 @@ namespace SharpCompress.Common.Arj
|
||||
Header.CompressedSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedMost:
|
||||
case CompressionMethod.Compressed:
|
||||
case CompressionMethod.CompressedFaster:
|
||||
if (Header.OriginalSize > 128 * 1024)
|
||||
{
|
||||
throw new NotSupportedException(
|
||||
"CompressionMethod: "
|
||||
+ Header.CompressionMethod
|
||||
+ " with size > 128KB"
|
||||
);
|
||||
}
|
||||
compressedStream = new LhaStream<Lh7DecoderCfg>(
|
||||
_stream,
|
||||
(int)Header.OriginalSize
|
||||
);
|
||||
break;
|
||||
case CompressionMethod.CompressedFastest:
|
||||
compressedStream = new LHDecoderStream(_stream, (int)Header.OriginalSize);
|
||||
break;
|
||||
|
||||
@@ -23,7 +23,7 @@ public enum CompressionType
|
||||
Reduce4,
|
||||
Explode,
|
||||
Squeezed,
|
||||
RLE90,
|
||||
Packed,
|
||||
Crunched,
|
||||
Squashed,
|
||||
Crushed,
|
||||
|
||||
@@ -25,6 +25,10 @@ internal sealed class TarHeader
|
||||
|
||||
internal const int BLOCK_SIZE = 512;
|
||||
|
||||
// Maximum size for long name/link headers to prevent memory exhaustion attacks
|
||||
// This is generous enough for most real-world scenarios (32KB)
|
||||
private const int MAX_LONG_NAME_SIZE = 32768;
|
||||
|
||||
internal void Write(Stream output)
|
||||
{
|
||||
var buffer = new byte[BLOCK_SIZE];
|
||||
@@ -186,6 +190,15 @@ internal sealed class TarHeader
|
||||
private string ReadLongName(BinaryReader reader, byte[] buffer)
|
||||
{
|
||||
var size = ReadSize(buffer);
|
||||
|
||||
// Validate size to prevent memory exhaustion from malformed headers
|
||||
if (size < 0 || size > MAX_LONG_NAME_SIZE)
|
||||
{
|
||||
throw new InvalidFormatException(
|
||||
$"Long name size {size} is invalid or exceeds maximum allowed size of {MAX_LONG_NAME_SIZE} bytes"
|
||||
);
|
||||
}
|
||||
|
||||
var nameLength = (int)size;
|
||||
var nameBytes = reader.ReadBytes(nameLength);
|
||||
var remainingBytesToRead = BLOCK_SIZE - (nameLength % BLOCK_SIZE);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
namespace SharpCompress.Common.Zip;
|
||||
|
||||
@@ -19,8 +20,24 @@ internal class WinzipAesEncryptionData
|
||||
{
|
||||
_keySize = keySize;
|
||||
|
||||
#if NETFRAMEWORK || NETSTANDARD2_0
|
||||
#if NETFRAMEWORK
|
||||
var rfc2898 = new Rfc2898DeriveBytes(password, salt, RFC2898_ITERATIONS);
|
||||
KeyBytes = rfc2898.GetBytes(KeySizeInBytes);
|
||||
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
|
||||
var generatedVerifyValue = rfc2898.GetBytes(2);
|
||||
#elif NET10_0_OR_GREATER
|
||||
var derivedKeySize = (KeySizeInBytes * 2) + 2;
|
||||
var passwordBytes = Encoding.UTF8.GetBytes(password);
|
||||
var derivedKey = Rfc2898DeriveBytes.Pbkdf2(
|
||||
passwordBytes,
|
||||
salt,
|
||||
RFC2898_ITERATIONS,
|
||||
HashAlgorithmName.SHA1,
|
||||
derivedKeySize
|
||||
);
|
||||
KeyBytes = derivedKey.AsSpan(0, KeySizeInBytes).ToArray();
|
||||
IvBytes = derivedKey.AsSpan(KeySizeInBytes, KeySizeInBytes).ToArray();
|
||||
var generatedVerifyValue = derivedKey.AsSpan((KeySizeInBytes * 2), 2).ToArray();
|
||||
#else
|
||||
var rfc2898 = new Rfc2898DeriveBytes(
|
||||
password,
|
||||
@@ -28,11 +45,10 @@ internal class WinzipAesEncryptionData
|
||||
RFC2898_ITERATIONS,
|
||||
HashAlgorithmName.SHA1
|
||||
);
|
||||
#endif
|
||||
|
||||
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???
|
||||
KeyBytes = rfc2898.GetBytes(KeySizeInBytes);
|
||||
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
|
||||
var generatedVerifyValue = rfc2898.GetBytes(2);
|
||||
#endif
|
||||
|
||||
var verify = BinaryPrimitives.ReadInt16LittleEndian(passwordVerifyValue);
|
||||
var generated = BinaryPrimitives.ReadInt16LittleEndian(generatedVerifyValue);
|
||||
|
||||
@@ -4,56 +4,68 @@ using System.IO;
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
[CLSCompliant(true)]
|
||||
public sealed class BitReader
|
||||
public class BitReader
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
private int _bitBuffer;
|
||||
private int _bitsRemaining;
|
||||
private bool _disposed;
|
||||
private readonly Stream _input;
|
||||
private int _bitBuffer; // currently buffered bits
|
||||
private int _bitCount; // number of bits in buffer
|
||||
|
||||
public BitReader(Stream input)
|
||||
{
|
||||
_stream = input ?? throw new ArgumentNullException(nameof(input));
|
||||
if (!input.CanRead)
|
||||
throw new ArgumentException("Stream must be readable.", nameof(input));
|
||||
_input = input ?? throw new ArgumentNullException(nameof(input));
|
||||
_bitBuffer = 0;
|
||||
_bitCount = 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reads a single bit from the stream. Returns 0 or 1.
|
||||
/// </summary>
|
||||
public int ReadBit()
|
||||
{
|
||||
if (_bitCount == 0)
|
||||
{
|
||||
int nextByte = _input.ReadByte();
|
||||
if (nextByte < 0)
|
||||
{
|
||||
throw new EndOfStreamException("No more data available in BitReader.");
|
||||
}
|
||||
|
||||
_bitBuffer = nextByte;
|
||||
_bitCount = 8;
|
||||
}
|
||||
|
||||
int bit = (_bitBuffer >> (_bitCount - 1)) & 1;
|
||||
_bitCount--;
|
||||
return bit;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reads n bits (up to 32) from the stream.
|
||||
/// </summary>
|
||||
public int ReadBits(int count)
|
||||
{
|
||||
if (_disposed)
|
||||
throw new ObjectDisposedException(nameof(BitReader));
|
||||
|
||||
if (count <= 0 || count > 32)
|
||||
if (count < 0 || count > 32)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(
|
||||
nameof(count),
|
||||
"Bit count must be between 1 and 32."
|
||||
"Count must be between 0 and 32."
|
||||
);
|
||||
}
|
||||
|
||||
int result = 0;
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
if (_bitsRemaining == 0)
|
||||
{
|
||||
int nextByte = _stream.ReadByte();
|
||||
if (nextByte == -1)
|
||||
throw new EndOfStreamException();
|
||||
|
||||
_bitBuffer = nextByte;
|
||||
_bitsRemaining = 8;
|
||||
}
|
||||
|
||||
// hoogste bit eerst
|
||||
result = (result << 1) | ((_bitBuffer >> 7) & 1);
|
||||
_bitBuffer <<= 1;
|
||||
_bitsRemaining--;
|
||||
result = (result << 1) | ReadBit();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resets any buffered bits.
|
||||
/// </summary>
|
||||
public void AlignToByte()
|
||||
{
|
||||
_bitsRemaining = 0;
|
||||
_bitCount = 0;
|
||||
_bitBuffer = 0;
|
||||
}
|
||||
}
|
||||
|
||||
43
src/SharpCompress/Compressors/Arj/HistoryIterator.cs
Normal file
43
src/SharpCompress/Compressors/Arj/HistoryIterator.cs
Normal file
@@ -0,0 +1,43 @@
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
/// <summary>
|
||||
/// Iterator that reads & pushes values back into the ring buffer.
|
||||
/// </summary>
|
||||
public class HistoryIterator : IEnumerator<byte>
|
||||
{
|
||||
private int _index;
|
||||
private readonly IRingBuffer _ring;
|
||||
|
||||
public HistoryIterator(IRingBuffer ring, int startIndex)
|
||||
{
|
||||
_ring = ring;
|
||||
_index = startIndex;
|
||||
}
|
||||
|
||||
public bool MoveNext()
|
||||
{
|
||||
Current = _ring[_index];
|
||||
_index = unchecked(_index + 1);
|
||||
|
||||
// Push value back into the ring buffer
|
||||
_ring.Push(Current);
|
||||
|
||||
return true; // iterator is infinite
|
||||
}
|
||||
|
||||
public void Reset()
|
||||
{
|
||||
throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public byte Current { get; private set; }
|
||||
|
||||
object IEnumerator.Current => Current;
|
||||
|
||||
public void Dispose() { }
|
||||
}
|
||||
}
|
||||
218
src/SharpCompress/Compressors/Arj/HuffmanTree.cs
Normal file
218
src/SharpCompress/Compressors/Arj/HuffmanTree.cs
Normal file
@@ -0,0 +1,218 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
[CLSCompliant(true)]
|
||||
public enum NodeType
|
||||
{
|
||||
Leaf,
|
||||
Branch,
|
||||
}
|
||||
|
||||
[CLSCompliant(true)]
|
||||
public sealed class TreeEntry
|
||||
{
|
||||
public readonly NodeType Type;
|
||||
public readonly int LeafValue;
|
||||
public readonly int BranchIndex;
|
||||
|
||||
public const int MAX_INDEX = 4096;
|
||||
|
||||
private TreeEntry(NodeType type, int leafValue, int branchIndex)
|
||||
{
|
||||
Type = type;
|
||||
LeafValue = leafValue;
|
||||
BranchIndex = branchIndex;
|
||||
}
|
||||
|
||||
public static TreeEntry Leaf(int value)
|
||||
{
|
||||
return new TreeEntry(NodeType.Leaf, value, -1);
|
||||
}
|
||||
|
||||
public static TreeEntry Branch(int index)
|
||||
{
|
||||
if (index >= MAX_INDEX)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(
|
||||
nameof(index),
|
||||
"Branch index exceeds MAX_INDEX"
|
||||
);
|
||||
}
|
||||
return new TreeEntry(NodeType.Branch, 0, index);
|
||||
}
|
||||
}
|
||||
|
||||
[CLSCompliant(true)]
|
||||
public sealed class HuffTree
|
||||
{
|
||||
private readonly List<TreeEntry> _tree;
|
||||
|
||||
public HuffTree(int capacity = 0)
|
||||
{
|
||||
_tree = new List<TreeEntry>(capacity);
|
||||
}
|
||||
|
||||
public void SetSingle(int value)
|
||||
{
|
||||
_tree.Clear();
|
||||
_tree.Add(TreeEntry.Leaf(value));
|
||||
}
|
||||
|
||||
public void BuildTree(byte[] lengths, int count)
|
||||
{
|
||||
if (lengths == null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(lengths));
|
||||
}
|
||||
|
||||
if (count < 0 || count > lengths.Length)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(count));
|
||||
}
|
||||
|
||||
if (count > TreeEntry.MAX_INDEX / 2)
|
||||
{
|
||||
throw new ArgumentException(
|
||||
$"Count exceeds maximum allowed: {TreeEntry.MAX_INDEX / 2}"
|
||||
);
|
||||
}
|
||||
byte[] slice = new byte[count];
|
||||
Array.Copy(lengths, slice, count);
|
||||
|
||||
BuildTree(slice);
|
||||
}
|
||||
|
||||
public void BuildTree(byte[] valueLengths)
|
||||
{
|
||||
if (valueLengths == null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(valueLengths));
|
||||
}
|
||||
|
||||
if (valueLengths.Length > TreeEntry.MAX_INDEX / 2)
|
||||
{
|
||||
throw new InvalidOperationException("Too many code lengths");
|
||||
}
|
||||
|
||||
_tree.Clear();
|
||||
|
||||
int maxAllocated = 1; // start with a single (root) node
|
||||
|
||||
for (byte currentLen = 1; ; currentLen++)
|
||||
{
|
||||
// add missing branches up to current limit
|
||||
int maxLimit = maxAllocated;
|
||||
|
||||
for (int i = _tree.Count; i < maxLimit; i++)
|
||||
{
|
||||
// TreeEntry.Branch may throw if index too large
|
||||
try
|
||||
{
|
||||
_tree.Add(TreeEntry.Branch(maxAllocated));
|
||||
}
|
||||
catch (ArgumentOutOfRangeException e)
|
||||
{
|
||||
_tree.Clear();
|
||||
throw new InvalidOperationException("Branch index exceeds limit", e);
|
||||
}
|
||||
|
||||
// each branch node allocates two children
|
||||
maxAllocated += 2;
|
||||
}
|
||||
|
||||
// fill tree with leaves found in the lengths table at the current length
|
||||
bool moreLeaves = false;
|
||||
|
||||
for (int value = 0; value < valueLengths.Length; value++)
|
||||
{
|
||||
byte len = valueLengths[value];
|
||||
if (len == currentLen)
|
||||
{
|
||||
_tree.Add(TreeEntry.Leaf(value));
|
||||
}
|
||||
else if (len > currentLen)
|
||||
{
|
||||
moreLeaves = true; // there are more leaves to process
|
||||
}
|
||||
}
|
||||
|
||||
// sanity check (too many leaves)
|
||||
if (_tree.Count > maxAllocated)
|
||||
{
|
||||
throw new InvalidOperationException("Too many leaves");
|
||||
}
|
||||
|
||||
// stop when no longer finding longer codes
|
||||
if (!moreLeaves)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// ensure tree is complete
|
||||
if (_tree.Count != maxAllocated)
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Missing some leaves: tree count = {_tree.Count}, expected = {maxAllocated}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public int ReadEntry(BitReader reader)
|
||||
{
|
||||
if (_tree.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException("Tree not initialized");
|
||||
}
|
||||
|
||||
TreeEntry node = _tree[0];
|
||||
while (true)
|
||||
{
|
||||
if (node.Type == NodeType.Leaf)
|
||||
{
|
||||
return node.LeafValue;
|
||||
}
|
||||
|
||||
int bit = reader.ReadBit();
|
||||
int index = node.BranchIndex + bit;
|
||||
|
||||
if (index >= _tree.Count)
|
||||
{
|
||||
throw new InvalidOperationException("Invalid branch index during read");
|
||||
}
|
||||
|
||||
node = _tree[index];
|
||||
}
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
var result = new StringBuilder();
|
||||
|
||||
void FormatStep(int index, string prefix)
|
||||
{
|
||||
var node = _tree[index];
|
||||
if (node.Type == NodeType.Leaf)
|
||||
{
|
||||
result.AppendLine($"{prefix} -> {node.LeafValue}");
|
||||
}
|
||||
else
|
||||
{
|
||||
FormatStep(node.BranchIndex, prefix + "0");
|
||||
FormatStep(node.BranchIndex + 1, prefix + "1");
|
||||
}
|
||||
}
|
||||
|
||||
if (_tree.Count > 0)
|
||||
{
|
||||
FormatStep(0, "");
|
||||
}
|
||||
|
||||
return result.ToString();
|
||||
}
|
||||
}
|
||||
}
|
||||
9
src/SharpCompress/Compressors/Arj/ILhaDecoderConfig.cs
Normal file
9
src/SharpCompress/Compressors/Arj/ILhaDecoderConfig.cs
Normal file
@@ -0,0 +1,9 @@
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
public interface ILhaDecoderConfig
|
||||
{
|
||||
int HistoryBits { get; }
|
||||
int OffsetBits { get; }
|
||||
RingBuffer RingBuffer { get; }
|
||||
}
|
||||
}
|
||||
17
src/SharpCompress/Compressors/Arj/IRingBuffer.cs
Normal file
17
src/SharpCompress/Compressors/Arj/IRingBuffer.cs
Normal file
@@ -0,0 +1,17 @@
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
public interface IRingBuffer
|
||||
{
|
||||
int BufferSize { get; }
|
||||
|
||||
int Cursor { get; }
|
||||
void SetCursor(int pos);
|
||||
|
||||
void Push(byte value);
|
||||
|
||||
HistoryIterator IterFromOffset(int offset);
|
||||
HistoryIterator IterFromPos(int pos);
|
||||
|
||||
byte this[int index] { get; }
|
||||
}
|
||||
}
|
||||
9
src/SharpCompress/Compressors/Arj/Lh5DecoderCfg.cs
Normal file
9
src/SharpCompress/Compressors/Arj/Lh5DecoderCfg.cs
Normal file
@@ -0,0 +1,9 @@
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
public class Lh5DecoderCfg : ILhaDecoderConfig
|
||||
{
|
||||
public int HistoryBits => 14;
|
||||
public int OffsetBits => 4;
|
||||
public RingBuffer RingBuffer { get; } = new RingBuffer(1 << 14);
|
||||
}
|
||||
}
|
||||
9
src/SharpCompress/Compressors/Arj/Lh7DecoderCfg.cs
Normal file
9
src/SharpCompress/Compressors/Arj/Lh7DecoderCfg.cs
Normal file
@@ -0,0 +1,9 @@
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
public class Lh7DecoderCfg : ILhaDecoderConfig
|
||||
{
|
||||
public int HistoryBits => 17;
|
||||
public int OffsetBits => 5;
|
||||
public RingBuffer RingBuffer { get; } = new RingBuffer(1 << 17);
|
||||
}
|
||||
}
|
||||
363
src/SharpCompress/Compressors/Arj/LhaStream.cs
Normal file
363
src/SharpCompress/Compressors/Arj/LhaStream.cs
Normal file
@@ -0,0 +1,363 @@
|
||||
using System;
|
||||
using System.Data;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
[CLSCompliant(true)]
|
||||
public sealed class LhaStream<C> : Stream, IStreamStack
|
||||
where C : ILhaDecoderConfig, new()
|
||||
{
|
||||
private readonly BitReader _bitReader;
|
||||
private readonly Stream _stream;
|
||||
|
||||
private readonly HuffTree _commandTree;
|
||||
private readonly HuffTree _offsetTree;
|
||||
private int _remainingCommands;
|
||||
private (int offset, int count)? _copyProgress;
|
||||
private readonly RingBuffer _ringBuffer;
|
||||
private readonly C _config = new C();
|
||||
|
||||
private const int NUM_COMMANDS = 510;
|
||||
private const int NUM_TEMP_CODELEN = 20;
|
||||
|
||||
private readonly int _originalSize;
|
||||
private int _producedBytes = 0;
|
||||
|
||||
#if DEBUG_STREAMS
|
||||
long IStreamStack.InstanceId { get; set; }
|
||||
#endif
|
||||
int IStreamStack.DefaultBufferSize { get; set; }
|
||||
|
||||
Stream IStreamStack.BaseStream() => _stream;
|
||||
|
||||
int IStreamStack.BufferSize
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
int IStreamStack.BufferPosition
|
||||
{
|
||||
get => 0;
|
||||
set { }
|
||||
}
|
||||
|
||||
void IStreamStack.SetPosition(long position) { }
|
||||
|
||||
public LhaStream(Stream compressedStream, int originalSize)
|
||||
{
|
||||
_stream = compressedStream ?? throw new ArgumentNullException(nameof(compressedStream));
|
||||
_bitReader = new BitReader(compressedStream);
|
||||
_ringBuffer = _config.RingBuffer;
|
||||
_commandTree = new HuffTree(NUM_COMMANDS * 2);
|
||||
_offsetTree = new HuffTree(NUM_TEMP_CODELEN * 2);
|
||||
_remainingCommands = 0;
|
||||
_copyProgress = null;
|
||||
_originalSize = originalSize;
|
||||
}
|
||||
|
||||
public override bool CanRead => true;
|
||||
public override bool CanSeek => false;
|
||||
public override bool CanWrite => false;
|
||||
public override long Length => throw new NotSupportedException();
|
||||
public override long Position
|
||||
{
|
||||
get => throw new NotSupportedException();
|
||||
set => throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override void Flush() { }
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public override void SetLength(long value) => throw new NotSupportedException();
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (buffer == null)
|
||||
{
|
||||
throw new ArgumentNullException(nameof(buffer));
|
||||
}
|
||||
if (offset < 0 || count < 0 || (offset + count) > buffer.Length)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException();
|
||||
}
|
||||
|
||||
if (_producedBytes >= _originalSize)
|
||||
{
|
||||
return 0; // EOF
|
||||
}
|
||||
if (count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
int bytesRead = FillBuffer(buffer);
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
private byte ReadCodeLength()
|
||||
{
|
||||
byte len = (byte)_bitReader.ReadBits(3);
|
||||
if (len == 7)
|
||||
{
|
||||
while (_bitReader.ReadBit() != 0)
|
||||
{
|
||||
len++;
|
||||
if (len > 255)
|
||||
{
|
||||
throw new InvalidOperationException("Code length overflow");
|
||||
}
|
||||
}
|
||||
}
|
||||
return len;
|
||||
}
|
||||
|
||||
private int ReadCodeSkip(int skipRange)
|
||||
{
|
||||
int bits;
|
||||
int increment;
|
||||
|
||||
switch (skipRange)
|
||||
{
|
||||
case 0:
|
||||
return 1;
|
||||
case 1:
|
||||
bits = 4;
|
||||
increment = 3; // 3..=18
|
||||
break;
|
||||
default:
|
||||
bits = 9;
|
||||
increment = 20; // 20..=531
|
||||
break;
|
||||
}
|
||||
|
||||
int skip = _bitReader.ReadBits(bits);
|
||||
return skip + increment;
|
||||
}
|
||||
|
||||
private void ReadTempTree()
|
||||
{
|
||||
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
|
||||
|
||||
// number of codes to read (5 bits)
|
||||
int numCodes = _bitReader.ReadBits(5);
|
||||
|
||||
// single code only
|
||||
if (numCodes == 0)
|
||||
{
|
||||
int code = _bitReader.ReadBits(5);
|
||||
_offsetTree.SetSingle((byte)code);
|
||||
return;
|
||||
}
|
||||
|
||||
if (numCodes > NUM_TEMP_CODELEN)
|
||||
{
|
||||
throw new Exception("temporary codelen table has invalid size");
|
||||
}
|
||||
|
||||
// read actual lengths
|
||||
int count = Math.Min(3, numCodes);
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
codeLengths[i] = (byte)ReadCodeLength();
|
||||
}
|
||||
|
||||
// 2-bit skip value follows
|
||||
int skip = _bitReader.ReadBits(2);
|
||||
|
||||
if (3 + skip > numCodes)
|
||||
{
|
||||
throw new Exception("temporary codelen table has invalid size");
|
||||
}
|
||||
|
||||
for (int i = 3 + skip; i < numCodes; i++)
|
||||
{
|
||||
codeLengths[i] = (byte)ReadCodeLength();
|
||||
}
|
||||
|
||||
_offsetTree.BuildTree(codeLengths, numCodes);
|
||||
}
|
||||
|
||||
private void ReadCommandTree()
|
||||
{
|
||||
byte[] codeLengths = new byte[NUM_COMMANDS];
|
||||
|
||||
// number of codes to read (9 bits)
|
||||
int numCodes = _bitReader.ReadBits(9);
|
||||
|
||||
// single code only
|
||||
if (numCodes == 0)
|
||||
{
|
||||
int code = _bitReader.ReadBits(9);
|
||||
_commandTree.SetSingle((ushort)code);
|
||||
return;
|
||||
}
|
||||
|
||||
if (numCodes > NUM_COMMANDS)
|
||||
{
|
||||
throw new Exception("commands codelen table has invalid size");
|
||||
}
|
||||
|
||||
int index = 0;
|
||||
while (index < numCodes)
|
||||
{
|
||||
for (int n = 0; n < numCodes - index; n++)
|
||||
{
|
||||
int code = _offsetTree.ReadEntry(_bitReader);
|
||||
|
||||
if (code >= 0 && code <= 2) // skip range
|
||||
{
|
||||
int skipCount = ReadCodeSkip(code);
|
||||
index += n + skipCount;
|
||||
goto outerLoop;
|
||||
}
|
||||
else
|
||||
{
|
||||
codeLengths[index + n] = (byte)(code - 2);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
outerLoop:
|
||||
;
|
||||
}
|
||||
|
||||
_commandTree.BuildTree(codeLengths, numCodes);
|
||||
}
|
||||
|
||||
private void ReadOffsetTree()
|
||||
{
|
||||
int numCodes = _bitReader.ReadBits(_config.OffsetBits);
|
||||
if (numCodes == 0)
|
||||
{
|
||||
int code = _bitReader.ReadBits(_config.OffsetBits);
|
||||
_offsetTree.SetSingle(code);
|
||||
return;
|
||||
}
|
||||
|
||||
if (numCodes > _config.HistoryBits)
|
||||
{
|
||||
throw new InvalidDataException("Offset code table too large");
|
||||
}
|
||||
|
||||
byte[] codeLengths = new byte[NUM_TEMP_CODELEN];
|
||||
for (int i = 0; i < numCodes; i++)
|
||||
{
|
||||
codeLengths[i] = (byte)ReadCodeLength();
|
||||
}
|
||||
|
||||
_offsetTree.BuildTree(codeLengths, numCodes);
|
||||
}
|
||||
|
||||
private void BeginNewBlock()
|
||||
{
|
||||
ReadTempTree();
|
||||
ReadCommandTree();
|
||||
ReadOffsetTree();
|
||||
}
|
||||
|
||||
private int ReadCommand() => _commandTree.ReadEntry(_bitReader);
|
||||
|
||||
private int ReadOffset()
|
||||
{
|
||||
int bits = _offsetTree.ReadEntry(_bitReader);
|
||||
if (bits <= 1)
|
||||
{
|
||||
return bits;
|
||||
}
|
||||
|
||||
int res = _bitReader.ReadBits(bits - 1);
|
||||
return res | (1 << (bits - 1));
|
||||
}
|
||||
|
||||
private int CopyFromHistory(byte[] target, int targetIndex, int offset, int count)
|
||||
{
|
||||
var historyIter = _ringBuffer.IterFromOffset(offset);
|
||||
int copied = 0;
|
||||
|
||||
while (
|
||||
copied < count && historyIter.MoveNext() && (targetIndex + copied) < target.Length
|
||||
)
|
||||
{
|
||||
target[targetIndex + copied] = historyIter.Current;
|
||||
copied++;
|
||||
}
|
||||
|
||||
if (copied < count)
|
||||
{
|
||||
_copyProgress = (offset, count - copied);
|
||||
}
|
||||
|
||||
return copied;
|
||||
}
|
||||
|
||||
public int FillBuffer(byte[] buffer)
|
||||
{
|
||||
int bufLen = buffer.Length;
|
||||
int bufIndex = 0;
|
||||
|
||||
// stop when we reached original size
|
||||
if (_producedBytes >= _originalSize)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
// calculate limit, so that we don't go over the original size
|
||||
int remaining = (int)Math.Min(bufLen, _originalSize - _producedBytes);
|
||||
|
||||
while (bufIndex < remaining)
|
||||
{
|
||||
if (_copyProgress.HasValue)
|
||||
{
|
||||
var (offset, count) = _copyProgress.Value;
|
||||
int copied = CopyFromHistory(
|
||||
buffer,
|
||||
bufIndex,
|
||||
offset,
|
||||
(int)Math.Min(count, remaining - bufIndex)
|
||||
);
|
||||
bufIndex += copied;
|
||||
_copyProgress = null;
|
||||
}
|
||||
|
||||
if (_remainingCommands == 0)
|
||||
{
|
||||
_remainingCommands = _bitReader.ReadBits(16);
|
||||
if (bufIndex + _remainingCommands > remaining)
|
||||
{
|
||||
break;
|
||||
}
|
||||
BeginNewBlock();
|
||||
}
|
||||
|
||||
_remainingCommands--;
|
||||
|
||||
int command = ReadCommand();
|
||||
|
||||
if (command >= 0 && command <= 0xFF)
|
||||
{
|
||||
byte value = (byte)command;
|
||||
buffer[bufIndex++] = value;
|
||||
_ringBuffer.Push(value);
|
||||
}
|
||||
else
|
||||
{
|
||||
int count = command - 0x100 + 3;
|
||||
int offset = ReadOffset();
|
||||
int copyCount = (int)Math.Min(count, remaining - bufIndex);
|
||||
bufIndex += CopyFromHistory(buffer, bufIndex, offset, copyCount);
|
||||
}
|
||||
}
|
||||
|
||||
_producedBytes += bufIndex;
|
||||
return bufIndex;
|
||||
}
|
||||
}
|
||||
}
|
||||
67
src/SharpCompress/Compressors/Arj/RingBuffer.cs
Normal file
67
src/SharpCompress/Compressors/Arj/RingBuffer.cs
Normal file
@@ -0,0 +1,67 @@
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace SharpCompress.Compressors.Arj
|
||||
{
|
||||
/// <summary>
|
||||
/// A fixed-size ring buffer where N must be a power of two.
|
||||
/// </summary>
|
||||
public class RingBuffer : IRingBuffer
|
||||
{
|
||||
private readonly byte[] _buffer;
|
||||
private int _cursor;
|
||||
|
||||
public int BufferSize { get; }
|
||||
|
||||
public int Cursor => _cursor;
|
||||
|
||||
private readonly int _mask;
|
||||
|
||||
public RingBuffer(int size)
|
||||
{
|
||||
if ((size & (size - 1)) != 0)
|
||||
{
|
||||
throw new ArgumentException("RingArrayBuffer size must be a power of two");
|
||||
}
|
||||
|
||||
BufferSize = size;
|
||||
_buffer = new byte[size];
|
||||
_cursor = 0;
|
||||
_mask = size - 1;
|
||||
|
||||
// Fill with spaces
|
||||
for (int i = 0; i < size; i++)
|
||||
{
|
||||
_buffer[i] = (byte)' ';
|
||||
}
|
||||
}
|
||||
|
||||
public void SetCursor(int pos)
|
||||
{
|
||||
_cursor = pos & _mask;
|
||||
}
|
||||
|
||||
public void Push(byte value)
|
||||
{
|
||||
int index = _cursor;
|
||||
_buffer[index & _mask] = value;
|
||||
_cursor = (index + 1) & _mask;
|
||||
}
|
||||
|
||||
public byte this[int index] => _buffer[index & _mask];
|
||||
|
||||
public HistoryIterator IterFromOffset(int offset)
|
||||
{
|
||||
int masked = (offset & _mask) + 1;
|
||||
int startIndex = _cursor + BufferSize - masked;
|
||||
return new HistoryIterator(this, startIndex);
|
||||
}
|
||||
|
||||
public HistoryIterator IterFromPos(int pos)
|
||||
{
|
||||
int startIndex = pos & _mask;
|
||||
return new HistoryIterator(this, startIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -544,6 +544,12 @@ internal sealed class CBZip2OutputStream : Stream, IStreamStack
|
||||
|
||||
private void EndBlock()
|
||||
{
|
||||
// Skip block processing for empty input (no data written)
|
||||
if (last < 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
blockCRC = mCrc.GetFinalCRC();
|
||||
combinedCRC = (combinedCRC << 1) | (int)(((uint)combinedCRC) >> 31);
|
||||
combinedCRC ^= blockCRC;
|
||||
|
||||
@@ -27,7 +27,7 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
|
||||
if (!disposed)
|
||||
{
|
||||
base.Dispose();
|
||||
if (!externalWindow)
|
||||
if (!externalWindow && window is not null)
|
||||
{
|
||||
ArrayPool<byte>.Shared.Return(window);
|
||||
window = null;
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Compressors.Xz;
|
||||
|
||||
@@ -30,6 +32,28 @@ public static class BinaryUtils
|
||||
internal static uint ReadLittleEndianUInt32(this Stream stream) =>
|
||||
unchecked((uint)ReadLittleEndianInt32(stream));
|
||||
|
||||
public static async Task<int> ReadLittleEndianInt32Async(
|
||||
this Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytes = new byte[4];
|
||||
var read = await stream.ReadFullyAsync(bytes, cancellationToken).ConfigureAwait(false);
|
||||
if (!read)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
return BinaryPrimitives.ReadInt32LittleEndian(bytes);
|
||||
}
|
||||
|
||||
internal static async Task<uint> ReadLittleEndianUInt32Async(
|
||||
this Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
unchecked(
|
||||
(uint)await ReadLittleEndianInt32Async(stream, cancellationToken).ConfigureAwait(false)
|
||||
);
|
||||
|
||||
internal static byte[] ToBigEndianBytes(this uint uint32)
|
||||
{
|
||||
var result = BitConverter.GetBytes(uint32);
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Compressors.Xz;
|
||||
@@ -39,4 +41,75 @@ internal static class MultiByteIntegers
|
||||
}
|
||||
return Output;
|
||||
}
|
||||
|
||||
public static async Task<ulong> ReadXZIntegerAsync(
|
||||
this BinaryReader reader,
|
||||
CancellationToken cancellationToken = default,
|
||||
int MaxBytes = 9
|
||||
)
|
||||
{
|
||||
if (MaxBytes <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(MaxBytes));
|
||||
}
|
||||
|
||||
if (MaxBytes > 9)
|
||||
{
|
||||
MaxBytes = 9;
|
||||
}
|
||||
|
||||
var LastByte = await ReadByteAsync(reader, cancellationToken).ConfigureAwait(false);
|
||||
var Output = (ulong)LastByte & 0x7F;
|
||||
|
||||
var i = 0;
|
||||
while ((LastByte & 0x80) != 0)
|
||||
{
|
||||
if (++i >= MaxBytes)
|
||||
{
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
|
||||
LastByte = await ReadByteAsync(reader, cancellationToken).ConfigureAwait(false);
|
||||
if (LastByte == 0)
|
||||
{
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
|
||||
Output |= ((ulong)(LastByte & 0x7F)) << (i * 7);
|
||||
}
|
||||
return Output;
|
||||
}
|
||||
|
||||
public static async Task<byte> ReadByteAsync(
|
||||
this BinaryReader reader,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var buffer = new byte[1];
|
||||
var bytesRead = await reader
|
||||
.BaseStream.ReadAsync(buffer, 0, 1, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (bytesRead != 1)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
return buffer[0];
|
||||
}
|
||||
|
||||
public static async Task<byte[]> ReadBytesAsync(
|
||||
this BinaryReader reader,
|
||||
int count,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var buffer = new byte[count];
|
||||
var bytesRead = await reader
|
||||
.BaseStream.ReadAsync(buffer, 0, count, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (bytesRead != count)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Compressors.Xz.Filters;
|
||||
|
||||
@@ -72,6 +74,49 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
public override async Task<int> ReadAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytesRead = 0;
|
||||
if (!HeaderIsLoaded)
|
||||
{
|
||||
await LoadHeaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (!_streamConnected)
|
||||
{
|
||||
ConnectStream();
|
||||
}
|
||||
|
||||
if (!_endOfStream)
|
||||
{
|
||||
bytesRead = await _decomStream
|
||||
.ReadAsync(buffer, offset, count, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (bytesRead != count)
|
||||
{
|
||||
_endOfStream = true;
|
||||
}
|
||||
|
||||
if (_endOfStream && !_paddingSkipped)
|
||||
{
|
||||
await SkipPaddingAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (_endOfStream && !_crcChecked)
|
||||
{
|
||||
await CheckCrcAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
private void SkipPadding()
|
||||
{
|
||||
var bytes = (BaseStream.Position - _startPosition) % 4;
|
||||
@@ -87,6 +132,23 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
_paddingSkipped = true;
|
||||
}
|
||||
|
||||
private async Task SkipPaddingAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var bytes = (BaseStream.Position - _startPosition) % 4;
|
||||
if (bytes > 0)
|
||||
{
|
||||
var paddingBytes = new byte[4 - bytes];
|
||||
await BaseStream
|
||||
.ReadAsync(paddingBytes, 0, paddingBytes.Length, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (paddingBytes.Any(b => b != 0))
|
||||
{
|
||||
throw new InvalidFormatException("Padding bytes were non-null");
|
||||
}
|
||||
}
|
||||
_paddingSkipped = true;
|
||||
}
|
||||
|
||||
private void CheckCrc()
|
||||
{
|
||||
var crc = new byte[_checkSize];
|
||||
@@ -96,6 +158,15 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
_crcChecked = true;
|
||||
}
|
||||
|
||||
private async Task CheckCrcAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var crc = new byte[_checkSize];
|
||||
await BaseStream.ReadAsync(crc, 0, _checkSize, cancellationToken).ConfigureAwait(false);
|
||||
// Actually do a check (and read in the bytes
|
||||
// into the function throughout the stream read).
|
||||
_crcChecked = true;
|
||||
}
|
||||
|
||||
private void ConnectStream()
|
||||
{
|
||||
_decomStream = BaseStream;
|
||||
@@ -123,6 +194,21 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
HeaderIsLoaded = true;
|
||||
}
|
||||
|
||||
private async Task LoadHeaderAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
await ReadHeaderSizeAsync(cancellationToken).ConfigureAwait(false);
|
||||
var headerCache = await CacheHeaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
using (var cache = new MemoryStream(headerCache))
|
||||
using (var cachedReader = new BinaryReader(cache))
|
||||
{
|
||||
cachedReader.BaseStream.Position = 1; // skip the header size byte
|
||||
ReadBlockFlags(cachedReader);
|
||||
ReadFilters(cachedReader);
|
||||
}
|
||||
HeaderIsLoaded = true;
|
||||
}
|
||||
|
||||
private void ReadHeaderSize()
|
||||
{
|
||||
_blockHeaderSizeByte = (byte)BaseStream.ReadByte();
|
||||
@@ -132,6 +218,17 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ReadHeaderSizeAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var buffer = new byte[1];
|
||||
await BaseStream.ReadAsync(buffer, 0, 1, cancellationToken).ConfigureAwait(false);
|
||||
_blockHeaderSizeByte = buffer[0];
|
||||
if (_blockHeaderSizeByte == 0)
|
||||
{
|
||||
throw new XZIndexMarkerReachedException();
|
||||
}
|
||||
}
|
||||
|
||||
private byte[] CacheHeader()
|
||||
{
|
||||
var blockHeaderWithoutCrc = new byte[BlockHeaderSize - 4];
|
||||
@@ -139,7 +236,7 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
var read = BaseStream.Read(blockHeaderWithoutCrc, 1, BlockHeaderSize - 5);
|
||||
if (read != BlockHeaderSize - 5)
|
||||
{
|
||||
throw new EndOfStreamException("Reached end of stream unexectedly");
|
||||
throw new EndOfStreamException("Reached end of stream unexpectedly");
|
||||
}
|
||||
|
||||
var crc = BaseStream.ReadLittleEndianUInt32();
|
||||
@@ -152,6 +249,30 @@ public sealed class XZBlock : XZReadOnlyStream
|
||||
return blockHeaderWithoutCrc;
|
||||
}
|
||||
|
||||
private async Task<byte[]> CacheHeaderAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var blockHeaderWithoutCrc = new byte[BlockHeaderSize - 4];
|
||||
blockHeaderWithoutCrc[0] = _blockHeaderSizeByte;
|
||||
var read = await BaseStream
|
||||
.ReadAsync(blockHeaderWithoutCrc, 1, BlockHeaderSize - 5, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (read != BlockHeaderSize - 5)
|
||||
{
|
||||
throw new EndOfStreamException("Reached end of stream unexpectedly");
|
||||
}
|
||||
|
||||
var crc = await BaseStream
|
||||
.ReadLittleEndianUInt32Async(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
var calcCrc = Crc32.Compute(blockHeaderWithoutCrc);
|
||||
if (crc != calcCrc)
|
||||
{
|
||||
throw new InvalidFormatException("Block header corrupt");
|
||||
}
|
||||
|
||||
return blockHeaderWithoutCrc;
|
||||
}
|
||||
|
||||
private void ReadBlockFlags(BinaryReader reader)
|
||||
{
|
||||
var blockFlags = reader.ReadByte();
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
@@ -27,6 +29,16 @@ public class XZFooter
|
||||
return footer;
|
||||
}
|
||||
|
||||
public static async Task<XZFooter> FromStreamAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var footer = new XZFooter(new BinaryReader(stream, Encoding.UTF8, true));
|
||||
await footer.ProcessAsync(cancellationToken).ConfigureAwait(false);
|
||||
return footer;
|
||||
}
|
||||
|
||||
public void Process()
|
||||
{
|
||||
var crc = _reader.ReadLittleEndianUInt32();
|
||||
@@ -49,4 +61,29 @@ public class XZFooter
|
||||
throw new InvalidFormatException("Magic footer missing");
|
||||
}
|
||||
}
|
||||
|
||||
public async Task ProcessAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var crc = await _reader
|
||||
.BaseStream.ReadLittleEndianUInt32Async(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
var footerBytes = await _reader.ReadBytesAsync(6, cancellationToken).ConfigureAwait(false);
|
||||
var myCrc = Crc32.Compute(footerBytes);
|
||||
if (crc != myCrc)
|
||||
{
|
||||
throw new InvalidFormatException("Footer corrupt");
|
||||
}
|
||||
|
||||
using (var stream = new MemoryStream(footerBytes))
|
||||
using (var reader = new BinaryReader(stream))
|
||||
{
|
||||
BackwardSize = (reader.ReadLittleEndianUInt32() + 1) * 4;
|
||||
StreamFlags = reader.ReadBytes(2);
|
||||
}
|
||||
var magBy = await _reader.ReadBytesAsync(2, cancellationToken).ConfigureAwait(false);
|
||||
if (!magBy.AsSpan().SequenceEqual(_magicBytes))
|
||||
{
|
||||
throw new InvalidFormatException("Magic footer missing");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
@@ -23,12 +25,28 @@ public class XZHeader
|
||||
return header;
|
||||
}
|
||||
|
||||
public static async Task<XZHeader> FromStreamAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var header = new XZHeader(new BinaryReader(stream, Encoding.UTF8, true));
|
||||
await header.ProcessAsync(cancellationToken).ConfigureAwait(false);
|
||||
return header;
|
||||
}
|
||||
|
||||
public void Process()
|
||||
{
|
||||
CheckMagicBytes(_reader.ReadBytes(6));
|
||||
ProcessStreamFlags();
|
||||
}
|
||||
|
||||
public async Task ProcessAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
CheckMagicBytes(await _reader.ReadBytesAsync(6, cancellationToken).ConfigureAwait(false));
|
||||
await ProcessStreamFlagsAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private void ProcessStreamFlags()
|
||||
{
|
||||
var streamFlags = _reader.ReadBytes(2);
|
||||
@@ -47,6 +65,26 @@ public class XZHeader
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ProcessStreamFlagsAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var streamFlags = await _reader.ReadBytesAsync(2, cancellationToken).ConfigureAwait(false);
|
||||
var crc = await _reader
|
||||
.BaseStream.ReadLittleEndianUInt32Async(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
var calcCrc = Crc32.Compute(streamFlags);
|
||||
if (crc != calcCrc)
|
||||
{
|
||||
throw new InvalidFormatException("Stream header corrupt");
|
||||
}
|
||||
|
||||
BlockCheckType = (CheckType)(streamFlags[1] & 0x0F);
|
||||
var futureUse = (byte)(streamFlags[1] & 0xF0);
|
||||
if (futureUse != 0 || streamFlags[0] != 0)
|
||||
{
|
||||
throw new InvalidFormatException("Unknown XZ Stream Version");
|
||||
}
|
||||
}
|
||||
|
||||
private void CheckMagicBytes(byte[] header)
|
||||
{
|
||||
if (!header.SequenceEqual(MagicHeader))
|
||||
|
||||
@@ -3,6 +3,8 @@ using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
@@ -39,6 +41,20 @@ public class XZIndex
|
||||
return index;
|
||||
}
|
||||
|
||||
public static async Task<XZIndex> FromStreamAsync(
|
||||
Stream stream,
|
||||
bool indexMarkerAlreadyVerified,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var index = new XZIndex(
|
||||
new BinaryReader(stream, Encoding.UTF8, true),
|
||||
indexMarkerAlreadyVerified
|
||||
);
|
||||
await index.ProcessAsync(cancellationToken).ConfigureAwait(false);
|
||||
return index;
|
||||
}
|
||||
|
||||
public void Process()
|
||||
{
|
||||
if (!_indexMarkerAlreadyVerified)
|
||||
@@ -55,6 +71,26 @@ public class XZIndex
|
||||
VerifyCrc32();
|
||||
}
|
||||
|
||||
public async Task ProcessAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!_indexMarkerAlreadyVerified)
|
||||
{
|
||||
await VerifyIndexMarkerAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
NumberOfRecords = await _reader.ReadXZIntegerAsync(cancellationToken).ConfigureAwait(false);
|
||||
for (ulong i = 0; i < NumberOfRecords; i++)
|
||||
{
|
||||
Records.Add(
|
||||
await XZIndexRecord
|
||||
.FromBinaryReaderAsync(_reader, cancellationToken)
|
||||
.ConfigureAwait(false)
|
||||
);
|
||||
}
|
||||
await SkipPaddingAsync(cancellationToken).ConfigureAwait(false);
|
||||
await VerifyCrc32Async(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private void VerifyIndexMarker()
|
||||
{
|
||||
var marker = _reader.ReadByte();
|
||||
@@ -64,6 +100,15 @@ public class XZIndex
|
||||
}
|
||||
}
|
||||
|
||||
private async Task VerifyIndexMarkerAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var marker = await _reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (marker != 0)
|
||||
{
|
||||
throw new InvalidFormatException("Not an index block");
|
||||
}
|
||||
}
|
||||
|
||||
private void SkipPadding()
|
||||
{
|
||||
var bytes = (int)(_reader.BaseStream.Position - StreamStartPosition) % 4;
|
||||
@@ -77,9 +122,32 @@ public class XZIndex
|
||||
}
|
||||
}
|
||||
|
||||
private async Task SkipPaddingAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var bytes = (int)(_reader.BaseStream.Position - StreamStartPosition) % 4;
|
||||
if (bytes > 0)
|
||||
{
|
||||
var paddingBytes = await _reader
|
||||
.ReadBytesAsync(4 - bytes, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (paddingBytes.Any(b => b != 0))
|
||||
{
|
||||
throw new InvalidFormatException("Padding bytes were non-null");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void VerifyCrc32()
|
||||
{
|
||||
var crc = _reader.ReadLittleEndianUInt32();
|
||||
// TODO verify this matches
|
||||
}
|
||||
|
||||
private async Task VerifyCrc32Async(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var crc = await _reader
|
||||
.BaseStream.ReadLittleEndianUInt32Async(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
// TODO verify this matches
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Compressors.Xz;
|
||||
|
||||
@@ -18,4 +20,16 @@ public class XZIndexRecord
|
||||
record.UncompressedSize = br.ReadXZInteger();
|
||||
return record;
|
||||
}
|
||||
|
||||
public static async Task<XZIndexRecord> FromBinaryReaderAsync(
|
||||
BinaryReader br,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var record = new XZIndexRecord();
|
||||
record.UnpaddedSize = await br.ReadXZIntegerAsync(cancellationToken).ConfigureAwait(false);
|
||||
record.UncompressedSize = await br.ReadXZIntegerAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
return record;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
|
||||
@@ -104,6 +106,35 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
public override async Task<int> ReadAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytesRead = 0;
|
||||
if (_endOfStream)
|
||||
{
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
if (!HeaderIsRead)
|
||||
{
|
||||
await ReadHeaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
bytesRead = await ReadBlocksAsync(buffer, offset, count, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (bytesRead < count)
|
||||
{
|
||||
_endOfStream = true;
|
||||
await ReadIndexAsync(cancellationToken).ConfigureAwait(false);
|
||||
await ReadFooterAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
private void ReadHeader()
|
||||
{
|
||||
Header = XZHeader.FromStream(BaseStream);
|
||||
@@ -111,12 +142,31 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
|
||||
HeaderIsRead = true;
|
||||
}
|
||||
|
||||
private async Task ReadHeaderAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
Header = await XZHeader
|
||||
.FromStreamAsync(BaseStream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
AssertBlockCheckTypeIsSupported();
|
||||
HeaderIsRead = true;
|
||||
}
|
||||
|
||||
private void ReadIndex() => Index = XZIndex.FromStream(BaseStream, true);
|
||||
|
||||
// TODO veryfy Index
|
||||
private async Task ReadIndexAsync(CancellationToken cancellationToken = default) =>
|
||||
Index = await XZIndex
|
||||
.FromStreamAsync(BaseStream, true, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
// TODO verify Index
|
||||
private void ReadFooter() => Footer = XZFooter.FromStream(BaseStream);
|
||||
|
||||
// TODO verify footer
|
||||
private async Task ReadFooterAsync(CancellationToken cancellationToken = default) =>
|
||||
Footer = await XZFooter
|
||||
.FromStreamAsync(BaseStream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
private int ReadBlocks(byte[] buffer, int offset, int count)
|
||||
{
|
||||
var bytesRead = 0;
|
||||
@@ -152,6 +202,48 @@ public sealed class XZStream : XZReadOnlyStream, IStreamStack
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
private async Task<int> ReadBlocksAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytesRead = 0;
|
||||
if (_currentBlock is null)
|
||||
{
|
||||
NextBlock();
|
||||
}
|
||||
|
||||
for (; ; )
|
||||
{
|
||||
try
|
||||
{
|
||||
if (bytesRead >= count)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var remaining = count - bytesRead;
|
||||
var newOffset = offset + bytesRead;
|
||||
var justRead = await _currentBlock
|
||||
.ReadAsync(buffer, newOffset, remaining, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (justRead < remaining)
|
||||
{
|
||||
NextBlock();
|
||||
}
|
||||
|
||||
bytesRead += justRead;
|
||||
}
|
||||
catch (XZIndexMarkerReachedException)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
private void NextBlock() =>
|
||||
_currentBlock = new XZBlock(BaseStream, Header.BlockCheckType, Header.BlockCheckSize);
|
||||
}
|
||||
|
||||
@@ -57,19 +57,238 @@ public class TarFactory
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
) => TarArchive.IsTarFile(stream);
|
||||
)
|
||||
{
|
||||
if (!stream.CanSeek)
|
||||
{
|
||||
return TarArchive.IsTarFile(stream); // For non-seekable streams, just check if it's a tar file
|
||||
}
|
||||
|
||||
var startPosition = stream.Position;
|
||||
|
||||
// First check if it's a regular tar file
|
||||
if (TarArchive.IsTarFile(stream))
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin); // Seek back for consistency
|
||||
return true;
|
||||
}
|
||||
|
||||
// Seek back after the tar file check
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
if (compressionOptions == null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// Try each compression option to see if it contains a tar file
|
||||
foreach (var testOption in compressionOptions)
|
||||
{
|
||||
if (testOption.Type == CompressionType.None)
|
||||
{
|
||||
continue; // Skip uncompressed
|
||||
}
|
||||
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
try
|
||||
{
|
||||
if (testOption.CanHandle(stream))
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
// Try to decompress and check if it contains a tar archive
|
||||
// For compression formats that don't support leaveOpen, we need to save/restore position
|
||||
var positionBeforeDecompress = stream.Position;
|
||||
Stream? decompressedStream = null;
|
||||
bool streamWasClosed = false;
|
||||
|
||||
try
|
||||
{
|
||||
decompressedStream = testOption.Type switch
|
||||
{
|
||||
CompressionType.BZip2 => new BZip2Stream(stream, CompressionMode.Decompress, true),
|
||||
_ => testOption.CreateStream(stream) // For other types, may close the stream
|
||||
};
|
||||
|
||||
if (TarArchive.IsTarFile(decompressedStream))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
catch (ObjectDisposedException)
|
||||
{
|
||||
streamWasClosed = true;
|
||||
throw; // Stream was closed, can't continue
|
||||
}
|
||||
finally
|
||||
{
|
||||
decompressedStream?.Dispose();
|
||||
|
||||
if (!streamWasClosed && stream.CanSeek)
|
||||
{
|
||||
try
|
||||
{
|
||||
stream.Seek(positionBeforeDecompress, SeekOrigin.Begin);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// If seek fails, the stream might have been closed
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Seek back to start after decompression attempt
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// If decompression fails, it's not this format - continue to next option
|
||||
try
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore seek failures
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
try
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore seek failures
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region IArchiveFactory
|
||||
|
||||
/// <inheritdoc/>
|
||||
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
|
||||
TarArchive.Open(stream, readerOptions);
|
||||
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
readerOptions ??= new ReaderOptions();
|
||||
|
||||
// Try to detect and handle compressed tar formats
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
var startPosition = stream.Position;
|
||||
|
||||
// Try each compression option to see if we can decompress it
|
||||
foreach (var testOption in compressionOptions)
|
||||
{
|
||||
if (testOption.Type == CompressionType.None)
|
||||
{
|
||||
continue; // Skip uncompressed
|
||||
}
|
||||
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
if (testOption.CanHandle(stream))
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
// Decompress the entire stream into a seekable MemoryStream
|
||||
using var decompressedStream = testOption.CreateStream(stream);
|
||||
var memoryStream = new MemoryStream();
|
||||
decompressedStream.CopyTo(memoryStream);
|
||||
memoryStream.Position = 0;
|
||||
|
||||
// Verify it's actually a tar file
|
||||
if (TarArchive.IsTarFile(memoryStream))
|
||||
{
|
||||
memoryStream.Position = 0;
|
||||
// Return a TarArchive from the decompressed memory stream
|
||||
// The TarArchive will own the MemoryStream and dispose it when disposed
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
LeaveStreamOpen = false, // Ensure the MemoryStream is disposed with the archive
|
||||
ArchiveEncoding = readerOptions?.ArchiveEncoding ?? new ArchiveEncoding()
|
||||
};
|
||||
return TarArchive.Open(memoryStream, options);
|
||||
}
|
||||
|
||||
memoryStream.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
// Fall back to normal tar archive opening
|
||||
return TarArchive.Open(stream, readerOptions);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
|
||||
TarArchive.Open(fileInfo, readerOptions);
|
||||
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
readerOptions ??= new ReaderOptions();
|
||||
|
||||
// Try to detect and handle compressed tar formats by file extension and content
|
||||
using var fileStream = fileInfo.OpenRead();
|
||||
|
||||
// Try each compression option
|
||||
foreach (var testOption in compressionOptions)
|
||||
{
|
||||
if (testOption.Type == CompressionType.None)
|
||||
{
|
||||
continue; // Skip uncompressed
|
||||
}
|
||||
|
||||
// Check if file extension matches
|
||||
var fileName = fileInfo.Name.ToLowerInvariant();
|
||||
if (testOption.KnownExtensions.Any(ext => fileName.EndsWith(ext)))
|
||||
{
|
||||
fileStream.Position = 0;
|
||||
|
||||
// Verify it's the right compression format
|
||||
if (testOption.CanHandle(fileStream))
|
||||
{
|
||||
fileStream.Position = 0;
|
||||
|
||||
// Decompress the entire file into a seekable MemoryStream
|
||||
using var decompressedStream = testOption.CreateStream(fileStream);
|
||||
var memoryStream = new MemoryStream();
|
||||
decompressedStream.CopyTo(memoryStream);
|
||||
memoryStream.Position = 0;
|
||||
|
||||
// Verify it's actually a tar file
|
||||
if (TarArchive.IsTarFile(memoryStream))
|
||||
{
|
||||
memoryStream.Position = 0;
|
||||
// Return a TarArchive from the decompressed memory stream
|
||||
// The TarArchive will own the MemoryStream and dispose it when disposed
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
LeaveStreamOpen = false, // Ensure the MemoryStream is disposed with the archive
|
||||
ArchiveEncoding = readerOptions?.ArchiveEncoding ?? new ArchiveEncoding()
|
||||
};
|
||||
return TarArchive.Open(memoryStream, options);
|
||||
}
|
||||
|
||||
memoryStream.Dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// fileStream will be closed by the using statement
|
||||
|
||||
// Fall back to normal tar archive opening
|
||||
return TarArchive.Open(fileInfo, readerOptions);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
|
||||
@@ -75,6 +75,14 @@ public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
|
||||
);
|
||||
}
|
||||
break;
|
||||
case ZipHeaderType.DirectoryEntry:
|
||||
// DirectoryEntry headers in the central directory are intentionally skipped.
|
||||
// In streaming mode, we can only read forward, and DirectoryEntry headers
|
||||
// reference LocalEntry headers that have already been processed. The file
|
||||
// data comes from LocalEntry headers, not DirectoryEntry headers.
|
||||
// For multi-volume ZIPs where file data spans multiple files, use ZipArchive
|
||||
// instead, which requires seekable streams.
|
||||
break;
|
||||
case ZipHeaderType.DirectoryEnd:
|
||||
{
|
||||
yield break;
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
<PropertyGroup>
|
||||
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
|
||||
<NeutralLanguage>en-US</NeutralLanguage>
|
||||
<VersionPrefix>0.41.0</VersionPrefix>
|
||||
<AssemblyVersion>0.41.0</AssemblyVersion>
|
||||
<FileVersion>0.41.0</FileVersion>
|
||||
<VersionPrefix>0.42.0</VersionPrefix>
|
||||
<AssemblyVersion>0.42.0</AssemblyVersion>
|
||||
<FileVersion>0.42.0</FileVersion>
|
||||
<Authors>Adam Hathcock</Authors>
|
||||
<TargetFrameworks>net48;net481;netstandard2.0;net6.0;net8.0</TargetFrameworks>
|
||||
<TargetFrameworks>net48;net8.0;net10.0</TargetFrameworks>
|
||||
<AssemblyName>SharpCompress</AssemblyName>
|
||||
<AssemblyOriginatorKeyFile>../../SharpCompress.snk</AssemblyOriginatorKeyFile>
|
||||
<SignAssembly>true</SignAssembly>
|
||||
@@ -17,7 +17,7 @@
|
||||
<Copyright>Copyright (c) 2025 Adam Hathcock</Copyright>
|
||||
<GenerateAssemblyTitleAttribute>false</GenerateAssemblyTitleAttribute>
|
||||
<GenerateAssemblyProductAttribute>false</GenerateAssemblyProductAttribute>
|
||||
<Description>SharpCompress is a compression library for NET Standard 2.0/NET 4.8/NET 4.8.1/NET 6.0/NET 8.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
|
||||
<Description>SharpCompress is a compression library for NET 4.8/NET 8.0/NET 10.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
|
||||
<PublishRepositoryUrl>true</PublishRepositoryUrl>
|
||||
<IncludeSymbols>true</IncludeSymbols>
|
||||
<DebugType>embedded</DebugType>
|
||||
@@ -28,31 +28,29 @@
|
||||
<EmbedUntrackedSources>true</EmbedUntrackedSources>
|
||||
<AllowedOutputExtensionsInPackageBuildOutputFolder>$(AllowedOutputExtensionsInPackageBuildOutputFolder);.pdb</AllowedOutputExtensionsInPackageBuildOutputFolder>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(TargetFramework)' == 'net8.0' ">
|
||||
<PropertyGroup Condition=" '$(TargetFramework)' == 'net8.0' Or '$(TargetFramework)' == 'net10.0' ">
|
||||
<IsTrimmable>true</IsTrimmable>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(TargetFramework)|$(Platform)'=='Debug|net8.0|AnyCPU'">
|
||||
<DefineConstants>$(DefineConstants);DEBUG_STREAMS</DefineConstants>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(TargetFramework)|$(Platform)'=='Debug|net10.0|AnyCPU'">
|
||||
<DefineConstants>$(DefineConstants);DEBUG_STREAMS</DefineConstants>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="ZstdSharp.Port" />
|
||||
<PackageReference Include="Microsoft.SourceLink.GitHub" PrivateAssets="All" />
|
||||
</ItemGroup>
|
||||
<ItemGroup Condition=" '$(TargetFramework)' == 'net8.0'">
|
||||
<ItemGroup Condition=" '$(TargetFramework)' == 'net8.0' Or '$(TargetFramework)' == 'net10.0' ">
|
||||
<PackageReference Include="Microsoft.NET.ILLink.Tasks" PrivateAssets="All" />
|
||||
</ItemGroup>
|
||||
<ItemGroup Condition=" '$(TargetFramework)' == 'net48' Or '$(TargetFramework)' == 'net481' ">
|
||||
<ItemGroup Condition=" '$(TargetFramework)' == 'net48' ">
|
||||
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
|
||||
<PackageReference Include="System.Text.Encoding.CodePages" />
|
||||
<PackageReference Include="System.Buffers" />
|
||||
<PackageReference Include="System.Memory" />
|
||||
<PackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" PrivateAssets="All" />
|
||||
</ItemGroup>
|
||||
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
|
||||
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
|
||||
<PackageReference Include="System.Text.Encoding.CodePages" />
|
||||
<PackageReference Include="System.Memory" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<None Include="..\..\README.md" Pack="true" PackagePath="\" />
|
||||
</ItemGroup>
|
||||
|
||||
@@ -447,6 +447,31 @@ internal static class Utility
|
||||
}
|
||||
#endif
|
||||
|
||||
public static async Task<bool> ReadFullyAsync(
|
||||
this Stream stream,
|
||||
byte[] buffer,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var total = 0;
|
||||
int read;
|
||||
while (
|
||||
(
|
||||
read = await stream
|
||||
.ReadAsync(buffer, total, buffer.Length - total, cancellationToken)
|
||||
.ConfigureAwait(false)
|
||||
) > 0
|
||||
)
|
||||
{
|
||||
total += read;
|
||||
if (total >= buffer.Length)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return (total >= buffer.Length);
|
||||
}
|
||||
|
||||
public static string TrimNulls(this string source) => source.Replace('\0', ' ').Trim();
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -48,7 +48,29 @@ internal class ZipCentralDirectoryEntry
|
||||
var decompressedvalue = zip64 ? uint.MaxValue : (uint)Decompressed;
|
||||
var headeroffsetvalue = zip64 ? uint.MaxValue : (uint)HeaderOffset;
|
||||
var extralength = zip64 ? (2 + 2 + 8 + 8 + 8 + 4) : 0;
|
||||
var version = (byte)(zip64 ? 45 : 20); // Version 20 required for deflate/encryption
|
||||
|
||||
// Determine version needed to extract:
|
||||
// - Version 63 for LZMA, PPMd, BZip2, ZStandard (advanced compression methods)
|
||||
// - Version 45 for Zip64 extensions (when Zip64HeaderOffset != 0 or actual sizes require it)
|
||||
// - Version 20 for standard Deflate/None compression
|
||||
byte version;
|
||||
if (
|
||||
compression == ZipCompressionMethod.LZMA
|
||||
|| compression == ZipCompressionMethod.PPMd
|
||||
|| compression == ZipCompressionMethod.BZip2
|
||||
|| compression == ZipCompressionMethod.ZStandard
|
||||
)
|
||||
{
|
||||
version = 63;
|
||||
}
|
||||
else if (zip64 || Zip64HeaderOffset != 0)
|
||||
{
|
||||
version = 45;
|
||||
}
|
||||
else
|
||||
{
|
||||
version = 20;
|
||||
}
|
||||
|
||||
var flags = Equals(archiveEncoding.GetEncoding(), Encoding.UTF8)
|
||||
? HeaderFlags.Efs
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
".NETFramework,Version=v4.8": {
|
||||
"Microsoft.Bcl.AsyncInterfaces": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
|
||||
"requested": "[10.0.0, )",
|
||||
"resolved": "10.0.0",
|
||||
"contentHash": "vFuwSLj9QJBbNR0NeNO4YVASUbokxs+i/xbuu8B+Fs4FAZg5QaFa6eGrMaRqTzzNI5tAb97T7BhSxtLckFyiRA==",
|
||||
"dependencies": {
|
||||
"System.Threading.Tasks.Extensions": "4.5.4"
|
||||
"System.Threading.Tasks.Extensions": "4.6.3"
|
||||
}
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies": {
|
||||
@@ -49,12 +49,13 @@
|
||||
},
|
||||
"System.Text.Encoding.CodePages": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
|
||||
"requested": "[10.0.0, )",
|
||||
"resolved": "10.0.0",
|
||||
"contentHash": "QLP54mIATaBpjGlsZIxga38VPk1G9js0Kw651B+bvrXi2kSgGZYrxJSpM3whhTZCBK4HEBHX3fzfDQMw7CXHGQ==",
|
||||
"dependencies": {
|
||||
"System.Memory": "4.5.5",
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
|
||||
"System.Memory": "4.6.3",
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.1.2",
|
||||
"System.ValueTuple": "4.6.1"
|
||||
}
|
||||
},
|
||||
"ZstdSharp.Port": {
|
||||
@@ -95,216 +96,25 @@
|
||||
},
|
||||
"System.Threading.Tasks.Extensions": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.5.4",
|
||||
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
|
||||
"dependencies": {
|
||||
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
|
||||
}
|
||||
}
|
||||
},
|
||||
".NETFramework,Version=v4.8.1": {
|
||||
"Microsoft.Bcl.AsyncInterfaces": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
|
||||
"dependencies": {
|
||||
"System.Threading.Tasks.Extensions": "4.5.4"
|
||||
}
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies": {
|
||||
"type": "Direct",
|
||||
"requested": "[1.0.3, )",
|
||||
"resolved": "1.0.3",
|
||||
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
|
||||
"dependencies": {
|
||||
"Microsoft.NETFramework.ReferenceAssemblies.net481": "1.0.3"
|
||||
}
|
||||
},
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
|
||||
"dependencies": {
|
||||
"Microsoft.Build.Tasks.Git": "8.0.0",
|
||||
"Microsoft.SourceLink.Common": "8.0.0"
|
||||
}
|
||||
},
|
||||
"System.Buffers": {
|
||||
"type": "Direct",
|
||||
"requested": "[4.6.1, )",
|
||||
"resolved": "4.6.1",
|
||||
"contentHash": "N8GXpmiLMtljq7gwvyS+1QvKT/W2J8sNAvx+HVg4NGmsG/H+2k/y9QI23auLJRterrzCiDH+IWAw4V/GPwsMlw=="
|
||||
},
|
||||
"System.Memory": {
|
||||
"type": "Direct",
|
||||
"requested": "[4.6.3, )",
|
||||
"resolved": "4.6.3",
|
||||
"contentHash": "qdcDOgnFZY40+Q9876JUHnlHu7bosOHX8XISRoH94fwk6hgaeQGSgfZd8srWRZNt5bV9ZW2TljcegDNxsf+96A==",
|
||||
"contentHash": "7sCiwilJLYbTZELaKnc7RecBBXWXA+xMLQWZKWawBxYjp6DBlSE3v9/UcvKBvr1vv2tTOhipiogM8rRmxlhrVA==",
|
||||
"dependencies": {
|
||||
"System.Buffers": "4.6.1",
|
||||
"System.Numerics.Vectors": "4.6.1",
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
|
||||
}
|
||||
},
|
||||
"System.Text.Encoding.CodePages": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
|
||||
"dependencies": {
|
||||
"System.Memory": "4.5.5",
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
|
||||
}
|
||||
},
|
||||
"ZstdSharp.Port": {
|
||||
"type": "Direct",
|
||||
"requested": "[0.8.6, )",
|
||||
"resolved": "0.8.6",
|
||||
"contentHash": "iP4jVLQoQmUjMU88g1WObiNr6YKZGvh4aOXn3yOJsHqZsflwRsxZPcIBvNXgjXO3vQKSLctXGLTpcBPLnWPS8A==",
|
||||
"dependencies": {
|
||||
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
|
||||
"System.Memory": "4.5.5",
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
|
||||
}
|
||||
},
|
||||
"Microsoft.Build.Tasks.Git": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies.net481": {
|
||||
"type": "Transitive",
|
||||
"resolved": "1.0.3",
|
||||
"contentHash": "Vv/20vgHS7VglVOVh8J3Iz/MA+VYKVRp9f7r2qiKBMuzviTOmocG70yq0Q8T5OTmCONkEAIJwETD1zhEfLkAXQ=="
|
||||
},
|
||||
"Microsoft.SourceLink.Common": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
|
||||
},
|
||||
"System.Numerics.Vectors": {
|
||||
"System.ValueTuple": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.6.1",
|
||||
"contentHash": "sQxefTnhagrhoq2ReR0D/6K0zJcr9Hrd6kikeXsA1I8kOCboTavcUC4r7TSfpKFeE163uMuxZcyfO1mGO3EN8Q=="
|
||||
},
|
||||
"System.Runtime.CompilerServices.Unsafe": {
|
||||
"type": "Transitive",
|
||||
"resolved": "6.1.2",
|
||||
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
|
||||
},
|
||||
"System.Threading.Tasks.Extensions": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.5.4",
|
||||
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
|
||||
"dependencies": {
|
||||
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
|
||||
}
|
||||
"contentHash": "+RJT4qaekpZ7DDLhf+LTjq+E48jieKiY9ulJ+BoxKmZblIJfIJT8Ufcaa/clQqnYvWs8jugfGSMu8ylS0caG0w=="
|
||||
}
|
||||
},
|
||||
".NETStandard,Version=v2.0": {
|
||||
"Microsoft.Bcl.AsyncInterfaces": {
|
||||
"net10.0": {
|
||||
"Microsoft.NET.ILLink.Tasks": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
|
||||
"dependencies": {
|
||||
"System.Threading.Tasks.Extensions": "4.5.4"
|
||||
}
|
||||
"requested": "[10.0.0, )",
|
||||
"resolved": "10.0.0",
|
||||
"contentHash": "kICGrGYEzCNI3wPzfEXcwNHgTvlvVn9yJDhSdRK+oZQy4jvYH529u7O0xf5ocQKzOMjfS07+3z9PKRIjrFMJDA=="
|
||||
},
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
|
||||
"dependencies": {
|
||||
"Microsoft.Build.Tasks.Git": "8.0.0",
|
||||
"Microsoft.SourceLink.Common": "8.0.0"
|
||||
}
|
||||
},
|
||||
"NETStandard.Library": {
|
||||
"type": "Direct",
|
||||
"requested": "[2.0.3, )",
|
||||
"resolved": "2.0.3",
|
||||
"contentHash": "st47PosZSHrjECdjeIzZQbzivYBJFv6P2nv4cj2ypdI204DO+vZ7l5raGMiX4eXMJ53RfOIg+/s4DHVZ54Nu2A==",
|
||||
"dependencies": {
|
||||
"Microsoft.NETCore.Platforms": "1.1.0"
|
||||
}
|
||||
},
|
||||
"System.Memory": {
|
||||
"type": "Direct",
|
||||
"requested": "[4.6.3, )",
|
||||
"resolved": "4.6.3",
|
||||
"contentHash": "qdcDOgnFZY40+Q9876JUHnlHu7bosOHX8XISRoH94fwk6hgaeQGSgfZd8srWRZNt5bV9ZW2TljcegDNxsf+96A==",
|
||||
"dependencies": {
|
||||
"System.Buffers": "4.6.1",
|
||||
"System.Numerics.Vectors": "4.6.1",
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
|
||||
}
|
||||
},
|
||||
"System.Text.Encoding.CodePages": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
|
||||
"dependencies": {
|
||||
"System.Memory": "4.5.5",
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
|
||||
}
|
||||
},
|
||||
"ZstdSharp.Port": {
|
||||
"type": "Direct",
|
||||
"requested": "[0.8.6, )",
|
||||
"resolved": "0.8.6",
|
||||
"contentHash": "iP4jVLQoQmUjMU88g1WObiNr6YKZGvh4aOXn3yOJsHqZsflwRsxZPcIBvNXgjXO3vQKSLctXGLTpcBPLnWPS8A==",
|
||||
"dependencies": {
|
||||
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
|
||||
"System.Memory": "4.5.5",
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
|
||||
}
|
||||
},
|
||||
"Microsoft.Build.Tasks.Git": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
|
||||
},
|
||||
"Microsoft.NETCore.Platforms": {
|
||||
"type": "Transitive",
|
||||
"resolved": "1.1.0",
|
||||
"contentHash": "kz0PEW2lhqygehI/d6XsPCQzD7ff7gUJaVGPVETX611eadGsA3A877GdSlU0LRVMCTH/+P3o2iDTak+S08V2+A=="
|
||||
},
|
||||
"Microsoft.SourceLink.Common": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
|
||||
},
|
||||
"System.Numerics.Vectors": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.6.1",
|
||||
"contentHash": "sQxefTnhagrhoq2ReR0D/6K0zJcr9Hrd6kikeXsA1I8kOCboTavcUC4r7TSfpKFeE163uMuxZcyfO1mGO3EN8Q=="
|
||||
},
|
||||
"System.Runtime.CompilerServices.Unsafe": {
|
||||
"type": "Transitive",
|
||||
"resolved": "6.1.2",
|
||||
"contentHash": "2hBr6zdbIBTDE3EhK7NSVNdX58uTK6iHW/P/Axmm9sl1xoGSLqDvMtpecn226TNwHByFokYwJmt/aQQNlO5CRw=="
|
||||
},
|
||||
"System.Threading.Tasks.Extensions": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.5.4",
|
||||
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
|
||||
"dependencies": {
|
||||
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
|
||||
}
|
||||
},
|
||||
"System.Buffers": {
|
||||
"type": "CentralTransitive",
|
||||
"requested": "[4.6.1, )",
|
||||
"resolved": "4.6.1",
|
||||
"contentHash": "N8GXpmiLMtljq7gwvyS+1QvKT/W2J8sNAvx+HVg4NGmsG/H+2k/y9QI23auLJRterrzCiDH+IWAw4V/GPwsMlw=="
|
||||
}
|
||||
},
|
||||
"net6.0": {
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
@@ -335,9 +145,9 @@
|
||||
"net8.0": {
|
||||
"Microsoft.NET.ILLink.Tasks": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.21, )",
|
||||
"resolved": "8.0.21",
|
||||
"contentHash": "s8H5PZQs50OcNkaB6Si54+v3GWM7vzs6vxFRMlD3aXsbM+aPCtod62gmK0BYWou9diGzmo56j8cIf/PziijDqQ=="
|
||||
"requested": "[10.0.0, )",
|
||||
"resolved": "10.0.0",
|
||||
"contentHash": "kICGrGYEzCNI3wPzfEXcwNHgTvlvVn9yJDhSdRK+oZQy4jvYH529u7O0xf5ocQKzOMjfS07+3z9PKRIjrFMJDA=="
|
||||
},
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="JetBrains.Profiler.SelfApi" />
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"version": 2,
|
||||
"dependencies": {
|
||||
"net8.0": {
|
||||
"net10.0": {
|
||||
"JetBrains.Profiler.SelfApi": {
|
||||
"type": "Direct",
|
||||
"requested": "[2.5.14, )",
|
||||
|
||||
@@ -27,5 +27,22 @@ namespace SharpCompress.Test.Arc
|
||||
|
||||
[Fact]
|
||||
public void Arc_Crunched_Read() => Read("Arc.crunched.arc");
|
||||
|
||||
[Theory]
|
||||
[InlineData("Arc.crunched.largefile.arc", CompressionType.Crunched)]
|
||||
public void Arc_LargeFile_ShouldThrow(string fileName, CompressionType compressionType)
|
||||
{
|
||||
var exception = Assert.Throws<NotSupportedException>(() =>
|
||||
ReadForBufferBoundaryCheck(fileName, compressionType)
|
||||
);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Arc.uncompressed.largefile.arc", CompressionType.None)]
|
||||
[InlineData("Arc.squeezed.largefile.arc", CompressionType.Squeezed)]
|
||||
public void Arc_LargeFileTest_Read(string fileName, CompressionType compressionType)
|
||||
{
|
||||
ReadForBufferBoundaryCheck(fileName, compressionType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
63
tests/SharpCompress.Test/ArchiveFactoryCompressedTarTests.cs
Normal file
63
tests/SharpCompress.Test/ArchiveFactoryCompressedTarTests.cs
Normal file
@@ -0,0 +1,63 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SharpCompress.Archives;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test;
|
||||
|
||||
public class ArchiveFactoryCompressedTarTests : TestBase
|
||||
{
|
||||
[Fact]
|
||||
public void ArchiveFactory_Open_TarBz2_ThrowsHelpfulException()
|
||||
{
|
||||
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2");
|
||||
var exception = Assert.Throws<InvalidOperationException>(() =>
|
||||
{
|
||||
using var archive = ArchiveFactory.Open(testFile);
|
||||
});
|
||||
|
||||
Assert.Contains("tar.bz2", exception.Message);
|
||||
Assert.Contains("ReaderFactory", exception.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ArchiveFactory_Open_TarLz_ThrowsHelpfulException()
|
||||
{
|
||||
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.lz");
|
||||
var exception = Assert.Throws<InvalidOperationException>(() =>
|
||||
{
|
||||
using var archive = ArchiveFactory.Open(testFile);
|
||||
});
|
||||
|
||||
Assert.Contains("tar.lz", exception.Message);
|
||||
Assert.Contains("ReaderFactory", exception.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ArchiveFactory_Open_TarBz2Stream_ThrowsHelpfulException()
|
||||
{
|
||||
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.bz2");
|
||||
using var stream = File.OpenRead(testFile);
|
||||
var exception = Assert.Throws<InvalidOperationException>(() =>
|
||||
{
|
||||
using var archive = ArchiveFactory.Open(stream);
|
||||
});
|
||||
|
||||
Assert.Contains("tar.bz2", exception.Message);
|
||||
Assert.Contains("ReaderFactory", exception.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ArchiveFactory_Open_TarLzStream_ThrowsHelpfulException()
|
||||
{
|
||||
var testFile = Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.lz");
|
||||
using var stream = File.OpenRead(testFile);
|
||||
var exception = Assert.Throws<InvalidOperationException>(() =>
|
||||
{
|
||||
using var archive = ArchiveFactory.Open(stream);
|
||||
});
|
||||
|
||||
Assert.Contains("tar.lz", exception.Message);
|
||||
Assert.Contains("ReaderFactory", exception.Message);
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.Arj;
|
||||
using Xunit;
|
||||
using Xunit.Sdk;
|
||||
|
||||
namespace SharpCompress.Test.Arj
|
||||
{
|
||||
@@ -22,6 +23,15 @@ namespace SharpCompress.Test.Arj
|
||||
[Fact]
|
||||
public void Arj_Uncompressed_Read() => Read("Arj.store.arj", CompressionType.None);
|
||||
|
||||
[Fact]
|
||||
public void Arj_Method1_Read() => Read("Arj.method1.arj");
|
||||
|
||||
[Fact]
|
||||
public void Arj_Method2_Read() => Read("Arj.method2.arj");
|
||||
|
||||
[Fact]
|
||||
public void Arj_Method3_Read() => Read("Arj.method3.arj");
|
||||
|
||||
[Fact]
|
||||
public void Arj_Method4_Read() => Read("Arj.method4.arj");
|
||||
|
||||
@@ -35,19 +45,36 @@ namespace SharpCompress.Test.Arj
|
||||
public void Arj_Multi_Reader()
|
||||
{
|
||||
var exception = Assert.Throws<MultiVolumeExtractionException>(() =>
|
||||
DoArj_Multi_Reader(
|
||||
[
|
||||
"Arj.store.split.arj",
|
||||
"Arj.store.split.a01",
|
||||
"Arj.store.split.a02",
|
||||
"Arj.store.split.a03",
|
||||
"Arj.store.split.a04",
|
||||
"Arj.store.split.a05",
|
||||
]
|
||||
)
|
||||
DoArj_Multi_Reader([
|
||||
"Arj.store.split.arj",
|
||||
"Arj.store.split.a01",
|
||||
"Arj.store.split.a02",
|
||||
"Arj.store.split.a03",
|
||||
"Arj.store.split.a04",
|
||||
"Arj.store.split.a05",
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Arj.method1.largefile.arj", CompressionType.ArjLZ77)]
|
||||
[InlineData("Arj.method2.largefile.arj", CompressionType.ArjLZ77)]
|
||||
[InlineData("Arj.method3.largefile.arj", CompressionType.ArjLZ77)]
|
||||
public void Arj_LargeFile_ShouldThrow(string fileName, CompressionType compressionType)
|
||||
{
|
||||
var exception = Assert.Throws<NotSupportedException>(() =>
|
||||
ReadForBufferBoundaryCheck(fileName, compressionType)
|
||||
);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("Arj.store.largefile.arj", CompressionType.None)]
|
||||
[InlineData("Arj.method4.largefile.arj", CompressionType.ArjLZ77)]
|
||||
public void Arj_LargeFileTest_Read(string fileName, CompressionType compressionType)
|
||||
{
|
||||
ReadForBufferBoundaryCheck(fileName, compressionType);
|
||||
}
|
||||
|
||||
private void DoArj_Multi_Reader(string[] archives)
|
||||
{
|
||||
using (
|
||||
|
||||
@@ -633,4 +633,13 @@ public class RarArchiveTests : ArchiveTests
|
||||
"Rar5.encrypted_filesOnly.rar",
|
||||
"Failure jpg exe Empty тест.txt jpg\\test.jpg exe\\test.exe"
|
||||
);
|
||||
|
||||
[Fact]
|
||||
public void Rar_TestEncryptedDetection()
|
||||
{
|
||||
using var passwordProtectedFilesArchive = RarArchive.Open(
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.encrypted_filesOnly.rar")
|
||||
);
|
||||
Assert.True(passwordProtectedFilesArchive.IsEncrypted);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,29 +15,25 @@ public class RarReaderAsyncTests : ReaderTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task Rar_Multi_Reader_Async() =>
|
||||
await DoRar_Multi_Reader_Async(
|
||||
[
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
await DoRar_Multi_Reader_Async([
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]);
|
||||
|
||||
[Fact]
|
||||
public async Task Rar5_Multi_Reader_Async() =>
|
||||
await DoRar_Multi_Reader_Async(
|
||||
[
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
await DoRar_Multi_Reader_Async([
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]);
|
||||
|
||||
private async Task DoRar_Multi_Reader_Async(string[] archives)
|
||||
{
|
||||
@@ -95,29 +91,25 @@ public class RarReaderAsyncTests : ReaderTests
|
||||
|
||||
[Fact]
|
||||
public async Task Rar_Multi_Reader_Delete_Files_Async() =>
|
||||
await DoRar_Multi_Reader_Delete_Files_Async(
|
||||
[
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
await DoRar_Multi_Reader_Delete_Files_Async([
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]);
|
||||
|
||||
[Fact]
|
||||
public async Task Rar5_Multi_Reader_Delete_Files_Async() =>
|
||||
await DoRar_Multi_Reader_Delete_Files_Async(
|
||||
[
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
await DoRar_Multi_Reader_Delete_Files_Async([
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]);
|
||||
|
||||
private async Task DoRar_Multi_Reader_Delete_Files_Async(string[] archives)
|
||||
{
|
||||
|
||||
@@ -14,29 +14,25 @@ public class RarReaderTests : ReaderTests
|
||||
{
|
||||
[Fact]
|
||||
public void Rar_Multi_Reader() =>
|
||||
DoRar_Multi_Reader(
|
||||
[
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
DoRar_Multi_Reader([
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]);
|
||||
|
||||
[Fact]
|
||||
public void Rar5_Multi_Reader() =>
|
||||
DoRar_Multi_Reader(
|
||||
[
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
DoRar_Multi_Reader([
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]);
|
||||
|
||||
private void DoRar_Multi_Reader(string[] archives)
|
||||
{
|
||||
@@ -61,16 +57,14 @@ public class RarReaderTests : ReaderTests
|
||||
|
||||
[Fact]
|
||||
public void Rar_Multi_Reader_Encrypted() =>
|
||||
DoRar_Multi_Reader_Encrypted(
|
||||
[
|
||||
"Rar.EncryptedParts.part01.rar",
|
||||
"Rar.EncryptedParts.part02.rar",
|
||||
"Rar.EncryptedParts.part03.rar",
|
||||
"Rar.EncryptedParts.part04.rar",
|
||||
"Rar.EncryptedParts.part05.rar",
|
||||
"Rar.EncryptedParts.part06.rar",
|
||||
]
|
||||
);
|
||||
DoRar_Multi_Reader_Encrypted([
|
||||
"Rar.EncryptedParts.part01.rar",
|
||||
"Rar.EncryptedParts.part02.rar",
|
||||
"Rar.EncryptedParts.part03.rar",
|
||||
"Rar.EncryptedParts.part04.rar",
|
||||
"Rar.EncryptedParts.part05.rar",
|
||||
"Rar.EncryptedParts.part06.rar",
|
||||
]);
|
||||
|
||||
private void DoRar_Multi_Reader_Encrypted(string[] archives) =>
|
||||
Assert.Throws<InvalidFormatException>(() =>
|
||||
@@ -97,29 +91,25 @@ public class RarReaderTests : ReaderTests
|
||||
|
||||
[Fact]
|
||||
public void Rar_Multi_Reader_Delete_Files() =>
|
||||
DoRar_Multi_Reader_Delete_Files(
|
||||
[
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
DoRar_Multi_Reader_Delete_Files([
|
||||
"Rar.multi.part01.rar",
|
||||
"Rar.multi.part02.rar",
|
||||
"Rar.multi.part03.rar",
|
||||
"Rar.multi.part04.rar",
|
||||
"Rar.multi.part05.rar",
|
||||
"Rar.multi.part06.rar",
|
||||
]);
|
||||
|
||||
[Fact]
|
||||
public void Rar5_Multi_Reader_Delete_Files() =>
|
||||
DoRar_Multi_Reader_Delete_Files(
|
||||
[
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]
|
||||
);
|
||||
DoRar_Multi_Reader_Delete_Files([
|
||||
"Rar5.multi.part01.rar",
|
||||
"Rar5.multi.part02.rar",
|
||||
"Rar5.multi.part03.rar",
|
||||
"Rar5.multi.part04.rar",
|
||||
"Rar5.multi.part05.rar",
|
||||
"Rar5.multi.part06.rar",
|
||||
]);
|
||||
|
||||
private void DoRar_Multi_Reader_Delete_Files(string[] archives)
|
||||
{
|
||||
@@ -407,16 +397,14 @@ public class RarReaderTests : ReaderTests
|
||||
Path.Combine("exe", "test.exe"),
|
||||
}
|
||||
);
|
||||
using var reader = RarReader.Open(
|
||||
[
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part02.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part04.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part05.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part06.rar"),
|
||||
]
|
||||
);
|
||||
using var reader = RarReader.Open([
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part01.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part02.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part03.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part04.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part05.rar"),
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "Rar.multi.part06.rar"),
|
||||
]);
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
Assert.Equal(expectedOrder.Pop(), reader.Entry.Key);
|
||||
|
||||
@@ -176,6 +176,27 @@ public abstract class ReaderTests : TestBase
|
||||
}
|
||||
}
|
||||
|
||||
protected void ReadForBufferBoundaryCheck(string fileName, CompressionType compressionType)
|
||||
{
|
||||
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, fileName));
|
||||
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
|
||||
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
Assert.Equal(compressionType, reader.Entry.CompressionType);
|
||||
|
||||
reader.WriteEntryToDirectory(
|
||||
SCRATCH_FILES_PATH,
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
|
||||
);
|
||||
}
|
||||
|
||||
CompareFilesByPath(
|
||||
Path.Combine(SCRATCH_FILES_PATH, "alice29.txt"),
|
||||
Path.Combine(MISC_TEST_FILES_PATH, "alice29.txt")
|
||||
);
|
||||
}
|
||||
|
||||
protected void Iterate(
|
||||
string testArchive,
|
||||
string fileOrder,
|
||||
|
||||
@@ -224,6 +224,15 @@ public class SevenZipArchiveTests : ArchiveTests
|
||||
);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SevenZipArchive_TestEncryptedDetection()
|
||||
{
|
||||
using var passwordProtectedFilesArchive = SevenZipArchive.Open(
|
||||
Path.Combine(TEST_ARCHIVES_PATH, "7Zip.encryptedFiles.7z")
|
||||
);
|
||||
Assert.True(passwordProtectedFilesArchive.IsEncrypted);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SevenZipArchive_TestSolidDetection()
|
||||
{
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFrameworks>net8.0;net48</TargetFrameworks>
|
||||
<TargetFrameworks>net10.0;net48</TargetFrameworks>
|
||||
<AssemblyName>SharpCompress.Test</AssemblyName>
|
||||
<PackageId>SharpCompress.Test</PackageId>
|
||||
<AssemblyOriginatorKeyFile>SharpCompress.Test.snk</AssemblyOriginatorKeyFile>
|
||||
<SignAssembly>true</SignAssembly>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(TargetFramework)|$(Platform)'=='Debug|net8.0|AnyCPU'">
|
||||
<PropertyGroup Condition="'$(Configuration)|$(TargetFramework)|$(Platform)'=='Debug|net10.0|AnyCPU'">
|
||||
<DefineConstants>$(DefineConstants);DEBUG_STREAMS</DefineConstants>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::Windows)))">
|
||||
|
||||
@@ -254,4 +254,58 @@ public class TarReaderTests : ReaderTests
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
[Fact]
|
||||
public void Tar_Malformed_LongName_Excessive_Size()
|
||||
{
|
||||
// Create a malformed TAR header with an excessively large LongName size
|
||||
// This simulates what happens during auto-detection of compressed files
|
||||
var buffer = new byte[512];
|
||||
|
||||
// Set up a basic TAR header structure
|
||||
// Name field (offset 0, 100 bytes) - set to "././@LongLink" which is typical for LongName
|
||||
var nameBytes = System.Text.Encoding.ASCII.GetBytes("././@LongLink");
|
||||
Array.Copy(nameBytes, 0, buffer, 0, nameBytes.Length);
|
||||
|
||||
// Set entry type to LongName (offset 156)
|
||||
buffer[156] = (byte)'L'; // EntryType.LongName
|
||||
|
||||
// Set an excessively large size (offset 124, 12 bytes, octal format)
|
||||
// This simulates a corrupted/misinterpreted size field
|
||||
// Using "77777777777" (octal) = 8589934591 bytes (~8GB)
|
||||
var sizeBytes = System.Text.Encoding.ASCII.GetBytes("77777777777 ");
|
||||
Array.Copy(sizeBytes, 0, buffer, 124, sizeBytes.Length);
|
||||
|
||||
// Calculate and set checksum (offset 148, 8 bytes)
|
||||
// Set checksum field to spaces first
|
||||
for (var i = 148; i < 156; i++)
|
||||
{
|
||||
buffer[i] = (byte)' ';
|
||||
}
|
||||
|
||||
// Calculate checksum
|
||||
var checksum = 0;
|
||||
foreach (var b in buffer)
|
||||
{
|
||||
checksum += b;
|
||||
}
|
||||
|
||||
var checksumStr = Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 ";
|
||||
var checksumBytes = System.Text.Encoding.ASCII.GetBytes(checksumStr);
|
||||
Array.Copy(checksumBytes, 0, buffer, 148, checksumBytes.Length);
|
||||
|
||||
// Create a stream with this malformed header
|
||||
using var stream = new MemoryStream();
|
||||
stream.Write(buffer, 0, buffer.Length);
|
||||
stream.Position = 0;
|
||||
|
||||
// Attempt to read this malformed archive
|
||||
// The InvalidFormatException from the validation gets caught and converted to IncompleteArchiveException
|
||||
// The important thing is it doesn't cause OutOfMemoryException
|
||||
Assert.Throws<IncompleteArchiveException>(() =>
|
||||
{
|
||||
using var reader = TarReader.Open(stream);
|
||||
reader.MoveToNextEntry();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
125
tests/SharpCompress.Test/Xz/XZBlockAsyncTests.cs
Normal file
125
tests/SharpCompress.Test/Xz/XZBlockAsyncTests.cs
Normal file
@@ -0,0 +1,125 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Compressors.Xz;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.Xz;
|
||||
|
||||
public class XzBlockAsyncTests : XzTestsBase
|
||||
{
|
||||
protected override void Rewind(Stream stream) => stream.Position = 12;
|
||||
|
||||
protected override void RewindIndexed(Stream stream) => stream.Position = 12;
|
||||
|
||||
private static async Task<byte[]> ReadBytesAsync(XZBlock block, int bytesToRead)
|
||||
{
|
||||
var buffer = new byte[bytesToRead];
|
||||
var read = await block.ReadAsync(buffer, 0, bytesToRead).ConfigureAwait(false);
|
||||
if (read != bytesToRead)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task OnFindIndexBlockThrowAsync()
|
||||
{
|
||||
var bytes = new byte[] { 0 };
|
||||
using Stream indexBlockStream = new MemoryStream(bytes);
|
||||
var xzBlock = new XZBlock(indexBlockStream, CheckType.CRC64, 8);
|
||||
await Assert.ThrowsAsync<XZIndexMarkerReachedException>(async () =>
|
||||
{
|
||||
await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false);
|
||||
});
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CrcIncorrectThrowsAsync()
|
||||
{
|
||||
var bytes = (byte[])Compressed.Clone();
|
||||
bytes[20]++;
|
||||
using Stream badCrcStream = new MemoryStream(bytes);
|
||||
Rewind(badCrcStream);
|
||||
var xzBlock = new XZBlock(badCrcStream, CheckType.CRC64, 8);
|
||||
var ex = await Assert.ThrowsAsync<InvalidFormatException>(async () =>
|
||||
{
|
||||
await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false);
|
||||
});
|
||||
Assert.Equal("Block header corrupt", ex.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CanReadMAsync()
|
||||
{
|
||||
var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
|
||||
Assert.Equal(
|
||||
Encoding.ASCII.GetBytes("M"),
|
||||
await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false)
|
||||
);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CanReadMaryAsync()
|
||||
{
|
||||
var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
|
||||
Assert.Equal(
|
||||
Encoding.ASCII.GetBytes("M"),
|
||||
await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false)
|
||||
);
|
||||
Assert.Equal(
|
||||
Encoding.ASCII.GetBytes("a"),
|
||||
await ReadBytesAsync(xzBlock, 1).ConfigureAwait(false)
|
||||
);
|
||||
Assert.Equal(
|
||||
Encoding.ASCII.GetBytes("ry"),
|
||||
await ReadBytesAsync(xzBlock, 2).ConfigureAwait(false)
|
||||
);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CanReadPoemWithStreamReaderAsync()
|
||||
{
|
||||
var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
|
||||
var sr = new StreamReader(xzBlock);
|
||||
Assert.Equal(await sr.ReadToEndAsync().ConfigureAwait(false), Original);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NoopWhenNoPaddingAsync()
|
||||
{
|
||||
// CompressedStream's only block has no padding.
|
||||
var xzBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
|
||||
var sr = new StreamReader(xzBlock);
|
||||
await sr.ReadToEndAsync().ConfigureAwait(false);
|
||||
Assert.Equal(0L, CompressedStream.Position % 4L);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SkipsPaddingWhenPresentAsync()
|
||||
{
|
||||
// CompressedIndexedStream's first block has 1-byte padding.
|
||||
var xzBlock = new XZBlock(CompressedIndexedStream, CheckType.CRC64, 8);
|
||||
var sr = new StreamReader(xzBlock);
|
||||
await sr.ReadToEndAsync().ConfigureAwait(false);
|
||||
Assert.Equal(0L, CompressedIndexedStream.Position % 4L);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HandlesPaddingInUnalignedBlockAsync()
|
||||
{
|
||||
var compressedUnaligned = new byte[Compressed.Length + 1];
|
||||
Compressed.CopyTo(compressedUnaligned, 1);
|
||||
var compressedUnalignedStream = new MemoryStream(compressedUnaligned);
|
||||
compressedUnalignedStream.Position = 13;
|
||||
|
||||
// Compressed's only block has no padding.
|
||||
var xzBlock = new XZBlock(compressedUnalignedStream, CheckType.CRC64, 8);
|
||||
var sr = new StreamReader(xzBlock);
|
||||
await sr.ReadToEndAsync().ConfigureAwait(false);
|
||||
Assert.Equal(1L, compressedUnalignedStream.Position % 4L);
|
||||
}
|
||||
}
|
||||
83
tests/SharpCompress.Test/Xz/XZHeaderAsyncTests.cs
Normal file
83
tests/SharpCompress.Test/Xz/XZHeaderAsyncTests.cs
Normal file
@@ -0,0 +1,83 @@
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Compressors.Xz;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.Xz;
|
||||
|
||||
public class XzHeaderAsyncTests : XzTestsBase
|
||||
{
|
||||
[Fact]
|
||||
public async Task ChecksMagicNumberAsync()
|
||||
{
|
||||
var bytes = (byte[])Compressed.Clone();
|
||||
bytes[3]++;
|
||||
using Stream badMagicNumberStream = new MemoryStream(bytes);
|
||||
var br = new BinaryReader(badMagicNumberStream);
|
||||
var header = new XZHeader(br);
|
||||
var ex = await Assert.ThrowsAsync<InvalidFormatException>(async () =>
|
||||
{
|
||||
await header.ProcessAsync().ConfigureAwait(false);
|
||||
});
|
||||
Assert.Equal("Invalid XZ Stream", ex.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CorruptHeaderThrowsAsync()
|
||||
{
|
||||
var bytes = (byte[])Compressed.Clone();
|
||||
bytes[8]++;
|
||||
using Stream badCrcStream = new MemoryStream(bytes);
|
||||
var br = new BinaryReader(badCrcStream);
|
||||
var header = new XZHeader(br);
|
||||
var ex = await Assert.ThrowsAsync<InvalidFormatException>(async () =>
|
||||
{
|
||||
await header.ProcessAsync().ConfigureAwait(false);
|
||||
});
|
||||
Assert.Equal("Stream header corrupt", ex.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task BadVersionIfCrcOkButStreamFlagUnknownAsync()
|
||||
{
|
||||
var bytes = (byte[])Compressed.Clone();
|
||||
byte[] streamFlags = [0x00, 0xF4];
|
||||
var crc = Crc32.Compute(streamFlags).ToLittleEndianBytes();
|
||||
streamFlags.CopyTo(bytes, 6);
|
||||
crc.CopyTo(bytes, 8);
|
||||
using Stream badFlagStream = new MemoryStream(bytes);
|
||||
var br = new BinaryReader(badFlagStream);
|
||||
var header = new XZHeader(br);
|
||||
var ex = await Assert.ThrowsAsync<InvalidFormatException>(async () =>
|
||||
{
|
||||
await header.ProcessAsync().ConfigureAwait(false);
|
||||
});
|
||||
Assert.Equal("Unknown XZ Stream Version", ex.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ProcessesBlockCheckTypeAsync()
|
||||
{
|
||||
var br = new BinaryReader(CompressedStream);
|
||||
var header = new XZHeader(br);
|
||||
await header.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal(CheckType.CRC64, header.BlockCheckType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CanCalculateBlockCheckSizeAsync()
|
||||
{
|
||||
var br = new BinaryReader(CompressedStream);
|
||||
var header = new XZHeader(br);
|
||||
await header.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal(8, header.BlockCheckSize);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ProcessesStreamHeaderFromFactoryAsync()
|
||||
{
|
||||
var header = await XZHeader.FromStreamAsync(CompressedStream).ConfigureAwait(false);
|
||||
Assert.Equal(CheckType.CRC64, header.BlockCheckType);
|
||||
}
|
||||
}
|
||||
97
tests/SharpCompress.Test/Xz/XZIndexAsyncTests.cs
Normal file
97
tests/SharpCompress.Test/Xz/XZIndexAsyncTests.cs
Normal file
@@ -0,0 +1,97 @@
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Compressors.Xz;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.Xz;
|
||||
|
||||
public class XzIndexAsyncTests : XzTestsBase
|
||||
{
|
||||
protected override void RewindEmpty(Stream stream) => stream.Position = 12;
|
||||
|
||||
protected override void Rewind(Stream stream) => stream.Position = 356;
|
||||
|
||||
protected override void RewindIndexed(Stream stream) => stream.Position = 612;
|
||||
|
||||
[Fact]
|
||||
public void RecordsStreamStartOnInit()
|
||||
{
|
||||
using Stream badStream = new MemoryStream([1, 2, 3, 4, 5]);
|
||||
var br = new BinaryReader(badStream);
|
||||
var index = new XZIndex(br, false);
|
||||
Assert.Equal(0, index.StreamStartPosition);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ThrowsIfHasNoIndexMarkerAsync()
|
||||
{
|
||||
using Stream badStream = new MemoryStream([1, 2, 3, 4, 5]);
|
||||
var br = new BinaryReader(badStream);
|
||||
var index = new XZIndex(br, false);
|
||||
await Assert.ThrowsAsync<InvalidFormatException>(async () =>
|
||||
await index.ProcessAsync().ConfigureAwait(false)
|
||||
);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReadsNoRecordAsync()
|
||||
{
|
||||
var br = new BinaryReader(CompressedEmptyStream);
|
||||
var index = new XZIndex(br, false);
|
||||
await index.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal((ulong)0, index.NumberOfRecords);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReadsOneRecordAsync()
|
||||
{
|
||||
var br = new BinaryReader(CompressedStream);
|
||||
var index = new XZIndex(br, false);
|
||||
await index.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal((ulong)1, index.NumberOfRecords);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReadsMultipleRecordsAsync()
|
||||
{
|
||||
var br = new BinaryReader(CompressedIndexedStream);
|
||||
var index = new XZIndex(br, false);
|
||||
await index.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal((ulong)2, index.NumberOfRecords);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReadsFirstRecordAsync()
|
||||
{
|
||||
var br = new BinaryReader(CompressedStream);
|
||||
var index = new XZIndex(br, false);
|
||||
await index.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal((ulong)OriginalBytes.Length, index.Records[0].UncompressedSize);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SkipsPaddingAsync()
|
||||
{
|
||||
// Index with 3-byte padding.
|
||||
using Stream badStream = new MemoryStream([
|
||||
0x00,
|
||||
0x01,
|
||||
0x10,
|
||||
0x80,
|
||||
0x01,
|
||||
0x00,
|
||||
0x00,
|
||||
0x00,
|
||||
0xB1,
|
||||
0x01,
|
||||
0xD9,
|
||||
0xC9,
|
||||
0xFF,
|
||||
]);
|
||||
var br = new BinaryReader(badStream);
|
||||
var index = new XZIndex(br, false);
|
||||
await index.ProcessAsync().ConfigureAwait(false);
|
||||
Assert.Equal(0L, badStream.Position % 4L);
|
||||
}
|
||||
}
|
||||
@@ -71,9 +71,21 @@ public class XzIndexTests : XzTestsBase
|
||||
public void SkipsPadding()
|
||||
{
|
||||
// Index with 3-byte padding.
|
||||
using Stream badStream = new MemoryStream(
|
||||
[0x00, 0x01, 0x10, 0x80, 0x01, 0x00, 0x00, 0x00, 0xB1, 0x01, 0xD9, 0xC9, 0xFF]
|
||||
);
|
||||
using Stream badStream = new MemoryStream([
|
||||
0x00,
|
||||
0x01,
|
||||
0x10,
|
||||
0x80,
|
||||
0x01,
|
||||
0x00,
|
||||
0x00,
|
||||
0x00,
|
||||
0xB1,
|
||||
0x01,
|
||||
0xD9,
|
||||
0xC9,
|
||||
0xFF,
|
||||
]);
|
||||
var br = new BinaryReader(badStream);
|
||||
var index = new XZIndex(br, false);
|
||||
index.Process();
|
||||
|
||||
36
tests/SharpCompress.Test/Xz/XZStreamAsyncTests.cs
Normal file
36
tests/SharpCompress.Test/Xz/XZStreamAsyncTests.cs
Normal file
@@ -0,0 +1,36 @@
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Compressors.Xz;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.Xz;
|
||||
|
||||
public class XzStreamAsyncTests : XzTestsBase
|
||||
{
|
||||
[Fact]
|
||||
public async Task CanReadEmptyStreamAsync()
|
||||
{
|
||||
var xz = new XZStream(CompressedEmptyStream);
|
||||
using var sr = new StreamReader(xz);
|
||||
var uncompressed = await sr.ReadToEndAsync().ConfigureAwait(false);
|
||||
Assert.Equal(OriginalEmpty, uncompressed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CanReadStreamAsync()
|
||||
{
|
||||
var xz = new XZStream(CompressedStream);
|
||||
using var sr = new StreamReader(xz);
|
||||
var uncompressed = await sr.ReadToEndAsync().ConfigureAwait(false);
|
||||
Assert.Equal(Original, uncompressed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CanReadIndexedStreamAsync()
|
||||
{
|
||||
var xz = new XZStream(CompressedIndexedStream);
|
||||
using var sr = new StreamReader(xz);
|
||||
var uncompressed = await sr.ReadToEndAsync().ConfigureAwait(false);
|
||||
Assert.Equal(OriginalIndexed, uncompressed);
|
||||
}
|
||||
}
|
||||
441
tests/SharpCompress.Test/Zip/Zip64VersionConsistencyTests.cs
Normal file
441
tests/SharpCompress.Test/Zip/Zip64VersionConsistencyTests.cs
Normal file
@@ -0,0 +1,441 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using SharpCompress.Archives;
|
||||
using SharpCompress.Archives.Zip;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.Zip;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.Zip;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for verifying version consistency between Local File Header (LFH)
|
||||
/// and Central Directory File Header (CDFH) when using Zip64.
|
||||
/// </summary>
|
||||
public class Zip64VersionConsistencyTests : WriterTests
|
||||
{
|
||||
public Zip64VersionConsistencyTests()
|
||||
: base(ArchiveType.Zip) { }
|
||||
|
||||
[Fact]
|
||||
public void Zip64_Small_File_With_UseZip64_Should_Have_Matching_Versions()
|
||||
{
|
||||
// Create a zip with UseZip64=true but with a small file
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "zip64_version_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
// Create archive with UseZip64=true
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = true,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
zipArchive.AddEntry("empty", new MemoryStream());
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Now read the raw bytes to verify version consistency
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read Local File Header
|
||||
var lfhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x04034b50u, lfhSignature); // Local file header signature
|
||||
|
||||
var lfhVersion = br.ReadUInt16();
|
||||
|
||||
// Skip to Central Directory
|
||||
// Find Central Directory by searching from the end
|
||||
fs.Seek(-22, SeekOrigin.End); // Min EOCD size
|
||||
var eocdSignature = br.ReadUInt32();
|
||||
|
||||
if (eocdSignature != 0x06054b50u)
|
||||
{
|
||||
// Might have Zip64 EOCD, search backwards
|
||||
fs.Seek(-100, SeekOrigin.End);
|
||||
var buffer = new byte[100];
|
||||
fs.Read(buffer, 0, 100);
|
||||
|
||||
// Find EOCD signature
|
||||
for (int i = buffer.Length - 4; i >= 0; i--)
|
||||
{
|
||||
if (BinaryPrimitives.ReadUInt32LittleEndian(buffer.AsSpan(i)) == 0x06054b50u)
|
||||
{
|
||||
fs.Seek(-100 + i, SeekOrigin.End);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Read EOCD
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
br.ReadUInt16(); // total entries
|
||||
br.ReadUInt32(); // central directory size (unused)
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
// If Zip64, need to read from Zip64 EOCD
|
||||
if (cdOffset == 0xFFFFFFFF)
|
||||
{
|
||||
// Find Zip64 EOCD Locator
|
||||
fs.Seek(-22 - 20, SeekOrigin.End);
|
||||
var z64eocdlSig = br.ReadUInt32();
|
||||
if (z64eocdlSig == 0x07064b50u)
|
||||
{
|
||||
br.ReadUInt32(); // disk number
|
||||
var z64eocdOffset = br.ReadUInt64();
|
||||
br.ReadUInt32(); // total disks
|
||||
|
||||
// Read Zip64 EOCD
|
||||
fs.Seek((long)z64eocdOffset, SeekOrigin.Begin);
|
||||
br.ReadUInt32(); // signature
|
||||
br.ReadUInt64(); // size of EOCD64
|
||||
br.ReadUInt16(); // version made by
|
||||
br.ReadUInt16(); // version needed
|
||||
br.ReadUInt32(); // disk number
|
||||
br.ReadUInt32(); // disk with CD
|
||||
br.ReadUInt64(); // entries on disk
|
||||
br.ReadUInt64(); // total entries
|
||||
br.ReadUInt64(); // CD size
|
||||
cdOffset = (uint)br.ReadUInt64(); // CD offset
|
||||
}
|
||||
}
|
||||
|
||||
// Read Central Directory Header
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, cdhSignature); // Central directory header signature
|
||||
|
||||
br.ReadUInt16(); // version made by
|
||||
var cdhVersionNeeded = br.ReadUInt16();
|
||||
|
||||
// The versions should match when UseZip64 is true
|
||||
Assert.Equal(lfhVersion, cdhVersionNeeded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Zip64_Small_File_Without_UseZip64_Should_Have_Version_20()
|
||||
{
|
||||
// Create a zip without UseZip64
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "no_zip64_version_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
// Create archive without UseZip64
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = false,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
zipArchive.AddEntry("empty", new MemoryStream());
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Read the raw bytes
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read Local File Header version
|
||||
var lfhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x04034b50u, lfhSignature);
|
||||
var lfhVersion = br.ReadUInt16();
|
||||
|
||||
// Read Central Directory Header version
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
br.ReadUInt16(); // total entries
|
||||
br.ReadUInt32(); // CD size
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, cdhSignature);
|
||||
br.ReadUInt16(); // version made by
|
||||
var cdhVersionNeeded = br.ReadUInt16();
|
||||
|
||||
// Both should be version 20 (or less)
|
||||
Assert.True(lfhVersion <= 20);
|
||||
Assert.Equal(lfhVersion, cdhVersionNeeded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void LZMA_Compression_Should_Use_Version_63()
|
||||
{
|
||||
// Create a zip with LZMA compression
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "lzma_version_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.LZMA)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = false,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
var data = new byte[100];
|
||||
new Random(42).NextBytes(data);
|
||||
zipArchive.AddEntry("test.bin", new MemoryStream(data));
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Read the raw bytes
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read Local File Header version
|
||||
var lfhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x04034b50u, lfhSignature);
|
||||
var lfhVersion = br.ReadUInt16();
|
||||
|
||||
// Read Central Directory Header version
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
br.ReadUInt16(); // total entries
|
||||
br.ReadUInt32(); // CD size
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, cdhSignature);
|
||||
br.ReadUInt16(); // version made by
|
||||
var cdhVersionNeeded = br.ReadUInt16();
|
||||
|
||||
// Both should be version 63 for LZMA
|
||||
Assert.Equal(63, lfhVersion);
|
||||
Assert.Equal(lfhVersion, cdhVersionNeeded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void PPMd_Compression_Should_Use_Version_63()
|
||||
{
|
||||
// Create a zip with PPMd compression
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "ppmd_version_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.PPMd)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = false,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
var data = new byte[100];
|
||||
new Random(42).NextBytes(data);
|
||||
zipArchive.AddEntry("test.bin", new MemoryStream(data));
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Read the raw bytes
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read Local File Header version
|
||||
var lfhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x04034b50u, lfhSignature);
|
||||
var lfhVersion = br.ReadUInt16();
|
||||
|
||||
// Read Central Directory Header version
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
br.ReadUInt16(); // total entries
|
||||
br.ReadUInt32(); // CD size
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhSignature = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, cdhSignature);
|
||||
br.ReadUInt16(); // version made by
|
||||
var cdhVersionNeeded = br.ReadUInt16();
|
||||
|
||||
// Both should be version 63 for PPMd
|
||||
Assert.Equal(63, lfhVersion);
|
||||
Assert.Equal(lfhVersion, cdhVersionNeeded);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Zip64_Multiple_Small_Files_With_UseZip64_Should_Have_Matching_Versions()
|
||||
{
|
||||
// Create a zip with UseZip64=true but with multiple small files
|
||||
var filename = Path.Combine(SCRATCH2_FILES_PATH, "zip64_version_multiple_test.zip");
|
||||
|
||||
if (File.Exists(filename))
|
||||
{
|
||||
File.Delete(filename);
|
||||
}
|
||||
|
||||
WriterOptions writerOptions = new ZipWriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
LeaveStreamOpen = false,
|
||||
UseZip64 = true,
|
||||
};
|
||||
|
||||
ZipArchive zipArchive = ZipArchive.Create();
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
var data = new byte[100];
|
||||
new Random(i).NextBytes(data);
|
||||
zipArchive.AddEntry($"file{i}.bin", new MemoryStream(data));
|
||||
}
|
||||
zipArchive.SaveTo(filename, writerOptions);
|
||||
|
||||
// Verify that all entries have matching versions
|
||||
using var fs = File.OpenRead(filename);
|
||||
using var br = new BinaryReader(fs);
|
||||
|
||||
// Read all LFH versions
|
||||
var lfhVersions = new System.Collections.Generic.List<ushort>();
|
||||
while (true)
|
||||
{
|
||||
var sig = br.ReadUInt32();
|
||||
if (sig == 0x04034b50u) // LFH signature
|
||||
{
|
||||
var version = br.ReadUInt16();
|
||||
lfhVersions.Add(version);
|
||||
|
||||
// Skip rest of LFH
|
||||
br.ReadUInt16(); // flags
|
||||
br.ReadUInt16(); // compression
|
||||
br.ReadUInt32(); // mod time
|
||||
br.ReadUInt32(); // crc
|
||||
br.ReadUInt32(); // compressed size
|
||||
br.ReadUInt32(); // uncompressed size
|
||||
var fnLen = br.ReadUInt16();
|
||||
var extraLen = br.ReadUInt16();
|
||||
fs.Seek(fnLen + extraLen, SeekOrigin.Current);
|
||||
|
||||
// Skip compressed data by reading compressed size from extra field if zip64
|
||||
// For simplicity in this test, we'll just find the next signature
|
||||
var found = false;
|
||||
|
||||
while (fs.Position < fs.Length - 4)
|
||||
{
|
||||
var b = br.ReadByte();
|
||||
if (b == 0x50)
|
||||
{
|
||||
var nextBytes = br.ReadBytes(3);
|
||||
if (
|
||||
(nextBytes[0] == 0x4b && nextBytes[1] == 0x03 && nextBytes[2] == 0x04)
|
||||
|| // LFH
|
||||
(nextBytes[0] == 0x4b && nextBytes[1] == 0x01 && nextBytes[2] == 0x02)
|
||||
) // CDH
|
||||
{
|
||||
fs.Seek(-4, SeekOrigin.Current);
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!found)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (sig == 0x02014b50u) // CDH signature
|
||||
{
|
||||
break; // Reached central directory
|
||||
}
|
||||
else
|
||||
{
|
||||
break; // Unknown signature
|
||||
}
|
||||
}
|
||||
|
||||
// Find Central Directory
|
||||
fs.Seek(-22, SeekOrigin.End);
|
||||
br.ReadUInt32(); // EOCD signature
|
||||
br.ReadUInt16(); // disk number
|
||||
br.ReadUInt16(); // disk with central dir
|
||||
br.ReadUInt16(); // entries on this disk
|
||||
var totalEntries = br.ReadUInt16();
|
||||
br.ReadUInt32(); // CD size
|
||||
var cdOffset = br.ReadUInt32();
|
||||
|
||||
// Check if we need Zip64 EOCD
|
||||
if (cdOffset == 0xFFFFFFFF)
|
||||
{
|
||||
fs.Seek(-22 - 20, SeekOrigin.End);
|
||||
var z64eocdlSig = br.ReadUInt32();
|
||||
if (z64eocdlSig == 0x07064b50u)
|
||||
{
|
||||
br.ReadUInt32(); // disk number
|
||||
var z64eocdOffset = br.ReadUInt64();
|
||||
fs.Seek((long)z64eocdOffset, SeekOrigin.Begin);
|
||||
br.ReadUInt32(); // signature
|
||||
br.ReadUInt64(); // size
|
||||
br.ReadUInt16(); // version made by
|
||||
br.ReadUInt16(); // version needed
|
||||
br.ReadUInt32(); // disk number
|
||||
br.ReadUInt32(); // disk with CD
|
||||
br.ReadUInt64(); // entries on disk
|
||||
totalEntries = (ushort)br.ReadUInt64(); // total entries
|
||||
br.ReadUInt64(); // CD size
|
||||
cdOffset = (uint)br.ReadUInt64(); // CD offset
|
||||
}
|
||||
}
|
||||
|
||||
// Read CDH versions
|
||||
fs.Seek(cdOffset, SeekOrigin.Begin);
|
||||
var cdhVersions = new System.Collections.Generic.List<ushort>();
|
||||
for (int i = 0; i < totalEntries; i++)
|
||||
{
|
||||
var sig = br.ReadUInt32();
|
||||
Assert.Equal(0x02014b50u, sig);
|
||||
br.ReadUInt16(); // version made by
|
||||
var version = br.ReadUInt16();
|
||||
cdhVersions.Add(version);
|
||||
|
||||
// Skip rest of CDH
|
||||
br.ReadUInt16(); // flags
|
||||
br.ReadUInt16(); // compression
|
||||
br.ReadUInt32(); // mod time
|
||||
br.ReadUInt32(); // crc
|
||||
br.ReadUInt32(); // compressed size
|
||||
br.ReadUInt32(); // uncompressed size
|
||||
var fnLen = br.ReadUInt16();
|
||||
var extraLen = br.ReadUInt16();
|
||||
var commentLen = br.ReadUInt16();
|
||||
br.ReadUInt16(); // disk number start
|
||||
br.ReadUInt16(); // internal attributes
|
||||
br.ReadUInt32(); // external attributes
|
||||
br.ReadUInt32(); // LFH offset
|
||||
fs.Seek(fnLen + extraLen + commentLen, SeekOrigin.Current);
|
||||
}
|
||||
|
||||
// Verify all versions match
|
||||
Assert.Equal(lfhVersions.Count, cdhVersions.Count);
|
||||
for (int i = 0; i < lfhVersions.Count; i++)
|
||||
{
|
||||
Assert.Equal(lfhVersions[i], cdhVersions[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using SharpCompress.Archives;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
@@ -397,4 +399,41 @@ public class ZipReaderTests : ReaderTests
|
||||
Assert.Equal("second.txt", reader.Entry.Key);
|
||||
Assert.Equal(197, reader.Entry.Size);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ZipReader_Returns_Same_Entries_As_ZipArchive()
|
||||
{
|
||||
// Verifies that ZipReader and ZipArchive return the same entries
|
||||
// for standard single-volume ZIP files. ZipReader processes LocalEntry
|
||||
// headers sequentially, while ZipArchive uses DirectoryEntry headers
|
||||
// from the central directory and seeks to LocalEntry headers for data.
|
||||
var testFiles = new[] { "Zip.none.zip", "Zip.deflate.zip", "Zip.none.issue86.zip" };
|
||||
|
||||
foreach (var testFile in testFiles)
|
||||
{
|
||||
var path = Path.Combine(TEST_ARCHIVES_PATH, testFile);
|
||||
|
||||
var readerKeys = new List<string>();
|
||||
using (var stream = File.OpenRead(path))
|
||||
using (var reader = ZipReader.Open(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
readerKeys.Add(reader.Entry.Key!);
|
||||
}
|
||||
}
|
||||
|
||||
var archiveKeys = new List<string>();
|
||||
using (var archive = Archives.Zip.ZipArchive.Open(path))
|
||||
{
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
archiveKeys.Add(entry.Key!);
|
||||
}
|
||||
}
|
||||
|
||||
Assert.Equal(archiveKeys.Count, readerKeys.Count);
|
||||
Assert.Equal(archiveKeys.OrderBy(k => k), readerKeys.OrderBy(k => k));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Writers;
|
||||
using Xunit;
|
||||
|
||||
namespace SharpCompress.Test.Zip;
|
||||
@@ -9,6 +11,42 @@ public class ZipWriterTests : WriterTests
|
||||
public ZipWriterTests()
|
||||
: base(ArchiveType.Zip) { }
|
||||
|
||||
[Fact]
|
||||
public void Zip_BZip2_Write_EmptyFile()
|
||||
{
|
||||
// Test that writing an empty file with BZip2 compression doesn't throw DivideByZeroException
|
||||
using var memoryStream = new MemoryStream();
|
||||
var options = new WriterOptions(CompressionType.BZip2)
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = new UTF8Encoding(false) },
|
||||
};
|
||||
|
||||
using (var writer = WriterFactory.Open(memoryStream, ArchiveType.Zip, options))
|
||||
{
|
||||
writer.Write("test-folder/zero-byte-file.txt", Stream.Null);
|
||||
}
|
||||
|
||||
Assert.True(memoryStream.Length > 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Zip_BZip2_Write_EmptyFolder()
|
||||
{
|
||||
// Test that writing an empty folder entry with BZip2 compression doesn't throw DivideByZeroException
|
||||
using var memoryStream = new MemoryStream();
|
||||
var options = new WriterOptions(CompressionType.BZip2)
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = new UTF8Encoding(false) },
|
||||
};
|
||||
|
||||
using (var writer = WriterFactory.Open(memoryStream, ArchiveType.Zip, options))
|
||||
{
|
||||
writer.Write("test-empty-folder/", Stream.Null);
|
||||
}
|
||||
|
||||
Assert.True(memoryStream.Length > 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Zip_Deflate_Write() =>
|
||||
Write(
|
||||
|
||||
@@ -4,20 +4,20 @@
|
||||
".NETFramework,Version=v4.8": {
|
||||
"AwesomeAssertions": {
|
||||
"type": "Direct",
|
||||
"requested": "[9.2.1, )",
|
||||
"resolved": "9.2.1",
|
||||
"contentHash": "lbwhyQNXxxEGx4oCbFqNfFy2DsywsvNhN6qoOjY4wwhMgI2L9+YrxjyF/M0io99yrvWV1Cjj12LP2QGcC43Uhw==",
|
||||
"requested": "[9.3.0, )",
|
||||
"resolved": "9.3.0",
|
||||
"contentHash": "8lGLYap2ec2gNLgjf2xKZaKLpQ7j36oJvrYzBVVpNAumqnxRdevqqhEF66qxE92f8y2+zsbQ061DeHG61ZhzaQ==",
|
||||
"dependencies": {
|
||||
"System.Threading.Tasks.Extensions": "4.5.4"
|
||||
}
|
||||
},
|
||||
"Microsoft.NET.Test.Sdk": {
|
||||
"type": "Direct",
|
||||
"requested": "[18.0.0, )",
|
||||
"resolved": "18.0.0",
|
||||
"contentHash": "bvxj2Asb7nT+tqOFFerrhQeEjUYLwx0Poi0Rznu63WbqN+A4uDn1t5NWXfAOOQsF6lpmK6N2v+Vvgso7KWZS7g==",
|
||||
"requested": "[18.0.1, )",
|
||||
"resolved": "18.0.1",
|
||||
"contentHash": "WNpu6vI2rA0pXY4r7NKxCN16XRWl5uHu6qjuyVLoDo6oYEggIQefrMjkRuibQHm/NslIUNCcKftvoWAN80MSAg==",
|
||||
"dependencies": {
|
||||
"Microsoft.CodeCoverage": "18.0.0"
|
||||
"Microsoft.CodeCoverage": "18.0.1"
|
||||
}
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies": {
|
||||
@@ -29,6 +29,12 @@
|
||||
"Microsoft.NETFramework.ReferenceAssemblies.net48": "1.0.3"
|
||||
}
|
||||
},
|
||||
"Mono.Posix.NETStandard": {
|
||||
"type": "Direct",
|
||||
"requested": "[1.0.0, )",
|
||||
"resolved": "1.0.0",
|
||||
"contentHash": "vSN/L1uaVwKsiLa95bYu2SGkF0iY3xMblTfxc8alSziPuVfJpj3geVqHGAA75J7cZkMuKpFVikz82Lo6y6LLdA=="
|
||||
},
|
||||
"xunit": {
|
||||
"type": "Direct",
|
||||
"requested": "[2.9.3, )",
|
||||
@@ -51,8 +57,8 @@
|
||||
},
|
||||
"Microsoft.CodeCoverage": {
|
||||
"type": "Transitive",
|
||||
"resolved": "18.0.0",
|
||||
"contentHash": "DFPhMrsIofgJ1DDU3ModqqRArDm15/bNl4ecmcuBspZkZ4ONYnCC0R8U27WzK7cYv6r8l6Q/fRmvg7cb+I/dJA=="
|
||||
"resolved": "18.0.1",
|
||||
"contentHash": "O+utSr97NAJowIQT/OVp3Lh9QgW/wALVTP4RG1m2AfFP4IyJmJz0ZBmFJUsRQiAPgq6IRC0t8AAzsiPIsaUDEA=="
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies.net48": {
|
||||
"type": "Transitive",
|
||||
@@ -92,12 +98,17 @@
|
||||
},
|
||||
"System.Threading.Tasks.Extensions": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.5.4",
|
||||
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
|
||||
"resolved": "4.6.3",
|
||||
"contentHash": "7sCiwilJLYbTZELaKnc7RecBBXWXA+xMLQWZKWawBxYjp6DBlSE3v9/UcvKBvr1vv2tTOhipiogM8rRmxlhrVA==",
|
||||
"dependencies": {
|
||||
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.1.2"
|
||||
}
|
||||
},
|
||||
"System.ValueTuple": {
|
||||
"type": "Transitive",
|
||||
"resolved": "4.6.1",
|
||||
"contentHash": "+RJT4qaekpZ7DDLhf+LTjq+E48jieKiY9ulJ+BoxKmZblIJfIJT8Ufcaa/clQqnYvWs8jugfGSMu8ylS0caG0w=="
|
||||
},
|
||||
"xunit.abstractions": {
|
||||
"type": "Transitive",
|
||||
"resolved": "2.0.3",
|
||||
@@ -141,20 +152,20 @@
|
||||
"sharpcompress": {
|
||||
"type": "Project",
|
||||
"dependencies": {
|
||||
"Microsoft.Bcl.AsyncInterfaces": "[8.0.0, )",
|
||||
"Microsoft.Bcl.AsyncInterfaces": "[10.0.0, )",
|
||||
"System.Buffers": "[4.6.1, )",
|
||||
"System.Memory": "[4.6.3, )",
|
||||
"System.Text.Encoding.CodePages": "[8.0.0, )",
|
||||
"System.Text.Encoding.CodePages": "[10.0.0, )",
|
||||
"ZstdSharp.Port": "[0.8.6, )"
|
||||
}
|
||||
},
|
||||
"Microsoft.Bcl.AsyncInterfaces": {
|
||||
"type": "CentralTransitive",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
|
||||
"requested": "[10.0.0, )",
|
||||
"resolved": "10.0.0",
|
||||
"contentHash": "vFuwSLj9QJBbNR0NeNO4YVASUbokxs+i/xbuu8B+Fs4FAZg5QaFa6eGrMaRqTzzNI5tAb97T7BhSxtLckFyiRA==",
|
||||
"dependencies": {
|
||||
"System.Threading.Tasks.Extensions": "4.5.4"
|
||||
"System.Threading.Tasks.Extensions": "4.6.3"
|
||||
}
|
||||
},
|
||||
"System.Buffers": {
|
||||
@@ -176,12 +187,13 @@
|
||||
},
|
||||
"System.Text.Encoding.CodePages": {
|
||||
"type": "CentralTransitive",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
|
||||
"requested": "[10.0.0, )",
|
||||
"resolved": "10.0.0",
|
||||
"contentHash": "QLP54mIATaBpjGlsZIxga38VPk1G9js0Kw651B+bvrXi2kSgGZYrxJSpM3whhTZCBK4HEBHX3fzfDQMw7CXHGQ==",
|
||||
"dependencies": {
|
||||
"System.Memory": "4.5.5",
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
|
||||
"System.Memory": "4.6.3",
|
||||
"System.Runtime.CompilerServices.Unsafe": "6.1.2",
|
||||
"System.ValueTuple": "4.6.1"
|
||||
}
|
||||
},
|
||||
"ZstdSharp.Port": {
|
||||
@@ -196,21 +208,21 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"net8.0": {
|
||||
"net10.0": {
|
||||
"AwesomeAssertions": {
|
||||
"type": "Direct",
|
||||
"requested": "[9.2.1, )",
|
||||
"resolved": "9.2.1",
|
||||
"contentHash": "lbwhyQNXxxEGx4oCbFqNfFy2DsywsvNhN6qoOjY4wwhMgI2L9+YrxjyF/M0io99yrvWV1Cjj12LP2QGcC43Uhw=="
|
||||
"requested": "[9.3.0, )",
|
||||
"resolved": "9.3.0",
|
||||
"contentHash": "8lGLYap2ec2gNLgjf2xKZaKLpQ7j36oJvrYzBVVpNAumqnxRdevqqhEF66qxE92f8y2+zsbQ061DeHG61ZhzaQ=="
|
||||
},
|
||||
"Microsoft.NET.Test.Sdk": {
|
||||
"type": "Direct",
|
||||
"requested": "[18.0.0, )",
|
||||
"resolved": "18.0.0",
|
||||
"contentHash": "bvxj2Asb7nT+tqOFFerrhQeEjUYLwx0Poi0Rznu63WbqN+A4uDn1t5NWXfAOOQsF6lpmK6N2v+Vvgso7KWZS7g==",
|
||||
"requested": "[18.0.1, )",
|
||||
"resolved": "18.0.1",
|
||||
"contentHash": "WNpu6vI2rA0pXY4r7NKxCN16XRWl5uHu6qjuyVLoDo6oYEggIQefrMjkRuibQHm/NslIUNCcKftvoWAN80MSAg==",
|
||||
"dependencies": {
|
||||
"Microsoft.CodeCoverage": "18.0.0",
|
||||
"Microsoft.TestPlatform.TestHost": "18.0.0"
|
||||
"Microsoft.CodeCoverage": "18.0.1",
|
||||
"Microsoft.TestPlatform.TestHost": "18.0.1"
|
||||
}
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies": {
|
||||
@@ -247,8 +259,8 @@
|
||||
},
|
||||
"Microsoft.CodeCoverage": {
|
||||
"type": "Transitive",
|
||||
"resolved": "18.0.0",
|
||||
"contentHash": "DFPhMrsIofgJ1DDU3ModqqRArDm15/bNl4ecmcuBspZkZ4ONYnCC0R8U27WzK7cYv6r8l6Q/fRmvg7cb+I/dJA=="
|
||||
"resolved": "18.0.1",
|
||||
"contentHash": "O+utSr97NAJowIQT/OVp3Lh9QgW/wALVTP4RG1m2AfFP4IyJmJz0ZBmFJUsRQiAPgq6IRC0t8AAzsiPIsaUDEA=="
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
|
||||
"type": "Transitive",
|
||||
@@ -257,18 +269,18 @@
|
||||
},
|
||||
"Microsoft.TestPlatform.ObjectModel": {
|
||||
"type": "Transitive",
|
||||
"resolved": "18.0.0",
|
||||
"contentHash": "Al/a99ymb8UdEEh6DKNiaoFn5i8fvX5PdM9LfU9Z/Q8NJrlyHHzF+LRHLbR+t89gRsJ2fFMpwYxgEn3eH1BQwA==",
|
||||
"resolved": "18.0.1",
|
||||
"contentHash": "qT/mwMcLF9BieRkzOBPL2qCopl8hQu6A1P7JWAoj/FMu5i9vds/7cjbJ/LLtaiwWevWLAeD5v5wjQJ/l6jvhWQ==",
|
||||
"dependencies": {
|
||||
"System.Reflection.Metadata": "8.0.0"
|
||||
}
|
||||
},
|
||||
"Microsoft.TestPlatform.TestHost": {
|
||||
"type": "Transitive",
|
||||
"resolved": "18.0.0",
|
||||
"contentHash": "aAxE8Thr9ZHGrljOYaDeLJqitQi75iE4xeEFn6CEGFirlHSn1KwpKPniuEn6zCLZ90Z3XqNlrC3ZJTuvBov45w==",
|
||||
"resolved": "18.0.1",
|
||||
"contentHash": "uDJKAEjFTaa2wHdWlfo6ektyoh+WD4/Eesrwb4FpBFKsLGehhACVnwwTI4qD3FrIlIEPlxdXg3SyrYRIcO+RRQ==",
|
||||
"dependencies": {
|
||||
"Microsoft.TestPlatform.ObjectModel": "18.0.0",
|
||||
"Microsoft.TestPlatform.ObjectModel": "18.0.1",
|
||||
"Newtonsoft.Json": "13.0.3"
|
||||
}
|
||||
},
|
||||
|
||||
BIN
tests/TestArchives/Archives/7Zip.encryptedFiles.7z
Normal file
BIN
tests/TestArchives/Archives/7Zip.encryptedFiles.7z
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arc.crunched.largefile.arc
Normal file
BIN
tests/TestArchives/Archives/Arc.crunched.largefile.arc
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arc.squashed.largefile.arc
Normal file
BIN
tests/TestArchives/Archives/Arc.squashed.largefile.arc
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arc.squeezed.largefile.arc
Normal file
BIN
tests/TestArchives/Archives/Arc.squeezed.largefile.arc
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arc.uncompressed.largefile.arc
Normal file
BIN
tests/TestArchives/Archives/Arc.uncompressed.largefile.arc
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method1.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method1.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method1.largefile.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method1.largefile.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method2.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method2.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method2.largefile.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method2.largefile.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method3.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method3.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method3.largefile.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method3.largefile.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.method4.largefile.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.method4.largefile.arj
Normal file
Binary file not shown.
BIN
tests/TestArchives/Archives/Arj.store.largefile.arj
Normal file
BIN
tests/TestArchives/Archives/Arj.store.largefile.arj
Normal file
Binary file not shown.
3609
tests/TestArchives/MiscTest/alice29.txt
Normal file
3609
tests/TestArchives/MiscTest/alice29.txt
Normal file
File diff suppressed because it is too large
Load Diff
10059
tests/TestArchives/MiscTest/news.txt
Normal file
10059
tests/TestArchives/MiscTest/news.txt
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user