mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-04 05:25:00 +00:00
Compare commits
336 Commits
0.44.0
...
adam/more-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
895dd02830 | ||
|
|
7112dba345 | ||
|
|
0767292bb0 | ||
|
|
b40e1a002a | ||
|
|
c096164486 | ||
|
|
d92def91b0 | ||
|
|
b48e938c98 | ||
|
|
4ed1f89866 | ||
|
|
525bcea989 | ||
|
|
6c3f7c86da | ||
|
|
595a97bd62 | ||
|
|
c9db03335b | ||
|
|
659f5d7834 | ||
|
|
42f6c77419 | ||
|
|
bcaec86514 | ||
|
|
1ca914823f | ||
|
|
be8841075a | ||
|
|
a94e319935 | ||
|
|
d60abc3f45 | ||
|
|
b994f0ab55 | ||
|
|
e2cb9f39ab | ||
|
|
58459bda12 | ||
|
|
8dfd5349f0 | ||
|
|
c770bc4788 | ||
|
|
24b4ef8780 | ||
|
|
6ddcbf2bc9 | ||
|
|
8d5d686b79 | ||
|
|
f4369e540a | ||
|
|
c219eb4abb | ||
|
|
9a7bdd39e8 | ||
|
|
484bc740d7 | ||
|
|
8a67d501a8 | ||
|
|
3c87242bd0 | ||
|
|
999124e68e | ||
|
|
db2f5c9cb9 | ||
|
|
af08a7cd54 | ||
|
|
72eaf66f05 | ||
|
|
8a3be35d67 | ||
|
|
d59e4c2a0d | ||
|
|
71655e04c4 | ||
|
|
a706a9d725 | ||
|
|
970934a40b | ||
|
|
a9c28a7b62 | ||
|
|
4d31436740 | ||
|
|
c82744c51c | ||
|
|
f0eaddc6a6 | ||
|
|
d6156f0f1e | ||
|
|
3c88c7fdd5 | ||
|
|
d11f6aefb0 | ||
|
|
010a38bb73 | ||
|
|
53f12d75db | ||
|
|
6c866324b2 | ||
|
|
a114155189 | ||
|
|
014bbc3ea4 | ||
|
|
d52facd4ab | ||
|
|
0a50386ada | ||
|
|
f64fa53ed1 | ||
|
|
335db1eb9e | ||
|
|
27fe2d807e | ||
|
|
27cf2795ef | ||
|
|
979c8d9234 | ||
|
|
04eabb7866 | ||
|
|
f4eccea20c | ||
|
|
fc63217dd0 | ||
|
|
b9fc680548 | ||
|
|
7dcc13c1f0 | ||
|
|
56d3091688 | ||
|
|
a0af0604d1 | ||
|
|
875c2d7694 | ||
|
|
8c95f863cb | ||
|
|
ddf37e82c2 | ||
|
|
a82fda98d7 | ||
|
|
44e4b1804e | ||
|
|
984ea8f46f | ||
|
|
4d84394417 | ||
|
|
507074cf72 | ||
|
|
f364b68e09 | ||
|
|
244acc0c9e | ||
|
|
def0bce221 | ||
|
|
d0823db595 | ||
|
|
73704bcd7e | ||
|
|
86c3b93fa5 | ||
|
|
e89fb211ce | ||
|
|
55100cb37a | ||
|
|
14fd880dac | ||
|
|
4ca1a7713e | ||
|
|
9caf7be928 | ||
|
|
bf4217fde6 | ||
|
|
de3cda9034 | ||
|
|
f1102dc980 | ||
|
|
f2bb81d611 | ||
|
|
41e0c151de | ||
|
|
d0f44839ff | ||
|
|
414cad1241 | ||
|
|
abe0087cfd | ||
|
|
060b1ed5dd | ||
|
|
fbc168fafe | ||
|
|
d5a8c37113 | ||
|
|
21ce9a38e6 | ||
|
|
7732fbb698 | ||
|
|
44402414a6 | ||
|
|
11b92d102a | ||
|
|
16831e1e6e | ||
|
|
3b83d08e2a | ||
|
|
b622a2ce73 | ||
|
|
c5814502f6 | ||
|
|
d9be6389ca | ||
|
|
336a8f2876 | ||
|
|
b4f949ba9b | ||
|
|
9403c12793 | ||
|
|
77c1cebefc | ||
|
|
caa7acdbc5 | ||
|
|
1522e64797 | ||
|
|
5152e3197e | ||
|
|
ae4f2c08fd | ||
|
|
9628f2dda1 | ||
|
|
65208a30c1 | ||
|
|
4c838db876 | ||
|
|
d1f6fd9af1 | ||
|
|
61c6f8403a | ||
|
|
a8f47237d7 | ||
|
|
7cbdc5b46c | ||
|
|
8b74243e79 | ||
|
|
f77a2aabab | ||
|
|
e6fb704780 | ||
|
|
c5d7407919 | ||
|
|
b9ed2b09c1 | ||
|
|
db0bb8a30d | ||
|
|
85d82e5c86 | ||
|
|
1a87075f33 | ||
|
|
8df9232171 | ||
|
|
7b7eba8cd9 | ||
|
|
169364f6ae | ||
|
|
c38f74d34c | ||
|
|
895699d22e | ||
|
|
cf901c2784 | ||
|
|
e1bbc65f5b | ||
|
|
f6faaa83ec | ||
|
|
4d3ae3a97f | ||
|
|
cc47fde57f | ||
|
|
a8d5b8e86b | ||
|
|
0a9c5bfe15 | ||
|
|
ff0769e988 | ||
|
|
3987733079 | ||
|
|
b26d38b7e4 | ||
|
|
2175cb299d | ||
|
|
8abb972f87 | ||
|
|
05bf22f518 | ||
|
|
3b5ee481c5 | ||
|
|
b54617238b | ||
|
|
44174e7b03 | ||
|
|
ecd9317ab3 | ||
|
|
884f0b702e | ||
|
|
2e95832bea | ||
|
|
97879f18b6 | ||
|
|
d74454f7e9 | ||
|
|
ce01cc7ce1 | ||
|
|
9454466be7 | ||
|
|
0e4a159998 | ||
|
|
4998676476 | ||
|
|
f359f553b3 | ||
|
|
08118f7286 | ||
|
|
408d2e6663 | ||
|
|
4c4b727bd7 | ||
|
|
8e54b10b7f | ||
|
|
f99e421115 | ||
|
|
82d56b9678 | ||
|
|
447d35267f | ||
|
|
763805e03a | ||
|
|
cd70a7760e | ||
|
|
ec7c359341 | ||
|
|
cc59c1960a | ||
|
|
1cc80e7675 | ||
|
|
cfe59fc515 | ||
|
|
2180df3318 | ||
|
|
29f4c7fe2e | ||
|
|
d5f9815561 | ||
|
|
6e5e47f041 | ||
|
|
b0fde2b8c7 | ||
|
|
4b9b20de42 | ||
|
|
f7c91bb26f | ||
|
|
4b34dd61d3 | ||
|
|
c958d184d0 | ||
|
|
0de5c59a77 | ||
|
|
3b10be53b5 | ||
|
|
5336eb6fe6 | ||
|
|
9fa686b8f9 | ||
|
|
2012077fb0 | ||
|
|
302cf2e14f | ||
|
|
b9fccbd691 | ||
|
|
bbbbc8810a | ||
|
|
c7da19f3a5 | ||
|
|
e919930cf6 | ||
|
|
2906529080 | ||
|
|
75cc36849b | ||
|
|
63e124e72f | ||
|
|
394d982168 | ||
|
|
f4ce4cbad8 | ||
|
|
491beabe03 | ||
|
|
f5d83c0e33 | ||
|
|
d2cb792d91 | ||
|
|
52fef492a5 | ||
|
|
a5300f3383 | ||
|
|
cab3e7d498 | ||
|
|
405dbb30cd | ||
|
|
9bb670ad19 | ||
|
|
bbba2e6c7a | ||
|
|
0b2158f74c | ||
|
|
5c06b8c48f | ||
|
|
810df8a18b | ||
|
|
63736efcac | ||
|
|
3e219fa9ec | ||
|
|
33b6447c18 | ||
|
|
ec310c87de | ||
|
|
c55a383112 | ||
|
|
227fec66ad | ||
|
|
38eec23e07 | ||
|
|
437271c6a2 | ||
|
|
81a2060c75 | ||
|
|
5e90cfd6c5 | ||
|
|
2d597e6e43 | ||
|
|
a410f73bf3 | ||
|
|
b41296194f | ||
|
|
bf7416753a | ||
|
|
7fbd751d27 | ||
|
|
85b28dfe68 | ||
|
|
779fba5deb | ||
|
|
2756b1f6f8 | ||
|
|
7b76858ae1 | ||
|
|
84b5b5a717 | ||
|
|
ebfa16f09f | ||
|
|
c1d240b516 | ||
|
|
5c4719f4a9 | ||
|
|
95d2278d8b | ||
|
|
e63ee57ef0 | ||
|
|
775efa1b26 | ||
|
|
3677b4b193 | ||
|
|
c32f4b4f2a | ||
|
|
8d34f88ca6 | ||
|
|
ca4cf25a1f | ||
|
|
4fa976b478 | ||
|
|
767f3a4985 | ||
|
|
ddc08e068e | ||
|
|
a1a86cdde8 | ||
|
|
fc85f1fa2c | ||
|
|
0b8081f320 | ||
|
|
0b5371d986 | ||
|
|
cdca909d84 | ||
|
|
ec7d2e357d | ||
|
|
1c0183ef11 | ||
|
|
9cf2b3129c | ||
|
|
9a4e864f5e | ||
|
|
4df952db1b | ||
|
|
1b4cedfa13 | ||
|
|
6d6103afd6 | ||
|
|
d727d76299 | ||
|
|
0502ff545e | ||
|
|
fce4a96718 | ||
|
|
38203fb950 | ||
|
|
65dba509e0 | ||
|
|
0615d17b8b | ||
|
|
c1f8580d89 | ||
|
|
c5a6f900df | ||
|
|
3807c3ce2a | ||
|
|
d2f328af01 | ||
|
|
c3ffcf4fe8 | ||
|
|
95c409d979 | ||
|
|
dadf9e71bb | ||
|
|
05ebf22009 | ||
|
|
f4b1780d8a | ||
|
|
921cff00a5 | ||
|
|
64a09eb0f8 | ||
|
|
3a636531e8 | ||
|
|
292da90184 | ||
|
|
90c8ff8650 | ||
|
|
0f37049aad | ||
|
|
3fb07d129f | ||
|
|
8d0ac5062f | ||
|
|
b2d1505e5c | ||
|
|
a35e65ee42 | ||
|
|
d1fcf31f7e | ||
|
|
17cd934b5b | ||
|
|
ae614cd3fe | ||
|
|
47037a4b9d | ||
|
|
507b1e35d8 | ||
|
|
2839e1d33f | ||
|
|
ef0b9d525c | ||
|
|
01e6e04a78 | ||
|
|
8c876c70af | ||
|
|
a7d6d6493e | ||
|
|
b6cc95af73 | ||
|
|
bdcc1d32c2 | ||
|
|
90d91cc7c2 | ||
|
|
ec83cf588f | ||
|
|
4f0a2e3c95 | ||
|
|
3747a27109 | ||
|
|
b501bac54a | ||
|
|
7aec98d652 | ||
|
|
406b198e0e | ||
|
|
8e42296c3a | ||
|
|
60e5220bd0 | ||
|
|
0f37cbfd0b | ||
|
|
541fd136d5 | ||
|
|
60d42ca9c3 | ||
|
|
ac0716ddeb | ||
|
|
b9792ca491 | ||
|
|
c3fd42057a | ||
|
|
39d85ff4f6 | ||
|
|
fbce3e77ba | ||
|
|
66e9de2685 | ||
|
|
321520408b | ||
|
|
68451bd75f | ||
|
|
486fdf118b | ||
|
|
bd3cda0617 | ||
|
|
725503d1ce | ||
|
|
b825e15406 | ||
|
|
9bd86f64c9 | ||
|
|
77015224f6 | ||
|
|
372ecb77d0 | ||
|
|
05642cbdc6 | ||
|
|
1a71c01fd4 | ||
|
|
54640548ed | ||
|
|
ea02d31096 | ||
|
|
d04830ba90 | ||
|
|
8533b09091 | ||
|
|
44b7955d85 | ||
|
|
038b9f18c6 | ||
|
|
6e0e20ba6e | ||
|
|
ec31cb9987 | ||
|
|
39a0b4ce78 | ||
|
|
af719707bf | ||
|
|
8415a19912 | ||
|
|
1607d2768e | ||
|
|
fb76bd82f2 | ||
|
|
3bdaba46a9 | ||
|
|
7c3c94ed7f |
@@ -307,7 +307,6 @@ dotnet_diagnostic.CS8602.severity = error
|
||||
dotnet_diagnostic.CS8604.severity = error
|
||||
dotnet_diagnostic.CS8618.severity = error
|
||||
dotnet_diagnostic.CS0618.severity = suggestion
|
||||
dotnet_diagnostic.CS1998.severity = error
|
||||
dotnet_diagnostic.CS4014.severity = error
|
||||
dotnet_diagnostic.CS8600.severity = error
|
||||
dotnet_diagnostic.CS8603.severity = error
|
||||
@@ -368,6 +367,9 @@ dotnet_diagnostic.NX0001.severity = error
|
||||
dotnet_diagnostic.NX0002.severity = silent
|
||||
dotnet_diagnostic.NX0003.severity = silent
|
||||
|
||||
dotnet_diagnostic.VSTHRD110.severity = error
|
||||
dotnet_diagnostic.VSTHRD107.severity = error
|
||||
|
||||
##########################################
|
||||
# Styles
|
||||
##########################################
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -4,8 +4,8 @@ _ReSharper.SharpCompress/
|
||||
bin/
|
||||
*.suo
|
||||
*.user
|
||||
TestArchives/Scratch/
|
||||
TestArchives/Scratch2/
|
||||
tests/TestArchives/Scratch/
|
||||
tests/TestArchives/Scratch2/
|
||||
TestResults/
|
||||
*.nupkg
|
||||
packages/*/
|
||||
|
||||
59
AGENTS.md
59
AGENTS.md
@@ -14,6 +14,7 @@ SharpCompress is a pure C# compression library supporting multiple archive forma
|
||||
- Follow the existing code style and patterns in the codebase.
|
||||
|
||||
## General Instructions
|
||||
- **Agents should NEVER commit to git** - Agents should stage files and leave committing to the user. Only create commits when the user explicitly requests them.
|
||||
- Make only high confidence suggestions when reviewing code changes.
|
||||
- Write code with good maintainability practices, including comments on why certain design decisions were made.
|
||||
- Handle edge cases and write clear exception handling.
|
||||
@@ -110,7 +111,7 @@ SharpCompress supports multiple archive and compression formats:
|
||||
- **Archive Formats**: Zip, Tar, 7Zip, Rar (read-only)
|
||||
- **Compression**: DEFLATE, BZip2, LZMA/LZMA2, PPMd, ZStandard (decompress only), Deflate64 (decompress only)
|
||||
- **Combined Formats**: Tar.GZip, Tar.BZip2, Tar.LZip, Tar.XZ, Tar.ZStandard
|
||||
- See FORMATS.md for complete format support matrix
|
||||
- See [docs/FORMATS.md](docs/FORMATS.md) for complete format support matrix
|
||||
|
||||
### Stream Handling Rules
|
||||
- **Disposal**: As of version 0.21, SharpCompress closes wrapped streams by default
|
||||
@@ -177,5 +178,59 @@ SharpCompress supports multiple archive and compression formats:
|
||||
2. **Solid archives (Rar, 7Zip)** - Use `ExtractAllEntries()` for best performance, not individual entry extraction
|
||||
3. **Stream disposal** - Always set `LeaveStreamOpen` explicitly when needed (default is to close)
|
||||
4. **Tar + non-seekable stream** - Must provide file size or it will throw
|
||||
5. **Multi-framework differences** - Some features differ between .NET Framework and modern .NET (e.g., Mono.Posix)
|
||||
6. **Format detection** - Use `ReaderFactory.Open()` for auto-detection, test with actual archive files
|
||||
|
||||
### Async Struct-Copy Bug in LZMA RangeCoder
|
||||
|
||||
When implementing async methods on mutable `struct` types (like `BitEncoder` and `BitDecoder` in the LZMA RangeCoder), be aware that the async state machine copies the struct when `await` is encountered. This means mutations to struct fields after the `await` point may not persist back to the original struct stored in arrays or fields.
|
||||
|
||||
**The Bug:**
|
||||
```csharp
|
||||
// BAD: async method on mutable struct
|
||||
public async ValueTask<uint> DecodeAsync(Decoder decoder, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var newBound = (decoder._range >> K_NUM_BIT_MODEL_TOTAL_BITS) * _prob;
|
||||
if (decoder._code < newBound)
|
||||
{
|
||||
decoder._range = newBound;
|
||||
_prob += (K_BIT_MODEL_TOTAL - _prob) >> K_NUM_MOVE_BITS; // Mutates _prob
|
||||
await decoder.Normalize2Async(cancellationToken).ConfigureAwait(false); // Struct gets copied here
|
||||
return 0; // Original _prob update may be lost
|
||||
}
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
**The Fix:**
|
||||
Refactor async methods on mutable structs to perform all struct mutations synchronously before any `await`, or use a helper method to separate the await from the struct mutation:
|
||||
|
||||
```csharp
|
||||
// GOOD: struct mutations happen synchronously, await is conditional
|
||||
public ValueTask<uint> DecodeAsync(Decoder decoder, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var newBound = (decoder._range >> K_NUM_BIT_MODEL_TOTAL_BITS) * _prob;
|
||||
if (decoder._code < newBound)
|
||||
{
|
||||
decoder._range = newBound;
|
||||
_prob += (K_BIT_MODEL_TOTAL - _prob) >> K_NUM_MOVE_BITS; // All mutations complete
|
||||
return DecodeAsyncHelper(decoder.Normalize2Async(cancellationToken), 0); // Await in helper
|
||||
}
|
||||
decoder._range -= newBound;
|
||||
decoder._code -= newBound;
|
||||
_prob -= (_prob) >> K_NUM_MOVE_BITS; // All mutations complete
|
||||
return DecodeAsyncHelper(decoder.Normalize2Async(cancellationToken), 1); // Await in helper
|
||||
}
|
||||
|
||||
private static async ValueTask<uint> DecodeAsyncHelper(ValueTask normalizeTask, uint result)
|
||||
{
|
||||
await normalizeTask.ConfigureAwait(false);
|
||||
return result;
|
||||
}
|
||||
```
|
||||
|
||||
**Why This Matters:**
|
||||
In LZMA, the `BitEncoder` and `BitDecoder` structs maintain adaptive probability models in their `_prob` field. When these structs are stored in arrays (e.g., `_models[m]`), the async state machine copy breaks the adaptive model, causing incorrect bit decoding and eventually `DataErrorException` exceptions.
|
||||
|
||||
**Related Files:**
|
||||
- `src/SharpCompress/Compressors/LZMA/RangeCoder/RangeCoderBit.Async.cs` - Fixed
|
||||
- `src/SharpCompress/Compressors/LZMA/RangeCoder/RangeCoderBitTree.Async.cs` - Uses readonly structs, so this pattern doesn't apply
|
||||
|
||||
@@ -12,5 +12,6 @@
|
||||
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
|
||||
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
|
||||
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
|
||||
<CentralPackageTransitivePinningEnabled>true</CentralPackageTransitivePinningEnabled>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
||||
@@ -5,16 +5,20 @@
|
||||
<PackageVersion Include="Glob" Version="1.1.9" />
|
||||
<PackageVersion Include="JetBrains.Profiler.SelfApi" Version="2.5.15" />
|
||||
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="10.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.ILLink.Task" Version="10.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
|
||||
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
|
||||
<PackageVersion Include="SimpleExec" Version="13.0.0" />
|
||||
<PackageVersion Include="System.Text.Encoding.CodePages" Version="10.0.0" />
|
||||
<PackageVersion Include="System.Buffers" Version="4.6.1" />
|
||||
<PackageVersion Include="System.Memory" Version="4.6.3" />
|
||||
<PackageVersion Include="xunit" Version="2.9.3" />
|
||||
<PackageVersion Include="xunit.v3" Version="3.2.1" />
|
||||
<PackageVersion Include="xunit.runner.visualstudio" Version="3.1.5" />
|
||||
<PackageVersion Include="Microsoft.NET.ILLink.Tasks" Version="10.0.0" />
|
||||
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
|
||||
<PackageVersion Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
|
||||
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
|
||||
<GlobalPackageReference Include="Microsoft.NETFramework.ReferenceAssemblies" Version="1.0.3" />
|
||||
<GlobalPackageReference
|
||||
Include="Microsoft.VisualStudio.Threading.Analyzers"
|
||||
Version="17.14.15"
|
||||
/>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -4,7 +4,7 @@ SharpCompress is a compression library in pure C# for .NET Framework 4.8, .NET 8
|
||||
|
||||
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
|
||||
|
||||
**NEW:** All I/O operations now support async/await for improved performance and scalability. See the [USAGE.md](USAGE.md#async-examples) for examples.
|
||||
**NEW:** All I/O operations now support async/await for improved performance and scalability. See the [USAGE.md](docs/USAGE.md#async-examples) for examples.
|
||||
|
||||
GitHub Actions Build -
|
||||
[](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
|
||||
@@ -14,7 +14,7 @@ GitHub Actions Build -
|
||||
|
||||
Post Issues on Github!
|
||||
|
||||
Check the [Supported Formats](FORMATS.md) and [Basic Usage.](USAGE.md)
|
||||
Check the [Supported Formats](docs/FORMATS.md) and [Basic Usage.](docs/USAGE.md)
|
||||
|
||||
## Recommended Formats
|
||||
|
||||
|
||||
@@ -18,12 +18,11 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
|
||||
Directory.Build.props = Directory.Build.props
|
||||
global.json = global.json
|
||||
.editorconfig = .editorconfig
|
||||
.gitignore = .gitignore
|
||||
Directory.Packages.props = Directory.Packages.props
|
||||
NuGet.config = NuGet.config
|
||||
.github\workflows\nuget-release.yml = .github\workflows\nuget-release.yml
|
||||
USAGE.md = USAGE.md
|
||||
README.md = README.md
|
||||
FORMATS.md = FORMATS.md
|
||||
AGENTS.md = AGENTS.md
|
||||
EndProjectSection
|
||||
EndProject
|
||||
|
||||
@@ -230,7 +230,7 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
|
||||
}
|
||||
else
|
||||
{
|
||||
// Not tagged - create prerelease version based on next minor version
|
||||
// Not tagged - create prerelease version
|
||||
var allTags = (await GetGitOutput("tag", "--list"))
|
||||
.Split('\n', StringSplitOptions.RemoveEmptyEntries)
|
||||
.Where(tag => Regex.IsMatch(tag.Trim(), @"^\d+\.\d+\.\d+$"))
|
||||
@@ -240,8 +240,22 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
|
||||
var lastTag = allTags.OrderBy(tag => Version.Parse(tag)).LastOrDefault() ?? "0.0.0";
|
||||
var lastVersion = Version.Parse(lastTag);
|
||||
|
||||
// Increment minor version for next release
|
||||
var nextVersion = new Version(lastVersion.Major, lastVersion.Minor + 1, 0);
|
||||
// Determine version increment based on branch
|
||||
var currentBranch = await GetCurrentBranch();
|
||||
Version nextVersion;
|
||||
|
||||
if (currentBranch == "release")
|
||||
{
|
||||
// Release branch: increment patch version
|
||||
nextVersion = new Version(lastVersion.Major, lastVersion.Minor, lastVersion.Build + 1);
|
||||
Console.WriteLine($"Building prerelease for release branch (patch increment)");
|
||||
}
|
||||
else
|
||||
{
|
||||
// Master or other branches: increment minor version
|
||||
nextVersion = new Version(lastVersion.Major, lastVersion.Minor + 1, 0);
|
||||
Console.WriteLine($"Building prerelease for {currentBranch} branch (minor increment)");
|
||||
}
|
||||
|
||||
// Use commit count since the last version tag if available; otherwise, fall back to total count
|
||||
var revListArgs = allTags.Any() ? $"--count {lastTag}..HEAD" : "--count HEAD";
|
||||
@@ -253,6 +267,28 @@ static async Task<(string version, bool isPrerelease)> GetVersion()
|
||||
}
|
||||
}
|
||||
|
||||
static async Task<string> GetCurrentBranch()
|
||||
{
|
||||
// In GitHub Actions, GITHUB_REF_NAME contains the branch name
|
||||
var githubRefName = Environment.GetEnvironmentVariable("GITHUB_REF_NAME");
|
||||
if (!string.IsNullOrEmpty(githubRefName))
|
||||
{
|
||||
return githubRefName;
|
||||
}
|
||||
|
||||
// Fallback to git command for local builds
|
||||
try
|
||||
{
|
||||
var (output, _) = await ReadAsync("git", "branch --show-current");
|
||||
return output.Trim();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.WriteLine($"Warning: Could not determine current branch: {ex.Message}");
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
static async Task<string> GetGitOutput(string command, string args)
|
||||
{
|
||||
try
|
||||
|
||||
@@ -14,11 +14,51 @@
|
||||
"resolved": "1.1.9",
|
||||
"contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw=="
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies": {
|
||||
"type": "Direct",
|
||||
"requested": "[1.0.3, )",
|
||||
"resolved": "1.0.3",
|
||||
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
|
||||
"dependencies": {
|
||||
"Microsoft.NETFramework.ReferenceAssemblies.net461": "1.0.3"
|
||||
}
|
||||
},
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
|
||||
"dependencies": {
|
||||
"Microsoft.Build.Tasks.Git": "8.0.0",
|
||||
"Microsoft.SourceLink.Common": "8.0.0"
|
||||
}
|
||||
},
|
||||
"Microsoft.VisualStudio.Threading.Analyzers": {
|
||||
"type": "Direct",
|
||||
"requested": "[17.14.15, )",
|
||||
"resolved": "17.14.15",
|
||||
"contentHash": "mXQPJsbuUD2ydq4/ffd8h8tSOFCXec+2xJOVNCvXjuMOq/+5EKHq3D2m2MC2+nUaXeFMSt66VS/J4HdKBixgcw=="
|
||||
},
|
||||
"SimpleExec": {
|
||||
"type": "Direct",
|
||||
"requested": "[13.0.0, )",
|
||||
"resolved": "13.0.0",
|
||||
"contentHash": "zcCR1pupa1wI1VqBULRiQKeHKKZOuJhi/K+4V5oO+rHJZlaOD53ViFo1c3PavDoMAfSn/FAXGAWpPoF57rwhYg=="
|
||||
},
|
||||
"Microsoft.Build.Tasks.Git": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
|
||||
},
|
||||
"Microsoft.NETFramework.ReferenceAssemblies.net461": {
|
||||
"type": "Transitive",
|
||||
"resolved": "1.0.3",
|
||||
"contentHash": "AmOJZwCqnOCNp6PPcf9joyogScWLtwy0M1WkqfEQ0M9nYwyDD7EX9ZjscKS5iYnyvteX7kzSKFCKt9I9dXA6mA=="
|
||||
},
|
||||
"Microsoft.SourceLink.Common": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
512
docs/API.md
Normal file
512
docs/API.md
Normal file
@@ -0,0 +1,512 @@
|
||||
# API Quick Reference
|
||||
|
||||
Quick reference for commonly used SharpCompress APIs.
|
||||
|
||||
## Factory Methods
|
||||
|
||||
### Opening Archives
|
||||
|
||||
```csharp
|
||||
// Auto-detect format
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
// Works with Zip, Tar, GZip, Rar, 7Zip, etc.
|
||||
}
|
||||
|
||||
// Specific format - Archive API
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip"))
|
||||
using (var archive = TarArchive.OpenArchive("file.tar"))
|
||||
using (var archive = RarArchive.OpenArchive("file.rar"))
|
||||
using (var archive = SevenZipArchive.OpenArchive("file.7z"))
|
||||
using (var archive = GZipArchive.OpenArchive("file.gz"))
|
||||
|
||||
// With options
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
Password = "password",
|
||||
LeaveStreamOpen = true,
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
|
||||
};
|
||||
using (var archive = ZipArchive.OpenArchive("encrypted.zip", options))
|
||||
```
|
||||
|
||||
### Creating Archives
|
||||
|
||||
```csharp
|
||||
// Writer Factory
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
// Write entries
|
||||
}
|
||||
|
||||
// Specific writer
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
using (var archive = TarArchive.CreateArchive())
|
||||
using (var archive = GZipArchive.CreateArchive())
|
||||
|
||||
// With options
|
||||
var options = new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 9,
|
||||
LeaveStreamOpen = false
|
||||
};
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.SaveTo("output.zip", options);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Archive API Methods
|
||||
|
||||
### Reading/Extracting
|
||||
|
||||
```csharp
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip"))
|
||||
{
|
||||
// Get all entries
|
||||
IEnumerable<IArchiveEntry> entries = archive.Entries;
|
||||
|
||||
// Find specific entry
|
||||
var entry = archive.Entries.FirstOrDefault(e => e.Key == "file.txt");
|
||||
|
||||
// Extract all
|
||||
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
|
||||
// Extract single entry
|
||||
var entry = archive.Entries.First();
|
||||
entry.WriteToFile(@"C:\output\file.txt");
|
||||
entry.WriteToFile(@"C:\output\file.txt", new ExtractionOptions { Overwrite = true });
|
||||
|
||||
// Get entry stream
|
||||
using (var stream = entry.OpenEntryStream())
|
||||
{
|
||||
stream.CopyTo(outputStream);
|
||||
}
|
||||
}
|
||||
|
||||
// Async extraction (requires IAsyncArchive)
|
||||
using (var asyncArchive = await ZipArchive.OpenAsyncArchive("file.zip"))
|
||||
{
|
||||
await asyncArchive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken: cancellationToken
|
||||
);
|
||||
}
|
||||
using (var stream = await entry.OpenEntryStreamAsync(cancellationToken))
|
||||
{
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
### Entry Properties
|
||||
|
||||
```csharp
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
string name = entry.Key; // Entry name/path
|
||||
long size = entry.Size; // Uncompressed size
|
||||
long compressedSize = entry.CompressedSize;
|
||||
bool isDir = entry.IsDirectory;
|
||||
DateTime? modTime = entry.LastModifiedTime;
|
||||
CompressionType compression = entry.CompressionType;
|
||||
}
|
||||
```
|
||||
|
||||
### Creating Archives
|
||||
|
||||
```csharp
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
// Add file
|
||||
archive.AddEntry("file.txt", @"C:\source\file.txt");
|
||||
|
||||
// Add multiple files
|
||||
archive.AddAllFromDirectory(@"C:\source");
|
||||
archive.AddAllFromDirectory(@"C:\source", "*.txt"); // Pattern
|
||||
|
||||
// Save to file
|
||||
archive.SaveTo("output.zip", CompressionType.Deflate);
|
||||
|
||||
// Save to stream
|
||||
archive.SaveTo(outputStream, new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 9,
|
||||
LeaveStreamOpen = true
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Reader API Methods
|
||||
|
||||
### Forward-Only Reading
|
||||
|
||||
```csharp
|
||||
using (var stream = File.OpenRead("file.zip"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
IArchiveEntry entry = reader.Entry;
|
||||
|
||||
if (!entry.IsDirectory)
|
||||
{
|
||||
// Extract entry
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
reader.WriteEntryToFile(@"C:\output\file.txt");
|
||||
|
||||
// Or get stream
|
||||
using (var entryStream = reader.OpenEntryStream())
|
||||
{
|
||||
entryStream.CopyTo(outputStream);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Async variants (use OpenAsyncReader to get IAsyncReader)
|
||||
using (var stream = File.OpenRead("file.zip"))
|
||||
using (var reader = await ReaderFactory.OpenAsyncReader(stream))
|
||||
{
|
||||
while (await reader.MoveToNextEntryAsync())
|
||||
{
|
||||
await reader.WriteEntryToFileAsync(
|
||||
@"C:\output\" + reader.Entry.Key,
|
||||
cancellationToken: cancellationToken
|
||||
);
|
||||
}
|
||||
|
||||
// Async extraction of all entries
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Writer API Methods
|
||||
|
||||
### Creating Archives (Streaming)
|
||||
|
||||
```csharp
|
||||
using (var stream = File.Create("output.zip"))
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
// Write single file
|
||||
using (var fileStream = File.OpenRead("source.txt"))
|
||||
{
|
||||
writer.Write("entry.txt", fileStream, DateTime.Now);
|
||||
}
|
||||
|
||||
// Write directory
|
||||
writer.WriteAll("C:\\source", "*", SearchOption.AllDirectories);
|
||||
writer.WriteAll("C:\\source", "*.txt", SearchOption.TopDirectoryOnly);
|
||||
|
||||
// Async variants
|
||||
using (var fileStream = File.OpenRead("source.txt"))
|
||||
{
|
||||
await writer.WriteAsync("entry.txt", fileStream, DateTime.Now, cancellationToken);
|
||||
}
|
||||
|
||||
await writer.WriteAllAsync("C:\\source", "*", SearchOption.AllDirectories, cancellationToken);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common Options
|
||||
|
||||
### ReaderOptions
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
Password = "password", // For encrypted archives
|
||||
LeaveStreamOpen = true, // Don't close wrapped stream
|
||||
ArchiveEncoding = new ArchiveEncoding // Custom character encoding
|
||||
{
|
||||
Default = Encoding.GetEncoding(932)
|
||||
}
|
||||
};
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip", options))
|
||||
{
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
### WriterOptions
|
||||
|
||||
```csharp
|
||||
var options = new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 9, // 0-9 for Deflate
|
||||
LeaveStreamOpen = true, // Don't close stream
|
||||
};
|
||||
archive.SaveTo("output.zip", options);
|
||||
```
|
||||
|
||||
### ExtractionOptions
|
||||
|
||||
```csharp
|
||||
var options = new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true, // Recreate directory structure
|
||||
Overwrite = true, // Overwrite existing files
|
||||
PreserveFileTime = true // Keep original timestamps
|
||||
};
|
||||
archive.WriteToDirectory(@"C:\output", options);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Compression Types
|
||||
|
||||
### Available Compressions
|
||||
|
||||
```csharp
|
||||
// For creating archives
|
||||
CompressionType.None // No compression (store)
|
||||
CompressionType.Deflate // DEFLATE (default for ZIP/GZip)
|
||||
CompressionType.Deflate64 // Deflate64
|
||||
CompressionType.BZip2 // BZip2
|
||||
CompressionType.LZMA // LZMA (for 7Zip, LZip, XZ)
|
||||
CompressionType.PPMd // PPMd (for ZIP)
|
||||
CompressionType.Rar // RAR compression (read-only)
|
||||
CompressionType.ZStandard // ZStandard
|
||||
ArchiveType.Arc
|
||||
ArchiveType.Arj
|
||||
ArchiveType.Ace
|
||||
|
||||
// For Tar archives with compression
|
||||
// Use WriterFactory to create compressed tar archives
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, CompressionType.GZip)) // Tar.GZip
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, CompressionType.BZip2)) // Tar.BZip2
|
||||
```
|
||||
|
||||
### Archive Types
|
||||
|
||||
```csharp
|
||||
ArchiveType.Zip
|
||||
ArchiveType.Tar
|
||||
ArchiveType.GZip
|
||||
ArchiveType.BZip2
|
||||
ArchiveType.Rar
|
||||
ArchiveType.SevenZip
|
||||
ArchiveType.XZ
|
||||
ArchiveType.ZStandard
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Patterns & Examples
|
||||
|
||||
### Extract with Error Handling
|
||||
|
||||
```csharp
|
||||
try
|
||||
{
|
||||
using (var archive = ZipArchive.Open("archive.zip",
|
||||
new ReaderOptions { Password = "password" }))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
}
|
||||
}
|
||||
catch (PasswordRequiredException)
|
||||
{
|
||||
Console.WriteLine("Password required");
|
||||
}
|
||||
catch (InvalidArchiveException)
|
||||
{
|
||||
Console.WriteLine("Archive is invalid");
|
||||
}
|
||||
catch (SharpCompressException ex)
|
||||
{
|
||||
Console.WriteLine($"Error: {ex.Message}");
|
||||
}
|
||||
```
|
||||
|
||||
### Extract with Progress
|
||||
|
||||
```csharp
|
||||
var progress = new Progress<ProgressReport>(report =>
|
||||
{
|
||||
Console.WriteLine($"Extracting {report.EntryPath}: {report.PercentComplete}%");
|
||||
});
|
||||
|
||||
var options = new ReaderOptions { Progress = progress };
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip", options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
```
|
||||
|
||||
### Async Extract with Cancellation
|
||||
|
||||
```csharp
|
||||
var cts = new CancellationTokenSource();
|
||||
cts.CancelAfter(TimeSpan.FromMinutes(5));
|
||||
|
||||
try
|
||||
{
|
||||
using (var archive = await ZipArchive.OpenAsyncArchive("archive.zip"))
|
||||
{
|
||||
await archive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken: cts.Token
|
||||
);
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Console.WriteLine("Extraction cancelled");
|
||||
}
|
||||
```
|
||||
|
||||
### Create with Custom Compression
|
||||
|
||||
```csharp
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory(@"D:\source");
|
||||
|
||||
// Fastest
|
||||
archive.SaveTo("fast.zip", new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 1
|
||||
});
|
||||
|
||||
// Balanced (default)
|
||||
archive.SaveTo("normal.zip", CompressionType.Deflate);
|
||||
|
||||
// Best compression
|
||||
archive.SaveTo("best.zip", new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 9
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Stream Processing (No File I/O)
|
||||
|
||||
```csharp
|
||||
using (var outputStream = new MemoryStream())
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
// Add content from memory
|
||||
using (var contentStream = new MemoryStream(Encoding.UTF8.GetBytes("Hello")))
|
||||
{
|
||||
archive.AddEntry("file.txt", contentStream);
|
||||
}
|
||||
|
||||
// Save to memory
|
||||
archive.SaveTo(outputStream, CompressionType.Deflate);
|
||||
|
||||
// Get bytes
|
||||
byte[] archiveBytes = outputStream.ToArray();
|
||||
}
|
||||
```
|
||||
|
||||
### Extract Specific Files
|
||||
|
||||
```csharp
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
var filesToExtract = new[] { "file1.txt", "file2.txt" };
|
||||
|
||||
foreach (var entry in archive.Entries.Where(e => filesToExtract.Contains(e.Key)))
|
||||
{
|
||||
entry.WriteToFile(@"C:\output\" + entry.Key);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### List Archive Contents
|
||||
|
||||
```csharp
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
Console.WriteLine($"[DIR] {entry.Key}");
|
||||
else
|
||||
Console.WriteLine($"[FILE] {entry.Key} ({entry.Size} bytes)");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common Mistakes
|
||||
|
||||
### ✗ Wrong - Stream not disposed
|
||||
|
||||
```csharp
|
||||
var stream = File.OpenRead("archive.zip");
|
||||
var archive = ZipArchive.OpenArchive(stream);
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
// stream not disposed - leaked resource
|
||||
```
|
||||
|
||||
### ✓ Correct - Using blocks
|
||||
|
||||
```csharp
|
||||
using (var stream = File.OpenRead("archive.zip"))
|
||||
using (var archive = ZipArchive.OpenArchive(stream))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
// Both properly disposed
|
||||
```
|
||||
|
||||
### ✗ Wrong - Mixing API styles
|
||||
|
||||
```csharp
|
||||
// Loading entire archive then iterating
|
||||
using (var archive = ZipArchive.OpenArchive("large.zip"))
|
||||
{
|
||||
var entries = archive.Entries.ToList(); // Loads all in memory
|
||||
foreach (var e in entries)
|
||||
{
|
||||
e.WriteToFile(...); // Then extracts each
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### ✓ Correct - Use Reader for large files
|
||||
|
||||
```csharp
|
||||
// Streaming iteration
|
||||
using (var stream = File.OpenRead("large.zip"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [USAGE.md](USAGE.md) - Complete code examples
|
||||
- [FORMATS.md](FORMATS.md) - Supported formats
|
||||
- [PERFORMANCE.md](PERFORMANCE.md) - API selection guide
|
||||
659
docs/ARCHITECTURE.md
Normal file
659
docs/ARCHITECTURE.md
Normal file
@@ -0,0 +1,659 @@
|
||||
# SharpCompress Architecture Guide
|
||||
|
||||
This guide explains the internal architecture and design patterns of SharpCompress for contributors.
|
||||
|
||||
## Overview
|
||||
|
||||
SharpCompress is organized into three main layers:
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────┐
|
||||
│ User-Facing APIs (Top Layer) │
|
||||
│ Archive, Reader, Writer Factories │
|
||||
├─────────────────────────────────────────┤
|
||||
│ Format-Specific Implementations │
|
||||
│ ZipArchive, TarReader, GZipWriter, │
|
||||
│ RarArchive, SevenZipArchive, etc. │
|
||||
├─────────────────────────────────────────┤
|
||||
│ Compression & Crypto (Bottom Layer) │
|
||||
│ Deflate, LZMA, BZip2, AES, CRC32 │
|
||||
└─────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Directory Structure
|
||||
|
||||
### `src/SharpCompress/`
|
||||
|
||||
#### `Archives/` - Archive Implementations
|
||||
Contains `IArchive` implementations for seekable, random-access APIs.
|
||||
|
||||
**Key Files:**
|
||||
- `AbstractArchive.cs` - Base class for all archives
|
||||
- `IArchive.cs` - Archive interface definition
|
||||
- `ArchiveFactory.cs` - Factory for opening archives
|
||||
- Format-specific: `ZipArchive.cs`, `TarArchive.cs`, `RarArchive.cs`, `SevenZipArchive.cs`, `GZipArchive.cs`
|
||||
|
||||
**Use Archive API when:**
|
||||
- Stream is seekable (file, memory)
|
||||
- Need random access to entries
|
||||
- Archive fits in memory
|
||||
- Simplicity is important
|
||||
|
||||
#### `Readers/` - Reader Implementations
|
||||
Contains `IReader` implementations for forward-only, non-seekable APIs.
|
||||
|
||||
**Key Files:**
|
||||
- `AbstractReader.cs` - Base reader class
|
||||
- `IReader.cs` - Reader interface
|
||||
- `ReaderFactory.cs` - Auto-detection factory
|
||||
- `ReaderOptions.cs` - Configuration for readers
|
||||
- Format-specific: `ZipReader.cs`, `TarReader.cs`, `GZipReader.cs`, `RarReader.cs`, etc.
|
||||
|
||||
**Use Reader API when:**
|
||||
- Stream is non-seekable (network, pipe, compressed)
|
||||
- Processing large files
|
||||
- Memory is limited
|
||||
- Forward-only processing is acceptable
|
||||
|
||||
#### `Writers/` - Writer Implementations
|
||||
Contains `IWriter` implementations for forward-only writing.
|
||||
|
||||
**Key Files:**
|
||||
- `AbstractWriter.cs` - Base writer class
|
||||
- `IWriter.cs` - Writer interface
|
||||
- `WriterFactory.cs` - Factory for creating writers
|
||||
- `WriterOptions.cs` - Configuration for writers
|
||||
- Format-specific: `ZipWriter.cs`, `TarWriter.cs`, `GZipWriter.cs`
|
||||
|
||||
#### `Factories/` - Format Detection
|
||||
Factory classes for auto-detecting archive format and creating appropriate readers/writers.
|
||||
|
||||
**Key Files:**
|
||||
- `Factory.cs` - Base factory class
|
||||
- `IFactory.cs` - Factory interface
|
||||
- Format-specific: `ZipFactory.cs`, `TarFactory.cs`, `RarFactory.cs`, etc.
|
||||
|
||||
**How It Works:**
|
||||
1. `ReaderFactory.OpenReader(stream)` probes stream signatures
|
||||
2. Identifies format by magic bytes
|
||||
3. Creates appropriate reader instance
|
||||
4. Returns generic `IReader` interface
|
||||
|
||||
#### `Common/` - Shared Types
|
||||
Common types, options, and enumerations used across formats.
|
||||
|
||||
**Key Files:**
|
||||
- `IEntry.cs` - Entry interface (file within archive)
|
||||
- `Entry.cs` - Entry implementation
|
||||
- `ArchiveType.cs` - Enum for archive formats
|
||||
- `CompressionType.cs` - Enum for compression methods
|
||||
- `ArchiveEncoding.cs` - Character encoding configuration
|
||||
- `ExtractionOptions.cs` - Extraction configuration
|
||||
- Format-specific headers: `Zip/Headers/`, `Tar/Headers/`, `Rar/Headers/`, etc.
|
||||
|
||||
#### `Compressors/` - Compression Algorithms
|
||||
Low-level compression streams implementing specific algorithms.
|
||||
|
||||
**Algorithms:**
|
||||
- `Deflate/` - DEFLATE compression (Zip default)
|
||||
- `BZip2/` - BZip2 compression
|
||||
- `LZMA/` - LZMA compression (7Zip, XZ, LZip)
|
||||
- `PPMd/` - Prediction by Partial Matching (Zip, 7Zip)
|
||||
- `ZStandard/` - ZStandard compression (decompression only)
|
||||
- `Xz/` - XZ format (decompression only)
|
||||
- `Rar/` - RAR-specific unpacking
|
||||
- `Arj/`, `Arc/`, `Ace/` - Legacy format decompression
|
||||
- `Filters/` - BCJ/BCJ2 filters for executable compression
|
||||
|
||||
**Each Compressor:**
|
||||
- Implements a `Stream` subclass
|
||||
- Provides both compression and decompression
|
||||
- Some are read-only (decompression only)
|
||||
|
||||
#### `Crypto/` - Encryption & Hashing
|
||||
Cryptographic functions and stream wrappers.
|
||||
|
||||
**Key Files:**
|
||||
- `Crc32Stream.cs` - CRC32 calculation wrapper
|
||||
- `BlockTransformer.cs` - Block cipher transformations
|
||||
- AES, PKWare, WinZip encryption implementations
|
||||
|
||||
#### `IO/` - Stream Utilities
|
||||
Stream wrappers and utilities.
|
||||
|
||||
**Key Classes:**
|
||||
- `SharpCompressStream` - Base stream class
|
||||
- `ProgressReportingStream` - Progress tracking wrapper
|
||||
- `MarkingBinaryReader` - Binary reader with position marks
|
||||
- `BufferedSubStream` - Buffered read-only substream
|
||||
- `ReadOnlySubStream` - Read-only view of parent stream
|
||||
- `NonDisposingStream` - Prevents wrapped stream disposal
|
||||
|
||||
---
|
||||
|
||||
## Design Patterns
|
||||
|
||||
### 1. Factory Pattern
|
||||
|
||||
**Purpose:** Auto-detect format and create appropriate reader/writer.
|
||||
|
||||
**Example:**
|
||||
```csharp
|
||||
// User calls factory
|
||||
using (var reader = ReaderFactory.OpenReader(stream)) // Returns IReader
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
// Process entry
|
||||
}
|
||||
}
|
||||
|
||||
// Behind the scenes:
|
||||
// 1. Factory.Open() probes stream signatures
|
||||
// 2. Detects format (Zip, Tar, Rar, etc.)
|
||||
// 3. Creates appropriate reader (ZipReader, TarReader, etc.)
|
||||
// 4. Returns as generic IReader interface
|
||||
```
|
||||
|
||||
**Files:**
|
||||
- `src/SharpCompress/Factories/ReaderFactory.cs`
|
||||
- `src/SharpCompress/Factories/WriterFactory.cs`
|
||||
- `src/SharpCompress/Factories/ArchiveFactory.cs`
|
||||
|
||||
### 2. Strategy Pattern
|
||||
|
||||
**Purpose:** Encapsulate compression algorithms as swappable strategies.
|
||||
|
||||
**Example:**
|
||||
```csharp
|
||||
// Different compression strategies
|
||||
CompressionType.Deflate // DEFLATE
|
||||
CompressionType.BZip2 // BZip2
|
||||
CompressionType.LZMA // LZMA
|
||||
CompressionType.PPMd // PPMd
|
||||
|
||||
// Writer uses strategy pattern
|
||||
var archive = ZipArchive.CreateArchive();
|
||||
archive.SaveTo("output.zip", CompressionType.Deflate); // Use Deflate
|
||||
archive.SaveTo("output.bz2", CompressionType.BZip2); // Use BZip2
|
||||
```
|
||||
|
||||
**Files:**
|
||||
- `src/SharpCompress/Compressors/` - Strategy implementations
|
||||
|
||||
### 3. Decorator Pattern
|
||||
|
||||
**Purpose:** Wrap streams with additional functionality.
|
||||
|
||||
**Example:**
|
||||
```csharp
|
||||
// Progress reporting decorator
|
||||
var progressStream = new ProgressReportingStream(baseStream, progressReporter);
|
||||
progressStream.Read(buffer, 0, buffer.Length); // Reports progress
|
||||
|
||||
// Non-disposing decorator
|
||||
var nonDisposingStream = new NonDisposingStream(baseStream);
|
||||
using (var compressor = new DeflateStream(nonDisposingStream))
|
||||
{
|
||||
// baseStream won't be disposed when compressor is disposed
|
||||
}
|
||||
```
|
||||
|
||||
**Files:**
|
||||
- `src/SharpCompress/IO/ProgressReportingStream.cs`
|
||||
- `src/SharpCompress/IO/NonDisposingStream.cs`
|
||||
|
||||
### 4. Template Method Pattern
|
||||
|
||||
**Purpose:** Define algorithm skeleton in base class, let subclasses fill details.
|
||||
|
||||
**Example:**
|
||||
```csharp
|
||||
// AbstractArchive defines common archive operations
|
||||
public abstract class AbstractArchive : IArchive
|
||||
{
|
||||
// Template methods
|
||||
public virtual void WriteToDirectory(string destinationDirectory, ExtractionOptions options)
|
||||
{
|
||||
// Common extraction logic
|
||||
foreach (var entry in Entries)
|
||||
{
|
||||
// Call subclass method
|
||||
entry.WriteToFile(destinationPath, options);
|
||||
}
|
||||
}
|
||||
|
||||
// Subclasses override format-specific details
|
||||
protected abstract Entry CreateEntry(EntryData data);
|
||||
}
|
||||
```
|
||||
|
||||
**Files:**
|
||||
- `src/SharpCompress/Archives/AbstractArchive.cs`
|
||||
- `src/SharpCompress/Readers/AbstractReader.cs`
|
||||
|
||||
### 5. Iterator Pattern
|
||||
|
||||
**Purpose:** Provide sequential access to entries.
|
||||
|
||||
**Example:**
|
||||
```csharp
|
||||
// Archive API - provides collection
|
||||
IEnumerable<IEntry> entries = archive.Entries;
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
// Random access - entries already in memory
|
||||
}
|
||||
|
||||
// Reader API - provides iterator
|
||||
IReader reader = ReaderFactory.OpenReader(stream);
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
// Forward-only iteration - one entry at a time
|
||||
var entry = reader.Entry;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Key Interfaces
|
||||
|
||||
### IArchive - Random Access API
|
||||
|
||||
```csharp
|
||||
public interface IArchive : IDisposable
|
||||
{
|
||||
IEnumerable<IEntry> Entries { get; }
|
||||
|
||||
void WriteToDirectory(string destinationDirectory,
|
||||
ExtractionOptions options = null);
|
||||
|
||||
IEntry FirstOrDefault(Func<IEntry, bool> predicate);
|
||||
|
||||
// ... format-specific methods
|
||||
}
|
||||
```
|
||||
|
||||
**Implementations:** `ZipArchive`, `TarArchive`, `RarArchive`, `SevenZipArchive`, `GZipArchive`
|
||||
|
||||
### IReader - Forward-Only API
|
||||
|
||||
```csharp
|
||||
public interface IReader : IDisposable
|
||||
{
|
||||
IEntry Entry { get; }
|
||||
|
||||
bool MoveToNextEntry();
|
||||
|
||||
void WriteEntryToDirectory(string destinationDirectory,
|
||||
ExtractionOptions options = null);
|
||||
|
||||
Stream OpenEntryStream();
|
||||
|
||||
// ... async variants
|
||||
}
|
||||
```
|
||||
|
||||
**Implementations:** `ZipReader`, `TarReader`, `RarReader`, `GZipReader`, etc.
|
||||
|
||||
### IWriter - Writing API
|
||||
|
||||
```csharp
|
||||
public interface IWriter : IDisposable
|
||||
{
|
||||
void Write(string entryPath, Stream source,
|
||||
DateTime? modificationTime = null);
|
||||
|
||||
void WriteAll(string sourceDirectory, string searchPattern,
|
||||
SearchOption searchOption);
|
||||
|
||||
// ... async variants
|
||||
}
|
||||
```
|
||||
|
||||
**Implementations:** `ZipWriter`, `TarWriter`, `GZipWriter`
|
||||
|
||||
### IEntry - Archive Entry
|
||||
|
||||
```csharp
|
||||
public interface IEntry
|
||||
{
|
||||
string Key { get; }
|
||||
uint Size { get; }
|
||||
uint CompressedSize { get; }
|
||||
bool IsDirectory { get; }
|
||||
DateTime? LastModifiedTime { get; }
|
||||
CompressionType CompressionType { get; }
|
||||
|
||||
void WriteToFile(string fullPath, ExtractionOptions options = null);
|
||||
void WriteToStream(Stream destinationStream);
|
||||
Stream OpenEntryStream();
|
||||
|
||||
// ... async variants
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Adding Support for a New Format
|
||||
|
||||
### Step 1: Understand the Format
|
||||
- Research format specification
|
||||
- Understand compression/encryption used
|
||||
- Study existing similar formats in codebase
|
||||
|
||||
### Step 2: Create Format Structure Classes
|
||||
|
||||
**Create:** `src/SharpCompress/Common/NewFormat/`
|
||||
|
||||
```csharp
|
||||
// Headers and data structures
|
||||
public class NewFormatHeader
|
||||
{
|
||||
public uint Magic { get; set; }
|
||||
public ushort Version { get; set; }
|
||||
// ... other fields
|
||||
|
||||
public static NewFormatHeader Read(BinaryReader reader)
|
||||
{
|
||||
// Deserialize from binary
|
||||
}
|
||||
}
|
||||
|
||||
public class NewFormatEntry
|
||||
{
|
||||
public string FileName { get; set; }
|
||||
public uint CompressedSize { get; set; }
|
||||
public uint UncompressedSize { get; set; }
|
||||
// ... other fields
|
||||
}
|
||||
```
|
||||
|
||||
### Step 3: Create Archive Implementation
|
||||
|
||||
**Create:** `src/SharpCompress/Archives/NewFormat/NewFormatArchive.cs`
|
||||
|
||||
```csharp
|
||||
public class NewFormatArchive : AbstractArchive
|
||||
{
|
||||
private NewFormatHeader _header;
|
||||
private List<NewFormatEntry> _entries;
|
||||
|
||||
public static NewFormatArchive OpenArchive(Stream stream)
|
||||
{
|
||||
var archive = new NewFormatArchive();
|
||||
archive._header = NewFormatHeader.Read(stream);
|
||||
archive.LoadEntries(stream);
|
||||
return archive;
|
||||
}
|
||||
|
||||
public override IEnumerable<IEntry> Entries => _entries.Select(e => new Entry(e));
|
||||
|
||||
protected override Stream OpenEntryStream(Entry entry)
|
||||
{
|
||||
// Return decompressed stream for entry
|
||||
}
|
||||
|
||||
// ... other abstract method implementations
|
||||
}
|
||||
```
|
||||
|
||||
### Step 4: Create Reader Implementation
|
||||
|
||||
**Create:** `src/SharpCompress/Readers/NewFormat/NewFormatReader.cs`
|
||||
|
||||
```csharp
|
||||
public class NewFormatReader : AbstractReader
|
||||
{
|
||||
private NewFormatHeader _header;
|
||||
private BinaryReader _reader;
|
||||
|
||||
public NewFormatReader(Stream stream)
|
||||
{
|
||||
_reader = new BinaryReader(stream);
|
||||
_header = NewFormatHeader.Read(_reader);
|
||||
}
|
||||
|
||||
public override bool MoveToNextEntry()
|
||||
{
|
||||
// Read next entry header
|
||||
if (!_reader.BaseStream.CanRead) return false;
|
||||
|
||||
var entryData = NewFormatEntry.Read(_reader);
|
||||
// ... set this.Entry
|
||||
return entryData != null;
|
||||
}
|
||||
|
||||
// ... other abstract method implementations
|
||||
}
|
||||
```
|
||||
|
||||
### Step 5: Create Factory
|
||||
|
||||
**Create:** `src/SharpCompress/Factories/NewFormatFactory.cs`
|
||||
|
||||
```csharp
|
||||
public class NewFormatFactory : Factory, IArchiveFactory, IReaderFactory
|
||||
{
|
||||
// Archive format magic bytes (signature)
|
||||
private static readonly byte[] NewFormatSignature = new byte[] { 0x4E, 0x46 }; // "NF"
|
||||
|
||||
public static NewFormatFactory Instance { get; } = new();
|
||||
|
||||
public IArchive CreateArchive(Stream stream)
|
||||
=> NewFormatArchive.OpenArchive(stream);
|
||||
|
||||
public IReader CreateReader(Stream stream, ReaderOptions options)
|
||||
=> new NewFormatReader(stream) { Options = options };
|
||||
|
||||
public bool Matches(Stream stream, ReadOnlySpan<byte> signature)
|
||||
=> signature.StartsWith(NewFormatSignature);
|
||||
}
|
||||
```
|
||||
|
||||
### Step 6: Register Factory
|
||||
|
||||
**Update:** `src/SharpCompress/Factories/ArchiveFactory.cs`
|
||||
|
||||
```csharp
|
||||
private static readonly IFactory[] Factories =
|
||||
{
|
||||
ZipFactory.Instance,
|
||||
TarFactory.Instance,
|
||||
RarFactory.Instance,
|
||||
SevenZipFactory.Instance,
|
||||
GZipFactory.Instance,
|
||||
NewFormatFactory.Instance, // Add here
|
||||
// ... other factories
|
||||
};
|
||||
```
|
||||
|
||||
### Step 7: Add Tests
|
||||
|
||||
**Create:** `tests/SharpCompress.Test/NewFormat/NewFormatTests.cs`
|
||||
|
||||
```csharp
|
||||
public class NewFormatTests : TestBase
|
||||
{
|
||||
[Fact]
|
||||
public void NewFormat_Extracts_Successfully()
|
||||
{
|
||||
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "archive.newformat");
|
||||
using (var archive = NewFormatArchive.OpenArchive(archivePath))
|
||||
{
|
||||
archive.WriteToDirectory(SCRATCH_FILES_PATH);
|
||||
// Assert extraction
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NewFormat_Reader_Works()
|
||||
{
|
||||
var archivePath = Path.Combine(TEST_ARCHIVES_PATH, "archive.newformat");
|
||||
using (var stream = File.OpenRead(archivePath))
|
||||
using (var reader = new NewFormatReader(stream))
|
||||
{
|
||||
Assert.True(reader.MoveToNextEntry());
|
||||
Assert.NotNull(reader.Entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Step 8: Add Test Archives
|
||||
|
||||
Place test files in `tests/TestArchives/Archives/NewFormat/` directory.
|
||||
|
||||
### Step 9: Document
|
||||
|
||||
Update `docs/FORMATS.md` with format support information.
|
||||
|
||||
---
|
||||
|
||||
## Compression Algorithm Implementation
|
||||
|
||||
### Creating a New Compression Stream
|
||||
|
||||
**Example:** Creating `CustomStream` for a custom compression algorithm
|
||||
|
||||
```csharp
|
||||
public class CustomStream : Stream
|
||||
{
|
||||
private readonly Stream _baseStream;
|
||||
private readonly bool _leaveOpen;
|
||||
|
||||
public CustomStream(Stream baseStream, bool leaveOpen = false)
|
||||
{
|
||||
_baseStream = baseStream;
|
||||
_leaveOpen = leaveOpen;
|
||||
}
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
// Decompress data from _baseStream into buffer
|
||||
// Return number of decompressed bytes
|
||||
}
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
{
|
||||
// Compress data from buffer into _baseStream
|
||||
}
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (disposing && !_leaveOpen)
|
||||
{
|
||||
_baseStream?.Dispose();
|
||||
}
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Stream Handling Best Practices
|
||||
|
||||
### Disposal Pattern
|
||||
|
||||
```csharp
|
||||
// Correct: Nested using blocks
|
||||
using (var fileStream = File.OpenRead("archive.zip"))
|
||||
using (var archive = ZipArchive.OpenArchive(fileStream))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
// Both archive and fileStream properly disposed
|
||||
|
||||
// Correct: Using with options
|
||||
var options = new ReaderOptions { LeaveStreamOpen = true };
|
||||
var stream = File.OpenRead("archive.zip");
|
||||
using (var archive = ZipArchive.OpenArchive(stream, options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
stream.Dispose(); // Manually dispose if LeaveStreamOpen = true
|
||||
```
|
||||
|
||||
### NonDisposingStream Wrapper
|
||||
|
||||
```csharp
|
||||
// Prevent unwanted stream closure
|
||||
var baseStream = File.OpenRead("data.bin");
|
||||
var nonDisposing = new NonDisposingStream(baseStream);
|
||||
|
||||
using (var compressor = new DeflateStream(nonDisposing))
|
||||
{
|
||||
// Compressor won't close baseStream when disposed
|
||||
}
|
||||
|
||||
// baseStream still usable
|
||||
baseStream.Position = 0; // Works
|
||||
baseStream.Dispose(); // Manual disposal
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Memory Efficiency
|
||||
|
||||
1. **Avoid loading entire archive in memory** - Use Reader API for large files
|
||||
2. **Process entries sequentially** - Especially for solid archives
|
||||
3. **Use appropriate buffer sizes** - Larger buffers for network I/O
|
||||
4. **Dispose streams promptly** - Free resources when done
|
||||
|
||||
### Algorithm Selection
|
||||
|
||||
1. **Archive API** - Fast for small archives with random access
|
||||
2. **Reader API** - Efficient for large files or streaming
|
||||
3. **Solid archives** - Sequential extraction much faster
|
||||
4. **Compression levels** - Trade-off between speed and size
|
||||
|
||||
---
|
||||
|
||||
## Testing Guidelines
|
||||
|
||||
### Test Coverage
|
||||
|
||||
1. **Happy path** - Normal extraction works
|
||||
2. **Edge cases** - Empty archives, single file, many files
|
||||
3. **Corrupted data** - Handle gracefully
|
||||
4. **Error cases** - Missing passwords, unsupported compression
|
||||
5. **Async operations** - Both sync and async code paths
|
||||
|
||||
### Test Archives
|
||||
|
||||
- Use `tests/TestArchives/` for test data
|
||||
- Create format-specific subdirectories
|
||||
- Include encrypted, corrupted, and edge case archives
|
||||
- Don't recreate existing archives
|
||||
|
||||
### Test Patterns
|
||||
|
||||
```csharp
|
||||
[Fact]
|
||||
public void Archive_Extraction_Works()
|
||||
{
|
||||
// Arrange
|
||||
var testArchive = Path.Combine(TEST_ARCHIVES_PATH, "test.zip");
|
||||
|
||||
// Act
|
||||
using (var archive = ZipArchive.OpenArchive(testArchive))
|
||||
{
|
||||
archive.WriteToDirectory(SCRATCH_FILES_PATH);
|
||||
}
|
||||
|
||||
// Assert
|
||||
Assert.True(File.Exists(Path.Combine(SCRATCH_FILES_PATH, "file.txt")));
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [AGENTS.md](../AGENTS.md) - Development guidelines
|
||||
- [FORMATS.md](FORMATS.md) - Supported formats
|
||||
129
docs/DataDescriptorStream-RewindableStream-Fix.md
Normal file
129
docs/DataDescriptorStream-RewindableStream-Fix.md
Normal file
@@ -0,0 +1,129 @@
|
||||
# DataDescriptorStream and RewindableStream Fix
|
||||
|
||||
## Summary
|
||||
|
||||
Fixed the `Zip_Uncompressed_Read_All` test failure caused by incompatibility between `DataDescriptorStream` seeking requirements and the new `RewindableStream` wrapper used in `StreamingZipHeaderFactory`.
|
||||
|
||||
## Problem Description
|
||||
|
||||
### Symptom
|
||||
The test `Zip_Uncompressed_Read_All` was failing with:
|
||||
```
|
||||
System.NotSupportedException : Cannot seek outside buffered region.
|
||||
```
|
||||
|
||||
### Root Cause
|
||||
|
||||
The issue had two related aspects:
|
||||
|
||||
#### 1. Double-Wrapping of RewindableStream
|
||||
|
||||
`StreamingZipHeaderFactory.ReadStreamHeader()` was creating a new `RewindableStream` wrapper:
|
||||
```csharp
|
||||
var rewindableStream = new RewindableStream(stream);
|
||||
```
|
||||
|
||||
When `ReaderFactory.OpenReader()` already wraps the input stream with `SeekableRewindableStream` (for seekable streams), this resulted in double-wrapping:
|
||||
```
|
||||
DataDescriptorStream
|
||||
-> NonDisposingStream
|
||||
-> RewindableStream (new, plain) <-- created by ReadStreamHeader
|
||||
-> SeekableRewindableStream <-- created by ReaderFactory
|
||||
-> FileStream
|
||||
```
|
||||
|
||||
The inner plain `RewindableStream` lost the seeking capability of `SeekableRewindableStream`.
|
||||
|
||||
#### 2. Recording State Interference
|
||||
|
||||
Even after fixing the double-wrapping using `RewindableStream.EnsureSeekable()`, there was another issue:
|
||||
|
||||
`StreamingZipHeaderFactory.ReadStreamHeader()` contains code to peek ahead when checking for zero-length files with `UsePostDataDescriptor`:
|
||||
|
||||
```csharp
|
||||
rewindableStream.StartRecording();
|
||||
var nextHeaderBytes = reader.ReadUInt32();
|
||||
rewindableStream.Rewind(true);
|
||||
```
|
||||
|
||||
This code was interfering with the recording state that `ReaderFactory.OpenReader()` had set up:
|
||||
|
||||
1. `ReaderFactory.OpenReader()` calls `bStream.StartRecording()` at position 0
|
||||
2. Factory detection calls `StreamingZipHeaderFactory.ReadStreamHeader()` via `IsZipFile()`
|
||||
3. Inside `ReadStreamHeader`, the above code overwrites the recorded position
|
||||
4. `Rewind(true)` stops recording and seeks to the wrong position
|
||||
5. When control returns to `Factory.TryOpenReader()`, it calls `stream.Rewind(true)`, but recording is already stopped, so nothing happens
|
||||
6. The stream position is not at the beginning, causing subsequent reads to fail
|
||||
|
||||
## Solution
|
||||
|
||||
### Fix 1: Use EnsureSeekable instead of new RewindableStream
|
||||
|
||||
Changed `StreamingZipHeaderFactory.ReadStreamHeader()` to use:
|
||||
```csharp
|
||||
var rewindableStream = RewindableStream.EnsureSeekable(stream);
|
||||
```
|
||||
|
||||
This method:
|
||||
- Returns the existing `RewindableStream` if the stream is already one (avoids double-wrapping)
|
||||
- Creates a `SeekableRewindableStream` if the underlying stream is seekable
|
||||
- Creates a plain `RewindableStream` only for non-seekable streams
|
||||
|
||||
### Fix 2: Use direct position save/restore for SeekableRewindableStream
|
||||
|
||||
For the peek-ahead logic, changed the code to check for `SeekableRewindableStream` specifically and use direct position manipulation:
|
||||
|
||||
```csharp
|
||||
if (rewindableStream is SeekableRewindableStream)
|
||||
{
|
||||
// Direct position save/restore avoids interfering with caller's recording state
|
||||
var savedPosition = rewindableStream.Position;
|
||||
var nextHeaderBytes = reader.ReadUInt32();
|
||||
rewindableStream.Position = savedPosition;
|
||||
header.HasData = !IsHeader(nextHeaderBytes);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Plain RewindableStream was created fresh by EnsureSeekable, safe to use recording
|
||||
rewindableStream.StartRecording();
|
||||
var nextHeaderBytes = reader.ReadUInt32();
|
||||
rewindableStream.Rewind(true);
|
||||
header.HasData = !IsHeader(nextHeaderBytes);
|
||||
}
|
||||
```
|
||||
|
||||
This approach:
|
||||
- For `SeekableRewindableStream` (reused from caller): Uses direct position save/restore to avoid clobbering the caller's recording state
|
||||
- For plain `RewindableStream` (freshly created): Uses the recording mechanism which is safe since the stream isn't shared
|
||||
|
||||
## Files Changed
|
||||
|
||||
- `src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs`
|
||||
- `src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.Async.cs`
|
||||
|
||||
## Design Notes
|
||||
|
||||
### Why not fix RewindableStream.CanSeek?
|
||||
|
||||
`RewindableStream.CanSeek` returns `true` even though it can only seek within its buffered region. We considered changing this to `false`, but:
|
||||
1. It would be a breaking change for existing code that relies on `CanSeek`
|
||||
2. The `RewindableStream` does provide limited seeking capability (within buffer)
|
||||
3. Checking for `SeekableRewindableStream` specifically is more precise
|
||||
|
||||
### Stream Wrapper Hierarchy
|
||||
|
||||
Understanding the stream wrapper hierarchy is crucial:
|
||||
|
||||
**For seekable source streams (e.g., FileStream):**
|
||||
```
|
||||
SeekableRewindableStream (full seeking via underlying stream)
|
||||
-> FileStream
|
||||
```
|
||||
|
||||
**For non-seekable source streams (e.g., decompression streams):**
|
||||
```
|
||||
RewindableStream (limited seeking via buffer)
|
||||
-> DecompressionStream
|
||||
```
|
||||
|
||||
`DataDescriptorStream` needs backward seeking to position the stream correctly after finding the data descriptor marker. This is why proper stream wrapper selection matters.
|
||||
610
docs/ENCODING.md
Normal file
610
docs/ENCODING.md
Normal file
@@ -0,0 +1,610 @@
|
||||
# SharpCompress Character Encoding Guide
|
||||
|
||||
This guide explains how SharpCompress handles character encoding for archive entries (filenames, comments, etc.).
|
||||
|
||||
## Overview
|
||||
|
||||
Most archive formats store filenames and metadata as bytes. SharpCompress must convert these bytes to strings using the appropriate character encoding.
|
||||
|
||||
**Common Problem:** Archives created on systems with non-UTF8 encodings (especially Japanese, Chinese systems) appear with corrupted filenames when extracted on systems that assume UTF8.
|
||||
|
||||
---
|
||||
|
||||
## ArchiveEncoding Class
|
||||
|
||||
### Basic Usage
|
||||
|
||||
```csharp
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
// Configure encoding before opening archive
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding(932) // cp932 for Japanese
|
||||
}
|
||||
};
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("japanese.zip", options))
|
||||
{
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
Console.WriteLine(entry.Key); // Now shows correct characters
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### ArchiveEncoding Properties
|
||||
|
||||
| Property | Purpose |
|
||||
|----------|---------|
|
||||
| `Default` | Default encoding for filenames (fallback) |
|
||||
| `CustomDecoder` | Custom decoding function for special cases |
|
||||
|
||||
### Setting for Different APIs
|
||||
|
||||
**Archive API:**
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
|
||||
};
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip", options))
|
||||
{
|
||||
// Use archive with correct encoding
|
||||
}
|
||||
```
|
||||
|
||||
**Reader API:**
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
|
||||
};
|
||||
using (var stream = File.OpenRead("file.zip"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream, options))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
// Filenames decoded correctly
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Common Encodings
|
||||
|
||||
### Asian Encodings
|
||||
|
||||
#### cp932 (Japanese)
|
||||
```csharp
|
||||
// Windows-31J, Shift-JIS variant used on Japanese Windows
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding(932)
|
||||
}
|
||||
};
|
||||
using (var archive = ZipArchive.OpenArchive("japanese.zip", options))
|
||||
{
|
||||
// Correctly decodes Japanese filenames
|
||||
}
|
||||
```
|
||||
|
||||
**When to use:**
|
||||
- Archives from Japanese Windows systems
|
||||
- Files with Japanese characters in names
|
||||
|
||||
#### gb2312 (Simplified Chinese)
|
||||
```csharp
|
||||
// Simplified Chinese
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("gb2312")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
#### gbk (Extended Simplified Chinese)
|
||||
```csharp
|
||||
// Extended Simplified Chinese (more characters than gb2312)
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("gbk")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
#### big5 (Traditional Chinese)
|
||||
```csharp
|
||||
// Traditional Chinese (Taiwan, Hong Kong)
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("big5")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
#### euc-jp (Japanese, Unix)
|
||||
```csharp
|
||||
// Extended Unix Code for Japanese
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("eucjp")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
#### euc-kr (Korean)
|
||||
```csharp
|
||||
// Extended Unix Code for Korean
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("euc-kr")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Western European Encodings
|
||||
|
||||
#### iso-8859-1 (Latin-1)
|
||||
```csharp
|
||||
// Western European (includes accented characters)
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("iso-8859-1")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
**When to use:**
|
||||
- Archives from French, German, Spanish systems
|
||||
- Files with accented characters (é, ñ, ü, etc.)
|
||||
|
||||
#### cp1252 (Windows-1252)
|
||||
```csharp
|
||||
// Windows Western European
|
||||
// Very similar to iso-8859-1 but with additional printable characters
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("cp1252")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
**When to use:**
|
||||
- Archives from older Western European Windows systems
|
||||
- Files with smart quotes and other Windows-specific characters
|
||||
|
||||
#### iso-8859-15 (Latin-9)
|
||||
```csharp
|
||||
// Western European with Euro symbol support
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("iso-8859-15")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Cyrillic Encodings
|
||||
|
||||
#### cp1251 (Windows Cyrillic)
|
||||
```csharp
|
||||
// Russian, Serbian, Bulgarian, etc.
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("cp1251")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
#### koi8-r (KOI8 Russian)
|
||||
```csharp
|
||||
// Russian (Unix standard)
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("koi8-r")
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### UTF Encodings (Modern)
|
||||
|
||||
#### UTF-8 (Default)
|
||||
```csharp
|
||||
// Modern standard - usually correct for new archives
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.UTF8
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
#### UTF-16
|
||||
```csharp
|
||||
// Unicode - rarely used in archives
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.Unicode
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Encoding Auto-Detection
|
||||
|
||||
SharpCompress attempts to auto-detect encoding, but this isn't always reliable:
|
||||
|
||||
```csharp
|
||||
// Auto-detection (default)
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip")) // Uses UTF8 by default
|
||||
{
|
||||
// May show corrupted characters if archive uses different encoding
|
||||
}
|
||||
|
||||
// Explicit encoding (more reliable)
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding { Default = Encoding.GetEncoding(932) }
|
||||
};
|
||||
using (var archive = ZipArchive.OpenArchive("file.zip", options))
|
||||
{
|
||||
// Correct characters displayed
|
||||
}
|
||||
```
|
||||
|
||||
### When Manual Override is Needed
|
||||
|
||||
| Situation | Solution |
|
||||
|-----------|----------|
|
||||
| Archive shows corrupted characters | Specify the encoding explicitly |
|
||||
| Archives from specific region | Use that region's encoding |
|
||||
| Mixed encodings in archive | Use CustomDecoder |
|
||||
| Testing with international files | Try different encodings |
|
||||
|
||||
---
|
||||
|
||||
## Custom Decoder
|
||||
|
||||
For complex scenarios where a single encoding isn't sufficient:
|
||||
|
||||
### Basic Custom Decoder
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
CustomDecoder = (data, offset, length) =>
|
||||
{
|
||||
// Custom decoding logic
|
||||
var bytes = new byte[length];
|
||||
Array.Copy(data, offset, bytes, 0, length);
|
||||
|
||||
// Try UTF8 first
|
||||
try
|
||||
{
|
||||
return Encoding.UTF8.GetString(bytes);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Fallback to cp932 if UTF8 fails
|
||||
return Encoding.GetEncoding(932).GetString(bytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("mixed.zip", options))
|
||||
{
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
Console.WriteLine(entry.Key); // Uses custom decoder
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Advanced: Detect Encoding by Content
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
CustomDecoder = DetectAndDecode
|
||||
}
|
||||
};
|
||||
|
||||
private static string DetectAndDecode(byte[] data, int offset, int length)
|
||||
{
|
||||
var bytes = new byte[length];
|
||||
Array.Copy(data, offset, bytes, 0, length);
|
||||
|
||||
// Try UTF8 (most modern archives)
|
||||
try
|
||||
{
|
||||
var str = Encoding.UTF8.GetString(bytes);
|
||||
// Verify it decoded correctly (no replacement characters)
|
||||
if (!str.Contains('\uFFFD'))
|
||||
return str;
|
||||
}
|
||||
catch { }
|
||||
|
||||
// Try cp932 (Japanese)
|
||||
try
|
||||
{
|
||||
var str = Encoding.GetEncoding(932).GetString(bytes);
|
||||
if (!str.Contains('\uFFFD'))
|
||||
return str;
|
||||
}
|
||||
catch { }
|
||||
|
||||
// Fallback to iso-8859-1 (always succeeds)
|
||||
return Encoding.GetEncoding("iso-8859-1").GetString(bytes);
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Code Examples
|
||||
|
||||
### Extract Archive with Japanese Filenames
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding(932) // cp932
|
||||
}
|
||||
};
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("japanese_files.zip", options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
}
|
||||
// Files extracted with correct Japanese names
|
||||
```
|
||||
|
||||
### Extract Archive with Western European Filenames
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("iso-8859-1")
|
||||
}
|
||||
};
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("french_files.zip", options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
// Accented characters (é, è, ê, etc.) display correctly
|
||||
```
|
||||
|
||||
### Extract Archive with Chinese Filenames
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("gbk") // Simplified Chinese
|
||||
}
|
||||
};
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("chinese_files.zip", options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
```
|
||||
|
||||
### Extract Archive with Russian Filenames
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("cp1251") // Windows Cyrillic
|
||||
}
|
||||
};
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("russian_files.zip", options))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
```
|
||||
|
||||
### Reader API with Encoding
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding(932)
|
||||
}
|
||||
};
|
||||
|
||||
using (var stream = File.OpenRead("japanese.zip"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream, options))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
Console.WriteLine(reader.Entry.Key); // Correct characters
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Creating Archives with Correct Encoding
|
||||
|
||||
When creating archives, SharpCompress uses UTF8 by default (recommended):
|
||||
|
||||
```csharp
|
||||
// Create with UTF8 (default, recommended)
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory(@"D:\my_files");
|
||||
archive.SaveTo("output.zip", CompressionType.Deflate);
|
||||
// Archives created with UTF8 encoding
|
||||
}
|
||||
```
|
||||
|
||||
If you need to create archives for systems that expect specific encodings:
|
||||
|
||||
```csharp
|
||||
// Note: SharpCompress Writer API uses UTF8 for encoding
|
||||
// To create archives with other encodings, consider:
|
||||
// 1. Let users on those systems create archives
|
||||
// 2. Use system tools (7-Zip, WinRAR) with desired encoding
|
||||
// 3. Post-process archives if absolutely necessary
|
||||
|
||||
// For now, recommend modern UTF8-based archives
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting Encoding Issues
|
||||
|
||||
### Filenames Show Question Marks (?)
|
||||
|
||||
```
|
||||
✗ Wrong encoding detected
|
||||
test文件.txt → test???.txt
|
||||
```
|
||||
|
||||
**Solution:** Specify correct encoding explicitly
|
||||
|
||||
```csharp
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
Default = Encoding.GetEncoding("gbk") // Try different encodings
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Filenames Show Replacement Character ()
|
||||
|
||||
```
|
||||
✗ Invalid bytes for selected encoding
|
||||
café.txt → caf.txt
|
||||
```
|
||||
|
||||
**Solution:**
|
||||
1. Try a different encoding (see Common Encodings table)
|
||||
2. Use CustomDecoder with fallback encoding
|
||||
3. Archive might be corrupted
|
||||
|
||||
### Mixed Encodings in Single Archive
|
||||
|
||||
```csharp
|
||||
// Use CustomDecoder to handle mixed encodings
|
||||
var options = new ReaderOptions
|
||||
{
|
||||
ArchiveEncoding = new ArchiveEncoding
|
||||
{
|
||||
CustomDecoder = (data, offset, length) =>
|
||||
{
|
||||
// Try multiple encodings in priority order
|
||||
var bytes = new byte[length];
|
||||
Array.Copy(data, offset, bytes, 0, length);
|
||||
|
||||
foreach (var encoding in new[]
|
||||
{
|
||||
Encoding.UTF8,
|
||||
Encoding.GetEncoding(932),
|
||||
Encoding.GetEncoding("iso-8859-1")
|
||||
})
|
||||
{
|
||||
try
|
||||
{
|
||||
var str = encoding.GetString(bytes);
|
||||
if (!str.Contains('\uFFFD'))
|
||||
return str;
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
|
||||
// Final fallback
|
||||
return Encoding.GetEncoding("iso-8859-1").GetString(bytes);
|
||||
}
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Encoding Reference Table
|
||||
|
||||
| Encoding | Code | Use Case |
|
||||
|----------|------|----------|
|
||||
| UTF-8 | (default) | Modern archives, recommended |
|
||||
| cp932 | 932 | Japanese Windows |
|
||||
| gb2312 | "gb2312" | Simplified Chinese |
|
||||
| gbk | "gbk" | Extended Simplified Chinese |
|
||||
| big5 | "big5" | Traditional Chinese |
|
||||
| iso-8859-1 | "iso-8859-1" | Western European |
|
||||
| cp1252 | "cp1252" | Windows Western European |
|
||||
| cp1251 | "cp1251" | Russian/Cyrillic |
|
||||
| euc-jp | "euc-jp" | Japanese Unix |
|
||||
| euc-kr | "euc-kr" | Korean |
|
||||
|
||||
---
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use UTF-8 for new archives** - Most modern systems support it
|
||||
2. **Ask the archive creator** - When receiving archives with corrupted names
|
||||
3. **Provide encoding options** - If your app handles user archives
|
||||
4. **Document your assumption** - Tell users what encoding you're using
|
||||
5. **Test with international files** - Before releasing production code
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [USAGE.md](USAGE.md#extract-zip-which-has-non-utf8-encoded-filenamycp932) - Usage examples
|
||||
474
docs/PERFORMANCE.md
Normal file
474
docs/PERFORMANCE.md
Normal file
@@ -0,0 +1,474 @@
|
||||
# SharpCompress Performance Guide
|
||||
|
||||
This guide helps you optimize SharpCompress for performance in various scenarios.
|
||||
|
||||
## API Selection Guide
|
||||
|
||||
### Archive API vs Reader API
|
||||
|
||||
Choose the right API based on your use case:
|
||||
|
||||
| Aspect | Archive API | Reader API |
|
||||
|--------|------------|-----------|
|
||||
| **Stream Type** | Seekable only | Non-seekable OK |
|
||||
| **Memory Usage** | All entries in memory | One entry at a time |
|
||||
| **Random Access** | ✓ Yes | ✗ No |
|
||||
| **Best For** | Small-to-medium archives | Large or streaming data |
|
||||
| **Performance** | Fast for random access | Better for large files |
|
||||
|
||||
### Archive API (Fast for Random Access)
|
||||
|
||||
```csharp
|
||||
// Use when:
|
||||
// - Archive fits in memory
|
||||
// - You need random access to entries
|
||||
// - Stream is seekable (file, MemoryStream)
|
||||
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
// Random access - all entries available
|
||||
var specific = archive.Entries.FirstOrDefault(e => e.Key == "file.txt");
|
||||
if (specific != null)
|
||||
{
|
||||
specific.WriteToFile(@"C:\output\file.txt");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Performance Characteristics:**
|
||||
- ✓ Instant entry lookup
|
||||
- ✓ Parallel extraction possible
|
||||
- ✗ Entire archive in memory
|
||||
- ✗ Can't process while downloading
|
||||
|
||||
### Reader API (Best for Large Files)
|
||||
|
||||
```csharp
|
||||
// Use when:
|
||||
// - Processing large archives (>100 MB)
|
||||
// - Streaming from network/pipe
|
||||
// - Memory is constrained
|
||||
// - Forward-only processing is acceptable
|
||||
|
||||
using (var stream = File.OpenRead("large.zip"))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
// Process one entry at a time
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Performance Characteristics:**
|
||||
- ✓ Minimal memory footprint
|
||||
- ✓ Works with non-seekable streams
|
||||
- ✓ Can process while downloading
|
||||
- ✗ Forward-only (no random access)
|
||||
- ✗ Entry lookup requires iteration
|
||||
|
||||
---
|
||||
|
||||
## Buffer Sizing
|
||||
|
||||
### Understanding Buffers
|
||||
|
||||
SharpCompress uses internal buffers for reading compressed data. Buffer size affects:
|
||||
- **Speed:** Larger buffers = fewer I/O operations = faster
|
||||
- **Memory:** Larger buffers = higher memory usage
|
||||
|
||||
### Recommended Buffer Sizes
|
||||
|
||||
| Scenario | Size | Notes |
|
||||
|----------|------|-------|
|
||||
| Embedded/IoT devices | 4-8 KB | Minimal memory usage |
|
||||
| Memory-constrained | 16-32 KB | Conservative default |
|
||||
| Standard use (default) | 64 KB | Recommended default |
|
||||
| Large file streaming | 256 KB | Better throughput |
|
||||
| High-speed SSD | 512 KB - 1 MB | Maximum throughput |
|
||||
|
||||
### How Buffer Size Affects Performance
|
||||
|
||||
```csharp
|
||||
// SharpCompress manages buffers internally
|
||||
// You can't directly set buffer size, but you can:
|
||||
|
||||
// 1. Use Stream.CopyTo with explicit buffer size
|
||||
using (var entryStream = reader.OpenEntryStream())
|
||||
using (var fileStream = File.Create(@"C:\output\file.txt"))
|
||||
{
|
||||
// 64 KB buffer (default)
|
||||
entryStream.CopyTo(fileStream);
|
||||
|
||||
// Or specify larger buffer for faster copy
|
||||
entryStream.CopyTo(fileStream, bufferSize: 262144); // 256 KB
|
||||
}
|
||||
|
||||
// 2. Use custom buffer for writing
|
||||
using (var entryStream = reader.OpenEntryStream())
|
||||
using (var fileStream = File.Create(@"C:\output\file.txt"))
|
||||
{
|
||||
byte[] buffer = new byte[262144]; // 256 KB
|
||||
int bytesRead;
|
||||
while ((bytesRead = entryStream.Read(buffer, 0, buffer.Length)) > 0)
|
||||
{
|
||||
fileStream.Write(buffer, 0, bytesRead);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Streaming Large Files
|
||||
|
||||
### Non-Seekable Stream Patterns
|
||||
|
||||
For processing archives from downloads or pipes:
|
||||
|
||||
```csharp
|
||||
// Download stream (non-seekable)
|
||||
using (var httpStream = await httpClient.GetStreamAsync(url))
|
||||
using (var reader = ReaderFactory.OpenReader(httpStream))
|
||||
{
|
||||
// Process entries as they arrive
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
if (!reader.Entry.IsDirectory)
|
||||
{
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Performance Tips:**
|
||||
- Don't try to buffer the entire stream
|
||||
- Process entries immediately
|
||||
- Use async APIs for better responsiveness
|
||||
|
||||
### Download-Then-Extract vs Streaming
|
||||
|
||||
Choose based on your constraints:
|
||||
|
||||
| Approach | When to Use |
|
||||
|----------|------------|
|
||||
| **Download then extract** | Moderate size, need random access |
|
||||
| **Stream during download** | Large files, bandwidth limited, memory constrained |
|
||||
|
||||
```csharp
|
||||
// Download then extract (requires disk space)
|
||||
var archivePath = await DownloadFile(url, @"C:\temp\archive.zip");
|
||||
using (var archive = ZipArchive.OpenArchive(archivePath))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
|
||||
// Stream during download (on-the-fly extraction)
|
||||
using (var httpStream = await httpClient.GetStreamAsync(url))
|
||||
using (var reader = ReaderFactory.OpenReader(httpStream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Solid Archive Optimization
|
||||
|
||||
### Why Solid Archives Are Slow
|
||||
|
||||
Solid archives (Rar, 7Zip) group files together in a single compressed stream:
|
||||
|
||||
```
|
||||
Solid Archive Layout:
|
||||
[Header] [Compressed Stream] [Footer]
|
||||
├─ File1 compressed data
|
||||
├─ File2 compressed data
|
||||
├─ File3 compressed data
|
||||
└─ File4 compressed data
|
||||
```
|
||||
|
||||
Extracting File3 requires decompressing File1 and File2 first.
|
||||
|
||||
### Sequential vs Random Extraction
|
||||
|
||||
**Random Extraction (Slow):**
|
||||
```csharp
|
||||
using (var archive = RarArchive.OpenArchive("solid.rar"))
|
||||
{
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
entry.WriteToFile(@"C:\output\" + entry.Key); // ✗ Slow!
|
||||
// Each entry triggers full decompression from start
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Sequential Extraction (Fast):**
|
||||
```csharp
|
||||
using (var archive = RarArchive.OpenArchive("solid.rar"))
|
||||
{
|
||||
// Method 1: Use WriteToDirectory (recommended)
|
||||
archive.WriteToDirectory(@"C:\output", new ExtractionOptions
|
||||
{
|
||||
ExtractFullPath = true,
|
||||
Overwrite = true
|
||||
});
|
||||
|
||||
// Method 2: Use ExtractAllEntries
|
||||
archive.ExtractAllEntries();
|
||||
|
||||
// Method 3: Use Reader API (also sequential)
|
||||
using (var reader = RarReader.Open(File.OpenRead("solid.rar")))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
reader.WriteEntryToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Performance Impact:**
|
||||
- Random extraction: O(n²) - very slow for many files
|
||||
- Sequential extraction: O(n) - 10-100x faster
|
||||
|
||||
### Best Practices for Solid Archives
|
||||
|
||||
1. **Always extract sequentially** when possible
|
||||
2. **Use Reader API** for large solid archives
|
||||
3. **Process entries in order** from the archive
|
||||
4. **Consider using 7Zip command-line** for scripted extractions
|
||||
|
||||
---
|
||||
|
||||
## Compression Level Trade-offs
|
||||
|
||||
### Deflate/GZip Levels
|
||||
|
||||
```csharp
|
||||
// Level 1 = Fastest, largest size
|
||||
// Level 6 = Default (balanced)
|
||||
// Level 9 = Slowest, best compression
|
||||
|
||||
// Write with different compression levels
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory(@"D:\data");
|
||||
|
||||
// Fast compression (level 1)
|
||||
archive.SaveTo("fast.zip", new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 1
|
||||
});
|
||||
|
||||
// Default compression (level 6)
|
||||
archive.SaveTo("default.zip", CompressionType.Deflate);
|
||||
|
||||
// Best compression (level 9)
|
||||
archive.SaveTo("best.zip", new WriterOptions(CompressionType.Deflate)
|
||||
{
|
||||
CompressionLevel = 9
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
**Speed vs Size:**
|
||||
| Level | Speed | Size | Use Case |
|
||||
|-------|-------|------|----------|
|
||||
| 1 | 10x | 90% | Network, streaming |
|
||||
| 6 | 1x | 75% | Default (good balance) |
|
||||
| 9 | 0.1x | 65% | Archival, static storage |
|
||||
|
||||
### BZip2 Block Size
|
||||
|
||||
```csharp
|
||||
// BZip2 block size affects memory and compression
|
||||
// 100K to 900K (default 900K)
|
||||
|
||||
// Smaller block size = lower memory, faster
|
||||
// Larger block size = better compression, slower
|
||||
|
||||
using (var archive = TarArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory(@"D:\data");
|
||||
|
||||
// These are preset in WriterOptions via CompressionLevel
|
||||
archive.SaveTo("archive.tar.bz2", CompressionType.BZip2);
|
||||
}
|
||||
```
|
||||
|
||||
### LZMA Settings
|
||||
|
||||
LZMA compression is very powerful but memory-intensive:
|
||||
|
||||
```csharp
|
||||
// LZMA (7Zip, .tar.lzma):
|
||||
// - Dictionary size: 16 KB to 1 GB (default 32 MB)
|
||||
// - Faster preset: smaller dictionary
|
||||
// - Better compression: larger dictionary
|
||||
|
||||
// Preset via CompressionType
|
||||
using (var archive = TarArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory(@"D:\data");
|
||||
archive.SaveTo("archive.tar.xz", CompressionType.LZMA); // Default settings
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Async Performance
|
||||
|
||||
### When Async Helps
|
||||
|
||||
Async is beneficial when:
|
||||
- **Long I/O operations** (network, slow disks)
|
||||
- **UI responsiveness** needed (Windows Forms, WPF, Blazor)
|
||||
- **Server applications** (ASP.NET, multiple concurrent operations)
|
||||
|
||||
```csharp
|
||||
// Async extraction (non-blocking)
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
await archive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
// Thread can handle other work while I/O happens
|
||||
```
|
||||
|
||||
### When Async Doesn't Help
|
||||
|
||||
Async doesn't improve performance for:
|
||||
- **CPU-bound operations** (already fast)
|
||||
- **Local SSD I/O** (I/O is fast enough)
|
||||
- **Single-threaded scenarios** (no parallelism benefit)
|
||||
|
||||
```csharp
|
||||
// Sync extraction (simpler, same performance on fast I/O)
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
archive.WriteToDirectory(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true }
|
||||
);
|
||||
}
|
||||
// Simple and fast - no async needed
|
||||
```
|
||||
|
||||
### Cancellation Pattern
|
||||
|
||||
```csharp
|
||||
var cts = new CancellationTokenSource();
|
||||
|
||||
// Cancel after 5 minutes
|
||||
cts.CancelAfter(TimeSpan.FromMinutes(5));
|
||||
|
||||
try
|
||||
{
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
await archive.WriteToDirectoryAsync(
|
||||
@"C:\output",
|
||||
new ExtractionOptions { ExtractFullPath = true, Overwrite = true },
|
||||
cts.Token
|
||||
);
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
Console.WriteLine("Extraction cancelled");
|
||||
// Clean up partial extraction if needed
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Practical Performance Tips
|
||||
|
||||
### 1. Choose the Right API
|
||||
|
||||
| Scenario | API | Why |
|
||||
|----------|-----|-----|
|
||||
| Small archives | Archive | Faster random access |
|
||||
| Large archives | Reader | Lower memory |
|
||||
| Streaming | Reader | Works on non-seekable streams |
|
||||
| Download streams | Reader | Async extraction while downloading |
|
||||
|
||||
### 2. Batch Operations
|
||||
|
||||
```csharp
|
||||
// ✗ Slow - opens each archive separately
|
||||
foreach (var file in files)
|
||||
{
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
}
|
||||
|
||||
// ✓ Better - process multiple entries at once
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Profile Your Code
|
||||
|
||||
```csharp
|
||||
var sw = Stopwatch.StartNew();
|
||||
using (var archive = ZipArchive.OpenArchive("large.zip"))
|
||||
{
|
||||
archive.WriteToDirectory(@"C:\output");
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
Console.WriteLine($"Extraction took {sw.ElapsedMilliseconds}ms");
|
||||
|
||||
// Measure memory before/after
|
||||
var beforeMem = GC.GetTotalMemory(true);
|
||||
// ... do work ...
|
||||
var afterMem = GC.GetTotalMemory(true);
|
||||
Console.WriteLine($"Memory used: {(afterMem - beforeMem) / 1024 / 1024}MB");
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Troubleshooting Performance
|
||||
|
||||
### Extraction is Slow
|
||||
|
||||
1. **Check if solid archive** → Use sequential extraction
|
||||
2. **Check API** → Reader API might be faster for large files
|
||||
3. **Check compression level** → Higher levels are slower to decompress
|
||||
4. **Check I/O** → Network drives are much slower than SSD
|
||||
5. **Check buffer size** → May need larger buffers for network
|
||||
|
||||
### High Memory Usage
|
||||
|
||||
1. **Use Reader API** instead of Archive API
|
||||
2. **Process entries immediately** rather than buffering
|
||||
3. **Reduce compression level** if writing
|
||||
4. **Check for memory leaks** in your code
|
||||
|
||||
### CPU Usage at 100%
|
||||
|
||||
1. **Normal for compression** - especially with high compression levels
|
||||
2. **Consider lower level** for faster processing
|
||||
3. **Reduce parallelism** if processing multiple archives
|
||||
4. **Check if awaiting properly** in async code
|
||||
|
||||
---
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [PERFORMANCE.md](USAGE.md) - Usage examples with performance considerations
|
||||
- [FORMATS.md](FORMATS.md) - Format-specific performance notes
|
||||
@@ -1,6 +1,6 @@
|
||||
# SharpCompress Usage
|
||||
|
||||
## Async/Await Support
|
||||
## Async/Await Support (Beta)
|
||||
|
||||
SharpCompress now provides full async/await support for all I/O operations. All `Read`, `Write`, and extraction operations have async equivalents ending in `Async` that accept an optional `CancellationToken`. This enables better performance and scalability for I/O-bound operations.
|
||||
|
||||
@@ -13,7 +13,7 @@ SharpCompress now provides full async/await support for all I/O operations. All
|
||||
|
||||
See [Async Examples](#async-examples) section below for usage patterns.
|
||||
|
||||
## Stream Rules (changed with 0.21)
|
||||
## Stream Rules
|
||||
|
||||
When dealing with Streams, the rule should be that you don't close a stream you didn't create. This, in effect, should mean you should always put a Stream in a using block to dispose it.
|
||||
|
||||
@@ -48,7 +48,7 @@ Also, look over the tests for more thorough [examples](https://github.com/adamha
|
||||
|
||||
### Create Zip Archive from multiple files
|
||||
```C#
|
||||
using(var archive = ZipArchive.Create())
|
||||
using(var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.AddEntry("file01.txt", "C:\\file01.txt");
|
||||
archive.AddEntry("file02.txt", "C:\\file02.txt");
|
||||
@@ -61,7 +61,7 @@ using(var archive = ZipArchive.Create())
|
||||
### Create Zip Archive from all files in a directory to a file
|
||||
|
||||
```C#
|
||||
using (var archive = ZipArchive.Create())
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory("D:\\temp");
|
||||
archive.SaveTo("C:\\temp.zip", CompressionType.Deflate);
|
||||
@@ -72,7 +72,7 @@ using (var archive = ZipArchive.Create())
|
||||
|
||||
```C#
|
||||
var memoryStream = new MemoryStream();
|
||||
using (var archive = ZipArchive.Create())
|
||||
using (var archive = ZipArchive.CreateArchive())
|
||||
{
|
||||
archive.AddAllFromDirectory("D:\\temp");
|
||||
archive.SaveTo(memoryStream, new WriterOptions(CompressionType.Deflate)
|
||||
@@ -90,7 +90,7 @@ Note: Extracting a solid rar or 7z file needs to be done in sequential order to
|
||||
`ExtractAllEntries` is primarily intended for solid archives (like solid Rar) or 7Zip archives, where sequential extraction provides the best performance. For general/simple extraction with any supported archive type, use `archive.WriteToDirectory()` instead.
|
||||
|
||||
```C#
|
||||
using (var archive = RarArchive.Open("Test.rar"))
|
||||
using (var archive = RarArchive.OpenArchive("Test.rar"))
|
||||
{
|
||||
// Simple extraction with RarArchive; this WriteToDirectory pattern works for all archive types
|
||||
archive.WriteToDirectory(@"D:\temp", new ExtractionOptions()
|
||||
@@ -104,7 +104,7 @@ using (var archive = RarArchive.Open("Test.rar"))
|
||||
### Iterate over all files from a Rar file using RarArchive
|
||||
|
||||
```C#
|
||||
using (var archive = RarArchive.Open("Test.rar"))
|
||||
using (var archive = RarArchive.OpenArchive("Test.rar"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
|
||||
{
|
||||
@@ -126,7 +126,7 @@ var progress = new Progress<ProgressReport>(report =>
|
||||
Console.WriteLine($"Extracting {report.EntryPath}: {report.PercentComplete}%");
|
||||
});
|
||||
|
||||
using (var archive = RarArchive.Open("archive.rar", new ReaderOptions { Progress = progress })) // Must be solid Rar or 7Zip
|
||||
using (var archive = RarArchive.OpenArchive("archive.rar", new ReaderOptions { Progress = progress })) // Must be solid Rar or 7Zip
|
||||
{
|
||||
archive.WriteToDirectory(@"D:\output", new ExtractionOptions()
|
||||
{
|
||||
@@ -140,7 +140,7 @@ using (var archive = RarArchive.Open("archive.rar", new ReaderOptions { Progress
|
||||
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
@@ -161,7 +161,7 @@ using (var reader = ReaderFactory.Open(stream))
|
||||
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("Tar.tar.bz2"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
@@ -180,7 +180,7 @@ using (var reader = ReaderFactory.Open(stream))
|
||||
|
||||
```C#
|
||||
using (Stream stream = File.OpenWrite("C:\\temp.tgz"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)
|
||||
{
|
||||
LeaveOpenStream = true
|
||||
}))
|
||||
@@ -199,7 +199,7 @@ opts.ArchiveEncoding.CustomDecoder = (data, x, y) =>
|
||||
{
|
||||
return encoding.GetString(data);
|
||||
};
|
||||
var tr = SharpCompress.Archives.Zip.ZipArchive.Open("test.zip", opts);
|
||||
var tr = SharpCompress.Archives.Zip.ZipArchive.OpenArchive("test.zip", opts);
|
||||
foreach(var entry in tr.Entries)
|
||||
{
|
||||
Console.WriteLine($"{entry.Key}");
|
||||
@@ -213,7 +213,7 @@ foreach(var entry in tr.Entries)
|
||||
**Extract single entry asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("archive.zip"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
while (reader.MoveToNextEntry())
|
||||
{
|
||||
@@ -234,7 +234,7 @@ using (var reader = ReaderFactory.Open(stream))
|
||||
**Extract all entries asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenRead("archive.tar.gz"))
|
||||
using (var reader = ReaderFactory.Open(stream))
|
||||
using (var reader = ReaderFactory.OpenReader(stream))
|
||||
{
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
@"D:\temp",
|
||||
@@ -250,7 +250,7 @@ using (var reader = ReaderFactory.Open(stream))
|
||||
|
||||
**Open and process entry stream asynchronously:**
|
||||
```C#
|
||||
using (var archive = ZipArchive.Open("archive.zip"))
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
foreach (var entry in archive.Entries.Where(e => !e.IsDirectory))
|
||||
{
|
||||
@@ -268,7 +268,7 @@ using (var archive = ZipArchive.Open("archive.zip"))
|
||||
**Write single file asynchronously:**
|
||||
```C#
|
||||
using (Stream archiveStream = File.OpenWrite("output.zip"))
|
||||
using (var writer = WriterFactory.Open(archiveStream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
using (var writer = WriterFactory.OpenWriter(archiveStream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
using (Stream fileStream = File.OpenRead("input.txt"))
|
||||
{
|
||||
@@ -280,7 +280,7 @@ using (var writer = WriterFactory.Open(archiveStream, ArchiveType.Zip, Compressi
|
||||
**Write entire directory asynchronously:**
|
||||
```C#
|
||||
using (Stream stream = File.OpenWrite("backup.tar.gz"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Tar, new WriterOptions(CompressionType.GZip)))
|
||||
{
|
||||
await writer.WriteAllAsync(
|
||||
@"D:\files",
|
||||
@@ -299,7 +299,7 @@ var cts = new CancellationTokenSource();
|
||||
cts.CancelAfter(TimeSpan.FromMinutes(5));
|
||||
|
||||
using (Stream stream = File.OpenWrite("archive.zip"))
|
||||
using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
using (var writer = WriterFactory.OpenWriter(stream, ArchiveType.Zip, CompressionType.Deflate))
|
||||
{
|
||||
try
|
||||
{
|
||||
@@ -316,7 +316,7 @@ using (var writer = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.
|
||||
|
||||
**Extract from archive asynchronously:**
|
||||
```C#
|
||||
using (var archive = ZipArchive.Open("archive.zip"))
|
||||
using (var archive = ZipArchive.OpenArchive("archive.zip"))
|
||||
{
|
||||
// Simple async extraction - works for all archive types
|
||||
await archive.WriteToDirectoryAsync(
|
||||
@@ -1,7 +1,7 @@
|
||||
// Copyright (c) Six Labors.
|
||||
// Licensed under the Apache License, Version 2.0.
|
||||
|
||||
#if !NETSTANDARD2_0 && !NETSTANDARD2_1 && !NETFRAMEWORK
|
||||
#if !LEGACY_DOTNET
|
||||
#define SUPPORTS_RUNTIME_INTRINSICS
|
||||
#define SUPPORTS_HOTPATH
|
||||
#endif
|
||||
|
||||
103
src/SharpCompress/Archives/AbstractArchive.Async.cs
Normal file
103
src/SharpCompress/Archives/AbstractArchive.Async.cs
Normal file
@@ -0,0 +1,103 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public abstract partial class AbstractArchive<TEntry, TVolume>
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
#region Async Support
|
||||
|
||||
// Async properties
|
||||
public virtual IAsyncEnumerable<TEntry> EntriesAsync => _lazyEntriesAsync;
|
||||
|
||||
public IAsyncEnumerable<TVolume> VolumesAsync => _lazyVolumesAsync;
|
||||
|
||||
protected virtual async IAsyncEnumerable<TEntry> LoadEntriesAsync(
|
||||
IAsyncEnumerable<TVolume> volumes
|
||||
)
|
||||
{
|
||||
foreach (var item in LoadEntries(await volumes.ToListAsync()))
|
||||
{
|
||||
yield return item;
|
||||
}
|
||||
}
|
||||
|
||||
public virtual async ValueTask DisposeAsync()
|
||||
{
|
||||
if (!_disposed)
|
||||
{
|
||||
await foreach (var v in _lazyVolumesAsync)
|
||||
{
|
||||
v.Dispose();
|
||||
}
|
||||
foreach (var v in _lazyEntriesAsync.GetLoaded().Cast<Entry>())
|
||||
{
|
||||
v.Close();
|
||||
}
|
||||
_sourceStream?.Dispose();
|
||||
|
||||
_disposed = true;
|
||||
}
|
||||
}
|
||||
|
||||
private async ValueTask EnsureEntriesLoadedAsync()
|
||||
{
|
||||
await _lazyEntriesAsync.EnsureFullyLoaded();
|
||||
await _lazyVolumesAsync.EnsureFullyLoaded();
|
||||
}
|
||||
|
||||
private async IAsyncEnumerable<IArchiveEntry> EntriesAsyncCast()
|
||||
{
|
||||
await foreach (var entry in EntriesAsync)
|
||||
{
|
||||
yield return entry;
|
||||
}
|
||||
}
|
||||
|
||||
IAsyncEnumerable<IArchiveEntry> IAsyncArchive.EntriesAsync => EntriesAsyncCast();
|
||||
|
||||
IAsyncEnumerable<IVolume> IAsyncArchive.VolumesAsync => VolumesAsyncCast();
|
||||
|
||||
private async IAsyncEnumerable<IVolume> VolumesAsyncCast()
|
||||
{
|
||||
await foreach (var volume in _lazyVolumesAsync)
|
||||
{
|
||||
yield return volume;
|
||||
}
|
||||
}
|
||||
|
||||
public async ValueTask<IAsyncReader> ExtractAllEntriesAsync()
|
||||
{
|
||||
if (!await IsSolidAsync() && Type != ArchiveType.SevenZip)
|
||||
{
|
||||
throw new SharpCompressException(
|
||||
"ExtractAllEntries can only be used on solid archives or 7Zip archives (which require random access)."
|
||||
);
|
||||
}
|
||||
await EnsureEntriesLoadedAsync();
|
||||
return await CreateReaderForSolidExtractionAsync();
|
||||
}
|
||||
|
||||
public virtual ValueTask<bool> IsSolidAsync() => new(false);
|
||||
|
||||
public async ValueTask<bool> IsCompleteAsync()
|
||||
{
|
||||
await EnsureEntriesLoadedAsync();
|
||||
return await EntriesAsync.AllAsync(x => x.IsComplete);
|
||||
}
|
||||
|
||||
public async ValueTask<long> TotalSizeAsync() =>
|
||||
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.CompressedSize);
|
||||
|
||||
public async ValueTask<long> TotalUncompressedSizeAsync() =>
|
||||
await EntriesAsync.AggregateAsync(0L, (total, cf) => total + cf.Size);
|
||||
|
||||
public ValueTask<bool> IsEncryptedAsync() => new(IsEncrypted);
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -1,14 +1,13 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public abstract class AbstractArchive<TEntry, TVolume> : IArchive
|
||||
public abstract partial class AbstractArchive<TEntry, TVolume> : IArchive, IAsyncArchive
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
@@ -17,6 +16,10 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive
|
||||
private bool _disposed;
|
||||
private readonly SourceStream? _sourceStream;
|
||||
|
||||
// Async fields - kept in original file per refactoring rules
|
||||
private readonly LazyAsyncReadOnlyCollection<TVolume> _lazyVolumesAsync;
|
||||
private readonly LazyAsyncReadOnlyCollection<TEntry> _lazyEntriesAsync;
|
||||
|
||||
protected ReaderOptions ReaderOptions { get; }
|
||||
|
||||
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
|
||||
@@ -26,6 +29,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive
|
||||
_sourceStream = sourceStream;
|
||||
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
|
||||
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
|
||||
_lazyVolumesAsync = new LazyAsyncReadOnlyCollection<TVolume>(
|
||||
LoadVolumesAsync(_sourceStream)
|
||||
);
|
||||
_lazyEntriesAsync = new LazyAsyncReadOnlyCollection<TEntry>(
|
||||
LoadEntriesAsync(_lazyVolumesAsync)
|
||||
);
|
||||
}
|
||||
|
||||
internal AbstractArchive(ArchiveType type)
|
||||
@@ -34,19 +43,16 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive
|
||||
ReaderOptions = new();
|
||||
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
|
||||
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
|
||||
_lazyVolumesAsync = new LazyAsyncReadOnlyCollection<TVolume>(
|
||||
AsyncEnumerableEx.Empty<TVolume>()
|
||||
);
|
||||
_lazyEntriesAsync = new LazyAsyncReadOnlyCollection<TEntry>(
|
||||
AsyncEnumerableEx.Empty<TEntry>()
|
||||
);
|
||||
}
|
||||
|
||||
public ArchiveType Type { get; }
|
||||
|
||||
private static Stream CheckStreams(Stream stream)
|
||||
{
|
||||
if (!stream.CanSeek || !stream.CanRead)
|
||||
{
|
||||
throw new ArchiveException("Archive streams must be Readable and Seekable");
|
||||
}
|
||||
return stream;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
@@ -66,12 +72,15 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive
|
||||
/// <summary>
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
public virtual long TotalUncompressSize =>
|
||||
public virtual long TotalUncompressedSize =>
|
||||
Entries.Aggregate(0L, (total, cf) => total + cf.Size);
|
||||
|
||||
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
|
||||
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
|
||||
|
||||
protected virtual IAsyncEnumerable<TVolume> LoadVolumesAsync(SourceStream sourceStream) =>
|
||||
LoadVolumes(sourceStream).ToAsyncEnumerable();
|
||||
|
||||
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
|
||||
|
||||
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
|
||||
@@ -118,6 +127,7 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive
|
||||
}
|
||||
|
||||
protected abstract IReader CreateReaderForSolidExtraction();
|
||||
protected abstract ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync();
|
||||
|
||||
/// <summary>
|
||||
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
|
||||
|
||||
123
src/SharpCompress/Archives/AbstractWritableArchive.Async.cs
Normal file
123
src/SharpCompress/Archives/AbstractWritableArchive.Async.cs
Normal file
@@ -0,0 +1,123 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public abstract partial class AbstractWritableArchive<TEntry, TVolume>
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
// Async property moved from main file
|
||||
private IAsyncEnumerable<TEntry> OldEntriesAsync =>
|
||||
base.EntriesAsync.Where(x => !removedEntries.Contains(x));
|
||||
|
||||
private async ValueTask RebuildModifiedCollectionAsync()
|
||||
{
|
||||
if (pauseRebuilding)
|
||||
{
|
||||
return;
|
||||
}
|
||||
hasModifications = true;
|
||||
newEntries.RemoveAll(v => removedEntries.Contains(v));
|
||||
modifiedEntries.Clear();
|
||||
await foreach (var entry in OldEntriesAsync)
|
||||
{
|
||||
modifiedEntries.Add(entry);
|
||||
}
|
||||
modifiedEntries.AddRange(newEntries);
|
||||
}
|
||||
|
||||
public async ValueTask RemoveEntryAsync(TEntry entry)
|
||||
{
|
||||
if (!removedEntries.Contains(entry))
|
||||
{
|
||||
removedEntries.Add(entry);
|
||||
await RebuildModifiedCollectionAsync();
|
||||
}
|
||||
}
|
||||
|
||||
private async ValueTask<bool> DoesKeyMatchExistingAsync(
|
||||
string key,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
await foreach (
|
||||
var entry in EntriesAsync.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
{
|
||||
var path = entry.Key;
|
||||
if (path is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
var p = path.Replace('/', '\\');
|
||||
if (p.Length > 0 && p[0] == '\\')
|
||||
{
|
||||
p = p.Substring(1);
|
||||
}
|
||||
return string.Equals(p, key, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public async ValueTask<TEntry> AddEntryAsync(
|
||||
string key,
|
||||
Stream source,
|
||||
bool closeStream,
|
||||
long size = 0,
|
||||
DateTime? modified = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (key.Length > 0 && key[0] is '/' or '\\')
|
||||
{
|
||||
key = key.Substring(1);
|
||||
}
|
||||
if (await DoesKeyMatchExistingAsync(key, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
|
||||
}
|
||||
var entry = CreateEntry(key, source, size, modified, closeStream);
|
||||
newEntries.Add(entry);
|
||||
await RebuildModifiedCollectionAsync();
|
||||
return entry;
|
||||
}
|
||||
|
||||
public async ValueTask<TEntry> AddDirectoryEntryAsync(
|
||||
string key,
|
||||
DateTime? modified = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (key.Length > 0 && key[0] is '/' or '\\')
|
||||
{
|
||||
key = key.Substring(1);
|
||||
}
|
||||
if (await DoesKeyMatchExistingAsync(key, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
throw new ArchiveException("Cannot add entry with duplicate key: " + key);
|
||||
}
|
||||
var entry = CreateDirectoryEntry(key, modified);
|
||||
newEntries.Add(entry);
|
||||
await RebuildModifiedCollectionAsync();
|
||||
return entry;
|
||||
}
|
||||
|
||||
public async ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
//reset streams of new entries
|
||||
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
|
||||
await SaveToAsync(stream, options, OldEntriesAsync, newEntries, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
@@ -10,9 +10,10 @@ using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
public abstract partial class AbstractWritableArchive<TEntry, TVolume>
|
||||
: AbstractArchive<TEntry, TVolume>,
|
||||
IWritableArchive
|
||||
IWritableArchive,
|
||||
IWritableAsyncArchive
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
@@ -139,6 +140,24 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
return false;
|
||||
}
|
||||
|
||||
ValueTask IWritableAsyncArchive.RemoveEntryAsync(IArchiveEntry entry) =>
|
||||
RemoveEntryAsync((TEntry)entry);
|
||||
|
||||
async ValueTask<IArchiveEntry> IWritableAsyncArchive.AddEntryAsync(
|
||||
string key,
|
||||
Stream source,
|
||||
bool closeStream,
|
||||
long size,
|
||||
DateTime? modified,
|
||||
CancellationToken cancellationToken
|
||||
) => await AddEntryAsync(key, source, closeStream, size, modified, cancellationToken);
|
||||
|
||||
async ValueTask<IArchiveEntry> IWritableAsyncArchive.AddDirectoryEntryAsync(
|
||||
string key,
|
||||
DateTime? modified,
|
||||
CancellationToken cancellationToken
|
||||
) => await AddDirectoryEntryAsync(key, modified, cancellationToken);
|
||||
|
||||
public TEntry AddDirectoryEntry(string key, DateTime? modified = null)
|
||||
{
|
||||
if (key.Length > 0 && key[0] is '/' or '\\')
|
||||
@@ -162,18 +181,6 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
SaveTo(stream, options, OldEntries, newEntries);
|
||||
}
|
||||
|
||||
public async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
//reset streams of new entries
|
||||
newEntries.Cast<IWritableArchiveEntry>().ForEach(x => x.Stream.Seek(0, SeekOrigin.Begin));
|
||||
await SaveToAsync(stream, options, OldEntries, newEntries, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
protected TEntry CreateEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
@@ -208,10 +215,10 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
IEnumerable<TEntry> newEntries
|
||||
);
|
||||
|
||||
protected abstract Task SaveToAsync(
|
||||
protected abstract ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<TEntry> oldEntries,
|
||||
IAsyncEnumerable<TEntry> oldEntries,
|
||||
IEnumerable<TEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
158
src/SharpCompress/Archives/ArchiveFactory.Async.cs
Normal file
158
src/SharpCompress/Archives/ArchiveFactory.Async.cs
Normal file
@@ -0,0 +1,158 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static partial class ArchiveFactory
|
||||
{
|
||||
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
readerOptions ??= new ReaderOptions();
|
||||
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
|
||||
var factory = await FindFactoryAsync<IArchiveFactory>(stream, cancellationToken);
|
||||
return factory.OpenAsyncArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static ValueTask<IAsyncArchive> OpenAsyncArchive(
|
||||
string filePath,
|
||||
ReaderOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return OpenAsyncArchive(new FileInfo(filePath), options, cancellationToken);
|
||||
}
|
||||
|
||||
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
var factory = await FindFactoryAsync<IArchiveFactory>(fileInfo, cancellationToken);
|
||||
return factory.OpenAsyncArchive(fileInfo, options);
|
||||
}
|
||||
|
||||
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var filesArray = fileInfos.ToArray();
|
||||
if (filesArray.Length == 0)
|
||||
{
|
||||
throw new InvalidOperationException("No files to open");
|
||||
}
|
||||
|
||||
var fileInfo = filesArray[0];
|
||||
if (filesArray.Length == 1)
|
||||
{
|
||||
return await OpenAsyncArchive(fileInfo, options, cancellationToken);
|
||||
}
|
||||
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
var factory = await FindFactoryAsync<IMultiArchiveFactory>(fileInfo, cancellationToken);
|
||||
return factory.OpenAsyncArchive(filesArray, options, cancellationToken);
|
||||
}
|
||||
|
||||
public static async ValueTask<IAsyncArchive> OpenAsyncArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
streams.NotNull(nameof(streams));
|
||||
var streamsArray = streams.ToArray();
|
||||
if (streamsArray.Length == 0)
|
||||
{
|
||||
throw new InvalidOperationException("No streams");
|
||||
}
|
||||
|
||||
var firstStream = streamsArray[0];
|
||||
if (streamsArray.Length == 1)
|
||||
{
|
||||
return await OpenAsyncArchive(firstStream, options, cancellationToken);
|
||||
}
|
||||
|
||||
firstStream.NotNull(nameof(firstStream));
|
||||
options ??= new ReaderOptions();
|
||||
|
||||
var factory = await FindFactoryAsync<IMultiArchiveFactory>(firstStream, cancellationToken);
|
||||
return factory.OpenAsyncArchive(streamsArray, options);
|
||||
}
|
||||
|
||||
public static ValueTask<T> FindFactoryAsync<T>(
|
||||
string path,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
where T : IFactory
|
||||
{
|
||||
path.NotNullOrEmpty(nameof(path));
|
||||
return FindFactoryAsync<T>(new FileInfo(path), cancellationToken);
|
||||
}
|
||||
|
||||
private static async ValueTask<T> FindFactoryAsync<T>(
|
||||
FileInfo finfo,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
where T : IFactory
|
||||
{
|
||||
finfo.NotNull(nameof(finfo));
|
||||
using Stream stream = finfo.OpenRead();
|
||||
return await FindFactoryAsync<T>(stream, cancellationToken);
|
||||
}
|
||||
|
||||
private static async ValueTask<T> FindFactoryAsync<T>(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
where T : IFactory
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
if (!stream.CanRead || !stream.CanSeek)
|
||||
{
|
||||
throw new ArgumentException("Stream should be readable and seekable");
|
||||
}
|
||||
|
||||
var factories = Factory.Factories.OfType<T>();
|
||||
|
||||
var startPosition = stream.Position;
|
||||
|
||||
foreach (var factory in factories)
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
if (await factory.IsArchiveAsync(stream, cancellationToken: cancellationToken))
|
||||
{
|
||||
stream.Seek(startPosition, SeekOrigin.Begin);
|
||||
|
||||
return factory;
|
||||
}
|
||||
}
|
||||
|
||||
var extensions = string.Join(", ", factories.Select(item => item.Name));
|
||||
|
||||
throw new InvalidOperationException(
|
||||
$"Cannot determine compressed stream type. Supported Archive Formats: {extensions}"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.IO;
|
||||
@@ -9,22 +11,16 @@ using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static class ArchiveFactory
|
||||
public static partial class ArchiveFactory
|
||||
{
|
||||
/// <summary>
|
||||
/// Opens an Archive for random access
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
/// <returns></returns>
|
||||
public static IArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
public static IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
readerOptions ??= new ReaderOptions();
|
||||
stream = SharpCompressStream.Create(stream, bufferSize: readerOptions.BufferSize);
|
||||
return FindFactory<IArchiveFactory>(stream).Open(stream, readerOptions);
|
||||
return FindFactory<IArchiveFactory>(stream).OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableArchive Create(ArchiveType type)
|
||||
public static IWritableArchive CreateArchive(ArchiveType type)
|
||||
{
|
||||
var factory = Factory
|
||||
.Factories.OfType<IWriteableArchiveFactory>()
|
||||
@@ -32,41 +28,29 @@ public static class ArchiveFactory
|
||||
|
||||
if (factory != null)
|
||||
{
|
||||
return factory.CreateWriteableArchive();
|
||||
return factory.CreateArchive();
|
||||
}
|
||||
|
||||
throw new NotSupportedException("Cannot create Archives of type: " + type);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(string filePath, ReaderOptions? options = null)
|
||||
public static IArchive OpenArchive(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), options);
|
||||
return OpenArchive(new FileInfo(filePath), options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
public static IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
{
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
return FindFactory<IArchiveFactory>(fileInfo).Open(fileInfo, options);
|
||||
return FindFactory<IArchiveFactory>(fileInfo).OpenArchive(fileInfo, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with IEnumerable FileInfo objects, multi and split support.
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(IEnumerable<FileInfo> fileInfos, ReaderOptions? options = null)
|
||||
public static IArchive OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? options = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var filesArray = fileInfos.ToArray();
|
||||
@@ -78,21 +62,16 @@ public static class ArchiveFactory
|
||||
var fileInfo = filesArray[0];
|
||||
if (filesArray.Length == 1)
|
||||
{
|
||||
return Open(fileInfo, options);
|
||||
return OpenArchive(fileInfo, options);
|
||||
}
|
||||
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
options ??= new ReaderOptions { LeaveStreamOpen = false };
|
||||
|
||||
return FindFactory<IMultiArchiveFactory>(fileInfo).Open(filesArray, options);
|
||||
return FindFactory<IMultiArchiveFactory>(fileInfo).OpenArchive(filesArray, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with IEnumerable FileInfo objects, multi and split support.
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="options"></param>
|
||||
public static IArchive Open(IEnumerable<Stream> streams, ReaderOptions? options = null)
|
||||
public static IArchive OpenArchive(IEnumerable<Stream> streams, ReaderOptions? options = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var streamsArray = streams.ToArray();
|
||||
@@ -104,29 +83,34 @@ public static class ArchiveFactory
|
||||
var firstStream = streamsArray[0];
|
||||
if (streamsArray.Length == 1)
|
||||
{
|
||||
return Open(firstStream, options);
|
||||
return OpenArchive(firstStream, options);
|
||||
}
|
||||
|
||||
firstStream.NotNull(nameof(firstStream));
|
||||
options ??= new ReaderOptions();
|
||||
|
||||
return FindFactory<IMultiArchiveFactory>(firstStream).Open(streamsArray, options);
|
||||
return FindFactory<IMultiArchiveFactory>(firstStream).OpenArchive(streamsArray, options);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory, retaining filename
|
||||
/// </summary>
|
||||
public static void WriteToDirectory(
|
||||
string sourceArchive,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null
|
||||
)
|
||||
{
|
||||
using var archive = Open(sourceArchive);
|
||||
using var archive = OpenArchive(sourceArchive);
|
||||
archive.WriteToDirectory(destinationDirectory, options);
|
||||
}
|
||||
|
||||
private static T FindFactory<T>(FileInfo finfo)
|
||||
public static T FindFactory<T>(string path)
|
||||
where T : IFactory
|
||||
{
|
||||
path.NotNullOrEmpty(nameof(path));
|
||||
using Stream stream = File.OpenRead(path);
|
||||
return FindFactory<T>(stream);
|
||||
}
|
||||
|
||||
public static T FindFactory<T>(FileInfo finfo)
|
||||
where T : IFactory
|
||||
{
|
||||
finfo.NotNull(nameof(finfo));
|
||||
@@ -134,7 +118,7 @@ public static class ArchiveFactory
|
||||
return FindFactory<T>(stream);
|
||||
}
|
||||
|
||||
private static T FindFactory<T>(Stream stream)
|
||||
public static T FindFactory<T>(Stream stream)
|
||||
where T : IFactory
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
@@ -166,22 +150,14 @@ public static class ArchiveFactory
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsArchive(
|
||||
string filePath,
|
||||
out ArchiveType? type,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
public static bool IsArchive(string filePath, out ArchiveType? type)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
using Stream s = File.OpenRead(filePath);
|
||||
return IsArchive(s, out type, bufferSize);
|
||||
return IsArchive(s, out type);
|
||||
}
|
||||
|
||||
public static bool IsArchive(
|
||||
Stream stream,
|
||||
out ArchiveType? type,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
public static bool IsArchive(Stream stream, out ArchiveType? type)
|
||||
{
|
||||
type = null;
|
||||
stream.NotNull(nameof(stream));
|
||||
@@ -208,22 +184,12 @@ public static class ArchiveFactory
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
|
||||
/// </summary>
|
||||
/// <param name="part1"></param>
|
||||
/// <returns></returns>
|
||||
public static IEnumerable<string> GetFileParts(string part1)
|
||||
{
|
||||
part1.NotNullOrEmpty(nameof(part1));
|
||||
return GetFileParts(new FileInfo(part1)).Select(a => a.FullName);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// From a passed in archive (zip, rar, 7z, 001), return all parts.
|
||||
/// </summary>
|
||||
/// <param name="part1"></param>
|
||||
/// <returns></returns>
|
||||
public static IEnumerable<FileInfo> GetFileParts(FileInfo part1)
|
||||
{
|
||||
part1.NotNull(nameof(part1));
|
||||
@@ -237,7 +203,7 @@ public static class ArchiveFactory
|
||||
if (part != null)
|
||||
{
|
||||
yield return part;
|
||||
while ((part = factory.GetFilePart(i++, part1)) != null) //tests split too
|
||||
while ((part = factory.GetFilePart(i++, part1)) != null)
|
||||
{
|
||||
yield return part;
|
||||
}
|
||||
@@ -246,6 +212,4 @@ public static class ArchiveFactory
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ internal abstract class ArchiveVolumeFactory
|
||||
//split 001, 002 ...
|
||||
var m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
{
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
part1.DirectoryName!,
|
||||
@@ -22,9 +23,13 @@ internal abstract class ArchiveVolumeFactory
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if (item != null && item.Exists)
|
||||
{
|
||||
return item;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
class AutoArchiveFactory : IArchiveFactory
|
||||
{
|
||||
public string Name => nameof(AutoArchiveFactory);
|
||||
|
||||
public ArchiveType? KnownArchiveType => null;
|
||||
|
||||
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
|
||||
|
||||
public bool IsArchive(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
) => throw new NotSupportedException();
|
||||
|
||||
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
|
||||
|
||||
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(stream, readerOptions);
|
||||
|
||||
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(fileInfo, readerOptions);
|
||||
}
|
||||
86
src/SharpCompress/Archives/GZip/GZipArchive.Async.cs
Normal file
86
src/SharpCompress/Archives/GZip/GZipArchive.Async.cs
Normal file
@@ -0,0 +1,86 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.GZip;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public partial class GZipArchive
|
||||
{
|
||||
public ValueTask SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
|
||||
SaveToAsync(new FileInfo(filePath), cancellationToken);
|
||||
|
||||
public async ValueTask SaveToAsync(
|
||||
FileInfo fileInfo,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
protected override async ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IAsyncEnumerable<GZipArchiveEntry> oldEntries,
|
||||
IEnumerable<GZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (Entries.Count > 1)
|
||||
{
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
|
||||
await foreach (
|
||||
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
{
|
||||
if (!entry.IsDirectory)
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
foreach (var entry in newEntries.Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return new((IAsyncReader)GZipReader.OpenReader(stream));
|
||||
}
|
||||
|
||||
protected override async IAsyncEnumerable<GZipArchiveEntry> LoadEntriesAsync(
|
||||
IAsyncEnumerable<GZipVolume> volumes
|
||||
)
|
||||
{
|
||||
var stream = (await volumes.SingleAsync()).Stream;
|
||||
yield return new GZipArchiveEntry(
|
||||
this,
|
||||
await GZipFilePart.CreateAsync(stream, ReaderOptions.ArchiveEncoding)
|
||||
);
|
||||
}
|
||||
}
|
||||
201
src/SharpCompress/Archives/GZip/GZipArchive.Factory.cs
Normal file
201
src/SharpCompress/Archives/GZip/GZipArchive.Factory.cs
Normal file
@@ -0,0 +1,201 @@
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.GZip;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.GZip;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public partial class GZipArchive
|
||||
#if NET8_0_OR_GREATER
|
||||
: IWritableArchiveOpenable,
|
||||
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
|
||||
#endif
|
||||
{
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
path.NotNullOrEmpty(nameof(path));
|
||||
return (IWritableAsyncArchive)OpenArchive(
|
||||
new FileInfo(path),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return OpenArchive(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new GZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableArchive CreateArchive() => new GZipArchive();
|
||||
|
||||
public static IWritableAsyncArchive CreateAsyncArchive() => new GZipArchive();
|
||||
|
||||
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsGZipFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsGZipFile(stream);
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(Stream stream)
|
||||
{
|
||||
Span<byte> header = stackalloc byte[10];
|
||||
|
||||
if (!stream.ReadFully(header))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public static async ValueTask<bool> IsGZipFileAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var header = ArrayPool<byte>.Shared.Rent(10);
|
||||
try
|
||||
{
|
||||
await stream.ReadFullyAsync(header, 0, 10, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
finally
|
||||
{
|
||||
ArrayPool<byte>.Shared.Return(header);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,122 +14,20 @@ using SharpCompress.Writers.GZip;
|
||||
|
||||
namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
public partial class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
{
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new GZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static GZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new GZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static GZipArchive Create() => new();
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private GZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.GZip, sourceStream) { }
|
||||
|
||||
internal GZipArchive()
|
||||
: base(ArchiveType.GZip) { }
|
||||
|
||||
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
sourceStream.LoadAllParts();
|
||||
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsGZipFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsGZipFile(stream);
|
||||
}
|
||||
|
||||
public void SaveTo(string filePath) => SaveTo(new FileInfo(filePath));
|
||||
|
||||
public void SaveTo(FileInfo fileInfo)
|
||||
@@ -138,38 +36,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
SaveTo(stream, new WriterOptions(CompressionType.GZip));
|
||||
}
|
||||
|
||||
public Task SaveToAsync(string filePath, CancellationToken cancellationToken = default) =>
|
||||
SaveToAsync(new FileInfo(filePath), cancellationToken);
|
||||
|
||||
public async Task SaveToAsync(FileInfo fileInfo, CancellationToken cancellationToken = default)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await SaveToAsync(stream, new WriterOptions(CompressionType.GZip), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(Stream stream)
|
||||
{
|
||||
// read the header on the first read
|
||||
Span<byte> header = stackalloc byte[10];
|
||||
|
||||
// workitem 8501: handle edge case (decompress empty stream)
|
||||
if (!stream.ReadFully(header))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
internal GZipArchive()
|
||||
: base(ArchiveType.GZip) { }
|
||||
|
||||
protected override GZipArchiveEntry CreateEntryInternal(
|
||||
string filePath,
|
||||
Stream source,
|
||||
@@ -213,34 +79,12 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<GZipArchiveEntry> oldEntries,
|
||||
IEnumerable<GZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (Entries.Count > 1)
|
||||
{
|
||||
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
|
||||
}
|
||||
using var writer = new GZipWriter(stream, new GZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(entry.Key.NotNull("Entry Key is null"), entryStream, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
|
||||
{
|
||||
var stream = volumes.Single().Stream;
|
||||
yield return new GZipArchiveEntry(
|
||||
this,
|
||||
new GZipFilePart(stream, ReaderOptions.ArchiveEncoding)
|
||||
GZipFilePart.Create(stream, ReaderOptions.ArchiveEncoding)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -248,6 +92,6 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return GZipReader.Open(stream);
|
||||
return GZipReader.OpenReader(stream);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,10 +23,10 @@ public class GZipArchiveEntry : GZipEntry, IArchiveEntry
|
||||
return Parts.Single().GetCompressedStream().NotNull();
|
||||
}
|
||||
|
||||
public virtual Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
// GZip synchronous implementation is fast enough, just wrap it
|
||||
return Task.FromResult(OpenEntryStream());
|
||||
return new(OpenEntryStream());
|
||||
}
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
@@ -38,5 +38,10 @@ public interface IArchive : IDisposable
|
||||
/// <summary>
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
long TotalUncompressSize { get; }
|
||||
long TotalUncompressedSize { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Returns whether the archive is encrypted.
|
||||
/// </summary>
|
||||
bool IsEncrypted { get; }
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ public interface IArchiveEntry : IEntry
|
||||
/// Opens the current entry as a stream that will decompress as it is read asynchronously.
|
||||
/// Read the entire stream or use SkipEntry on EntryStream.
|
||||
/// </summary>
|
||||
Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
|
||||
ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// The archive can find all the parts of the archive needed to extract this entry.
|
||||
|
||||
@@ -9,8 +9,6 @@ namespace SharpCompress.Archives;
|
||||
|
||||
public static class IArchiveEntryExtensions
|
||||
{
|
||||
private const int BufferSize = 81920;
|
||||
|
||||
/// <param name="archiveEntry">The archive entry to extract.</param>
|
||||
extension(IArchiveEntry archiveEntry)
|
||||
{
|
||||
@@ -28,7 +26,7 @@ public static class IArchiveEntryExtensions
|
||||
|
||||
using var entryStream = archiveEntry.OpenEntryStream();
|
||||
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
|
||||
sourceStream.CopyTo(streamToWriteTo, BufferSize);
|
||||
sourceStream.CopyTo(streamToWriteTo, Constants.BufferSize);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -37,7 +35,7 @@ public static class IArchiveEntryExtensions
|
||||
/// <param name="streamToWriteTo">The stream to write the entry content to.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
|
||||
public async Task WriteToAsync(
|
||||
public async ValueTask WriteToAsync(
|
||||
Stream streamToWriteTo,
|
||||
IProgress<ProgressReport>? progress = null,
|
||||
CancellationToken cancellationToken = default
|
||||
@@ -51,7 +49,7 @@ public static class IArchiveEntryExtensions
|
||||
using var entryStream = await archiveEntry.OpenEntryStreamAsync(cancellationToken);
|
||||
var sourceStream = WrapWithProgress(entryStream, archiveEntry, progress);
|
||||
await sourceStream
|
||||
.CopyToAsync(streamToWriteTo, BufferSize, cancellationToken)
|
||||
.CopyToAsync(streamToWriteTo, Constants.BufferSize, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
@@ -110,18 +108,20 @@ public static class IArchiveEntryExtensions
|
||||
/// <summary>
|
||||
/// Extract to specific directory asynchronously, retaining filename
|
||||
/// </summary>
|
||||
public Task WriteToDirectoryAsync(
|
||||
public async ValueTask WriteToDirectoryAsync(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
ExtractionMethods.WriteEntryToDirectoryAsync(
|
||||
entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
entry.WriteToFileAsync,
|
||||
cancellationToken
|
||||
);
|
||||
await ExtractionMethods
|
||||
.WriteEntryToDirectoryAsync(
|
||||
entry,
|
||||
destinationDirectory,
|
||||
options,
|
||||
entry.WriteToFileAsync,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific file
|
||||
@@ -141,21 +141,23 @@ public static class IArchiveEntryExtensions
|
||||
/// <summary>
|
||||
/// Extract to specific file asynchronously
|
||||
/// </summary>
|
||||
public Task WriteToFileAsync(
|
||||
public async ValueTask WriteToFileAsync(
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
ExtractionMethods.WriteEntryToFileAsync(
|
||||
entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
async (x, fm, ct) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
await entry.WriteToAsync(fs, null, ct).ConfigureAwait(false);
|
||||
},
|
||||
cancellationToken
|
||||
);
|
||||
await ExtractionMethods
|
||||
.WriteEntryToFileAsync(
|
||||
entry,
|
||||
destinationFileName,
|
||||
options,
|
||||
async (x, fm, ct) =>
|
||||
{
|
||||
using var fs = File.Open(destinationFileName, fm);
|
||||
await entry.WriteToAsync(fs, null, ct).ConfigureAwait(false);
|
||||
},
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
@@ -10,7 +8,6 @@ namespace SharpCompress.Archives;
|
||||
|
||||
public static class IArchiveExtensions
|
||||
{
|
||||
/// <param name="archive">The archive to extract.</param>
|
||||
extension(IArchive archive)
|
||||
{
|
||||
/// <summary>
|
||||
@@ -25,7 +22,6 @@ public static class IArchiveExtensions
|
||||
IProgress<ProgressReport>? progress = null
|
||||
)
|
||||
{
|
||||
// For solid archives (Rar, 7Zip), use the optimized reader-based approach
|
||||
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
|
||||
{
|
||||
using var reader = archive.ExtractAllEntries();
|
||||
@@ -33,7 +29,6 @@ public static class IArchiveExtensions
|
||||
}
|
||||
else
|
||||
{
|
||||
// For non-solid archives, extract entries directly
|
||||
archive.WriteToDirectoryInternal(destinationDirectory, options, progress);
|
||||
}
|
||||
}
|
||||
@@ -44,14 +39,10 @@ public static class IArchiveExtensions
|
||||
IProgress<ProgressReport>? progress
|
||||
)
|
||||
{
|
||||
// Prepare for progress reporting
|
||||
var totalBytes = archive.TotalUncompressSize;
|
||||
var totalBytes = archive.TotalUncompressedSize;
|
||||
var bytesRead = 0L;
|
||||
|
||||
// Tracking for created directories.
|
||||
var seenDirectories = new HashSet<string>();
|
||||
|
||||
// Extract
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
@@ -70,94 +61,8 @@ public static class IArchiveExtensions
|
||||
continue;
|
||||
}
|
||||
|
||||
// Use the entry's WriteToDirectory method which respects ExtractionOptions
|
||||
entry.WriteToDirectory(destinationDirectory, options);
|
||||
|
||||
// Update progress
|
||||
bytesRead += entry.Size;
|
||||
progress?.Report(
|
||||
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract to specific directory asynchronously with progress reporting and cancellation support
|
||||
/// </summary>
|
||||
/// <param name="destinationDirectory">The folder to extract into.</param>
|
||||
/// <param name="options">Extraction options.</param>
|
||||
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
|
||||
/// <param name="cancellationToken">Optional cancellation token.</param>
|
||||
public async Task WriteToDirectoryAsync(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
IProgress<ProgressReport>? progress = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
// For solid archives (Rar, 7Zip), use the optimized reader-based approach
|
||||
if (archive.IsSolid || archive.Type == ArchiveType.SevenZip)
|
||||
{
|
||||
using var reader = archive.ExtractAllEntries();
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
destinationDirectory,
|
||||
options,
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
// For non-solid archives, extract entries directly
|
||||
await archive.WriteToDirectoryAsyncInternal(
|
||||
destinationDirectory,
|
||||
options,
|
||||
progress,
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task WriteToDirectoryAsyncInternal(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
IProgress<ProgressReport>? progress,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
// Prepare for progress reporting
|
||||
var totalBytes = archive.TotalUncompressSize;
|
||||
var bytesRead = 0L;
|
||||
|
||||
// Tracking for created directories.
|
||||
var seenDirectories = new HashSet<string>();
|
||||
|
||||
// Extract
|
||||
foreach (var entry in archive.Entries)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
var dirPath = Path.Combine(
|
||||
destinationDirectory,
|
||||
entry.Key.NotNull("Entry Key is null")
|
||||
);
|
||||
if (
|
||||
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
|
||||
&& seenDirectories.Add(dirPath)
|
||||
)
|
||||
{
|
||||
Directory.CreateDirectory(parentDirectory);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Use the entry's WriteToDirectoryAsync method which respects ExtractionOptions
|
||||
await entry
|
||||
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
// Update progress
|
||||
bytesRead += entry.Size;
|
||||
progress?.Report(
|
||||
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
@@ -24,12 +25,27 @@ public interface IArchiveFactory : IFactory
|
||||
/// </summary>
|
||||
/// <param name="stream">An open, readable and seekable stream.</param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IArchive Open(Stream stream, ReaderOptions? readerOptions = null);
|
||||
IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Opens an Archive for random access asynchronously.
|
||||
/// </summary>
|
||||
/// <param name="stream">An open, readable and seekable stream.</param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IAsyncArchive OpenAsyncArchive(Stream stream, ReaderOptions? readerOptions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo">the file to open.</param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null);
|
||||
IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Opens an Archive from a FileInfo object asynchronously.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo">the file to open.</param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
IAsyncArchive OpenAsyncArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null);
|
||||
}
|
||||
|
||||
40
src/SharpCompress/Archives/IArchiveOpenable.cs
Normal file
40
src/SharpCompress/Archives/IArchiveOpenable.cs
Normal file
@@ -0,0 +1,40 @@
|
||||
#if NET8_0_OR_GREATER
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IArchiveOpenable<TSync, TASync>
|
||||
where TSync : IArchive
|
||||
where TASync : IAsyncArchive
|
||||
{
|
||||
public static abstract TSync OpenArchive(string filePath, ReaderOptions? readerOptions = null);
|
||||
|
||||
public static abstract TSync OpenArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
|
||||
public static abstract TSync OpenArchive(Stream stream, ReaderOptions? readerOptions = null);
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
}
|
||||
|
||||
#endif
|
||||
48
src/SharpCompress/Archives/IAsyncArchive.cs
Normal file
48
src/SharpCompress/Archives/IAsyncArchive.cs
Normal file
@@ -0,0 +1,48 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IAsyncArchive : IAsyncDisposable
|
||||
{
|
||||
IAsyncEnumerable<IArchiveEntry> EntriesAsync { get; }
|
||||
IAsyncEnumerable<IVolume> VolumesAsync { get; }
|
||||
|
||||
ArchiveType Type { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Use this method to extract all entries in an archive in order.
|
||||
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
|
||||
/// extracted sequentially for the best performance.
|
||||
/// </summary>
|
||||
ValueTask<IAsyncReader> ExtractAllEntriesAsync();
|
||||
|
||||
/// <summary>
|
||||
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
|
||||
/// Rar Archives can be SOLID while all 7Zip archives are considered SOLID.
|
||||
/// </summary>
|
||||
ValueTask<bool> IsSolidAsync();
|
||||
|
||||
/// <summary>
|
||||
/// This checks to see if all the known entries have IsComplete = true
|
||||
/// </summary>
|
||||
ValueTask<bool> IsCompleteAsync();
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files compressed in the archive.
|
||||
/// </summary>
|
||||
ValueTask<long> TotalSizeAsync();
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files as uncompressed in the archive.
|
||||
/// </summary>
|
||||
ValueTask<long> TotalUncompressedSizeAsync();
|
||||
|
||||
/// <summary>
|
||||
/// Returns whether the archive is encrypted.
|
||||
/// </summary>
|
||||
ValueTask<bool> IsEncryptedAsync();
|
||||
}
|
||||
92
src/SharpCompress/Archives/IAsyncArchiveExtensions.cs
Normal file
92
src/SharpCompress/Archives/IAsyncArchiveExtensions.cs
Normal file
@@ -0,0 +1,92 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static class IAsyncArchiveExtensions
|
||||
{
|
||||
extension(IAsyncArchive archive)
|
||||
{
|
||||
/// <summary>
|
||||
/// Extract to specific directory asynchronously with progress reporting and cancellation support
|
||||
/// </summary>
|
||||
/// <param name="archive">The archive to extract.</param>
|
||||
/// <param name="destinationDirectory">The folder to extract into.</param>
|
||||
/// <param name="options">Extraction options.</param>
|
||||
/// <param name="progress">Optional progress reporter for tracking extraction progress.</param>
|
||||
/// <param name="cancellationToken">Optional cancellation token.</param>
|
||||
public async ValueTask WriteToDirectoryAsync(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options = null,
|
||||
IProgress<ProgressReport>? progress = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (await archive.IsSolidAsync() || archive.Type == ArchiveType.SevenZip)
|
||||
{
|
||||
await using var reader = await archive.ExtractAllEntriesAsync();
|
||||
await reader.WriteAllToDirectoryAsync(
|
||||
destinationDirectory,
|
||||
options,
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
await archive.WriteToDirectoryAsyncInternal(
|
||||
destinationDirectory,
|
||||
options,
|
||||
progress,
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private async ValueTask WriteToDirectoryAsyncInternal(
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
IProgress<ProgressReport>? progress,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
var totalBytes = await archive.TotalUncompressedSizeAsync();
|
||||
var bytesRead = 0L;
|
||||
var seenDirectories = new HashSet<string>();
|
||||
|
||||
await foreach (var entry in archive.EntriesAsync.WithCancellation(cancellationToken))
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
var dirPath = Path.Combine(
|
||||
destinationDirectory,
|
||||
entry.Key.NotNull("Entry Key is null")
|
||||
);
|
||||
if (
|
||||
Path.GetDirectoryName(dirPath + "/") is { } parentDirectory
|
||||
&& seenDirectories.Add(dirPath)
|
||||
)
|
||||
{
|
||||
Directory.CreateDirectory(parentDirectory);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
await entry
|
||||
.WriteToDirectoryAsync(destinationDirectory, options, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
bytesRead += entry.Size;
|
||||
progress?.Report(
|
||||
new ProgressReport(entry.Key ?? string.Empty, bytesRead, totalBytes)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using SharpCompress.Factories;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
@@ -25,12 +26,34 @@ public interface IMultiArchiveFactory : IFactory
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IArchive Open(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
|
||||
IArchive OpenArchive(IReadOnlyList<Stream> streams, ReaderOptions? readerOptions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Opens a multi-part archive from streams asynchronously.
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with IEnumerable Stream objects, multi and split support.
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
IArchive Open(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
|
||||
IArchive OpenArchive(IReadOnlyList<FileInfo> fileInfos, ReaderOptions? readerOptions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Opens a multi-part archive from files asynchronously.
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions">reading options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
IAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
}
|
||||
|
||||
35
src/SharpCompress/Archives/IMultiArchiveOpenable.cs
Normal file
35
src/SharpCompress/Archives/IMultiArchiveOpenable.cs
Normal file
@@ -0,0 +1,35 @@
|
||||
#if NET8_0_OR_GREATER
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IMultiArchiveOpenable<TSync, TASync>
|
||||
where TSync : IArchive
|
||||
where TASync : IAsyncArchive
|
||||
{
|
||||
public static abstract TSync OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
|
||||
public static abstract TSync OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
);
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public static abstract TASync OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
}
|
||||
#endif
|
||||
@@ -6,10 +6,17 @@ using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IWritableArchive : IArchive
|
||||
public interface IWritableArchiveCommon
|
||||
{
|
||||
void RemoveEntry(IArchiveEntry entry);
|
||||
/// <summary>
|
||||
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
|
||||
/// </summary>
|
||||
/// <returns>IDisposeable to resume entry rebuilding</returns>
|
||||
IDisposable PauseEntryRebuilding();
|
||||
}
|
||||
|
||||
public interface IWritableArchive : IArchive, IWritableArchiveCommon
|
||||
{
|
||||
IArchiveEntry AddEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
@@ -20,17 +27,51 @@ public interface IWritableArchive : IArchive
|
||||
|
||||
IArchiveEntry AddDirectoryEntry(string key, DateTime? modified = null);
|
||||
|
||||
/// <summary>
|
||||
/// Saves the archive to the specified stream using the given writer options.
|
||||
/// </summary>
|
||||
void SaveTo(Stream stream, WriterOptions options);
|
||||
|
||||
Task SaveToAsync(
|
||||
/// <summary>
|
||||
/// Removes the specified entry from the archive.
|
||||
/// </summary>
|
||||
void RemoveEntry(IArchiveEntry entry);
|
||||
}
|
||||
|
||||
public interface IWritableAsyncArchive : IAsyncArchive, IWritableArchiveCommon
|
||||
{
|
||||
/// <summary>
|
||||
/// Asynchronously saves the archive to the specified stream using the given writer options.
|
||||
/// </summary>
|
||||
ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
|
||||
/// Asynchronously adds an entry to the archive with the specified key, source stream, and options.
|
||||
/// </summary>
|
||||
/// <returns>IDisposeable to resume entry rebuilding</returns>
|
||||
IDisposable PauseEntryRebuilding();
|
||||
ValueTask<IArchiveEntry> AddEntryAsync(
|
||||
string key,
|
||||
Stream source,
|
||||
bool closeStream,
|
||||
long size = 0,
|
||||
DateTime? modified = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Asynchronously adds a directory entry to the archive with the specified key and modification time.
|
||||
/// </summary>
|
||||
ValueTask<IArchiveEntry> AddDirectoryEntryAsync(
|
||||
string key,
|
||||
DateTime? modified = null,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Removes the specified entry from the archive.
|
||||
/// </summary>
|
||||
ValueTask RemoveEntryAsync(IArchiveEntry entry);
|
||||
}
|
||||
|
||||
@@ -1,106 +1,70 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static class IWritableArchiveExtensions
|
||||
{
|
||||
public static void AddEntry(
|
||||
this IWritableArchive writableArchive,
|
||||
string entryPath,
|
||||
string filePath
|
||||
)
|
||||
extension(IWritableArchive writableArchive)
|
||||
{
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
if (!fileInfo.Exists)
|
||||
public void AddAllFromDirectory(
|
||||
string filePath,
|
||||
string searchPattern = "*.*",
|
||||
SearchOption searchOption = SearchOption.AllDirectories
|
||||
)
|
||||
{
|
||||
throw new FileNotFoundException("Could not AddEntry: " + filePath);
|
||||
}
|
||||
writableArchive.AddEntry(
|
||||
entryPath,
|
||||
new FileInfo(filePath).OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
|
||||
public static void SaveTo(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath,
|
||||
WriterOptions options
|
||||
) => writableArchive.SaveTo(new FileInfo(filePath), options);
|
||||
|
||||
public static void SaveTo(
|
||||
this IWritableArchive writableArchive,
|
||||
FileInfo fileInfo,
|
||||
WriterOptions options
|
||||
)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
writableArchive.SaveTo(stream, options);
|
||||
}
|
||||
|
||||
public static Task SaveToAsync(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
) => writableArchive.SaveToAsync(new FileInfo(filePath), options, cancellationToken);
|
||||
|
||||
public static async Task SaveToAsync(
|
||||
this IWritableArchive writableArchive,
|
||||
FileInfo fileInfo,
|
||||
WriterOptions options,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await writableArchive.SaveToAsync(stream, options, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public static void AddAllFromDirectory(
|
||||
this IWritableArchive writableArchive,
|
||||
string filePath,
|
||||
string searchPattern = "*.*",
|
||||
SearchOption searchOption = SearchOption.AllDirectories
|
||||
)
|
||||
{
|
||||
using (writableArchive.PauseEntryRebuilding())
|
||||
{
|
||||
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
|
||||
using (writableArchive.PauseEntryRebuilding())
|
||||
{
|
||||
var fileInfo = new FileInfo(path);
|
||||
writableArchive.AddEntry(
|
||||
path.Substring(filePath.Length),
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
foreach (
|
||||
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
|
||||
)
|
||||
{
|
||||
var fileInfo = new FileInfo(path);
|
||||
writableArchive.AddEntry(
|
||||
path.Substring(filePath.Length),
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static IArchiveEntry AddEntry(
|
||||
this IWritableArchive writableArchive,
|
||||
string key,
|
||||
FileInfo fileInfo
|
||||
)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
public IArchiveEntry AddEntry(string key, string file) =>
|
||||
writableArchive.AddEntry(key, new FileInfo(file));
|
||||
|
||||
public IArchiveEntry AddEntry(
|
||||
string key,
|
||||
Stream source,
|
||||
long size = 0,
|
||||
DateTime? modified = null
|
||||
) => writableArchive.AddEntry(key, source, false, size, modified);
|
||||
|
||||
public IArchiveEntry AddEntry(string key, FileInfo fileInfo)
|
||||
{
|
||||
throw new ArgumentException("FileInfo does not exist.");
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
throw new ArgumentException("FileInfo does not exist.");
|
||||
}
|
||||
return writableArchive.AddEntry(
|
||||
key,
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
|
||||
public void SaveTo(string filePath, WriterOptions? options = null) =>
|
||||
writableArchive.SaveTo(new FileInfo(filePath), options ?? new(CompressionType.Deflate));
|
||||
|
||||
public void SaveTo(FileInfo fileInfo, WriterOptions? options = null)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
writableArchive.SaveTo(stream, options ?? new(CompressionType.Deflate));
|
||||
}
|
||||
return writableArchive.AddEntry(
|
||||
key,
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
10
src/SharpCompress/Archives/IWritableArchiveOpenable.cs
Normal file
10
src/SharpCompress/Archives/IWritableArchiveOpenable.cs
Normal file
@@ -0,0 +1,10 @@
|
||||
#if NET8_0_OR_GREATER
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public interface IWritableArchiveOpenable
|
||||
: IArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
|
||||
{
|
||||
public static abstract IWritableArchive CreateArchive();
|
||||
public static abstract IWritableAsyncArchive CreateAsyncArchive();
|
||||
}
|
||||
#endif
|
||||
@@ -0,0 +1,86 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Writers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
public static class IWritableAsyncArchiveExtensions
|
||||
{
|
||||
extension(IWritableAsyncArchive writableArchive)
|
||||
{
|
||||
public async ValueTask AddAllFromDirectoryAsync(
|
||||
string filePath,
|
||||
string searchPattern = "*.*",
|
||||
SearchOption searchOption = SearchOption.AllDirectories
|
||||
)
|
||||
{
|
||||
using (writableArchive.PauseEntryRebuilding())
|
||||
{
|
||||
foreach (
|
||||
var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption)
|
||||
)
|
||||
{
|
||||
var fileInfo = new FileInfo(path);
|
||||
await writableArchive.AddEntryAsync(
|
||||
path.Substring(filePath.Length),
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public ValueTask<IArchiveEntry> AddEntryAsync(string key, string file) =>
|
||||
writableArchive.AddEntryAsync(key, new FileInfo(file));
|
||||
|
||||
public ValueTask<IArchiveEntry> AddEntryAsync(
|
||||
string key,
|
||||
Stream source,
|
||||
long size = 0,
|
||||
DateTime? modified = null
|
||||
) => writableArchive.AddEntryAsync(key, source, false, size, modified);
|
||||
|
||||
public ValueTask<IArchiveEntry> AddEntryAsync(string key, FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
throw new ArgumentException("FileInfo does not exist.");
|
||||
}
|
||||
return writableArchive.AddEntryAsync(
|
||||
key,
|
||||
fileInfo.OpenRead(),
|
||||
true,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTime
|
||||
);
|
||||
}
|
||||
|
||||
public ValueTask SaveToAsync(
|
||||
string filePath,
|
||||
WriterOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
writableArchive.SaveToAsync(
|
||||
new FileInfo(filePath),
|
||||
options ?? new(CompressionType.Deflate),
|
||||
cancellationToken
|
||||
);
|
||||
|
||||
public async ValueTask SaveToAsync(
|
||||
FileInfo fileInfo,
|
||||
WriterOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var stream = fileInfo.Open(FileMode.Create, FileAccess.Write);
|
||||
await writableArchive
|
||||
.SaveToAsync(stream, options ?? new(CompressionType.Deflate), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -16,5 +16,5 @@ public interface IWriteableArchiveFactory : Factories.IFactory
|
||||
/// Creates a new, empty archive, ready to be written.
|
||||
/// </summary>
|
||||
/// <returns></returns>
|
||||
IWritableArchive CreateWriteableArchive();
|
||||
IWritableArchive CreateArchive();
|
||||
}
|
||||
|
||||
@@ -36,4 +36,7 @@ internal class FileInfoRarArchiveVolume : RarVolume
|
||||
new FileInfoRarFilePart(this, ReaderOptions.Password, markHeader, fileHeader, FileInfo);
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts() => FileParts;
|
||||
|
||||
internal override IAsyncEnumerable<RarFilePart> ReadFilePartsAsync() =>
|
||||
FileParts.ToAsyncEnumerable();
|
||||
}
|
||||
|
||||
53
src/SharpCompress/Archives/Rar/RarArchive.Async.cs
Normal file
53
src/SharpCompress/Archives/Rar/RarArchive.Async.cs
Normal file
@@ -0,0 +1,53 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Archives.Rar;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.Rar;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public partial class RarArchive
|
||||
{
|
||||
public override async ValueTask DisposeAsync()
|
||||
{
|
||||
if (!_disposed)
|
||||
{
|
||||
if (UnpackV1.IsValueCreated && UnpackV1.Value is IDisposable unpackV1)
|
||||
{
|
||||
unpackV1.Dispose();
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
await base.DisposeAsync();
|
||||
}
|
||||
}
|
||||
|
||||
protected override async ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
|
||||
{
|
||||
if (await this.IsMultipartVolumeAsync())
|
||||
{
|
||||
var streams = await VolumesAsync
|
||||
.Select(volume =>
|
||||
{
|
||||
volume.Stream.Position = 0;
|
||||
return volume.Stream;
|
||||
})
|
||||
.ToListAsync();
|
||||
return (RarReader)RarReader.OpenReader(streams, ReaderOptions);
|
||||
}
|
||||
|
||||
var stream = (await VolumesAsync.FirstAsync()).Stream;
|
||||
stream.Position = 0;
|
||||
return (RarReader)RarReader.OpenReader(stream, ReaderOptions);
|
||||
}
|
||||
|
||||
public override async ValueTask<bool> IsSolidAsync() =>
|
||||
await (await VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsSolidArchiveAsync();
|
||||
}
|
||||
@@ -1,18 +1,36 @@
|
||||
using System.Linq;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Rar;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public static class RarArchiveExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
|
||||
/// </summary>
|
||||
public static bool IsFirstVolume(this RarArchive archive) =>
|
||||
archive.Volumes.First().IsFirstVolume;
|
||||
extension(IRarArchive archive)
|
||||
{
|
||||
/// <summary>
|
||||
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
|
||||
/// </summary>
|
||||
public bool IsFirstVolume() => archive.Volumes.Cast<RarVolume>().First().IsFirstVolume;
|
||||
|
||||
/// <summary>
|
||||
/// RarArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public static bool IsMultipartVolume(this RarArchive archive) =>
|
||||
archive.Volumes.First().IsMultiVolume;
|
||||
/// <summary>
|
||||
/// RarArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public bool IsMultipartVolume() => archive.Volumes.Cast<RarVolume>().First().IsMultiVolume;
|
||||
}
|
||||
|
||||
extension(IRarAsyncArchive archive)
|
||||
{
|
||||
/// <summary>
|
||||
/// RarArchive is the first volume of a multi-part archive. If MultipartVolume is true and IsFirstVolume is false then the first volume file must be missing.
|
||||
/// </summary>
|
||||
public async ValueTask<bool> IsFirstVolumeAsync() =>
|
||||
(await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsFirstVolume;
|
||||
|
||||
/// <summary>
|
||||
/// RarArchive is part of a multi-part archive.
|
||||
/// </summary>
|
||||
public async ValueTask<bool> IsMultipartVolumeAsync() =>
|
||||
(await archive.VolumesAsync.CastAsync<RarVolume>().FirstAsync()).IsMultiVolume;
|
||||
}
|
||||
}
|
||||
|
||||
187
src/SharpCompress/Archives/Rar/RarArchive.Factory.cs
Normal file
187
src/SharpCompress/Archives/Rar/RarArchive.Factory.cs
Normal file
@@ -0,0 +1,187 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.Compressors.Rar;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.Rar;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public partial class RarArchive
|
||||
#if NET8_0_OR_GREATER
|
||||
: IArchiveOpenable<IRarArchive, IRarAsyncArchive>,
|
||||
IMultiArchiveOpenable<IRarArchive, IRarAsyncArchive>
|
||||
#endif
|
||||
{
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
path.NotNullOrEmpty(nameof(path));
|
||||
return (IRarAsyncArchive)OpenArchive(new FileInfo(path), readerOptions);
|
||||
}
|
||||
|
||||
public static IRarArchive OpenArchive(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
options ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IRarArchive OpenArchive(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
options ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IRarArchive OpenArchive(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
public static IRarArchive OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IRarArchive OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IRarAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IRarAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IRarAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
|
||||
public static IRarAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IRarAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
|
||||
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsRarFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsRarFile(stream);
|
||||
}
|
||||
|
||||
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
try
|
||||
{
|
||||
MarkHeader.Read(stream, true, false);
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static async ValueTask<bool> IsRarFileAsync(
|
||||
Stream stream,
|
||||
ReaderOptions? options = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
try
|
||||
{
|
||||
await MarkHeader
|
||||
.ReadAsync(stream, true, false, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
@@ -12,17 +14,26 @@ using SharpCompress.Readers.Rar;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
public interface IRarArchiveCommon
|
||||
{
|
||||
int MinVersion { get; }
|
||||
int MaxVersion { get; }
|
||||
}
|
||||
|
||||
public interface IRarArchive : IArchive, IRarArchiveCommon { }
|
||||
|
||||
public interface IRarAsyncArchive : IAsyncArchive, IRarArchiveCommon { }
|
||||
|
||||
public partial class RarArchive
|
||||
: AbstractArchive<RarArchiveEntry, RarVolume>,
|
||||
IRarArchive,
|
||||
IRarAsyncArchive
|
||||
{
|
||||
private bool _disposed;
|
||||
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
|
||||
new(() => new Compressors.Rar.UnpackV2017.Unpack());
|
||||
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private RarArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Rar, sourceStream) { }
|
||||
|
||||
@@ -43,12 +54,17 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
|
||||
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
|
||||
|
||||
// Simple async property - kept in original file
|
||||
protected override IAsyncEnumerable<RarArchiveEntry> LoadEntriesAsync(
|
||||
IAsyncEnumerable<RarVolume> volumes
|
||||
) => RarArchiveEntryFactory.GetEntriesAsync(this, volumes, ReaderOptions);
|
||||
|
||||
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
sourceStream.LoadAllParts(); //request all streams
|
||||
sourceStream.LoadAllParts();
|
||||
var streams = sourceStream.Streams.ToArray();
|
||||
var i = 0;
|
||||
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
|
||||
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions))
|
||||
{
|
||||
sourceStream.IsVolumes = true;
|
||||
streams[1].Position = 0;
|
||||
@@ -61,7 +77,6 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
));
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
|
||||
}
|
||||
|
||||
@@ -74,12 +89,12 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
volume.Stream.Position = 0;
|
||||
return volume.Stream;
|
||||
});
|
||||
return RarReader.Open(streams, ReaderOptions);
|
||||
return (RarReader)RarReader.OpenReader(streams, ReaderOptions);
|
||||
}
|
||||
|
||||
var stream = Volumes.First().Stream;
|
||||
stream.Position = 0;
|
||||
return RarReader.Open(stream, ReaderOptions);
|
||||
return (RarReader)RarReader.OpenReader(stream, ReaderOptions);
|
||||
}
|
||||
|
||||
public override bool IsSolid => Volumes.First().IsSolidArchive;
|
||||
@@ -87,124 +102,6 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
public override bool IsEncrypted => Entries.First(x => !x.IsDirectory).IsEncrypted;
|
||||
|
||||
public virtual int MinVersion => Volumes.First().MinVersion;
|
||||
|
||||
public virtual int MaxVersion => Volumes.First().MaxVersion;
|
||||
|
||||
#region Creation
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(string filePath, ReaderOptions? options = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
options ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => RarArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
options ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="options"></param>
|
||||
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static RarArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new RarArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsRarFile(string filePath) => IsRarFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsRarFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsRarFile(stream);
|
||||
}
|
||||
|
||||
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
try
|
||||
{
|
||||
MarkHeader.Read(stream, true, false);
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
43
src/SharpCompress/Archives/Rar/RarArchiveEntry.Async.cs
Normal file
43
src/SharpCompress/Archives/Rar/RarArchiveEntry.Async.cs
Normal file
@@ -0,0 +1,43 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Rar;
|
||||
using SharpCompress.Common.Rar.Headers;
|
||||
using SharpCompress.Compressors.Rar;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public partial class RarArchiveEntry
|
||||
{
|
||||
public async ValueTask<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
RarStream stream;
|
||||
if (IsRarV3)
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV1.Value,
|
||||
FileHeader,
|
||||
await MultiVolumeReadOnlyAsyncStream.Create(
|
||||
Parts.ToAsyncEnumerable().CastAsync<RarFilePart>()
|
||||
)
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
await MultiVolumeReadOnlyAsyncStream.Create(
|
||||
Parts.ToAsyncEnumerable().CastAsync<RarFilePart>()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
await stream.InitializeAsync(cancellationToken);
|
||||
return stream;
|
||||
}
|
||||
}
|
||||
@@ -12,7 +12,7 @@ using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Rar;
|
||||
|
||||
public class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
public partial class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
{
|
||||
private readonly ICollection<RarFilePart> parts;
|
||||
private readonly RarArchive archive;
|
||||
@@ -92,30 +92,6 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
|
||||
return stream;
|
||||
}
|
||||
|
||||
public async Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
RarStream stream;
|
||||
if (IsRarV3)
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV1.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
stream = new RarStream(
|
||||
archive.UnpackV2017.Value,
|
||||
FileHeader,
|
||||
new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>())
|
||||
);
|
||||
}
|
||||
|
||||
await stream.InitializeAsync(cancellationToken);
|
||||
return stream;
|
||||
}
|
||||
|
||||
public bool IsComplete
|
||||
{
|
||||
get
|
||||
|
||||
@@ -17,6 +17,19 @@ internal static class RarArchiveEntryFactory
|
||||
}
|
||||
}
|
||||
|
||||
private static async IAsyncEnumerable<RarFilePart> GetFilePartsAsync(
|
||||
IAsyncEnumerable<RarVolume> parts
|
||||
)
|
||||
{
|
||||
await foreach (var rarPart in parts)
|
||||
{
|
||||
await foreach (var fp in rarPart.ReadFilePartsAsync())
|
||||
{
|
||||
yield return fp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<IEnumerable<RarFilePart>> GetMatchedFileParts(
|
||||
IEnumerable<RarVolume> parts
|
||||
)
|
||||
@@ -38,6 +51,27 @@ internal static class RarArchiveEntryFactory
|
||||
}
|
||||
}
|
||||
|
||||
private static async IAsyncEnumerable<IEnumerable<RarFilePart>> GetMatchedFilePartsAsync(
|
||||
IAsyncEnumerable<RarVolume> parts
|
||||
)
|
||||
{
|
||||
var groupedParts = new List<RarFilePart>();
|
||||
await foreach (var fp in GetFilePartsAsync(parts))
|
||||
{
|
||||
groupedParts.Add(fp);
|
||||
|
||||
if (!fp.FileHeader.IsSplitAfter)
|
||||
{
|
||||
yield return groupedParts;
|
||||
groupedParts = new List<RarFilePart>();
|
||||
}
|
||||
}
|
||||
if (groupedParts.Count > 0)
|
||||
{
|
||||
yield return groupedParts;
|
||||
}
|
||||
}
|
||||
|
||||
internal static IEnumerable<RarArchiveEntry> GetEntries(
|
||||
RarArchive archive,
|
||||
IEnumerable<RarVolume> rarParts,
|
||||
@@ -49,4 +83,16 @@ internal static class RarArchiveEntryFactory
|
||||
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
|
||||
}
|
||||
}
|
||||
|
||||
internal static async IAsyncEnumerable<RarArchiveEntry> GetEntriesAsync(
|
||||
RarArchive archive,
|
||||
IAsyncEnumerable<RarVolume> rarParts,
|
||||
ReaderOptions readerOptions
|
||||
)
|
||||
{
|
||||
await foreach (var groupedParts in GetMatchedFilePartsAsync(rarParts))
|
||||
{
|
||||
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ internal static class RarArchiveVolumeFactory
|
||||
//new style rar - ..part1 | /part01 | part001 ....
|
||||
var m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
{
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
part1.DirectoryName!,
|
||||
@@ -23,11 +24,13 @@ internal static class RarArchiveVolumeFactory
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
//old style - ...rar, .r00, .r01 ...
|
||||
m = Regex.Match(part1.Name, @"^(.*\.)([r-z{])(ar|[0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
{
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
part1.DirectoryName!,
|
||||
@@ -40,12 +43,17 @@ internal static class RarArchiveVolumeFactory
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
else //split .001, .002 ....
|
||||
{
|
||||
return ArchiveVolumeFactory.GetFilePart(index, part1);
|
||||
}
|
||||
}
|
||||
|
||||
if (item != null && item.Exists)
|
||||
{
|
||||
return item;
|
||||
}
|
||||
|
||||
return null; //no more items
|
||||
}
|
||||
|
||||
@@ -14,6 +14,9 @@ internal class StreamRarArchiveVolume : RarVolume
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
|
||||
|
||||
internal override IAsyncEnumerable<RarFilePart> ReadFilePartsAsync() =>
|
||||
GetVolumeFilePartsAsync();
|
||||
|
||||
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>
|
||||
new SeekableFilePart(markHeader, fileHeader, Index, Stream, ReaderOptions.Password);
|
||||
}
|
||||
|
||||
73
src/SharpCompress/Archives/SevenZip/SevenZipArchive.Async.cs
Normal file
73
src/SharpCompress/Archives/SevenZip/SevenZipArchive.Async.cs
Normal file
@@ -0,0 +1,73 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.SevenZip;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.SevenZip;
|
||||
|
||||
public partial class SevenZipArchive
|
||||
{
|
||||
private async ValueTask LoadFactoryAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (_database is null)
|
||||
{
|
||||
stream.Position = 0;
|
||||
var reader = new ArchiveReader();
|
||||
await reader.OpenAsync(
|
||||
stream,
|
||||
lookForHeader: ReaderOptions.LookForHeader,
|
||||
cancellationToken
|
||||
);
|
||||
_database = await reader.ReadDatabaseAsync(
|
||||
new PasswordProvider(ReaderOptions.Password),
|
||||
cancellationToken
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
protected override async IAsyncEnumerable<SevenZipArchiveEntry> LoadEntriesAsync(
|
||||
IAsyncEnumerable<SevenZipVolume> volumes
|
||||
)
|
||||
{
|
||||
var stream = (await volumes.SingleAsync()).Stream;
|
||||
await LoadFactoryAsync(stream);
|
||||
if (_database is null)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
var entries = new SevenZipArchiveEntry[_database._files.Count];
|
||||
for (var i = 0; i < _database._files.Count; i++)
|
||||
{
|
||||
var file = _database._files[i];
|
||||
entries[i] = new SevenZipArchiveEntry(
|
||||
this,
|
||||
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
|
||||
);
|
||||
}
|
||||
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
|
||||
{
|
||||
var isSolid = false;
|
||||
foreach (var entry in group)
|
||||
{
|
||||
entry.IsSolid = isSolid;
|
||||
isSolid = true;
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
yield return entry;
|
||||
}
|
||||
}
|
||||
|
||||
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync() =>
|
||||
new(new SevenZipReader(ReaderOptions, this));
|
||||
}
|
||||
210
src/SharpCompress/Archives/SevenZip/SevenZipArchive.Factory.cs
Normal file
210
src/SharpCompress/Archives/SevenZip/SevenZipArchive.Factory.cs
Normal file
@@ -0,0 +1,210 @@
|
||||
using System;
|
||||
using System.Buffers;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.SevenZip;
|
||||
|
||||
public partial class SevenZipArchive
|
||||
#if NET8_0_OR_GREATER
|
||||
: IArchiveOpenable<IArchive, IAsyncArchive>,
|
||||
IMultiArchiveOpenable<IArchive, IAsyncArchive>
|
||||
#endif
|
||||
{
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
path.NotNullOrEmpty("path");
|
||||
return (IAsyncArchive)OpenArchive(new FileInfo(path), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
public static IArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty("filePath");
|
||||
return OpenArchive(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
public static IArchive OpenArchive(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull("fileInfo");
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IArchive OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IArchive OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull("stream");
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
|
||||
public static IAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsSevenZipFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsSevenZipFile(stream);
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
return SignatureMatch(stream);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static async ValueTask<bool> IsSevenZipFileAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
try
|
||||
{
|
||||
return await SignatureMatchAsync(stream, cancellationToken);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static ReadOnlySpan<byte> Signature => [(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C];
|
||||
|
||||
private static bool SignatureMatch(Stream stream)
|
||||
{
|
||||
var buffer = ArrayPool<byte>.Shared.Rent(6);
|
||||
try
|
||||
{
|
||||
stream.ReadExact(buffer, 0, 6);
|
||||
return buffer.AsSpan().Slice(0, 6).SequenceEqual(Signature);
|
||||
}
|
||||
finally
|
||||
{
|
||||
ArrayPool<byte>.Shared.Return(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
private static async ValueTask<bool> SignatureMatchAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
var buffer = ArrayPool<byte>.Shared.Rent(6);
|
||||
try
|
||||
{
|
||||
if (!await stream.ReadFullyAsync(buffer, 0, 6, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return buffer.AsSpan().Slice(0, 6).SequenceEqual(Signature);
|
||||
}
|
||||
finally
|
||||
{
|
||||
ArrayPool<byte>.Shared.Return(buffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -12,99 +12,10 @@ using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.SevenZip;
|
||||
|
||||
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
|
||||
public partial class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
|
||||
{
|
||||
private ArchiveDatabase? _database;
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty("filePath");
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull("fileInfo");
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull("stream");
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
@@ -118,18 +29,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsSevenZipFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsSevenZipFile(stream);
|
||||
}
|
||||
|
||||
internal SevenZipArchive()
|
||||
: base(ArchiveType.SevenZip) { }
|
||||
|
||||
@@ -137,32 +36,45 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
IEnumerable<SevenZipVolume> volumes
|
||||
)
|
||||
{
|
||||
var stream = volumes.Single().Stream;
|
||||
LoadFactory(stream);
|
||||
if (_database is null)
|
||||
foreach (var volume in volumes)
|
||||
{
|
||||
return Enumerable.Empty<SevenZipArchiveEntry>();
|
||||
}
|
||||
var entries = new SevenZipArchiveEntry[_database._files.Count];
|
||||
for (var i = 0; i < _database._files.Count; i++)
|
||||
{
|
||||
var file = _database._files[i];
|
||||
entries[i] = new SevenZipArchiveEntry(
|
||||
this,
|
||||
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
|
||||
);
|
||||
}
|
||||
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
|
||||
{
|
||||
var isSolid = false;
|
||||
foreach (var entry in group)
|
||||
LoadFactory(volume.Stream);
|
||||
if (_database is null)
|
||||
{
|
||||
entry.IsSolid = isSolid;
|
||||
isSolid = true; //mark others in this group as solid - same as rar behaviour.
|
||||
yield break;
|
||||
}
|
||||
var entries = new SevenZipArchiveEntry[_database._files.Count];
|
||||
for (var i = 0; i < _database._files.Count; i++)
|
||||
{
|
||||
var file = _database._files[i];
|
||||
entries[i] = new SevenZipArchiveEntry(
|
||||
this,
|
||||
new SevenZipFilePart(
|
||||
volume.Stream,
|
||||
_database,
|
||||
i,
|
||||
file,
|
||||
ReaderOptions.ArchiveEncoding
|
||||
)
|
||||
);
|
||||
}
|
||||
foreach (
|
||||
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
|
||||
)
|
||||
{
|
||||
var isSolid = false;
|
||||
foreach (var entry in group)
|
||||
{
|
||||
entry.IsSolid = isSolid;
|
||||
isSolid = true;
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
yield return entry;
|
||||
}
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
private void LoadFactory(Stream stream)
|
||||
@@ -176,28 +88,6 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
return SignatureMatch(stream);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static ReadOnlySpan<byte> Signature =>
|
||||
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
|
||||
|
||||
private static bool SignatureMatch(Stream stream)
|
||||
{
|
||||
var reader = new BinaryReader(stream);
|
||||
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
|
||||
return signatureBytes.SequenceEqual(Signature);
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction() =>
|
||||
new SevenZipReader(ReaderOptions, this);
|
||||
|
||||
@@ -212,13 +102,34 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
public override long TotalSize =>
|
||||
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
|
||||
|
||||
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
|
||||
internal sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
|
||||
{
|
||||
private readonly SevenZipArchive _archive;
|
||||
private SevenZipEntry? _currentEntry;
|
||||
private Stream? _currentFolderStream;
|
||||
private CFolder? _currentFolder;
|
||||
|
||||
/// <summary>
|
||||
/// Enables internal diagnostics for tests.
|
||||
/// When disabled (default), diagnostics properties return null to avoid exposing internal state.
|
||||
/// </summary>
|
||||
internal bool DiagnosticsEnabled { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Current folder instance used to decide whether the solid folder stream should be reused.
|
||||
/// Only available when <see cref="DiagnosticsEnabled"/> is true.
|
||||
/// </summary>
|
||||
internal object? DiagnosticsCurrentFolder => DiagnosticsEnabled ? _currentFolder : null;
|
||||
|
||||
/// <summary>
|
||||
/// Current shared folder stream instance.
|
||||
/// Only available when <see cref="DiagnosticsEnabled"/> is true.
|
||||
/// </summary>
|
||||
internal Stream? DiagnosticsCurrentFolderStream =>
|
||||
DiagnosticsEnabled ? _currentFolderStream : null;
|
||||
|
||||
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
|
||||
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
|
||||
: base(readerOptions, ArchiveType.SevenZip, false) => this._archive = archive;
|
||||
|
||||
public override SevenZipVolume Volume => _archive.Volumes.Single();
|
||||
|
||||
@@ -231,9 +142,10 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
_currentEntry = dir;
|
||||
yield return dir;
|
||||
}
|
||||
// For non-directory entries, yield them without creating shared streams
|
||||
// Each call to GetEntryStream() will create a fresh decompression stream
|
||||
// to avoid state corruption issues with async operations
|
||||
// For solid archives (entries in the same folder share a compressed stream),
|
||||
// we must iterate entries sequentially and maintain the folder stream state
|
||||
// across entries in the same folder to avoid recreating the decompression
|
||||
// stream for each file, which breaks contiguous streaming.
|
||||
foreach (var entry in entries.Where(x => !x.IsDirectory))
|
||||
{
|
||||
_currentEntry = entry;
|
||||
@@ -243,19 +155,46 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
|
||||
protected override EntryStream GetEntryStream()
|
||||
{
|
||||
// Create a fresh decompression stream for each file (no state sharing).
|
||||
// However, the LZMA decoder has bugs in its async implementation that cause
|
||||
// state corruption even on fresh streams. The SyncOnlyStream wrapper
|
||||
// works around these bugs by forcing async operations to use sync equivalents.
|
||||
//
|
||||
// TODO: Fix the LZMA decoder async bugs (in LzmaStream, Decoder, OutWindow)
|
||||
// so this wrapper is no longer necessary.
|
||||
var entry = _currentEntry.NotNull("currentEntry is not null");
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
return CreateEntryStream(Stream.Null);
|
||||
}
|
||||
return CreateEntryStream(new SyncOnlyStream(entry.FilePart.GetCompressedStream()));
|
||||
|
||||
var folder = entry.FilePart.Folder;
|
||||
|
||||
// Check if we're starting a new folder - dispose old folder stream if needed
|
||||
if (folder != _currentFolder)
|
||||
{
|
||||
_currentFolderStream?.Dispose();
|
||||
_currentFolderStream = null;
|
||||
_currentFolder = folder;
|
||||
}
|
||||
|
||||
// Create the folder stream once per folder
|
||||
if (_currentFolderStream is null)
|
||||
{
|
||||
_currentFolderStream = _archive._database!.GetFolderStream(
|
||||
_archive.Volumes.Single().Stream,
|
||||
folder!,
|
||||
_archive._database.PasswordProvider
|
||||
);
|
||||
}
|
||||
|
||||
// Wrap with SyncOnlyStream to work around LZMA async bugs
|
||||
// Return a ReadOnlySubStream that reads from the shared folder stream
|
||||
return CreateEntryStream(
|
||||
new SyncOnlyStream(
|
||||
new ReadOnlySubStream(_currentFolderStream, entry.Size, leaveOpen: true)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public override void Dispose()
|
||||
{
|
||||
_currentFolderStream?.Dispose();
|
||||
_currentFolderStream = null;
|
||||
base.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -328,7 +267,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
#if !LEGACY_DOTNET
|
||||
public override ValueTask<int> ReadAsync(
|
||||
Memory<byte> buffer,
|
||||
CancellationToken cancellationToken = default
|
||||
|
||||
@@ -12,8 +12,9 @@ public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
|
||||
|
||||
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
|
||||
|
||||
public Task<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
|
||||
Task.FromResult(OpenEntryStream());
|
||||
public async ValueTask<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => (await FilePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
|
||||
|
||||
public IArchive Archive { get; }
|
||||
|
||||
|
||||
161
src/SharpCompress/Archives/Tar/TarArchive.Async.cs
Normal file
161
src/SharpCompress/Archives/Tar/TarArchive.Async.cs
Normal file
@@ -0,0 +1,161 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Readers.Tar;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.Tar;
|
||||
|
||||
namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public partial class TarArchive
|
||||
{
|
||||
protected override async ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IAsyncEnumerable<TarArchiveEntry> oldEntries,
|
||||
IEnumerable<TarArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(stream, new TarWriterOptions(options));
|
||||
await foreach (
|
||||
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
foreach (var entry in newEntries)
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return new((IAsyncReader)TarReader.OpenReader(stream));
|
||||
}
|
||||
|
||||
protected override async IAsyncEnumerable<TarArchiveEntry> LoadEntriesAsync(
|
||||
IAsyncEnumerable<TarVolume> volumes
|
||||
)
|
||||
{
|
||||
var stream = (await volumes.SingleAsync()).Stream;
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
stream.Position = 0;
|
||||
}
|
||||
|
||||
// Always use async header reading in LoadEntriesAsync for consistency
|
||||
{
|
||||
// Use async header reading for async-only streams
|
||||
TarHeader? previousHeader = null;
|
||||
await foreach (
|
||||
var header in TarHeaderFactory.ReadHeaderAsync(
|
||||
StreamingMode.Seekable,
|
||||
stream,
|
||||
ReaderOptions.ArchiveEncoding
|
||||
)
|
||||
)
|
||||
{
|
||||
if (header != null)
|
||||
{
|
||||
if (header.EntryType == EntryType.LongName)
|
||||
{
|
||||
previousHeader = header;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (previousHeader != null)
|
||||
{
|
||||
var entry = new TarArchiveEntry(
|
||||
this,
|
||||
new TarFilePart(previousHeader, stream),
|
||||
CompressionType.None
|
||||
);
|
||||
|
||||
var oldStreamPos = stream.Position;
|
||||
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
using var memoryStream = new MemoryStream();
|
||||
await entryStream.CopyToAsync(memoryStream);
|
||||
memoryStream.Position = 0;
|
||||
var bytes = memoryStream.ToArray();
|
||||
|
||||
header.Name = ReaderOptions
|
||||
.ArchiveEncoding.Decode(bytes)
|
||||
.TrimNulls();
|
||||
}
|
||||
|
||||
stream.Position = oldStreamPos;
|
||||
|
||||
previousHeader = null;
|
||||
}
|
||||
yield return new TarArchiveEntry(
|
||||
this,
|
||||
new TarFilePart(header, stream),
|
||||
CompressionType.None
|
||||
);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new IncompleteArchiveException("Failed to read TAR header");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
198
src/SharpCompress/Archives/Tar/TarArchive.Factory.cs
Normal file
198
src/SharpCompress/Archives/Tar/TarArchive.Factory.cs
Normal file
@@ -0,0 +1,198 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public partial class TarArchive
|
||||
#if NET8_0_OR_GREATER
|
||||
: IWritableArchiveOpenable,
|
||||
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
|
||||
#endif
|
||||
{
|
||||
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return OpenArchive(new FileInfo(filePath), readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new TarArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(new FileInfo(path), readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
|
||||
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsTarFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsTarFile(stream);
|
||||
}
|
||||
|
||||
public static bool IsTarFile(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
var tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
var reader = new BinaryReader(stream, Encoding.UTF8, false);
|
||||
var readSucceeded = tarHeader.Read(reader);
|
||||
var isEmptyArchive =
|
||||
tarHeader.Name?.Length == 0
|
||||
&& tarHeader.Size == 0
|
||||
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
|
||||
return readSucceeded || isEmptyArchive;
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
// Catch all exceptions during tar header reading to determine if this is a valid tar file
|
||||
// Invalid tar files or corrupted streams will throw various exceptions
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static async ValueTask<bool> IsTarFileAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
try
|
||||
{
|
||||
var tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
var reader = new AsyncBinaryReader(stream, false);
|
||||
var readSucceeded = await tarHeader.ReadAsync(reader);
|
||||
var isEmptyArchive =
|
||||
tarHeader.Name?.Length == 0
|
||||
&& tarHeader.Size == 0
|
||||
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
|
||||
return readSucceeded || isEmptyArchive;
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
// Catch all exceptions during tar header reading to determine if this is a valid tar file
|
||||
// Invalid tar files or corrupted streams will throw various exceptions
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static IWritableArchive CreateArchive() => new TarArchive();
|
||||
|
||||
public static IWritableAsyncArchive CreateAsyncArchive() => new TarArchive();
|
||||
}
|
||||
@@ -15,132 +15,14 @@ using SharpCompress.Writers.Tar;
|
||||
|
||||
namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
public partial class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
{
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new TarArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static TarArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new TarArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsTarFile(string filePath) => IsTarFile(new FileInfo(filePath));
|
||||
|
||||
public static bool IsTarFile(FileInfo fileInfo)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsTarFile(stream);
|
||||
}
|
||||
|
||||
public static bool IsTarFile(Stream stream)
|
||||
{
|
||||
try
|
||||
{
|
||||
var tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
|
||||
var isEmptyArchive =
|
||||
tarHeader.Name?.Length == 0
|
||||
&& tarHeader.Size == 0
|
||||
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
|
||||
return readSucceeded || isEmptyArchive;
|
||||
}
|
||||
catch { }
|
||||
return false;
|
||||
}
|
||||
|
||||
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
|
||||
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
sourceStream.NotNull("SourceStream is null").LoadAllParts();
|
||||
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
private TarArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Tar, sourceStream) { }
|
||||
|
||||
@@ -150,6 +32,10 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
|
||||
{
|
||||
var stream = volumes.Single().Stream;
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
stream.Position = 0;
|
||||
}
|
||||
TarHeader? previousHeader = null;
|
||||
foreach (
|
||||
var header in TarHeaderFactory.ReadHeader(
|
||||
@@ -180,7 +66,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
using (var entryStream = entry.OpenEntryStream())
|
||||
{
|
||||
using var memoryStream = new MemoryStream();
|
||||
entryStream.CopyTo(memoryStream);
|
||||
entryStream.CopyTo(memoryStream, Constants.BufferSize);
|
||||
memoryStream.Position = 0;
|
||||
var bytes = memoryStream.ToArray();
|
||||
|
||||
@@ -205,8 +91,6 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
}
|
||||
}
|
||||
|
||||
public static TarArchive Create() => new();
|
||||
|
||||
protected override TarArchiveEntry CreateEntryInternal(
|
||||
string filePath,
|
||||
Stream source,
|
||||
@@ -259,47 +143,10 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<TarArchiveEntry> oldEntries,
|
||||
IEnumerable<TarArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new TarWriter(stream, new TarWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return TarReader.Open(stream);
|
||||
return TarReader.OpenReader(stream);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,9 +14,8 @@ public class TarArchiveEntry : TarEntry, IArchiveEntry
|
||||
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
|
||||
|
||||
public virtual Task<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => Task.FromResult(OpenEntryStream());
|
||||
public ValueTask<Stream> OpenEntryStreamAsync(CancellationToken cancellationToken = default) =>
|
||||
new(OpenEntryStream());
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
|
||||
132
src/SharpCompress/Archives/Zip/ZipArchive.Async.cs
Normal file
132
src/SharpCompress/Archives/Zip/ZipArchive.Async.cs
Normal file
@@ -0,0 +1,132 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
using SharpCompress.Writers;
|
||||
using SharpCompress.Writers.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public partial class ZipArchive
|
||||
{
|
||||
protected override async IAsyncEnumerable<ZipArchiveEntry> LoadEntriesAsync(
|
||||
IAsyncEnumerable<ZipVolume> volumes
|
||||
)
|
||||
{
|
||||
var vols = await volumes.ToListAsync();
|
||||
var volsArray = vols.ToArray();
|
||||
|
||||
await foreach (
|
||||
var h in headerFactory.NotNull().ReadSeekableHeaderAsync(volsArray.Last().Stream)
|
||||
)
|
||||
{
|
||||
if (h != null)
|
||||
{
|
||||
switch (h.ZipHeaderType)
|
||||
{
|
||||
case ZipHeaderType.DirectoryEntry:
|
||||
{
|
||||
var deh = (DirectoryEntryHeader)h;
|
||||
Stream s;
|
||||
if (
|
||||
deh.RelativeOffsetOfEntryHeader + deh.CompressedSize
|
||||
> volsArray[deh.DiskNumberStart].Stream.Length
|
||||
)
|
||||
{
|
||||
var v = volsArray.Skip(deh.DiskNumberStart).ToArray();
|
||||
s = new SourceStream(
|
||||
v[0].Stream,
|
||||
i => i < v.Length ? v[i].Stream : null,
|
||||
new ReaderOptions() { LeaveStreamOpen = true }
|
||||
);
|
||||
}
|
||||
else
|
||||
{
|
||||
s = volsArray[deh.DiskNumberStart].Stream;
|
||||
}
|
||||
|
||||
yield return new ZipArchiveEntry(
|
||||
this,
|
||||
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
|
||||
);
|
||||
}
|
||||
break;
|
||||
case ZipHeaderType.DirectoryEnd:
|
||||
{
|
||||
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
|
||||
volsArray.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
|
||||
yield break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override async ValueTask SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IAsyncEnumerable<ZipArchiveEntry> oldEntries,
|
||||
IEnumerable<ZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
|
||||
await foreach (
|
||||
var entry in oldEntries.WithCancellation(cancellationToken).ConfigureAwait(false)
|
||||
)
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
foreach (var entry in newEntries)
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
302
src/SharpCompress/Archives/Zip/ZipArchive.Factory.cs
Normal file
302
src/SharpCompress/Archives/Zip/ZipArchive.Factory.cs
Normal file
@@ -0,0 +1,302 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Common.Zip;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.IO;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public partial class ZipArchive
|
||||
#if NET8_0_OR_GREATER
|
||||
: IWritableArchiveOpenable,
|
||||
IMultiArchiveOpenable<IWritableArchive, IWritableAsyncArchive>
|
||||
#endif
|
||||
{
|
||||
public static IWritableArchive OpenArchive(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return OpenArchive(new FileInfo(filePath), readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions() { LeaveStreamOpen = false }
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableArchive OpenArchive(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new ZipArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
string path,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(path, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
Stream stream,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(stream, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
FileInfo fileInfo,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfo, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<Stream> streams,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(streams, readerOptions);
|
||||
}
|
||||
|
||||
public static IWritableAsyncArchive OpenAsyncArchive(
|
||||
IReadOnlyList<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
return (IWritableAsyncArchive)OpenArchive(fileInfos, readerOptions);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(string filePath, string? password = null) =>
|
||||
IsZipFile(new FileInfo(filePath), password);
|
||||
|
||||
public static bool IsZipFile(FileInfo fileInfo, string? password = null)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsZipFile(stream, password);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(Stream stream, string? password = null)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsZipMulti(Stream stream, string? password = null)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
|
||||
}
|
||||
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
{
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
|
||||
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
|
||||
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static async ValueTask<bool> IsZipFileAsync(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
|
||||
}
|
||||
|
||||
var header = await headerFactory
|
||||
.ReadStreamHeaderAsync(stream)
|
||||
.Where(x => x.ZipHeaderType != ZipHeaderType.Split)
|
||||
.FirstOrDefaultAsync(cancellationToken);
|
||||
if (header is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static IWritableArchive CreateArchive() => new ZipArchive();
|
||||
|
||||
public static IWritableAsyncArchive CreateAsyncArchive() => new ZipArchive();
|
||||
|
||||
public static async ValueTask<bool> IsZipMultiAsync(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: Constants.BufferSize);
|
||||
}
|
||||
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
{
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
|
||||
ZipHeader? x = null;
|
||||
await foreach (
|
||||
var h in z.ReadSeekableHeaderAsync(stream)
|
||||
.WithCancellation(cancellationToken)
|
||||
)
|
||||
{
|
||||
x = h;
|
||||
break;
|
||||
}
|
||||
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -16,21 +16,12 @@ using SharpCompress.Writers.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
public partial class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
{
|
||||
private readonly SeekableZipHeaderFactory? headerFactory;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the compression level applied to files added to the archive,
|
||||
/// if the compression method is set to deflate
|
||||
/// </summary>
|
||||
public CompressionLevel DeflateCompressionLevel { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="sourceStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal ZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Zip, sourceStream) =>
|
||||
headerFactory = new SeekableZipHeaderFactory(
|
||||
@@ -38,219 +29,39 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
sourceStream.ReaderOptions.ArchiveEncoding
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.NotNullOrEmpty(nameof(filePath));
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a FileInfo object to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.NotNull(nameof(fileInfo));
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
fileInfo,
|
||||
i => ZipArchiveVolumeFactory.GetFilePart(i, fileInfo),
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all file parts passed in
|
||||
/// </summary>
|
||||
/// <param name="fileInfos"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.NotNull(nameof(fileInfos));
|
||||
var files = fileInfos.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
files[0],
|
||||
i => i < files.Length ? files[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with all stream parts passed in
|
||||
/// </summary>
|
||||
/// <param name="streams"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(IEnumerable<Stream> streams, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
streams.NotNull(nameof(streams));
|
||||
var strms = streams.ToArray();
|
||||
return new ZipArchive(
|
||||
new SourceStream(
|
||||
strms[0],
|
||||
i => i < strms.Length ? strms[i] : null,
|
||||
readerOptions ?? new ReaderOptions()
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Takes a seekable Stream as a source
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static ZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.NotNull(nameof(stream));
|
||||
|
||||
if (stream is not { CanSeek: true })
|
||||
{
|
||||
throw new ArgumentException("Stream must be seekable", nameof(stream));
|
||||
}
|
||||
|
||||
return new ZipArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(
|
||||
string filePath,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
) => IsZipFile(new FileInfo(filePath), password, bufferSize);
|
||||
|
||||
public static bool IsZipFile(
|
||||
FileInfo fileInfo,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
if (!fileInfo.Exists)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
using Stream stream = fileInfo.OpenRead();
|
||||
return IsZipFile(stream, password, bufferSize);
|
||||
}
|
||||
|
||||
public static bool IsZipFile(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
|
||||
}
|
||||
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static bool IsZipMulti(
|
||||
Stream stream,
|
||||
string? password = null,
|
||||
int bufferSize = ReaderOptions.DefaultBufferSize
|
||||
)
|
||||
{
|
||||
var headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding(), null);
|
||||
try
|
||||
{
|
||||
if (stream is not SharpCompressStream)
|
||||
{
|
||||
stream = new SharpCompressStream(stream, bufferSize: bufferSize);
|
||||
}
|
||||
|
||||
var header = headerFactory
|
||||
.ReadStreamHeader(stream)
|
||||
.FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
|
||||
if (header is null)
|
||||
{
|
||||
if (stream.CanSeek) //could be multipart. Test for central directory - might not be z64 safe
|
||||
{
|
||||
var z = new SeekableZipHeaderFactory(password, new ArchiveEncoding());
|
||||
var x = z.ReadSeekableHeader(stream).FirstOrDefault();
|
||||
return x?.ZipHeaderType == ZipHeaderType.DirectoryEntry;
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return Enum.IsDefined(typeof(ZipHeaderType), header.ZipHeaderType);
|
||||
}
|
||||
catch (CryptographicException)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
internal ZipArchive()
|
||||
: base(ArchiveType.Zip) { }
|
||||
|
||||
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
|
||||
{
|
||||
stream.LoadAllParts(); //request all streams
|
||||
stream.LoadAllParts();
|
||||
stream.Position = 0;
|
||||
|
||||
var streams = stream.Streams.ToList();
|
||||
var idx = 0;
|
||||
if (streams.Count() > 1) //test part 2 - true = multipart not split
|
||||
if (streams.Count() > 1)
|
||||
{
|
||||
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
|
||||
var isZip = IsZipFile(streams[1], ReaderOptions.Password, ReaderOptions.BufferSize);
|
||||
streams[1].Position -= 4;
|
||||
//check if second stream is zip header without changing position
|
||||
var headerProbeStream = streams[1];
|
||||
var startPosition = headerProbeStream.Position;
|
||||
headerProbeStream.Position = startPosition + 4;
|
||||
var isZip = IsZipFile(headerProbeStream, ReaderOptions.Password);
|
||||
headerProbeStream.Position = startPosition;
|
||||
if (isZip)
|
||||
{
|
||||
stream.IsVolumes = true;
|
||||
|
||||
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
|
||||
var tmp = streams[0];
|
||||
streams.RemoveAt(0);
|
||||
streams.Add(tmp);
|
||||
|
||||
//streams[0].Position = 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
|
||||
return streams.Select(a => new ZipVolume(a, ReaderOptions, idx++));
|
||||
}
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
|
||||
}
|
||||
|
||||
internal ZipArchive()
|
||||
: base(ArchiveType.Zip) { }
|
||||
|
||||
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
|
||||
{
|
||||
var vols = volumes.ToArray();
|
||||
@@ -329,41 +140,6 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task SaveToAsync(
|
||||
Stream stream,
|
||||
WriterOptions options,
|
||||
IEnumerable<ZipArchiveEntry> oldEntries,
|
||||
IEnumerable<ZipArchiveEntry> newEntries,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
using var writer = new ZipWriter(stream, new ZipWriterOptions(options));
|
||||
foreach (var entry in oldEntries.Concat(newEntries))
|
||||
{
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
await writer
|
||||
.WriteDirectoryAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entry.LastModifiedTime,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
await writer
|
||||
.WriteAsync(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
cancellationToken
|
||||
)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override ZipArchiveEntry CreateEntryInternal(
|
||||
string filePath,
|
||||
Stream source,
|
||||
@@ -377,12 +153,17 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
DateTime? modified
|
||||
) => new ZipWritableArchiveEntry(this, directoryPath, modified);
|
||||
|
||||
public static ZipArchive Create() => new();
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
((IStreamStack)stream).StackSeek(0);
|
||||
return ZipReader.Open(stream, ReaderOptions, Entries);
|
||||
return ZipReader.OpenReader(stream, ReaderOptions, Entries);
|
||||
}
|
||||
|
||||
protected override ValueTask<IAsyncReader> CreateReaderForSolidExtractionAsync()
|
||||
{
|
||||
var stream = Volumes.Single().Stream;
|
||||
stream.Position = 0;
|
||||
return new((IAsyncReader)ZipReader.OpenReader(stream));
|
||||
}
|
||||
}
|
||||
|
||||
22
src/SharpCompress/Archives/Zip/ZipArchiveEntry.Async.cs
Normal file
22
src/SharpCompress/Archives/Zip/ZipArchiveEntry.Async.cs
Normal file
@@ -0,0 +1,22 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public partial class ZipArchiveEntry
|
||||
{
|
||||
public async ValueTask<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var part = Parts.Single();
|
||||
if (part is SeekableZipFilePart seekablePart)
|
||||
{
|
||||
return (await seekablePart.GetCompressedStreamAsync(cancellationToken)).NotNull();
|
||||
}
|
||||
return OpenEntryStream();
|
||||
}
|
||||
}
|
||||
@@ -6,17 +6,13 @@ using SharpCompress.Common.Zip;
|
||||
|
||||
namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
public partial class ZipArchiveEntry : ZipEntry, IArchiveEntry
|
||||
{
|
||||
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
|
||||
: base(part) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
|
||||
|
||||
public virtual Task<Stream> OpenEntryStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => Task.FromResult(OpenEntryStream());
|
||||
|
||||
#region IArchiveEntry Members
|
||||
|
||||
public IArchive Archive { get; }
|
||||
|
||||
@@ -14,6 +14,7 @@ internal static class ZipArchiveVolumeFactory
|
||||
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
|
||||
var m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
{
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
part1.DirectoryName!,
|
||||
@@ -24,11 +25,16 @@ internal static class ZipArchiveVolumeFactory
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
else //split - 001, 002 ...
|
||||
{
|
||||
return ArchiveVolumeFactory.GetFilePart(index, part1);
|
||||
}
|
||||
|
||||
if (item != null && item.Exists)
|
||||
{
|
||||
return item;
|
||||
}
|
||||
|
||||
return null; //no more items
|
||||
}
|
||||
|
||||
@@ -22,9 +22,13 @@ namespace SharpCompress.Common.Ace
|
||||
for (int j = 0; j < 8; j++)
|
||||
{
|
||||
if ((crc & 1) != 0)
|
||||
{
|
||||
crc = (crc >> 1) ^ 0xEDB88320u;
|
||||
}
|
||||
else
|
||||
{
|
||||
crc >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
table[i] = crc;
|
||||
|
||||
111
src/SharpCompress/Common/Ace/Headers/AceFileHeader.Async.cs
Normal file
111
src/SharpCompress/Common/Ace/Headers/AceFileHeader.Async.cs
Normal file
@@ -0,0 +1,111 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arc;
|
||||
|
||||
namespace SharpCompress.Common.Ace.Headers;
|
||||
|
||||
public sealed partial class AceFileHeader
|
||||
{
|
||||
/// <summary>
|
||||
/// Asynchronously reads the next file entry header from the stream.
|
||||
/// Returns null if no more entries or end of archive.
|
||||
/// Supports both ACE 1.0 and ACE 2.0 formats.
|
||||
/// </summary>
|
||||
public override async ValueTask<AceHeader?> ReadAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var headerData = await ReadHeaderAsync(stream, cancellationToken);
|
||||
if (headerData.Length == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
int offset = 0;
|
||||
|
||||
// Header type (1 byte)
|
||||
HeaderType = headerData[offset++];
|
||||
|
||||
// Skip recovery record headers (ACE 2.0 feature)
|
||||
if (HeaderType == (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.RECOVERY32)
|
||||
{
|
||||
// Skip to next header
|
||||
return null;
|
||||
}
|
||||
|
||||
if (HeaderType != (byte)SharpCompress.Common.Ace.Headers.AceHeaderType.FILE)
|
||||
{
|
||||
// Unknown header type - skip
|
||||
return null;
|
||||
}
|
||||
|
||||
// Header flags (2 bytes)
|
||||
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Packed size (4 bytes)
|
||||
PackedSize = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// Original size (4 bytes)
|
||||
OriginalSize = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// File date/time in DOS format (4 bytes)
|
||||
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
|
||||
DateTime = ConvertDosDateTime(dosDateTime);
|
||||
offset += 4;
|
||||
|
||||
// File attributes (4 bytes)
|
||||
Attributes = (int)BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// CRC32 (4 bytes)
|
||||
Crc32 = BitConverter.ToUInt32(headerData, offset);
|
||||
offset += 4;
|
||||
|
||||
// Compression type (1 byte)
|
||||
byte compressionType = headerData[offset++];
|
||||
CompressionType = GetCompressionType(compressionType);
|
||||
|
||||
// Compression quality/parameter (1 byte)
|
||||
byte compressionQuality = headerData[offset++];
|
||||
CompressionQuality = GetCompressionQuality(compressionQuality);
|
||||
|
||||
// Parameters (2 bytes)
|
||||
Parameters = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Reserved (2 bytes) - skip
|
||||
offset += 2;
|
||||
|
||||
// Filename length (2 bytes)
|
||||
var filenameLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Filename
|
||||
if (offset + filenameLength <= headerData.Length)
|
||||
{
|
||||
Filename = ArchiveEncoding.Decode(headerData, offset, filenameLength);
|
||||
offset += filenameLength;
|
||||
}
|
||||
|
||||
// Handle comment if present
|
||||
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
|
||||
{
|
||||
// Comment length (2 bytes)
|
||||
if (offset + 2 <= headerData.Length)
|
||||
{
|
||||
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2 + commentLength; // Skip comment
|
||||
}
|
||||
}
|
||||
|
||||
// Store the data start position
|
||||
DataStartPosition = stream.Position;
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Xml.Linq;
|
||||
using SharpCompress.Common.Arc;
|
||||
|
||||
@@ -10,7 +12,7 @@ namespace SharpCompress.Common.Ace.Headers
|
||||
/// <summary>
|
||||
/// ACE file entry header
|
||||
/// </summary>
|
||||
public sealed class AceFileHeader : AceHeader
|
||||
public sealed partial class AceFileHeader : AceHeader
|
||||
{
|
||||
public long DataStartPosition { get; private set; }
|
||||
public long PackedSize { get; set; }
|
||||
@@ -46,7 +48,7 @@ namespace SharpCompress.Common.Ace.Headers
|
||||
}
|
||||
}
|
||||
|
||||
public AceFileHeader(ArchiveEncoding archiveEncoding)
|
||||
public AceFileHeader(IArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding, AceHeaderType.FILE) { }
|
||||
|
||||
/// <summary>
|
||||
@@ -147,6 +149,8 @@ namespace SharpCompress.Common.Ace.Headers
|
||||
return this;
|
||||
}
|
||||
|
||||
// ReadAsync moved to AceFileHeader.Async.cs
|
||||
|
||||
public CompressionType GetCompressionType(byte value) =>
|
||||
value switch
|
||||
{
|
||||
|
||||
69
src/SharpCompress/Common/Ace/Headers/AceHeader.Async.cs
Normal file
69
src/SharpCompress/Common/Ace/Headers/AceHeader.Async.cs
Normal file
@@ -0,0 +1,69 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Ace.Headers;
|
||||
|
||||
public abstract partial class AceHeader
|
||||
{
|
||||
public abstract ValueTask<AceHeader?> ReadAsync(
|
||||
Stream reader,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public async ValueTask<byte[]> ReadHeaderAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
// Read header CRC (2 bytes) and header size (2 bytes)
|
||||
var headerBytes = new byte[4];
|
||||
if (await stream.ReadAsync(headerBytes, 0, 4, cancellationToken) != 4)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
HeaderCrc = BitConverter.ToUInt16(headerBytes, 0); // CRC for validation
|
||||
HeaderSize = BitConverter.ToUInt16(headerBytes, 2);
|
||||
if (HeaderSize == 0)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
// Read the header data
|
||||
var body = new byte[HeaderSize];
|
||||
if (await stream.ReadAsync(body, 0, HeaderSize, cancellationToken) != HeaderSize)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
// Verify crc
|
||||
var checksum = AceCrc.AceCrc16(body);
|
||||
if (checksum != HeaderCrc)
|
||||
{
|
||||
throw new InvalidDataException("Header checksum is invalid");
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Asynchronously checks if the stream is an ACE archive
|
||||
/// </summary>
|
||||
/// <param name="stream">The stream to read from</param>
|
||||
/// <param name="cancellationToken">Cancellation token</param>
|
||||
/// <returns>True if the stream is an ACE archive, false otherwise</returns>
|
||||
public static async ValueTask<bool> IsArchiveAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytes = new byte[14];
|
||||
if (await stream.ReadAsync(bytes, 0, 14, cancellationToken) != 14)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return CheckMagicBytes(bytes, 7);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Arj.Headers;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
@@ -17,7 +19,7 @@ namespace SharpCompress.Common.Ace.Headers
|
||||
RECOVERY64B = 4,
|
||||
}
|
||||
|
||||
public abstract class AceHeader
|
||||
public abstract partial class AceHeader
|
||||
{
|
||||
// ACE signature: bytes at offset 7 should be "**ACE**"
|
||||
private static readonly byte[] AceSignature =
|
||||
@@ -31,13 +33,13 @@ namespace SharpCompress.Common.Ace.Headers
|
||||
(byte)'*',
|
||||
];
|
||||
|
||||
public AceHeader(ArchiveEncoding archiveEncoding, AceHeaderType type)
|
||||
public AceHeader(IArchiveEncoding archiveEncoding, AceHeaderType type)
|
||||
{
|
||||
AceHeaderType = type;
|
||||
ArchiveEncoding = archiveEncoding;
|
||||
}
|
||||
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
public IArchiveEncoding ArchiveEncoding { get; }
|
||||
public AceHeaderType AceHeaderType { get; }
|
||||
|
||||
public ushort HeaderFlags { get; set; }
|
||||
@@ -58,6 +60,8 @@ namespace SharpCompress.Common.Ace.Headers
|
||||
|
||||
public abstract AceHeader? Read(Stream reader);
|
||||
|
||||
// Async methods moved to AceHeader.Async.cs
|
||||
|
||||
public byte[] ReadHeader(Stream stream)
|
||||
{
|
||||
// Read header CRC (2 bytes) and header size (2 bytes)
|
||||
|
||||
83
src/SharpCompress/Common/Ace/Headers/AceMainHeader.Async.cs
Normal file
83
src/SharpCompress/Common/Ace/Headers/AceMainHeader.Async.cs
Normal file
@@ -0,0 +1,83 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Ace.Headers;
|
||||
|
||||
public sealed partial class AceMainHeader
|
||||
{
|
||||
/// <summary>
|
||||
/// Asynchronously reads the main archive header from the stream.
|
||||
/// Returns header if this is a valid ACE archive.
|
||||
/// Supports both ACE 1.0 and ACE 2.0 formats.
|
||||
/// </summary>
|
||||
public override async ValueTask<AceHeader?> ReadAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var headerData = await ReadHeaderAsync(stream, cancellationToken);
|
||||
if (headerData.Length == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
int offset = 0;
|
||||
|
||||
// Header type should be 0 for main header
|
||||
if (headerData[offset++] != HeaderType)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Header flags (2 bytes)
|
||||
HeaderFlags = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2;
|
||||
|
||||
// Skip signature "**ACE**" (7 bytes)
|
||||
if (!CheckMagicBytes(headerData, offset))
|
||||
{
|
||||
throw new InvalidDataException("Invalid ACE archive signature.");
|
||||
}
|
||||
offset += 7;
|
||||
|
||||
// ACE version (1 byte) - 10 for ACE 1.0, 20 for ACE 2.0
|
||||
AceVersion = headerData[offset++];
|
||||
ExtractVersion = headerData[offset++];
|
||||
|
||||
// Host OS (1 byte)
|
||||
if (offset < headerData.Length)
|
||||
{
|
||||
var hostOsByte = headerData[offset++];
|
||||
HostOS = hostOsByte <= 11 ? (HostOS)hostOsByte : HostOS.Unknown;
|
||||
}
|
||||
// Volume number (1 byte)
|
||||
VolumeNumber = headerData[offset++];
|
||||
|
||||
// Creation date/time (4 bytes)
|
||||
var dosDateTime = BitConverter.ToUInt32(headerData, offset);
|
||||
DateTime = ConvertDosDateTime(dosDateTime);
|
||||
offset += 4;
|
||||
|
||||
// Reserved fields (8 bytes)
|
||||
if (offset + 8 <= headerData.Length)
|
||||
{
|
||||
offset += 8;
|
||||
}
|
||||
|
||||
// Skip additional fields based on flags
|
||||
// Handle comment if present
|
||||
if ((HeaderFlags & SharpCompress.Common.Ace.Headers.HeaderFlags.COMMENT) != 0)
|
||||
{
|
||||
if (offset + 2 <= headerData.Length)
|
||||
{
|
||||
ushort commentLength = BitConverter.ToUInt16(headerData, offset);
|
||||
offset += 2 + commentLength;
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,8 @@ using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Ace.Headers;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Crypto;
|
||||
@@ -11,7 +13,7 @@ namespace SharpCompress.Common.Ace.Headers
|
||||
/// <summary>
|
||||
/// ACE main archive header
|
||||
/// </summary>
|
||||
public sealed class AceMainHeader : AceHeader
|
||||
public sealed partial class AceMainHeader : AceHeader
|
||||
{
|
||||
public byte ExtractVersion { get; set; }
|
||||
public byte CreatorVersion { get; set; }
|
||||
@@ -22,7 +24,7 @@ namespace SharpCompress.Common.Ace.Headers
|
||||
public List<byte> Comment { get; set; } = new();
|
||||
public byte AceVersion { get; private set; }
|
||||
|
||||
public AceMainHeader(ArchiveEncoding archiveEncoding)
|
||||
public AceMainHeader(IArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding, AceHeaderType.MAIN) { }
|
||||
|
||||
/// <summary>
|
||||
@@ -93,5 +95,7 @@ namespace SharpCompress.Common.Ace.Headers
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
// ReadAsync moved to AceMainHeader.Async.cs
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ namespace SharpCompress.Common.Arc
|
||||
{
|
||||
public class ArcEntryHeader
|
||||
{
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
public IArchiveEncoding ArchiveEncoding { get; }
|
||||
public CompressionType CompressionMethod { get; private set; }
|
||||
public string? Name { get; private set; }
|
||||
public long CompressedSize { get; private set; }
|
||||
@@ -16,7 +16,7 @@ namespace SharpCompress.Common.Arc
|
||||
public long OriginalSize { get; private set; }
|
||||
public long DataStartPosition { get; private set; }
|
||||
|
||||
public ArcEntryHeader(ArchiveEncoding archiveEncoding)
|
||||
public ArcEntryHeader(IArchiveEncoding archiveEncoding)
|
||||
{
|
||||
this.ArchiveEncoding = archiveEncoding;
|
||||
}
|
||||
|
||||
@@ -3,55 +3,11 @@ using System.Text;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class ArchiveEncoding
|
||||
public class ArchiveEncoding : IArchiveEncoding
|
||||
{
|
||||
/// <summary>
|
||||
/// Default encoding to use when archive format doesn't specify one.
|
||||
/// </summary>
|
||||
public Encoding? Default { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
|
||||
/// </summary>
|
||||
public Encoding? Password { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Set this encoding when you want to force it for all encoding operations.
|
||||
/// </summary>
|
||||
public Encoding Default { get; set; } = Encoding.Default;
|
||||
public Encoding Password { get; set; } = Encoding.Default;
|
||||
public Encoding UTF8 { get; set; } = Encoding.UTF8;
|
||||
public Encoding? Forced { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Set this when you want to use a custom method for all decoding operations.
|
||||
/// </summary>
|
||||
/// <returns>string Func(bytes, index, length)</returns>
|
||||
public Func<byte[], int, int, string>? CustomDecoder { get; set; }
|
||||
|
||||
public ArchiveEncoding()
|
||||
: this(Encoding.Default, Encoding.Default) { }
|
||||
|
||||
public ArchiveEncoding(Encoding def, Encoding password)
|
||||
{
|
||||
Default = def;
|
||||
Password = password;
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK
|
||||
static ArchiveEncoding() => Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
|
||||
#endif
|
||||
|
||||
public string Decode(byte[] bytes) => Decode(bytes, 0, bytes.Length);
|
||||
|
||||
public string Decode(byte[] bytes, int start, int length) =>
|
||||
GetDecoder().Invoke(bytes, start, length);
|
||||
|
||||
public string DecodeUTF8(byte[] bytes) => Encoding.UTF8.GetString(bytes, 0, bytes.Length);
|
||||
|
||||
public byte[] Encode(string str) => GetEncoding().GetBytes(str);
|
||||
|
||||
public Encoding GetEncoding() => Forced ?? Default ?? Encoding.UTF8;
|
||||
|
||||
public Encoding GetPasswordEncoding() => Password ?? Encoding.UTF8;
|
||||
|
||||
public Func<byte[], int, int, string> GetDecoder() =>
|
||||
CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
|
||||
public Func<byte[], int, int, EncodingType, string>? CustomDecoder { get; set; }
|
||||
}
|
||||
|
||||
87
src/SharpCompress/Common/ArchiveEncodingExtensions.cs
Normal file
87
src/SharpCompress/Common/ArchiveEncodingExtensions.cs
Normal file
@@ -0,0 +1,87 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the type of encoding to use.
|
||||
/// </summary>
|
||||
public enum EncodingType
|
||||
{
|
||||
/// <summary>
|
||||
/// Uses the default encoding.
|
||||
/// </summary>
|
||||
Default,
|
||||
|
||||
/// <summary>
|
||||
/// Uses UTF-8 encoding.
|
||||
/// </summary>
|
||||
UTF8,
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provides extension methods for archive encoding.
|
||||
/// </summary>
|
||||
public static class ArchiveEncodingExtensions
|
||||
{
|
||||
#if !NETFRAMEWORK
|
||||
/// <summary>
|
||||
/// Registers the code pages encoding provider.
|
||||
/// </summary>
|
||||
static ArchiveEncodingExtensions() =>
|
||||
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
|
||||
#endif
|
||||
|
||||
extension(IArchiveEncoding encoding)
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the encoding based on the archive encoding settings.
|
||||
/// </summary>
|
||||
/// <param name="useUtf8">Whether to use UTF-8.</param>
|
||||
/// <returns>The encoding.</returns>
|
||||
public Encoding GetEncoding(bool useUtf8 = false) =>
|
||||
encoding.Forced ?? (useUtf8 ? encoding.UTF8 : encoding.Default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the decoder function for the archive encoding.
|
||||
/// </summary>
|
||||
/// <returns>The decoder function.</returns>
|
||||
public Func<byte[], int, int, EncodingType, string> GetDecoder() =>
|
||||
encoding.CustomDecoder
|
||||
?? (
|
||||
(bytes, index, count, type) =>
|
||||
encoding.GetEncoding(type == EncodingType.UTF8).GetString(bytes, index, count)
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
/// Encodes a string using the default encoding.
|
||||
/// </summary>
|
||||
/// <param name="str">The string to encode.</param>
|
||||
/// <returns>The encoded bytes.</returns>
|
||||
public byte[] Encode(string str) => encoding.Default.GetBytes(str);
|
||||
|
||||
/// <summary>
|
||||
/// Decodes bytes using the specified encoding type.
|
||||
/// </summary>
|
||||
/// <param name="bytes">The bytes to decode.</param>
|
||||
/// <param name="type">The encoding type.</param>
|
||||
/// <returns>The decoded string.</returns>
|
||||
public string Decode(byte[] bytes, EncodingType type = EncodingType.Default) =>
|
||||
encoding.Decode(bytes, 0, bytes.Length, type);
|
||||
|
||||
/// <summary>
|
||||
/// Decodes a portion of bytes using the specified encoding type.
|
||||
/// </summary>
|
||||
/// <param name="bytes">The bytes to decode.</param>
|
||||
/// <param name="start">The start index.</param>
|
||||
/// <param name="length">The length.</param>
|
||||
/// <param name="type">The encoding type.</param>
|
||||
/// <returns>The decoded string.</returns>
|
||||
public string Decode(
|
||||
byte[] bytes,
|
||||
int start,
|
||||
int length,
|
||||
EncodingType type = EncodingType.Default
|
||||
) => encoding.GetDecoder()(bytes, start, length, type);
|
||||
}
|
||||
}
|
||||
132
src/SharpCompress/Common/Arj/Headers/ArjHeader.Async.cs
Normal file
132
src/SharpCompress/Common/Arj/Headers/ArjHeader.Async.cs
Normal file
@@ -0,0 +1,132 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers;
|
||||
|
||||
public abstract partial class ArjHeader
|
||||
{
|
||||
public abstract ValueTask<ArjHeader?> ReadAsync(
|
||||
Stream reader,
|
||||
CancellationToken cancellationToken = default
|
||||
);
|
||||
|
||||
public async ValueTask<byte[]> ReadHeaderAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
// check for magic bytes
|
||||
var magic = new byte[2];
|
||||
if (await stream.ReadAsync(magic, 0, 2, cancellationToken) != 2)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
if (!CheckMagicBytes(magic))
|
||||
{
|
||||
throw new InvalidDataException("Not an ARJ file (wrong magic bytes)");
|
||||
}
|
||||
|
||||
// read header_size
|
||||
byte[] headerBytes = new byte[2];
|
||||
await stream.ReadAsync(headerBytes, 0, 2, cancellationToken);
|
||||
var headerSize = (ushort)(headerBytes[0] | headerBytes[1] << 8);
|
||||
if (headerSize < 1)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
var body = new byte[headerSize];
|
||||
var read = await stream.ReadAsync(body, 0, headerSize, cancellationToken);
|
||||
if (read < headerSize)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
byte[] crc = new byte[4];
|
||||
read = await stream.ReadAsync(crc, 0, 4, cancellationToken);
|
||||
var checksum = Crc32Stream.Compute(body);
|
||||
// Compute the hash value
|
||||
if (checksum != BitConverter.ToUInt32(crc, 0))
|
||||
{
|
||||
throw new InvalidDataException("Header checksum is invalid");
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
protected async ValueTask<List<byte[]>> ReadExtendedHeadersAsync(
|
||||
Stream reader,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
List<byte[]> extendedHeader = new List<byte[]>();
|
||||
byte[] buffer = new byte[2];
|
||||
|
||||
while (true)
|
||||
{
|
||||
int bytesRead = await reader.ReadAsync(buffer, 0, 2, cancellationToken);
|
||||
if (bytesRead < 2)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header size."
|
||||
);
|
||||
}
|
||||
|
||||
var extHeaderSize = (ushort)(buffer[0] | (buffer[1] << 8));
|
||||
if (extHeaderSize == 0)
|
||||
{
|
||||
return extendedHeader;
|
||||
}
|
||||
|
||||
byte[] header = new byte[extHeaderSize];
|
||||
bytesRead = await reader.ReadAsync(header, 0, extHeaderSize, cancellationToken);
|
||||
if (bytesRead < extHeaderSize)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header data."
|
||||
);
|
||||
}
|
||||
|
||||
byte[] crcextended = new byte[4];
|
||||
bytesRead = await reader.ReadAsync(crcextended, 0, 4, cancellationToken);
|
||||
if (bytesRead < 4)
|
||||
{
|
||||
throw new EndOfStreamException(
|
||||
"Unexpected end of stream while reading extended header CRC."
|
||||
);
|
||||
}
|
||||
|
||||
var checksum = Crc32Stream.Compute(header);
|
||||
if (checksum != BitConverter.ToUInt32(crcextended, 0))
|
||||
{
|
||||
throw new InvalidDataException("Extended header checksum is invalid");
|
||||
}
|
||||
|
||||
extendedHeader.Add(header);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Asynchronously checks if the stream is an ARJ archive
|
||||
/// </summary>
|
||||
/// <param name="stream">The stream to read from</param>
|
||||
/// <param name="cancellationToken">Cancellation token</param>
|
||||
/// <returns>True if the stream is an ARJ archive, false otherwise</returns>
|
||||
public static async ValueTask<bool> IsArchiveAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytes = new byte[2];
|
||||
if (await stream.ReadAsync(bytes, 0, 2, cancellationToken) != 2)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return CheckMagicBytes(bytes);
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
using SharpCompress.Crypto;
|
||||
@@ -15,7 +16,7 @@ namespace SharpCompress.Common.Arj.Headers
|
||||
LocalHeader,
|
||||
}
|
||||
|
||||
public abstract class ArjHeader
|
||||
public abstract partial class ArjHeader
|
||||
{
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
@@ -31,6 +32,8 @@ namespace SharpCompress.Common.Arj.Headers
|
||||
|
||||
public abstract ArjHeader? Read(Stream reader);
|
||||
|
||||
// Async methods moved to ArjHeader.Async.cs
|
||||
|
||||
public byte[] ReadHeader(Stream stream)
|
||||
{
|
||||
// check for magic bytes
|
||||
@@ -72,6 +75,8 @@ namespace SharpCompress.Common.Arj.Headers
|
||||
return body;
|
||||
}
|
||||
|
||||
// ReadHeaderAsync moved to ArjHeader.Async.cs
|
||||
|
||||
protected List<byte[]> ReadExtendedHeaders(Stream reader)
|
||||
{
|
||||
List<byte[]> extendedHeader = new List<byte[]>();
|
||||
|
||||
24
src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.Async.cs
Normal file
24
src/SharpCompress/Common/Arj/Headers/ArjLocalHeader.Async.cs
Normal file
@@ -0,0 +1,24 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers;
|
||||
|
||||
public partial class ArjLocalHeader
|
||||
{
|
||||
public override async ValueTask<ArjHeader?> ReadAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var body = await ReadHeaderAsync(stream, cancellationToken);
|
||||
if (body.Length > 0)
|
||||
{
|
||||
await ReadExtendedHeadersAsync(stream, cancellationToken);
|
||||
var header = LoadFrom(body);
|
||||
header.DataStartPosition = stream.Position;
|
||||
return header;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -4,11 +4,12 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public class ArjLocalHeader : ArjHeader
|
||||
public partial class ArjLocalHeader : ArjHeader
|
||||
{
|
||||
public ArchiveEncoding ArchiveEncoding { get; }
|
||||
public long DataStartPosition { get; protected set; }
|
||||
@@ -55,6 +56,8 @@ namespace SharpCompress.Common.Arj.Headers
|
||||
return null;
|
||||
}
|
||||
|
||||
// ReadAsync moved to ArjLocalHeader.Async.cs
|
||||
|
||||
public ArjLocalHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
int offset = 0;
|
||||
|
||||
18
src/SharpCompress/Common/Arj/Headers/ArjMainHeader.Async.cs
Normal file
18
src/SharpCompress/Common/Arj/Headers/ArjMainHeader.Async.cs
Normal file
@@ -0,0 +1,18 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers;
|
||||
|
||||
public partial class ArjMainHeader
|
||||
{
|
||||
public override async ValueTask<ArjHeader?> ReadAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var body = await ReadHeaderAsync(stream, cancellationToken);
|
||||
await ReadExtendedHeadersAsync(stream, cancellationToken);
|
||||
return LoadFrom(body);
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,14 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
using SharpCompress.Crypto;
|
||||
|
||||
namespace SharpCompress.Common.Arj.Headers
|
||||
{
|
||||
public class ArjMainHeader : ArjHeader
|
||||
public partial class ArjMainHeader : ArjHeader
|
||||
{
|
||||
private const int FIRST_HDR_SIZE = 34;
|
||||
private const ushort ARJ_MAGIC = 0xEA60;
|
||||
@@ -45,6 +47,8 @@ namespace SharpCompress.Common.Arj.Headers
|
||||
return LoadFrom(body);
|
||||
}
|
||||
|
||||
// ReadAsync moved to ArjMainHeader.Async.cs
|
||||
|
||||
public ArjMainHeader LoadFrom(byte[] headerBytes)
|
||||
{
|
||||
var offset = 1;
|
||||
|
||||
108
src/SharpCompress/Common/AsyncBinaryReader.cs
Normal file
108
src/SharpCompress/Common/AsyncBinaryReader.cs
Normal file
@@ -0,0 +1,108 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common
|
||||
{
|
||||
public sealed class AsyncBinaryReader : IDisposable
|
||||
{
|
||||
private readonly Stream _stream;
|
||||
private readonly Stream _originalStream;
|
||||
private readonly bool _leaveOpen;
|
||||
private readonly byte[] _buffer = new byte[8];
|
||||
private bool _disposed;
|
||||
|
||||
public AsyncBinaryReader(Stream stream, bool leaveOpen = false, int bufferSize = 4096)
|
||||
{
|
||||
if (!stream.CanRead)
|
||||
{
|
||||
throw new ArgumentException("Stream must be readable.");
|
||||
}
|
||||
|
||||
_originalStream = stream ?? throw new ArgumentNullException(nameof(stream));
|
||||
_leaveOpen = leaveOpen;
|
||||
|
||||
// Use the stream directly without wrapping in BufferedStream
|
||||
// BufferedStream uses synchronous Read internally which doesn't work with async-only streams
|
||||
// SharpCompress uses SharpCompressStream for buffering which supports true async reads
|
||||
_stream = stream;
|
||||
}
|
||||
|
||||
public Stream BaseStream => _stream;
|
||||
|
||||
public async ValueTask<byte> ReadByteAsync(CancellationToken ct = default)
|
||||
{
|
||||
await _stream.ReadExactAsync(_buffer, 0, 1, ct).ConfigureAwait(false);
|
||||
return _buffer[0];
|
||||
}
|
||||
|
||||
public async ValueTask<ushort> ReadUInt16Async(CancellationToken ct = default)
|
||||
{
|
||||
await _stream.ReadExactAsync(_buffer, 0, 2, ct).ConfigureAwait(false);
|
||||
return BinaryPrimitives.ReadUInt16LittleEndian(_buffer);
|
||||
}
|
||||
|
||||
public async ValueTask<uint> ReadUInt32Async(CancellationToken ct = default)
|
||||
{
|
||||
await _stream.ReadExactAsync(_buffer, 0, 4, ct).ConfigureAwait(false);
|
||||
return BinaryPrimitives.ReadUInt32LittleEndian(_buffer);
|
||||
}
|
||||
|
||||
public async ValueTask<ulong> ReadUInt64Async(CancellationToken ct = default)
|
||||
{
|
||||
await _stream.ReadExactAsync(_buffer, 0, 8, ct).ConfigureAwait(false);
|
||||
return BinaryPrimitives.ReadUInt64LittleEndian(_buffer);
|
||||
}
|
||||
|
||||
public async ValueTask ReadBytesAsync(
|
||||
byte[] bytes,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken ct = default
|
||||
)
|
||||
{
|
||||
await _stream.ReadExactAsync(bytes, offset, count, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async ValueTask SkipAsync(int count, CancellationToken ct = default)
|
||||
{
|
||||
await _stream.SkipAsync(count, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
|
||||
// Dispose the original stream if we own it
|
||||
if (!_leaveOpen)
|
||||
{
|
||||
_originalStream.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
#if NET8_0_OR_GREATER
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
if (_disposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_disposed = true;
|
||||
|
||||
// Dispose the original stream if we own it
|
||||
if (!_leaveOpen)
|
||||
{
|
||||
await _originalStream.DisposeAsync().ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
12
src/SharpCompress/Common/Constants.cs
Normal file
12
src/SharpCompress/Common/Constants.cs
Normal file
@@ -0,0 +1,12 @@
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public static class Constants
|
||||
{
|
||||
/// <summary>
|
||||
/// The default buffer size for stream operations, matching .NET's Stream.CopyTo default of 81920 bytes.
|
||||
/// This can be modified globally at runtime.
|
||||
/// </summary>
|
||||
public static int BufferSize { get; set; } = 81920;
|
||||
|
||||
public static int RewindableBufferSize { get; set; } = 81920;
|
||||
}
|
||||
84
src/SharpCompress/Common/EntryStream.Async.cs
Normal file
84
src/SharpCompress/Common/EntryStream.Async.cs
Normal file
@@ -0,0 +1,84 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.IO;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public partial class EntryStream
|
||||
{
|
||||
/// <summary>
|
||||
/// Asynchronously skip the rest of the entry stream.
|
||||
/// </summary>
|
||||
public async ValueTask SkipEntryAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
await this.SkipAsync(cancellationToken).ConfigureAwait(false);
|
||||
_completed = true;
|
||||
}
|
||||
|
||||
#if !LEGACY_DOTNET
|
||||
public override async ValueTask DisposeAsync()
|
||||
{
|
||||
if (_isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
if (!(_completed || _reader.Cancelled))
|
||||
{
|
||||
await SkipEntryAsync().ConfigureAwait(false);
|
||||
}
|
||||
|
||||
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
|
||||
if (_stream is IStreamStack ss)
|
||||
{
|
||||
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
|
||||
{
|
||||
await deflateStream.FlushAsync().ConfigureAwait(false);
|
||||
}
|
||||
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
|
||||
{
|
||||
await lzmaStream.FlushAsync().ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(EntryStream));
|
||||
#endif
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
await _stream.DisposeAsync().ConfigureAwait(false);
|
||||
}
|
||||
#endif
|
||||
|
||||
public override async Task<int> ReadAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
var read = await _stream
|
||||
.ReadAsync(buffer, offset, count, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (read <= 0)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return read;
|
||||
}
|
||||
|
||||
#if !LEGACY_DOTNET
|
||||
public override async ValueTask<int> ReadAsync(
|
||||
Memory<byte> buffer,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var read = await _stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
if (read <= 0)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return read;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
@@ -8,7 +8,7 @@ using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class EntryStream : Stream, IStreamStack
|
||||
public partial class EntryStream : Stream, IStreamStack
|
||||
{
|
||||
#if DEBUG_STREAMS
|
||||
long IStreamStack.InstanceId { get; set; }
|
||||
@@ -53,15 +53,6 @@ public class EntryStream : Stream, IStreamStack
|
||||
_completed = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Asynchronously skip the rest of the entry stream.
|
||||
/// </summary>
|
||||
public async Task SkipEntryAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
await this.SkipAsync(cancellationToken).ConfigureAwait(false);
|
||||
_completed = true;
|
||||
}
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (_isDisposed)
|
||||
@@ -93,39 +84,6 @@ public class EntryStream : Stream, IStreamStack
|
||||
_stream.Dispose();
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
public override async ValueTask DisposeAsync()
|
||||
{
|
||||
if (_isDisposed)
|
||||
{
|
||||
return;
|
||||
}
|
||||
_isDisposed = true;
|
||||
if (!(_completed || _reader.Cancelled))
|
||||
{
|
||||
await SkipEntryAsync().ConfigureAwait(false);
|
||||
}
|
||||
|
||||
//Need a safe standard approach to this - it's okay for compression to overreads. Handling needs to be standardised
|
||||
if (_stream is IStreamStack ss)
|
||||
{
|
||||
if (ss.BaseStream() is SharpCompress.Compressors.Deflate.DeflateStream deflateStream)
|
||||
{
|
||||
await deflateStream.FlushAsync().ConfigureAwait(false);
|
||||
}
|
||||
else if (ss.BaseStream() is SharpCompress.Compressors.LZMA.LzmaStream lzmaStream)
|
||||
{
|
||||
await lzmaStream.FlushAsync().ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
#if DEBUG_STREAMS
|
||||
this.DebugDispose(typeof(EntryStream));
|
||||
#endif
|
||||
await base.DisposeAsync().ConfigureAwait(false);
|
||||
await _stream.DisposeAsync().ConfigureAwait(false);
|
||||
}
|
||||
#endif
|
||||
|
||||
public override bool CanRead => true;
|
||||
|
||||
public override bool CanSeek => false;
|
||||
@@ -154,38 +112,6 @@ public class EntryStream : Stream, IStreamStack
|
||||
return read;
|
||||
}
|
||||
|
||||
public override async Task<int> ReadAsync(
|
||||
byte[] buffer,
|
||||
int offset,
|
||||
int count,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
var read = await _stream
|
||||
.ReadAsync(buffer, offset, count, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (read <= 0)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return read;
|
||||
}
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
public override async ValueTask<int> ReadAsync(
|
||||
Memory<byte> buffer,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var read = await _stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
if (read <= 0)
|
||||
{
|
||||
_completed = true;
|
||||
}
|
||||
return read;
|
||||
}
|
||||
#endif
|
||||
|
||||
public override int ReadByte()
|
||||
{
|
||||
var value = _stream.ReadByte();
|
||||
|
||||
116
src/SharpCompress/Common/ExtractionMethods.Async.cs
Normal file
116
src/SharpCompress/Common/ExtractionMethods.Async.cs
Normal file
@@ -0,0 +1,116 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
internal static partial class ExtractionMethods
|
||||
{
|
||||
public static async ValueTask WriteEntryToDirectoryAsync(
|
||||
IEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
Func<string, ExtractionOptions?, CancellationToken, ValueTask> writeAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
string destinationFileName;
|
||||
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
|
||||
|
||||
//check for trailing slash.
|
||||
if (
|
||||
fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1]
|
||||
!= Path.DirectorySeparatorChar
|
||||
)
|
||||
{
|
||||
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
|
||||
}
|
||||
|
||||
if (!Directory.Exists(fullDestinationDirectoryPath))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
$"Directory does not exist to extract to: {fullDestinationDirectoryPath}"
|
||||
);
|
||||
}
|
||||
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
|
||||
file = Utility.ReplaceInvalidFileNameChars(file);
|
||||
if (options.ExtractFullPath)
|
||||
{
|
||||
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
|
||||
.NotNull("Directory is null");
|
||||
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
|
||||
|
||||
if (!Directory.Exists(destdir))
|
||||
{
|
||||
if (!destdir.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to create a directory outside of the destination directory."
|
||||
);
|
||||
}
|
||||
|
||||
Directory.CreateDirectory(destdir);
|
||||
}
|
||||
destinationFileName = Path.Combine(destdir, file);
|
||||
}
|
||||
else
|
||||
{
|
||||
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
|
||||
}
|
||||
|
||||
if (!entry.IsDirectory)
|
||||
{
|
||||
destinationFileName = Path.GetFullPath(destinationFileName);
|
||||
|
||||
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to write a file outside of the destination directory."
|
||||
);
|
||||
}
|
||||
await writeAsync(destinationFileName, options, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
|
||||
{
|
||||
Directory.CreateDirectory(destinationFileName);
|
||||
}
|
||||
}
|
||||
|
||||
public static async ValueTask WriteEntryToFileAsync(
|
||||
IEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options,
|
||||
Func<string, FileMode, CancellationToken, ValueTask> openAndWriteAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (entry.LinkTarget != null)
|
||||
{
|
||||
if (options?.WriteSymbolicLink is null)
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
|
||||
);
|
||||
}
|
||||
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
|
||||
}
|
||||
else
|
||||
{
|
||||
var fm = FileMode.Create;
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
if (!options.Overwrite)
|
||||
{
|
||||
fm = FileMode.CreateNew;
|
||||
}
|
||||
|
||||
await openAndWriteAsync(destinationFileName, fm, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
entry.PreserveExtractionOptions(destinationFileName, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,7 +6,7 @@ using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
internal static class ExtractionMethods
|
||||
internal static partial class ExtractionMethods
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the appropriate StringComparison for path checks based on the file system.
|
||||
@@ -123,111 +123,4 @@ internal static class ExtractionMethods
|
||||
entry.PreserveExtractionOptions(destinationFileName, options);
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task WriteEntryToDirectoryAsync(
|
||||
IEntry entry,
|
||||
string destinationDirectory,
|
||||
ExtractionOptions? options,
|
||||
Func<string, ExtractionOptions?, CancellationToken, Task> writeAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
string destinationFileName;
|
||||
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
|
||||
|
||||
//check for trailing slash.
|
||||
if (
|
||||
fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1]
|
||||
!= Path.DirectorySeparatorChar
|
||||
)
|
||||
{
|
||||
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
|
||||
}
|
||||
|
||||
if (!Directory.Exists(fullDestinationDirectoryPath))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
$"Directory does not exist to extract to: {fullDestinationDirectoryPath}"
|
||||
);
|
||||
}
|
||||
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
|
||||
file = Utility.ReplaceInvalidFileNameChars(file);
|
||||
if (options.ExtractFullPath)
|
||||
{
|
||||
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
|
||||
.NotNull("Directory is null");
|
||||
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
|
||||
|
||||
if (!Directory.Exists(destdir))
|
||||
{
|
||||
if (!destdir.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to create a directory outside of the destination directory."
|
||||
);
|
||||
}
|
||||
|
||||
Directory.CreateDirectory(destdir);
|
||||
}
|
||||
destinationFileName = Path.Combine(destdir, file);
|
||||
}
|
||||
else
|
||||
{
|
||||
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
|
||||
}
|
||||
|
||||
if (!entry.IsDirectory)
|
||||
{
|
||||
destinationFileName = Path.GetFullPath(destinationFileName);
|
||||
|
||||
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, PathComparison))
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is trying to write a file outside of the destination directory."
|
||||
);
|
||||
}
|
||||
await writeAsync(destinationFileName, options, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
else if (options.ExtractFullPath && !Directory.Exists(destinationFileName))
|
||||
{
|
||||
Directory.CreateDirectory(destinationFileName);
|
||||
}
|
||||
}
|
||||
|
||||
public static async Task WriteEntryToFileAsync(
|
||||
IEntry entry,
|
||||
string destinationFileName,
|
||||
ExtractionOptions? options,
|
||||
Func<string, FileMode, CancellationToken, Task> openAndWriteAsync,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
if (entry.LinkTarget != null)
|
||||
{
|
||||
if (options?.WriteSymbolicLink is null)
|
||||
{
|
||||
throw new ExtractionException(
|
||||
"Entry is a symbolic link but ExtractionOptions.WriteSymbolicLink delegate is null"
|
||||
);
|
||||
}
|
||||
options.WriteSymbolicLink(destinationFileName, entry.LinkTarget);
|
||||
}
|
||||
else
|
||||
{
|
||||
var fm = FileMode.Create;
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
if (!options.Overwrite)
|
||||
{
|
||||
fm = FileMode.CreateNew;
|
||||
}
|
||||
|
||||
await openAndWriteAsync(destinationFileName, fm, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
entry.PreserveExtractionOptions(destinationFileName, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public abstract class FilePart
|
||||
{
|
||||
protected FilePart(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
|
||||
protected FilePart(IArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
|
||||
|
||||
internal ArchiveEncoding ArchiveEncoding { get; }
|
||||
internal IArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
internal abstract string? FilePartName { get; }
|
||||
public int Index { get; set; }
|
||||
@@ -14,4 +16,8 @@ public abstract class FilePart
|
||||
internal abstract Stream? GetCompressedStream();
|
||||
internal abstract Stream? GetRawStream();
|
||||
internal bool Skipped { get; set; }
|
||||
|
||||
internal virtual ValueTask<Stream?> GetCompressedStreamAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => new(GetCompressedStream());
|
||||
}
|
||||
|
||||
15
src/SharpCompress/Common/GZip/GZipEntry.Async.cs
Normal file
15
src/SharpCompress/Common/GZip/GZipEntry.Async.cs
Normal file
@@ -0,0 +1,15 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
|
||||
namespace SharpCompress.Common.GZip;
|
||||
|
||||
public partial class GZipEntry
|
||||
{
|
||||
internal static async IAsyncEnumerable<GZipEntry> GetEntriesAsync(
|
||||
Stream stream,
|
||||
OptionsBase options
|
||||
)
|
||||
{
|
||||
yield return new GZipEntry(await GZipFilePart.CreateAsync(stream, options.ArchiveEncoding));
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,7 @@ using System.IO;
|
||||
|
||||
namespace SharpCompress.Common.GZip;
|
||||
|
||||
public class GZipEntry : Entry
|
||||
public partial class GZipEntry : Entry
|
||||
{
|
||||
private readonly GZipFilePart? _filePart;
|
||||
|
||||
@@ -40,6 +40,8 @@ public class GZipEntry : Entry
|
||||
|
||||
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
|
||||
{
|
||||
yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
|
||||
yield return new GZipEntry(GZipFilePart.Create(stream, options.ArchiveEncoding));
|
||||
}
|
||||
|
||||
// Async methods moved to GZipEntry.Async.cs
|
||||
}
|
||||
|
||||
133
src/SharpCompress/Common/GZip/GZipFilePart.Async.cs
Normal file
133
src/SharpCompress/Common/GZip/GZipFilePart.Async.cs
Normal file
@@ -0,0 +1,133 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common.Tar.Headers;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
|
||||
namespace SharpCompress.Common.GZip;
|
||||
|
||||
internal sealed partial class GZipFilePart
|
||||
{
|
||||
internal static async ValueTask<GZipFilePart> CreateAsync(
|
||||
Stream stream,
|
||||
IArchiveEncoding archiveEncoding,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var part = new GZipFilePart(stream, archiveEncoding);
|
||||
|
||||
await part.ReadAndValidateGzipHeaderAsync(cancellationToken);
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
var position = stream.Position;
|
||||
stream.Position = stream.Length - 8;
|
||||
await part.ReadTrailerAsync(cancellationToken);
|
||||
stream.Position = position;
|
||||
part.EntryStartPosition = position;
|
||||
}
|
||||
else
|
||||
{
|
||||
// For non-seekable streams, we can't read the trailer or track position.
|
||||
// Set to 0 since the stream will be read sequentially from its current position.
|
||||
part.EntryStartPosition = 0;
|
||||
}
|
||||
return part;
|
||||
}
|
||||
|
||||
private async ValueTask ReadTrailerAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
|
||||
var trailer = new byte[8];
|
||||
_ = await _stream.ReadFullyAsync(trailer, 0, 8, cancellationToken);
|
||||
|
||||
Crc = BinaryPrimitives.ReadUInt32LittleEndian(trailer);
|
||||
UncompressedSize = BinaryPrimitives.ReadUInt32LittleEndian(trailer.AsSpan().Slice(4));
|
||||
}
|
||||
|
||||
private async ValueTask ReadAndValidateGzipHeaderAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
// read the header on the first read
|
||||
var header = new byte[10];
|
||||
var n = await _stream.ReadAsync(header, 0, 10, cancellationToken);
|
||||
|
||||
// workitem 8501: handle edge case (decompress empty stream)
|
||||
if (n == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (n != 10)
|
||||
{
|
||||
throw new ZlibException("Not a valid GZIP stream.");
|
||||
}
|
||||
|
||||
if (header[0] != 0x1F || header[1] != 0x8B || header[2] != 8)
|
||||
{
|
||||
throw new ZlibException("Bad GZIP header.");
|
||||
}
|
||||
|
||||
var timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan().Slice(4));
|
||||
DateModified = TarHeader.EPOCH.AddSeconds(timet);
|
||||
if ((header[3] & 0x04) == 0x04)
|
||||
{
|
||||
// read and discard extra field
|
||||
var lengthField = new byte[2];
|
||||
_ = await _stream.ReadAsync(lengthField, 0, 2, cancellationToken);
|
||||
|
||||
var extraLength = (short)(lengthField[0] + (lengthField[1] * 256));
|
||||
var extra = new byte[extraLength];
|
||||
|
||||
if (!await _stream.ReadFullyAsync(extra, cancellationToken))
|
||||
{
|
||||
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
|
||||
}
|
||||
}
|
||||
if ((header[3] & 0x08) == 0x08)
|
||||
{
|
||||
_name = await ReadZeroTerminatedStringAsync(_stream, cancellationToken);
|
||||
}
|
||||
if ((header[3] & 0x10) == 0x010)
|
||||
{
|
||||
await ReadZeroTerminatedStringAsync(_stream, cancellationToken);
|
||||
}
|
||||
if ((header[3] & 0x02) == 0x02)
|
||||
{
|
||||
var buf = new byte[1];
|
||||
_ = await _stream.ReadAsync(buf, 0, 1, cancellationToken); // CRC16, ignore
|
||||
}
|
||||
}
|
||||
|
||||
private async ValueTask<string> ReadZeroTerminatedStringAsync(
|
||||
Stream stream,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var buf1 = new byte[1];
|
||||
var list = new List<byte>();
|
||||
var done = false;
|
||||
do
|
||||
{
|
||||
// workitem 7740
|
||||
var n = await stream.ReadAsync(buf1, 0, 1, cancellationToken);
|
||||
if (n != 1)
|
||||
{
|
||||
throw new ZlibException("Unexpected EOF reading GZIP header.");
|
||||
}
|
||||
if (buf1[0] == 0)
|
||||
{
|
||||
done = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
list.Add(buf1[0]);
|
||||
}
|
||||
} while (!done);
|
||||
var buffer = list.ToArray();
|
||||
return ArchiveEncoding.Decode(buffer);
|
||||
}
|
||||
}
|
||||
@@ -8,33 +8,37 @@ using SharpCompress.Compressors.Deflate;
|
||||
|
||||
namespace SharpCompress.Common.GZip;
|
||||
|
||||
internal sealed class GZipFilePart : FilePart
|
||||
internal sealed partial class GZipFilePart : FilePart
|
||||
{
|
||||
private string? _name;
|
||||
private readonly Stream _stream;
|
||||
|
||||
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding)
|
||||
internal static GZipFilePart Create(Stream stream, IArchiveEncoding archiveEncoding)
|
||||
{
|
||||
_stream = stream;
|
||||
ReadAndValidateGzipHeader();
|
||||
var part = new GZipFilePart(stream, archiveEncoding);
|
||||
|
||||
part.ReadAndValidateGzipHeader();
|
||||
if (stream.CanSeek)
|
||||
{
|
||||
var position = stream.Position;
|
||||
stream.Position = stream.Length - 8;
|
||||
ReadTrailer();
|
||||
part.ReadTrailer();
|
||||
stream.Position = position;
|
||||
EntryStartPosition = position;
|
||||
part.EntryStartPosition = position;
|
||||
}
|
||||
else
|
||||
{
|
||||
// For non-seekable streams, we can't read the trailer or track position.
|
||||
// Set to 0 since the stream will be read sequentially from its current position.
|
||||
EntryStartPosition = 0;
|
||||
part.EntryStartPosition = 0;
|
||||
}
|
||||
return part;
|
||||
}
|
||||
|
||||
internal long EntryStartPosition { get; }
|
||||
private GZipFilePart(Stream stream, IArchiveEncoding archiveEncoding)
|
||||
: base(archiveEncoding) => _stream = stream;
|
||||
|
||||
internal long EntryStartPosition { get; private set; }
|
||||
|
||||
internal DateTime? DateModified { get; private set; }
|
||||
internal uint? Crc { get; private set; }
|
||||
@@ -43,7 +47,12 @@ internal sealed class GZipFilePart : FilePart
|
||||
internal override string? FilePartName => _name;
|
||||
|
||||
internal override Stream GetCompressedStream() =>
|
||||
new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
|
||||
new DeflateStream(
|
||||
_stream,
|
||||
CompressionMode.Decompress,
|
||||
CompressionLevel.Default,
|
||||
leaveOpen: true
|
||||
);
|
||||
|
||||
internal override Stream GetRawStream() => _stream;
|
||||
|
||||
@@ -51,7 +60,7 @@ internal sealed class GZipFilePart : FilePart
|
||||
{
|
||||
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
|
||||
Span<byte> trailer = stackalloc byte[8];
|
||||
var n = _stream.Read(trailer);
|
||||
_stream.ReadFully(trailer);
|
||||
|
||||
Crc = BinaryPrimitives.ReadUInt32LittleEndian(trailer);
|
||||
UncompressedSize = BinaryPrimitives.ReadUInt32LittleEndian(trailer.Slice(4));
|
||||
|
||||
36
src/SharpCompress/Common/IArchiveEncoding.cs
Normal file
36
src/SharpCompress/Common/IArchiveEncoding.cs
Normal file
@@ -0,0 +1,36 @@
|
||||
using System;
|
||||
using System.Text;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
/// <summary>
|
||||
/// Defines the encoding settings for archives.
|
||||
/// </summary>
|
||||
public interface IArchiveEncoding
|
||||
{
|
||||
/// <summary>
|
||||
/// Default encoding to use when archive format doesn't specify one. Required and defaults to Encoding.Default.
|
||||
/// </summary>
|
||||
public Encoding Default { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898. Required and defaults to Encoding.Default.
|
||||
/// </summary>
|
||||
public Encoding Password { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Default encoding to use when archive format specifies UTF-8 encoding. Required and defaults to Encoding.UTF8.
|
||||
/// </summary>
|
||||
public Encoding UTF8 { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Set this encoding when you want to force it for all encoding operations.
|
||||
/// </summary>
|
||||
public Encoding? Forced { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Set this when you want to use a custom method for all decoding operations.
|
||||
/// </summary>
|
||||
/// <returns>string Func(bytes, index, length, EncodingType)</returns>
|
||||
public Func<byte[], int, int, EncodingType, string>? CustomDecoder { get; set; }
|
||||
}
|
||||
@@ -2,7 +2,7 @@ using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public interface IVolume : IDisposable
|
||||
public interface IVolume : IDisposable, IAsyncDisposable
|
||||
{
|
||||
int Index { get; }
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@ public class OptionsBase
|
||||
/// </summary>
|
||||
public bool LeaveStreamOpen { get; set; } = true;
|
||||
|
||||
public ArchiveEncoding ArchiveEncoding { get; set; } = new();
|
||||
public IArchiveEncoding ArchiveEncoding { get; set; } = new ArchiveEncoding();
|
||||
}
|
||||
|
||||
189
src/SharpCompress/Common/Rar/AsyncMarkingBinaryReader.cs
Normal file
189
src/SharpCompress/Common/Rar/AsyncMarkingBinaryReader.cs
Normal file
@@ -0,0 +1,189 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Common.Rar;
|
||||
|
||||
internal class AsyncMarkingBinaryReader
|
||||
{
|
||||
private readonly AsyncBinaryReader _reader;
|
||||
|
||||
public AsyncMarkingBinaryReader(Stream stream)
|
||||
{
|
||||
_reader = new AsyncBinaryReader(stream, leaveOpen: true);
|
||||
}
|
||||
|
||||
public Stream BaseStream => _reader.BaseStream;
|
||||
|
||||
public virtual long CurrentReadByteCount { get; protected set; }
|
||||
|
||||
public virtual void Mark() => CurrentReadByteCount = 0;
|
||||
|
||||
public virtual async ValueTask<bool> ReadBooleanAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
) => await ReadByteAsync(cancellationToken).ConfigureAwait(false) != 0;
|
||||
|
||||
public virtual async ValueTask<byte> ReadByteAsync(
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
CurrentReadByteCount++;
|
||||
return await _reader.ReadByteAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public virtual async ValueTask<byte[]> ReadBytesAsync(
|
||||
int count,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
CurrentReadByteCount += count;
|
||||
var bytes = new byte[count];
|
||||
await _reader.ReadBytesAsync(bytes, 0, count, cancellationToken).ConfigureAwait(false);
|
||||
return bytes;
|
||||
}
|
||||
|
||||
public async ValueTask<ushort> ReadUInt16Async(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var bytes = await ReadBytesAsync(2, cancellationToken).ConfigureAwait(false);
|
||||
return BinaryPrimitives.ReadUInt16LittleEndian(bytes);
|
||||
}
|
||||
|
||||
public async ValueTask<uint> ReadUInt32Async(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var bytes = await ReadBytesAsync(4, cancellationToken).ConfigureAwait(false);
|
||||
return BinaryPrimitives.ReadUInt32LittleEndian(bytes);
|
||||
}
|
||||
|
||||
public virtual async ValueTask<ulong> ReadUInt64Async(
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytes = await ReadBytesAsync(8, cancellationToken).ConfigureAwait(false);
|
||||
return BinaryPrimitives.ReadUInt64LittleEndian(bytes);
|
||||
}
|
||||
|
||||
public virtual async ValueTask<short> ReadInt16Async(
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytes = await ReadBytesAsync(2, cancellationToken).ConfigureAwait(false);
|
||||
return BinaryPrimitives.ReadInt16LittleEndian(bytes);
|
||||
}
|
||||
|
||||
public virtual async ValueTask<int> ReadInt32Async(
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytes = await ReadBytesAsync(4, cancellationToken).ConfigureAwait(false);
|
||||
return BinaryPrimitives.ReadInt32LittleEndian(bytes);
|
||||
}
|
||||
|
||||
public virtual async ValueTask<long> ReadInt64Async(
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var bytes = await ReadBytesAsync(8, cancellationToken).ConfigureAwait(false);
|
||||
return BinaryPrimitives.ReadInt64LittleEndian(bytes);
|
||||
}
|
||||
|
||||
public async ValueTask<ulong> ReadRarVIntAsync(
|
||||
CancellationToken cancellationToken = default,
|
||||
int maxBytes = 10
|
||||
) => await DoReadRarVIntAsync((maxBytes - 1) * 7, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
private async ValueTask<ulong> DoReadRarVIntAsync(
|
||||
int maxShift,
|
||||
CancellationToken cancellationToken
|
||||
)
|
||||
{
|
||||
var shift = 0;
|
||||
ulong result = 0;
|
||||
do
|
||||
{
|
||||
var b0 = await ReadByteAsync(cancellationToken).ConfigureAwait(false);
|
||||
var b1 = ((uint)b0) & 0x7f;
|
||||
ulong n = b1;
|
||||
var shifted = n << shift;
|
||||
if (n != shifted >> shift)
|
||||
{
|
||||
// overflow
|
||||
break;
|
||||
}
|
||||
result |= shifted;
|
||||
if (b0 == b1)
|
||||
{
|
||||
return result;
|
||||
}
|
||||
shift += 7;
|
||||
} while (shift <= maxShift);
|
||||
|
||||
throw new FormatException("malformed vint");
|
||||
}
|
||||
|
||||
public async ValueTask<uint> ReadRarVIntUInt32Async(
|
||||
int maxBytes = 5,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
// hopefully this gets inlined
|
||||
await DoReadRarVIntUInt32Async((maxBytes - 1) * 7, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
public async ValueTask<ushort> ReadRarVIntUInt16Async(
|
||||
int maxBytes = 3,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
// hopefully this gets inlined
|
||||
checked(
|
||||
(ushort)
|
||||
await DoReadRarVIntUInt32Async((maxBytes - 1) * 7, cancellationToken)
|
||||
.ConfigureAwait(false)
|
||||
);
|
||||
|
||||
public async ValueTask<byte> ReadRarVIntByteAsync(
|
||||
int maxBytes = 2,
|
||||
CancellationToken cancellationToken = default
|
||||
) =>
|
||||
// hopefully this gets inlined
|
||||
checked(
|
||||
(byte)
|
||||
await DoReadRarVIntUInt32Async((maxBytes - 1) * 7, cancellationToken)
|
||||
.ConfigureAwait(false)
|
||||
);
|
||||
|
||||
public async ValueTask SkipAsync(int count, CancellationToken cancellationToken = default)
|
||||
{
|
||||
CurrentReadByteCount += count;
|
||||
await _reader.SkipAsync(count, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private async ValueTask<uint> DoReadRarVIntUInt32Async(
|
||||
int maxShift,
|
||||
CancellationToken cancellationToken = default
|
||||
)
|
||||
{
|
||||
var shift = 0;
|
||||
uint result = 0;
|
||||
do
|
||||
{
|
||||
var b0 = await ReadByteAsync(cancellationToken).ConfigureAwait(false);
|
||||
var b1 = ((uint)b0) & 0x7f;
|
||||
var n = b1;
|
||||
var shifted = n << shift;
|
||||
if (n != shifted >> shift)
|
||||
{
|
||||
// overflow
|
||||
break;
|
||||
}
|
||||
result |= shifted;
|
||||
if (b0 == b1)
|
||||
{
|
||||
return result;
|
||||
}
|
||||
shift += 7;
|
||||
} while (shift <= maxShift);
|
||||
|
||||
throw new FormatException("malformed vint");
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user