mirror of
https://github.com/adamhathcock/sharpcompress.git
synced 2026-02-04 05:25:00 +00:00
Compare commits
106 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7fe27ac310 | ||
|
|
1e300349ce | ||
|
|
6b01a7b08e | ||
|
|
34d948df18 | ||
|
|
27091c4f1d | ||
|
|
970a3d7f2a | ||
|
|
2bedbbfc54 | ||
|
|
8de33f0db3 | ||
|
|
df4eab67dc | ||
|
|
2d13bc0046 | ||
|
|
704a0cb35d | ||
|
|
06a983e445 | ||
|
|
2d10df8b87 | ||
|
|
baf66db9dc | ||
|
|
3545693999 | ||
|
|
84fb99c2c8 | ||
|
|
21e2983ae1 | ||
|
|
004e0941d5 | ||
|
|
188a426dde | ||
|
|
6fcfae8bfe | ||
|
|
9515350f52 | ||
|
|
6b88f82656 | ||
|
|
e42d953f47 | ||
|
|
9c257faf26 | ||
|
|
d18cad6d76 | ||
|
|
061273be22 | ||
|
|
b89de6caad | ||
|
|
9bc0a1d7c7 | ||
|
|
eee518b7fa | ||
|
|
b7b78edaa3 | ||
|
|
3eaac68ab4 | ||
|
|
a7672190e9 | ||
|
|
4e4e89b6eb | ||
|
|
33dd519f56 | ||
|
|
5c1149aa8b | ||
|
|
9061e92af6 | ||
|
|
49f5ceaa9b | ||
|
|
525b309d37 | ||
|
|
bdb3a787fc | ||
|
|
a9601ef848 | ||
|
|
6fc4b045fd | ||
|
|
446852c7d0 | ||
|
|
c635f00899 | ||
|
|
1393629bc5 | ||
|
|
49ce17b759 | ||
|
|
74888021c8 | ||
|
|
9483856439 | ||
|
|
dbbc7c8132 | ||
|
|
b203d165f5 | ||
|
|
c695e1136d | ||
|
|
d847202308 | ||
|
|
9d24e0a4b8 | ||
|
|
745fe1eb9f | ||
|
|
3e52b85e9d | ||
|
|
d26f020b50 | ||
|
|
095b5f702c | ||
|
|
9622853b8d | ||
|
|
b94e75fabe | ||
|
|
23dd041e2e | ||
|
|
c73ca21b4d | ||
|
|
7ebdc85ad2 | ||
|
|
99e2c8c90d | ||
|
|
f24bfdf945 | ||
|
|
7963233702 | ||
|
|
b550df2038 | ||
|
|
fb55624f5f | ||
|
|
e96366f489 | ||
|
|
900190cf54 | ||
|
|
2af744b474 | ||
|
|
11153084e2 | ||
|
|
4b9c814bfc | ||
|
|
1b5d3a3b6e | ||
|
|
373637e6a7 | ||
|
|
cb223217c1 | ||
|
|
eab97a3f8b | ||
|
|
fdfaa8ab45 | ||
|
|
2321d9dbee | ||
|
|
bf74dd887a | ||
|
|
3612035894 | ||
|
|
6553e9b0cd | ||
|
|
09f2410170 | ||
|
|
fb73d8c0a7 | ||
|
|
f2b0368078 | ||
|
|
02301ecf6d | ||
|
|
bcb61ee3e4 | ||
|
|
6a824429d0 | ||
|
|
6a52f9097f | ||
|
|
3fa85fc516 | ||
|
|
498d132d8a | ||
|
|
b6340f1458 | ||
|
|
4afc7ae2e4 | ||
|
|
95975a4c33 | ||
|
|
198a0673a2 | ||
|
|
94d1503c64 | ||
|
|
5f13e245f0 | ||
|
|
2715ae645d | ||
|
|
0299232cb5 | ||
|
|
93e181cfd9 | ||
|
|
8072eb1212 | ||
|
|
226ce340f2 | ||
|
|
ab5535eba3 | ||
|
|
8da2499495 | ||
|
|
c057ffb153 | ||
|
|
fe13d29549 | ||
|
|
225aaab4f4 | ||
|
|
14c973558b |
@@ -3,7 +3,7 @@
|
||||
"isRoot": true,
|
||||
"tools": {
|
||||
"csharpier": {
|
||||
"version": "0.26.7",
|
||||
"version": "0.28.1",
|
||||
"commands": [
|
||||
"dotnet-csharpier"
|
||||
]
|
||||
|
||||
@@ -70,7 +70,7 @@ indent_style = tab
|
||||
|
||||
[*.{cs,csx,cake,vb,vbx}]
|
||||
# Default Severity for all .NET Code Style rules below
|
||||
dotnet_analyzer_diagnostic.severity = warning
|
||||
dotnet_analyzer_diagnostic.severity = silent
|
||||
|
||||
##########################################
|
||||
# File Header (Uncomment to support file headers)
|
||||
@@ -269,6 +269,8 @@ dotnet_diagnostic.CA1305.severity = suggestion
|
||||
dotnet_diagnostic.CA1307.severity = suggestion
|
||||
dotnet_diagnostic.CA1309.severity = suggestion
|
||||
dotnet_diagnostic.CA1310.severity = error
|
||||
dotnet_diagnostic.CA1507.severity = suggestion
|
||||
dotnet_diagnostic.CA1513.severity = suggestion
|
||||
dotnet_diagnostic.CA1707.severity = suggestion
|
||||
dotnet_diagnostic.CA1708.severity = suggestion
|
||||
dotnet_diagnostic.CA1711.severity = suggestion
|
||||
@@ -286,6 +288,7 @@ dotnet_diagnostic.CA1834.severity = error
|
||||
dotnet_diagnostic.CA1845.severity = suggestion
|
||||
dotnet_diagnostic.CA1848.severity = suggestion
|
||||
dotnet_diagnostic.CA1852.severity = suggestion
|
||||
dotnet_diagnostic.CA1860.severity = silent
|
||||
dotnet_diagnostic.CA2016.severity = suggestion
|
||||
dotnet_diagnostic.CA2201.severity = error
|
||||
dotnet_diagnostic.CA2206.severity = error
|
||||
@@ -303,13 +306,12 @@ dotnet_diagnostic.CS1998.severity = error
|
||||
dotnet_diagnostic.CS8602.severity = error
|
||||
dotnet_diagnostic.CS8604.severity = error
|
||||
dotnet_diagnostic.CS8618.severity = error
|
||||
dotnet_diagnostic.CS0618.severity = error
|
||||
dotnet_diagnostic.CS0618.severity = suggestion
|
||||
dotnet_diagnostic.CS1998.severity = error
|
||||
dotnet_diagnostic.CS4014.severity = error
|
||||
dotnet_diagnostic.CS8600.severity = error
|
||||
dotnet_diagnostic.CS8603.severity = error
|
||||
dotnet_diagnostic.CS8625.severity = error
|
||||
dotnet_diagnostic.CS8981.severity = suggestion
|
||||
|
||||
dotnet_diagnostic.BL0005.severity = suggestion
|
||||
|
||||
@@ -318,7 +320,7 @@ dotnet_diagnostic.MVC1000.severity = suggestion
|
||||
dotnet_diagnostic.RZ10012.severity = error
|
||||
|
||||
dotnet_diagnostic.IDE0004.severity = error # redundant cast
|
||||
dotnet_diagnostic.IDE0005.severity = error
|
||||
dotnet_diagnostic.IDE0005.severity = suggestion
|
||||
dotnet_diagnostic.IDE0007.severity = error # Use var
|
||||
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
|
||||
dotnet_diagnostic.IDE0010.severity = silent # populate switch
|
||||
@@ -329,7 +331,7 @@ dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operat
|
||||
dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
|
||||
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
|
||||
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
|
||||
dotnet_diagnostic.IDE0028.severity = silent
|
||||
dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors
|
||||
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
|
||||
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
|
||||
dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type
|
||||
@@ -337,7 +339,7 @@ dotnet_diagnostic.IDE0040.severity = error # modifiers required
|
||||
dotnet_diagnostic.IDE0041.severity = error # simplify null
|
||||
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
|
||||
dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible
|
||||
dotnet_diagnostic.IDE0047.severity = suggestion # paratemeter name
|
||||
dotnet_diagnostic.IDE0047.severity = suggestion # parameter name
|
||||
dotnet_diagnostic.IDE0051.severity = error # unused field
|
||||
dotnet_diagnostic.IDE0052.severity = error # unused member
|
||||
dotnet_diagnostic.IDE0053.severity = suggestion # lambda not needed
|
||||
@@ -351,11 +353,20 @@ dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
|
||||
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
|
||||
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
|
||||
dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
|
||||
dotnet_diagnostic.IDE0130.severity = error # namespace folder structure
|
||||
dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
|
||||
dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
|
||||
dotnet_diagnostic.IDE0161.severity = error # Please use file namespaces
|
||||
dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed
|
||||
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
|
||||
dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching
|
||||
dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation
|
||||
dotnet_diagnostic.IDE0290.severity = error # Primary Constructor
|
||||
dotnet_diagnostic.IDE0300.severity = suggestion # Collection
|
||||
dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList
|
||||
|
||||
dotnet_diagnostic.NX0001.severity = error
|
||||
dotnet_diagnostic.NX0002.severity = silent
|
||||
dotnet_diagnostic.NX0003.severity = silent
|
||||
|
||||
##########################################
|
||||
# Styles
|
||||
|
||||
@@ -10,5 +10,7 @@
|
||||
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
|
||||
<RunAnalyzersDuringLiveAnalysis>False</RunAnalyzersDuringLiveAnalysis>
|
||||
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
|
||||
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
|
||||
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
|
||||
</PropertyGroup>
|
||||
</Project>
|
||||
|
||||
18
Directory.Packages.props
Normal file
18
Directory.Packages.props
Normal file
@@ -0,0 +1,18 @@
|
||||
<Project>
|
||||
<ItemGroup>
|
||||
<PackageVersion Include="Bullseye" Version="5.0.0" />
|
||||
<PackageVersion Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageVersion Include="Glob" Version="1.1.9" />
|
||||
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
|
||||
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
|
||||
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
|
||||
<PackageVersion Include="SimpleExec" Version="12.0.0" />
|
||||
<PackageVersion Include="System.Memory" Version="4.5.5" />
|
||||
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
|
||||
<PackageVersion Include="xunit" Version="2.9.0" />
|
||||
<PackageVersion Include="xunit.runner.visualstudio" Version="2.8.2" />
|
||||
<PackageVersion Include="xunit.SkippableFact" Version="1.4.13" />
|
||||
<PackageVersion Include="ZstdSharp.Port" Version="0.8.1" />
|
||||
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -11,7 +11,7 @@
|
||||
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
|
||||
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
|
||||
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
|
||||
| Zip (2) | None, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
|
||||
| Zip (2) | None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
|
||||
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
| Tar.BZip2 | BZip2 | Both | TarArchive | TarReader | TarWriter (3) |
|
||||
|
||||
9
NuGet.config
Normal file
9
NuGet.config
Normal file
@@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<packageSourceMapping>
|
||||
<!-- key value for <packageSource> should match key values from <packageSources> element -->
|
||||
<packageSource key="nuget.org">
|
||||
<package pattern="*" />
|
||||
</packageSource>
|
||||
</packageSourceMapping>
|
||||
</configuration>
|
||||
@@ -1,12 +1,12 @@
|
||||
# SharpCompress
|
||||
|
||||
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
|
||||
|
||||
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
|
||||
|
||||
GitHub Actions Build -
|
||||
[](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
|
||||
[](https://www.robiniadocs.com/d/sharpcompress/api/SharpCompress.html)
|
||||
[](https://dndocs.com/d/sharpcompress/api/index.html)
|
||||
|
||||
## Need Help?
|
||||
|
||||
|
||||
@@ -17,6 +17,9 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
Directory.Build.props = Directory.Build.props
|
||||
global.json = global.json
|
||||
.editorconfig = .editorconfig
|
||||
Directory.Packages.props = Directory.Packages.props
|
||||
NuGet.config = NuGet.config
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Global
|
||||
|
||||
@@ -79,6 +79,10 @@
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticFields/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticReadonly/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=StaticReadonly/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=15b5b1f1_002D457c_002D4ca6_002Db278_002D5615aedc07d3/@EntryIndexedValue"><Policy><Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static readonly fields (private)"><ElementKinds><Kind Name="READONLY_FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=4a98fdf6_002D7d98_002D4f5a_002Dafeb_002Dea44ad98c70c/@EntryIndexedValue"><Policy><Descriptor Staticness="Instance" AccessRightKinds="Private" Description="Instance fields (private)"><ElementKinds><Kind Name="FIELD" /><Kind Name="READONLY_FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=c873eafb_002Dd57f_002D481d_002D8c93_002D77f6863c2f88/@EntryIndexedValue"><Policy><Descriptor Staticness="Static" AccessRightKinds="Protected, ProtectedInternal, Internal, Public, PrivateProtected" Description="Static readonly fields (not private)"><ElementKinds><Kind Name="READONLY_FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=f9fce829_002De6f4_002D4cb2_002D80f1_002D5497c44f51df/@EntryIndexedValue"><Policy><Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static fields (private)"><ElementKinds><Kind Name="FIELD" /></ElementKinds></Descriptor><Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /></Policy></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FCONSTANT/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FFUNCTION/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FVARIABLE/@EntryIndexedValue"><Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /></s:String>
|
||||
@@ -127,6 +131,7 @@
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EPredefinedNamingRulesToUserRulesUpgrade/@EntryIndexedValue">True</s:Boolean>
|
||||
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue"><SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml">
|
||||
<Solution />
|
||||
</SessionState></s:String></wpf:ResourceDictionary>
|
||||
|
||||
2
USAGE.md
2
USAGE.md
@@ -27,7 +27,7 @@ To deal with the "correct" rules as well as the expectations of users, I've deci
|
||||
|
||||
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
|
||||
|
||||
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapped to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
|
||||
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapper to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
|
||||
|
||||
## Samples
|
||||
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net7.0</TargetFramework>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Bullseye" Version="4.2.1" />
|
||||
<PackageReference Include="Glob" Version="1.1.9" />
|
||||
<PackageReference Include="SimpleExec" Version="11.0.0" />
|
||||
<PackageReference Include="Bullseye" />
|
||||
<PackageReference Include="Glob" />
|
||||
<PackageReference Include="SimpleExec" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
45
build/packages.lock.json
Normal file
45
build/packages.lock.json
Normal file
@@ -0,0 +1,45 @@
|
||||
{
|
||||
"version": 2,
|
||||
"dependencies": {
|
||||
"net8.0": {
|
||||
"Bullseye": {
|
||||
"type": "Direct",
|
||||
"requested": "[5.0.0, )",
|
||||
"resolved": "5.0.0",
|
||||
"contentHash": "bqyt+m17ym+5aN45C5oZRAjuLDt8jKiCm/ys1XfymIXSkrTFwvI/QsbY3ucPSHDz7SF7uON7B57kXFv5H2k1ew=="
|
||||
},
|
||||
"Glob": {
|
||||
"type": "Direct",
|
||||
"requested": "[1.1.9, )",
|
||||
"resolved": "1.1.9",
|
||||
"contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw=="
|
||||
},
|
||||
"Microsoft.SourceLink.GitHub": {
|
||||
"type": "Direct",
|
||||
"requested": "[8.0.0, )",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
|
||||
"dependencies": {
|
||||
"Microsoft.Build.Tasks.Git": "8.0.0",
|
||||
"Microsoft.SourceLink.Common": "8.0.0"
|
||||
}
|
||||
},
|
||||
"SimpleExec": {
|
||||
"type": "Direct",
|
||||
"requested": "[12.0.0, )",
|
||||
"resolved": "12.0.0",
|
||||
"contentHash": "ptxlWtxC8vM6Y6e3h9ZTxBBkOWnWrm/Sa1HT+2i1xcXY3Hx2hmKDZP5RShPf8Xr9D+ivlrXNy57ktzyH8kyt+Q=="
|
||||
},
|
||||
"Microsoft.Build.Tasks.Git": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
|
||||
},
|
||||
"Microsoft.SourceLink.Common": {
|
||||
"type": "Transitive",
|
||||
"resolved": "8.0.0",
|
||||
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -62,7 +62,7 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
|
||||
public static int ReduceSum(Vector256<int> accumulator)
|
||||
{
|
||||
// Add upper lane to lower lane.
|
||||
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
|
||||
var vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
|
||||
|
||||
// Add odd to even.
|
||||
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_11_01_01));
|
||||
@@ -81,7 +81,7 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
|
||||
[MethodImpl(MethodImplOptions.AggressiveInlining)]
|
||||
public static int EvenReduceSum(Vector256<int> accumulator)
|
||||
{
|
||||
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
|
||||
var vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
|
||||
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10)); // add high to low
|
||||
|
||||
// Vector128<int>.ToScalar() isn't optimized pre-net5.0 https://github.com/dotnet/runtime/pull/37882
|
||||
@@ -189,29 +189,29 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
|
||||
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
|
||||
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
uint s1 = adler & 0xFFFF;
|
||||
uint s2 = (adler >> 16) & 0xFFFF;
|
||||
var s1 = adler & 0xFFFF;
|
||||
var s2 = (adler >> 16) & 0xFFFF;
|
||||
|
||||
// Process the data in blocks.
|
||||
uint length = (uint)buffer.Length;
|
||||
uint blocks = length / BlockSize;
|
||||
var length = (uint)buffer.Length;
|
||||
var blocks = length / BlockSize;
|
||||
length -= blocks * BlockSize;
|
||||
|
||||
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
|
||||
{
|
||||
fixed (byte* tapPtr = &MemoryMarshal.GetReference(Tap1Tap2))
|
||||
{
|
||||
byte* localBufferPtr = bufferPtr;
|
||||
var localBufferPtr = bufferPtr;
|
||||
|
||||
// _mm_setr_epi8 on x86
|
||||
Vector128<sbyte> tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
|
||||
Vector128<sbyte> tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
|
||||
Vector128<byte> zero = Vector128<byte>.Zero;
|
||||
var tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
|
||||
var tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
|
||||
var zero = Vector128<byte>.Zero;
|
||||
var ones = Vector128.Create((short)1);
|
||||
|
||||
while (blocks > 0)
|
||||
{
|
||||
uint n = NMAX / BlockSize; /* The NMAX constraint. */
|
||||
var n = NMAX / BlockSize; /* The NMAX constraint. */
|
||||
if (n > blocks)
|
||||
{
|
||||
n = blocks;
|
||||
@@ -221,15 +221,15 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
|
||||
|
||||
// Process n blocks of data. At most NMAX data bytes can be
|
||||
// processed before s2 must be reduced modulo BASE.
|
||||
Vector128<uint> v_ps = Vector128.CreateScalar(s1 * n);
|
||||
Vector128<uint> v_s2 = Vector128.CreateScalar(s2);
|
||||
Vector128<uint> v_s1 = Vector128<uint>.Zero;
|
||||
var v_ps = Vector128.CreateScalar(s1 * n);
|
||||
var v_s2 = Vector128.CreateScalar(s2);
|
||||
var v_s1 = Vector128<uint>.Zero;
|
||||
|
||||
do
|
||||
{
|
||||
// Load 32 input bytes.
|
||||
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
|
||||
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
|
||||
var bytes1 = Sse3.LoadDquVector128(localBufferPtr);
|
||||
var bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
|
||||
|
||||
// Add previous block byte sum to v_ps.
|
||||
v_ps = Sse2.Add(v_ps, v_s1);
|
||||
@@ -237,11 +237,11 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
|
||||
// Horizontally add the bytes for s1, multiply-adds the
|
||||
// bytes by [ 32, 31, 30, ... ] for s2.
|
||||
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsUInt32());
|
||||
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
|
||||
var mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones).AsUInt32());
|
||||
|
||||
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsUInt32());
|
||||
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
|
||||
var mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
|
||||
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones).AsUInt32());
|
||||
|
||||
localBufferPtr += BlockSize;
|
||||
@@ -281,15 +281,15 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
|
||||
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
|
||||
public static unsafe uint CalculateAvx2(uint adler, ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
uint s1 = adler & 0xFFFF;
|
||||
uint s2 = (adler >> 16) & 0xFFFF;
|
||||
uint length = (uint)buffer.Length;
|
||||
var s1 = adler & 0xFFFF;
|
||||
var s2 = (adler >> 16) & 0xFFFF;
|
||||
var length = (uint)buffer.Length;
|
||||
|
||||
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
|
||||
{
|
||||
byte* localBufferPtr = bufferPtr;
|
||||
var localBufferPtr = bufferPtr;
|
||||
|
||||
Vector256<byte> zero = Vector256<byte>.Zero;
|
||||
var zero = Vector256<byte>.Zero;
|
||||
var dot3v = Vector256.Create((short)1);
|
||||
var dot2v = Vector256.Create(
|
||||
32,
|
||||
@@ -333,29 +333,29 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
|
||||
|
||||
while (length >= 32)
|
||||
{
|
||||
int k = length < NMAX ? (int)length : (int)NMAX;
|
||||
var k = length < NMAX ? (int)length : (int)NMAX;
|
||||
k -= k % 32;
|
||||
length -= (uint)k;
|
||||
|
||||
Vector256<uint> vs10 = vs1;
|
||||
Vector256<uint> vs3 = Vector256<uint>.Zero;
|
||||
var vs10 = vs1;
|
||||
var vs3 = Vector256<uint>.Zero;
|
||||
|
||||
while (k >= 32)
|
||||
{
|
||||
// Load 32 input bytes.
|
||||
Vector256<byte> block = Avx.LoadVector256(localBufferPtr);
|
||||
var block = Avx.LoadVector256(localBufferPtr);
|
||||
|
||||
// Sum of abs diff, resulting in 2 x int32's
|
||||
Vector256<ushort> vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
|
||||
var vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
|
||||
|
||||
vs1 = Avx2.Add(vs1, vs1sad.AsUInt32());
|
||||
vs3 = Avx2.Add(vs3, vs10);
|
||||
|
||||
// sum 32 uint8s to 16 shorts.
|
||||
Vector256<short> vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
|
||||
var vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
|
||||
|
||||
// sum 16 shorts to 8 uint32s.
|
||||
Vector256<int> vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
|
||||
var vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
|
||||
|
||||
vs2 = Avx2.Add(vsum2.AsUInt32(), vs2);
|
||||
vs10 = vs1;
|
||||
@@ -434,14 +434,14 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
|
||||
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
|
||||
private static unsafe uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
|
||||
{
|
||||
uint s1 = adler & 0xFFFF;
|
||||
uint s2 = (adler >> 16) & 0xFFFF;
|
||||
var s1 = adler & 0xFFFF;
|
||||
var s2 = (adler >> 16) & 0xFFFF;
|
||||
uint k;
|
||||
|
||||
fixed (byte* bufferPtr = buffer)
|
||||
{
|
||||
var localBufferPtr = bufferPtr;
|
||||
uint length = (uint)buffer.Length;
|
||||
var length = (uint)buffer.Length;
|
||||
|
||||
while (length > 0)
|
||||
{
|
||||
|
||||
@@ -12,39 +12,35 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
where TEntry : IArchiveEntry
|
||||
where TVolume : IVolume
|
||||
{
|
||||
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
|
||||
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
|
||||
private readonly LazyReadOnlyCollection<TVolume> _lazyVolumes;
|
||||
private readonly LazyReadOnlyCollection<TEntry> _lazyEntries;
|
||||
private bool _disposed;
|
||||
private readonly SourceStream? _sourceStream;
|
||||
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
|
||||
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
|
||||
|
||||
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
|
||||
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
|
||||
|
||||
protected ReaderOptions ReaderOptions { get; }
|
||||
|
||||
private bool disposed;
|
||||
protected SourceStream SrcStream;
|
||||
|
||||
internal AbstractArchive(ArchiveType type, SourceStream srcStream)
|
||||
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
|
||||
{
|
||||
Type = type;
|
||||
ReaderOptions = srcStream.ReaderOptions;
|
||||
SrcStream = srcStream;
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(SrcStream));
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
|
||||
ReaderOptions = sourceStream.ReaderOptions;
|
||||
_sourceStream = sourceStream;
|
||||
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
|
||||
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
|
||||
}
|
||||
|
||||
#nullable disable
|
||||
internal AbstractArchive(ArchiveType type)
|
||||
{
|
||||
Type = type;
|
||||
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
|
||||
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
|
||||
ReaderOptions = new();
|
||||
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
|
||||
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
|
||||
}
|
||||
|
||||
#nullable enable
|
||||
|
||||
public ArchiveType Type { get; }
|
||||
|
||||
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) =>
|
||||
@@ -65,12 +61,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
public virtual ICollection<TEntry> Entries => lazyEntries;
|
||||
public virtual ICollection<TEntry> Entries => _lazyEntries;
|
||||
|
||||
/// <summary>
|
||||
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
|
||||
/// </summary>
|
||||
public ICollection<TVolume> Volumes => lazyVolumes;
|
||||
public ICollection<TVolume> Volumes => _lazyVolumes;
|
||||
|
||||
/// <summary>
|
||||
/// The total size of the files compressed in the archive.
|
||||
@@ -84,29 +80,29 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
|
||||
public virtual long TotalUncompressSize =>
|
||||
Entries.Aggregate(0L, (total, cf) => total + cf.Size);
|
||||
|
||||
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream srcStream);
|
||||
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
|
||||
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
|
||||
|
||||
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
|
||||
|
||||
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
|
||||
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
|
||||
|
||||
public virtual void Dispose()
|
||||
{
|
||||
if (!disposed)
|
||||
if (!_disposed)
|
||||
{
|
||||
lazyVolumes.ForEach(v => v.Dispose());
|
||||
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
|
||||
SrcStream?.Dispose();
|
||||
_lazyVolumes.ForEach(v => v.Dispose());
|
||||
_lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
|
||||
_sourceStream?.Dispose();
|
||||
|
||||
disposed = true;
|
||||
_disposed = true;
|
||||
}
|
||||
}
|
||||
|
||||
void IArchiveExtractionListener.EnsureEntriesLoaded()
|
||||
{
|
||||
lazyEntries.EnsureFullyLoaded();
|
||||
lazyVolumes.EnsureFullyLoaded();
|
||||
_lazyEntries.EnsureFullyLoaded();
|
||||
_lazyVolumes.EnsureFullyLoaded();
|
||||
}
|
||||
|
||||
void IExtractionListener.FireCompressedBytesRead(
|
||||
|
||||
@@ -31,18 +31,18 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
}
|
||||
}
|
||||
|
||||
private readonly List<TEntry> newEntries = new List<TEntry>();
|
||||
private readonly List<TEntry> removedEntries = new List<TEntry>();
|
||||
private readonly List<TEntry> newEntries = new();
|
||||
private readonly List<TEntry> removedEntries = new();
|
||||
|
||||
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
|
||||
private readonly List<TEntry> modifiedEntries = new();
|
||||
private bool hasModifications;
|
||||
private bool pauseRebuilding;
|
||||
|
||||
internal AbstractWritableArchive(ArchiveType type)
|
||||
: base(type) { }
|
||||
|
||||
internal AbstractWritableArchive(ArchiveType type, SourceStream srcStream)
|
||||
: base(type, srcStream) { }
|
||||
internal AbstractWritableArchive(ArchiveType type, SourceStream sourceStream)
|
||||
: base(type, sourceStream) { }
|
||||
|
||||
public override ICollection<TEntry> Entries
|
||||
{
|
||||
@@ -120,6 +120,10 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
|
||||
{
|
||||
foreach (var path in Entries.Select(x => x.Key))
|
||||
{
|
||||
if (path is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
var p = path.Replace('/', '\\');
|
||||
if (p.Length > 0 && p[0] == '\\')
|
||||
{
|
||||
|
||||
@@ -239,4 +239,6 @@ public static class ArchiveFactory
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ internal abstract class ArchiveVolumeFactory
|
||||
FileInfo? item = null;
|
||||
|
||||
//split 001, 002 ...
|
||||
Match m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
|
||||
var m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
|
||||
27
src/SharpCompress/Archives/AutoArchiveFactory.cs
Normal file
27
src/SharpCompress/Archives/AutoArchiveFactory.cs
Normal file
@@ -0,0 +1,27 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using SharpCompress.Common;
|
||||
using SharpCompress.Readers;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
|
||||
class AutoArchiveFactory : IArchiveFactory
|
||||
{
|
||||
public string Name => nameof(AutoArchiveFactory);
|
||||
|
||||
public ArchiveType? KnownArchiveType => null;
|
||||
|
||||
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
|
||||
|
||||
public bool IsArchive(Stream stream, string? password = null) =>
|
||||
throw new NotSupportedException();
|
||||
|
||||
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
|
||||
|
||||
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(stream, readerOptions);
|
||||
|
||||
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
|
||||
ArchiveFactory.Open(fileInfo, readerOptions);
|
||||
}
|
||||
@@ -90,25 +90,23 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
return new GZipArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
public static GZipArchive Create() => new GZipArchive();
|
||||
public static GZipArchive Create() => new();
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal GZipArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Tar, srcStream) { }
|
||||
/// <param name="sourceStream"></param>
|
||||
private GZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.GZip, sourceStream) { }
|
||||
|
||||
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream srcStream)
|
||||
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
srcStream.LoadAllParts();
|
||||
var idx = 0;
|
||||
return srcStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, idx++));
|
||||
sourceStream.LoadAllParts();
|
||||
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
|
||||
}
|
||||
|
||||
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
|
||||
@@ -184,7 +182,11 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ namespace SharpCompress.Archives.GZip;
|
||||
|
||||
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
|
||||
{
|
||||
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part)
|
||||
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part)
|
||||
: base(part) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream()
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -32,7 +30,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
|
||||
|
||||
public override long Crc => 0;
|
||||
|
||||
public override string Key { get; }
|
||||
public override string? Key { get; }
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
|
||||
@@ -17,15 +17,11 @@ public static class IArchiveEntryExtensions
|
||||
streamListener.EnsureEntriesLoaded();
|
||||
streamListener.FireEntryExtractionBegin(archiveEntry);
|
||||
streamListener.FireFilePartExtractionBegin(
|
||||
archiveEntry.Key,
|
||||
archiveEntry.Key ?? "Key",
|
||||
archiveEntry.Size,
|
||||
archiveEntry.CompressedSize
|
||||
);
|
||||
var entryStream = archiveEntry.OpenEntryStream();
|
||||
if (entryStream is null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
using (entryStream)
|
||||
{
|
||||
using Stream s = new ListeningStream(streamListener, entryStream);
|
||||
|
||||
@@ -3,7 +3,6 @@ using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using SharpCompress.Common;
|
||||
|
||||
namespace SharpCompress.Archives;
|
||||
@@ -55,14 +54,26 @@ public static class IArchiveExtensions
|
||||
var entry = entries.Entry;
|
||||
if (entry.IsDirectory)
|
||||
{
|
||||
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
|
||||
if (
|
||||
Path.GetDirectoryName(dirPath + "/") is { } emptyDirectory
|
||||
&& seenDirectories.Add(dirPath)
|
||||
)
|
||||
{
|
||||
Directory.CreateDirectory(emptyDirectory);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Create each directory
|
||||
var path = Path.Combine(destination, entry.Key);
|
||||
if (Path.GetDirectoryName(path) is { } directory && seenDirectories.Add(path))
|
||||
// Create each directory if not already created
|
||||
var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
|
||||
if (Path.GetDirectoryName(path) is { } directory)
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
if (!Directory.Exists(directory) && !seenDirectories.Contains(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
seenDirectories.Add(directory);
|
||||
}
|
||||
}
|
||||
|
||||
// Write file
|
||||
|
||||
@@ -13,7 +13,7 @@ namespace SharpCompress.Archives.Rar;
|
||||
/// </summary>
|
||||
internal class FileInfoRarArchiveVolume : RarVolume
|
||||
{
|
||||
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index = 0)
|
||||
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index)
|
||||
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
|
||||
{
|
||||
FileInfo = fileInfo;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
@@ -14,40 +15,39 @@ namespace SharpCompress.Archives.Rar;
|
||||
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
{
|
||||
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
|
||||
new Lazy<IRarUnpack>(() => new Compressors.Rar.UnpackV2017.Unpack());
|
||||
internal Lazy<IRarUnpack> UnpackV1 { get; } =
|
||||
new Lazy<IRarUnpack>(() => new Compressors.Rar.UnpackV1.Unpack());
|
||||
new(() => new Compressors.Rar.UnpackV2017.Unpack());
|
||||
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal RarArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Rar, srcStream) { }
|
||||
/// <param name="sourceStream"></param>
|
||||
private RarArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Rar, sourceStream) { }
|
||||
|
||||
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
|
||||
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
|
||||
|
||||
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream srcStream)
|
||||
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
SrcStream.LoadAllParts(); //request all streams
|
||||
var streams = SrcStream.Streams.ToArray();
|
||||
var idx = 0;
|
||||
sourceStream.LoadAllParts(); //request all streams
|
||||
var streams = sourceStream.Streams.ToArray();
|
||||
var i = 0;
|
||||
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
|
||||
{
|
||||
SrcStream.IsVolumes = true;
|
||||
sourceStream.IsVolumes = true;
|
||||
streams[1].Position = 0;
|
||||
SrcStream.Position = 0;
|
||||
sourceStream.Position = 0;
|
||||
|
||||
return srcStream.Streams.Select(
|
||||
a => new StreamRarArchiveVolume(a, ReaderOptions, idx++)
|
||||
);
|
||||
}
|
||||
else //split mode or single file
|
||||
{
|
||||
return new StreamRarArchiveVolume(SrcStream, ReaderOptions, idx++).AsEnumerable();
|
||||
return sourceStream.Streams.Select(a => new StreamRarArchiveVolume(
|
||||
a,
|
||||
ReaderOptions,
|
||||
i++
|
||||
));
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
@@ -106,7 +106,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
|
||||
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
|
||||
{
|
||||
stream.CheckNotNull(nameof(stream));
|
||||
return new RarArchive(new SourceStream(stream, i => null, options ?? new ReaderOptions()));
|
||||
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -11,7 +11,7 @@ internal static class RarArchiveVolumeFactory
|
||||
FileInfo? item = null;
|
||||
|
||||
//new style rar - ..part1 | /part01 | part001 ....
|
||||
Match m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
|
||||
var m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
|
||||
@@ -6,8 +6,8 @@ namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal class SeekableFilePart : RarFilePart
|
||||
{
|
||||
private readonly Stream stream;
|
||||
private readonly string? password;
|
||||
private readonly Stream _stream;
|
||||
private readonly string? _password;
|
||||
|
||||
internal SeekableFilePart(
|
||||
MarkHeader mh,
|
||||
@@ -18,27 +18,27 @@ internal class SeekableFilePart : RarFilePart
|
||||
)
|
||||
: base(mh, fh, index)
|
||||
{
|
||||
this.stream = stream;
|
||||
this.password = password;
|
||||
_stream = stream;
|
||||
_password = password;
|
||||
}
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
stream.Position = FileHeader.DataStartPosition;
|
||||
_stream.Position = FileHeader.DataStartPosition;
|
||||
|
||||
if (FileHeader.R4Salt != null)
|
||||
{
|
||||
var cryptKey = new CryptKey3(password!);
|
||||
return new RarCryptoWrapper(stream, FileHeader.R4Salt, cryptKey);
|
||||
var cryptKey = new CryptKey3(_password!);
|
||||
return new RarCryptoWrapper(_stream, FileHeader.R4Salt, cryptKey);
|
||||
}
|
||||
|
||||
if (FileHeader.Rar5CryptoInfo != null)
|
||||
{
|
||||
var cryptKey = new CryptKey5(password!, FileHeader.Rar5CryptoInfo);
|
||||
return new RarCryptoWrapper(stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
|
||||
var cryptKey = new CryptKey5(_password!, FileHeader.Rar5CryptoInfo);
|
||||
return new RarCryptoWrapper(_stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
|
||||
}
|
||||
|
||||
return stream;
|
||||
return _stream;
|
||||
}
|
||||
|
||||
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;
|
||||
|
||||
@@ -9,7 +9,7 @@ namespace SharpCompress.Archives.Rar;
|
||||
|
||||
internal class StreamRarArchiveVolume : RarVolume
|
||||
{
|
||||
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index = 0)
|
||||
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index)
|
||||
: base(StreamingMode.Seekable, stream, options, index) { }
|
||||
|
||||
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -14,14 +12,14 @@ namespace SharpCompress.Archives.SevenZip;
|
||||
|
||||
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
|
||||
{
|
||||
private ArchiveDatabase database;
|
||||
private ArchiveDatabase? _database;
|
||||
|
||||
/// <summary>
|
||||
/// Constructor expects a filepath to an existing file.
|
||||
/// </summary>
|
||||
/// <param name="filePath"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions = null)
|
||||
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
filePath.CheckNotNullOrEmpty("filePath");
|
||||
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
|
||||
@@ -32,7 +30,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// </summary>
|
||||
/// <param name="fileInfo"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
|
||||
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
fileInfo.CheckNotNull("fileInfo");
|
||||
return new SevenZipArchive(
|
||||
@@ -51,7 +49,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(
|
||||
IEnumerable<FileInfo> fileInfos,
|
||||
ReaderOptions readerOptions = null
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
fileInfos.CheckNotNull(nameof(fileInfos));
|
||||
@@ -72,7 +70,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(
|
||||
IEnumerable<Stream> streams,
|
||||
ReaderOptions readerOptions = null
|
||||
ReaderOptions? readerOptions = null
|
||||
)
|
||||
{
|
||||
streams.CheckNotNull(nameof(streams));
|
||||
@@ -91,27 +89,25 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
/// </summary>
|
||||
/// <param name="stream"></param>
|
||||
/// <param name="readerOptions"></param>
|
||||
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
|
||||
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
|
||||
{
|
||||
stream.CheckNotNull("stream");
|
||||
return new SevenZipArchive(
|
||||
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
|
||||
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal SevenZipArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.SevenZip, srcStream) { }
|
||||
/// <param name="sourceStream"></param>
|
||||
private SevenZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.SevenZip, sourceStream) { }
|
||||
|
||||
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream srcStream)
|
||||
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
SrcStream.LoadAllParts(); //request all streams
|
||||
var idx = 0;
|
||||
return new SevenZipVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
|
||||
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
}
|
||||
|
||||
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
|
||||
@@ -135,13 +131,17 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
{
|
||||
var stream = volumes.Single().Stream;
|
||||
LoadFactory(stream);
|
||||
var entries = new SevenZipArchiveEntry[database._files.Count];
|
||||
for (var i = 0; i < database._files.Count; i++)
|
||||
if (_database is null)
|
||||
{
|
||||
var file = database._files[i];
|
||||
return Enumerable.Empty<SevenZipArchiveEntry>();
|
||||
}
|
||||
var entries = new SevenZipArchiveEntry[_database._files.Count];
|
||||
for (var i = 0; i < _database._files.Count; i++)
|
||||
{
|
||||
var file = _database._files[i];
|
||||
entries[i] = new SevenZipArchiveEntry(
|
||||
this,
|
||||
new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding)
|
||||
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
|
||||
);
|
||||
}
|
||||
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
|
||||
@@ -159,12 +159,12 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
|
||||
private void LoadFactory(Stream stream)
|
||||
{
|
||||
if (database is null)
|
||||
if (_database is null)
|
||||
{
|
||||
stream.Position = 0;
|
||||
var reader = new ArchiveReader();
|
||||
reader.Open(stream);
|
||||
database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
|
||||
reader.Open(stream, lookForHeader: ReaderOptions.LookForHeader);
|
||||
_database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -180,14 +180,14 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
}
|
||||
}
|
||||
|
||||
private static ReadOnlySpan<byte> SIGNATURE =>
|
||||
private static ReadOnlySpan<byte> Signature =>
|
||||
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
|
||||
|
||||
private static bool SignatureMatch(Stream stream)
|
||||
{
|
||||
var reader = new BinaryReader(stream);
|
||||
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
|
||||
return signatureBytes.SequenceEqual(SIGNATURE);
|
||||
return signatureBytes.SequenceEqual(Signature);
|
||||
}
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction() =>
|
||||
@@ -196,30 +196,24 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
public override bool IsSolid =>
|
||||
Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1;
|
||||
|
||||
public override long TotalSize
|
||||
{
|
||||
get
|
||||
{
|
||||
var i = Entries.Count;
|
||||
return database._packSizes.Aggregate(0L, (total, packSize) => total + packSize);
|
||||
}
|
||||
}
|
||||
public override long TotalSize =>
|
||||
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
|
||||
|
||||
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
|
||||
{
|
||||
private readonly SevenZipArchive archive;
|
||||
private CFolder currentFolder;
|
||||
private Stream currentStream;
|
||||
private CFileItem currentItem;
|
||||
private readonly SevenZipArchive _archive;
|
||||
private CFolder? _currentFolder;
|
||||
private Stream? _currentStream;
|
||||
private CFileItem? _currentItem;
|
||||
|
||||
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
|
||||
: base(readerOptions, ArchiveType.SevenZip) => this.archive = archive;
|
||||
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
|
||||
|
||||
public override SevenZipVolume Volume => archive.Volumes.Single();
|
||||
public override SevenZipVolume Volume => _archive.Volumes.Single();
|
||||
|
||||
protected override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
|
||||
{
|
||||
var entries = archive.Entries.ToList();
|
||||
var entries = _archive.Entries.ToList();
|
||||
stream.Position = 0;
|
||||
foreach (var dir in entries.Where(x => x.IsDirectory))
|
||||
{
|
||||
@@ -229,37 +223,42 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
|
||||
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
|
||||
)
|
||||
{
|
||||
currentFolder = group.Key;
|
||||
_currentFolder = group.Key;
|
||||
if (group.Key is null)
|
||||
{
|
||||
currentStream = Stream.Null;
|
||||
_currentStream = Stream.Null;
|
||||
}
|
||||
else
|
||||
{
|
||||
currentStream = archive.database.GetFolderStream(
|
||||
_currentStream = _archive._database?.GetFolderStream(
|
||||
stream,
|
||||
currentFolder,
|
||||
_currentFolder,
|
||||
new PasswordProvider(Options.Password)
|
||||
);
|
||||
}
|
||||
foreach (var entry in group)
|
||||
{
|
||||
currentItem = entry.FilePart.Header;
|
||||
_currentItem = entry.FilePart.Header;
|
||||
yield return entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected override EntryStream GetEntryStream() =>
|
||||
CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
|
||||
CreateEntryStream(
|
||||
new ReadOnlySubStream(
|
||||
_currentStream.NotNull("currentStream is not null"),
|
||||
_currentItem?.Size ?? 0
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
private class PasswordProvider : IPasswordProvider
|
||||
{
|
||||
private readonly string _password;
|
||||
private readonly string? _password;
|
||||
|
||||
public PasswordProvider(string password) => _password = password;
|
||||
public PasswordProvider(string? password) => _password = password;
|
||||
|
||||
public string CryptoGetTextPassword() => _password;
|
||||
public string? CryptoGetTextPassword() => _password;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,7 +114,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
var tarHeader = new TarHeader(new ArchiveEncoding());
|
||||
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
|
||||
var isEmptyArchive =
|
||||
tarHeader.Name.Length == 0
|
||||
tarHeader.Name?.Length == 0
|
||||
&& tarHeader.Size == 0
|
||||
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
|
||||
return readSucceeded || isEmptyArchive;
|
||||
@@ -123,22 +123,20 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
return false;
|
||||
}
|
||||
|
||||
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream srcStream)
|
||||
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
|
||||
{
|
||||
SrcStream.LoadAllParts(); //request all streams
|
||||
var idx = 0;
|
||||
return new TarVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
|
||||
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable(); //simple single volume or split, multivolume not supported
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal TarArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Tar, srcStream) { }
|
||||
/// <param name="sourceStream"></param>
|
||||
private TarArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Tar, sourceStream) { }
|
||||
|
||||
internal TarArchive()
|
||||
private TarArchive()
|
||||
: base(ArchiveType.Tar) { }
|
||||
|
||||
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
|
||||
@@ -192,10 +190,14 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new IncompleteArchiveException("Failed to read TAR header");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static TarArchive Create() => new TarArchive();
|
||||
public static TarArchive Create() => new();
|
||||
|
||||
protected override TarArchiveEntry CreateEntryInternal(
|
||||
string filePath,
|
||||
@@ -225,7 +227,12 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size);
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime,
|
||||
entry.Size
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ namespace SharpCompress.Archives.Tar;
|
||||
|
||||
public class TarArchiveEntry : TarEntry, IArchiveEntry
|
||||
{
|
||||
internal TarArchiveEntry(TarArchive archive, TarFilePart part, CompressionType compressionType)
|
||||
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
|
||||
: base(part, compressionType) => Archive = archive;
|
||||
|
||||
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream();
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
|
||||
@@ -16,10 +16,7 @@ namespace SharpCompress.Archives.Zip;
|
||||
|
||||
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
{
|
||||
#nullable disable
|
||||
private readonly SeekableZipHeaderFactory headerFactory;
|
||||
|
||||
#nullable enable
|
||||
private readonly SeekableZipHeaderFactory? headerFactory;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the compression level applied to files added to the archive,
|
||||
@@ -30,13 +27,13 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
/// <summary>
|
||||
/// Constructor with a SourceStream able to handle FileInfo and Streams.
|
||||
/// </summary>
|
||||
/// <param name="srcStream"></param>
|
||||
/// <param name="sourceStream"></param>
|
||||
/// <param name="options"></param>
|
||||
internal ZipArchive(SourceStream srcStream)
|
||||
: base(ArchiveType.Zip, srcStream) =>
|
||||
internal ZipArchive(SourceStream sourceStream)
|
||||
: base(ArchiveType.Zip, sourceStream) =>
|
||||
headerFactory = new SeekableZipHeaderFactory(
|
||||
srcStream.ReaderOptions.Password,
|
||||
srcStream.ReaderOptions.ArchiveEncoding
|
||||
sourceStream.ReaderOptions.Password,
|
||||
sourceStream.ReaderOptions.ArchiveEncoding
|
||||
);
|
||||
|
||||
/// <summary>
|
||||
@@ -189,21 +186,21 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
}
|
||||
}
|
||||
|
||||
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream srcStream)
|
||||
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
|
||||
{
|
||||
SrcStream.LoadAllParts(); //request all streams
|
||||
SrcStream.Position = 0;
|
||||
stream.LoadAllParts(); //request all streams
|
||||
stream.Position = 0;
|
||||
|
||||
var streams = SrcStream.Streams.ToList();
|
||||
var streams = stream.Streams.ToList();
|
||||
var idx = 0;
|
||||
if (streams.Count > 1) //test part 2 - true = multipart not split
|
||||
if (streams.Count() > 1) //test part 2 - true = multipart not split
|
||||
{
|
||||
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
|
||||
var isZip = IsZipFile(streams[1], ReaderOptions.Password);
|
||||
streams[1].Position -= 4;
|
||||
if (isZip)
|
||||
{
|
||||
SrcStream.IsVolumes = true;
|
||||
stream.IsVolumes = true;
|
||||
|
||||
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
|
||||
streams.RemoveAt(0);
|
||||
@@ -215,7 +212,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
}
|
||||
|
||||
//split mode or single file
|
||||
return new ZipVolume(SrcStream, ReaderOptions, idx++).AsEnumerable();
|
||||
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
|
||||
}
|
||||
|
||||
internal ZipArchive()
|
||||
@@ -224,14 +221,13 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
|
||||
{
|
||||
var vols = volumes.ToArray();
|
||||
foreach (var h in headerFactory.ReadSeekableHeader(vols.Last().Stream))
|
||||
foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream))
|
||||
{
|
||||
if (h != null)
|
||||
{
|
||||
switch (h.ZipHeaderType)
|
||||
{
|
||||
case ZipHeaderType.DirectoryEntry:
|
||||
|
||||
{
|
||||
var deh = (DirectoryEntryHeader)h;
|
||||
Stream s;
|
||||
@@ -254,14 +250,14 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
|
||||
yield return new ZipArchiveEntry(
|
||||
this,
|
||||
new SeekableZipFilePart(headerFactory, deh, s)
|
||||
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
|
||||
);
|
||||
}
|
||||
break;
|
||||
case ZipHeaderType.DirectoryEnd:
|
||||
{
|
||||
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
|
||||
volumes.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
|
||||
vols.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
|
||||
yield break;
|
||||
}
|
||||
}
|
||||
@@ -282,7 +278,11 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
|
||||
{
|
||||
using var entryStream = entry.OpenEntryStream();
|
||||
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
|
||||
writer.Write(
|
||||
entry.Key.NotNull("Entry Key is null"),
|
||||
entryStream,
|
||||
entry.LastModifiedTime
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -294,7 +294,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
|
||||
bool closeStream
|
||||
) => new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
|
||||
|
||||
public static ZipArchive Create() => new ZipArchive();
|
||||
public static ZipArchive Create() => new();
|
||||
|
||||
protected override IReader CreateReaderForSolidExtraction()
|
||||
{
|
||||
|
||||
@@ -12,7 +12,7 @@ internal static class ZipArchiveVolumeFactory
|
||||
|
||||
//load files with zip/zipx first. Swapped to end once loaded in ZipArchive
|
||||
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
|
||||
Match m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
|
||||
var m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
|
||||
if (m.Success)
|
||||
item = new FileInfo(
|
||||
Path.Combine(
|
||||
|
||||
33
src/SharpCompress/BufferPool.cs
Normal file
33
src/SharpCompress/BufferPool.cs
Normal file
@@ -0,0 +1,33 @@
|
||||
using System.Buffers;
|
||||
|
||||
namespace SharpCompress;
|
||||
|
||||
internal static class BufferPool
|
||||
{
|
||||
/// <summary>
|
||||
/// gets a buffer from the pool
|
||||
/// </summary>
|
||||
/// <param name="bufferSize">size of the buffer</param>
|
||||
/// <returns>the buffer</returns>
|
||||
public static byte[] Rent(int bufferSize)
|
||||
{
|
||||
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
|
||||
return ArrayPool<byte>.Shared.Rent(bufferSize);
|
||||
#else
|
||||
return new byte[bufferSize];
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// returns a buffer to the pool
|
||||
/// </summary>
|
||||
/// <param name="buffer">the buffer to return</param>
|
||||
public static void Return(byte[] buffer)
|
||||
{
|
||||
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
|
||||
ArrayPool<byte>.Shared.Return(buffer);
|
||||
#else
|
||||
// no-op
|
||||
#endif
|
||||
}
|
||||
}
|
||||
@@ -8,12 +8,12 @@ public class ArchiveEncoding
|
||||
/// <summary>
|
||||
/// Default encoding to use when archive format doesn't specify one.
|
||||
/// </summary>
|
||||
public Encoding Default { get; set; }
|
||||
public Encoding? Default { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
|
||||
/// </summary>
|
||||
public Encoding Password { get; set; }
|
||||
public Encoding? Password { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Set this encoding when you want to force it for all encoding operations.
|
||||
@@ -50,6 +50,8 @@ public class ArchiveEncoding
|
||||
|
||||
public Encoding GetEncoding() => Forced ?? Default ?? Encoding.UTF8;
|
||||
|
||||
public Encoding GetPasswordEncoding() => Password ?? Encoding.UTF8;
|
||||
|
||||
public Func<byte[], int, int, string> GetDecoder() =>
|
||||
CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
|
||||
}
|
||||
|
||||
@@ -14,5 +14,12 @@ public enum CompressionType
|
||||
LZip,
|
||||
Xz,
|
||||
Unknown,
|
||||
Deflate64
|
||||
Deflate64,
|
||||
Shrink,
|
||||
Lzw,
|
||||
Reduce1,
|
||||
Reduce2,
|
||||
Reduce3,
|
||||
Reduce4,
|
||||
Explode
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ public abstract class Entry : IEntry
|
||||
/// <summary>
|
||||
/// The string key of the file internal to the Archive.
|
||||
/// </summary>
|
||||
public abstract string Key { get; }
|
||||
public abstract string? Key { get; }
|
||||
|
||||
/// <summary>
|
||||
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
|
||||
@@ -71,11 +71,11 @@ public abstract class Entry : IEntry
|
||||
/// </summary>
|
||||
public abstract bool IsSplitAfter { get; }
|
||||
|
||||
public int VolumeIndexFirst => Parts?.FirstOrDefault()?.Index ?? 0;
|
||||
public int VolumeIndexLast => Parts?.LastOrDefault()?.Index ?? 0;
|
||||
public int VolumeIndexFirst => Parts.FirstOrDefault()?.Index ?? 0;
|
||||
public int VolumeIndexLast => Parts.LastOrDefault()?.Index ?? 0;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override string ToString() => Key;
|
||||
public override string ToString() => Key ?? "Entry";
|
||||
|
||||
internal abstract IEnumerable<FilePart> Parts { get; }
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ internal static class ExtractionMethods
|
||||
)
|
||||
{
|
||||
string destinationFileName;
|
||||
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
|
||||
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
|
||||
|
||||
//check for trailing slash.
|
||||
if (
|
||||
@@ -36,11 +36,13 @@ internal static class ExtractionMethods
|
||||
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
string file = Path.GetFileName(entry.Key);
|
||||
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
|
||||
file = Utility.ReplaceInvalidFileNameChars(file);
|
||||
if (options.ExtractFullPath)
|
||||
{
|
||||
string folder = Path.GetDirectoryName(entry.Key)!;
|
||||
string destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
|
||||
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
|
||||
.NotNull("Directory is null");
|
||||
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
|
||||
|
||||
if (!Directory.Exists(destdir))
|
||||
{
|
||||
@@ -102,7 +104,7 @@ internal static class ExtractionMethods
|
||||
}
|
||||
else
|
||||
{
|
||||
FileMode fm = FileMode.Create;
|
||||
var fm = FileMode.Create;
|
||||
options ??= new ExtractionOptions() { Overwrite = true };
|
||||
|
||||
if (!options.Overwrite)
|
||||
|
||||
@@ -8,7 +8,7 @@ public abstract class FilePart
|
||||
|
||||
internal ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
internal abstract string FilePartName { get; }
|
||||
internal abstract string? FilePartName { get; }
|
||||
public int Index { get; set; }
|
||||
|
||||
internal abstract Stream GetCompressedStream();
|
||||
|
||||
@@ -6,23 +6,23 @@ namespace SharpCompress.Common.GZip;
|
||||
|
||||
public class GZipEntry : Entry
|
||||
{
|
||||
private readonly GZipFilePart _filePart;
|
||||
private readonly GZipFilePart? _filePart;
|
||||
|
||||
internal GZipEntry(GZipFilePart filePart) => _filePart = filePart;
|
||||
internal GZipEntry(GZipFilePart? filePart) => _filePart = filePart;
|
||||
|
||||
public override CompressionType CompressionType => CompressionType.GZip;
|
||||
|
||||
public override long Crc => _filePart.Crc ?? 0;
|
||||
public override long Crc => _filePart?.Crc ?? 0;
|
||||
|
||||
public override string Key => _filePart.FilePartName;
|
||||
public override string? Key => _filePart?.FilePartName;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => 0;
|
||||
|
||||
public override long Size => _filePart.UncompressedSize ?? 0;
|
||||
public override long Size => _filePart?.UncompressedSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart.DateModified;
|
||||
public override DateTime? LastModifiedTime => _filePart?.DateModified;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
@@ -36,7 +36,7 @@ public class GZipEntry : Entry
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
|
||||
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
|
||||
{
|
||||
|
||||
@@ -34,7 +34,7 @@ internal sealed class GZipFilePart : FilePart
|
||||
internal uint? Crc { get; private set; }
|
||||
internal uint? UncompressedSize { get; private set; }
|
||||
|
||||
internal override string FilePartName => _name!;
|
||||
internal override string? FilePartName => _name;
|
||||
|
||||
internal override Stream GetCompressedStream() =>
|
||||
new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
|
||||
|
||||
@@ -5,7 +5,7 @@ namespace SharpCompress.Common.GZip;
|
||||
|
||||
public class GZipVolume : Volume
|
||||
{
|
||||
public GZipVolume(Stream stream, ReaderOptions options, int index = 0)
|
||||
public GZipVolume(Stream stream, ReaderOptions? options, int index)
|
||||
: base(stream, options, index) { }
|
||||
|
||||
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
|
||||
|
||||
@@ -9,7 +9,7 @@ public interface IEntry
|
||||
long CompressedSize { get; }
|
||||
long Crc { get; }
|
||||
DateTime? CreatedTime { get; }
|
||||
string Key { get; }
|
||||
string? Key { get; }
|
||||
string? LinkTarget { get; }
|
||||
bool IsDirectory { get; }
|
||||
bool IsEncrypted { get; }
|
||||
|
||||
@@ -6,5 +6,5 @@ public interface IVolume : IDisposable
|
||||
{
|
||||
int Index { get; }
|
||||
|
||||
string FileName { get; }
|
||||
string? FileName { get; }
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@ public class OptionsBase
|
||||
/// </summary>
|
||||
public bool LeaveStreamOpen { get; set; } = true;
|
||||
|
||||
public ArchiveEncoding ArchiveEncoding { get; set; } = new ArchiveEncoding();
|
||||
public ArchiveEncoding ArchiveEncoding { get; set; } = new();
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace SharpCompress.Common;
|
||||
|
||||
public class PasswordProtectedException : ExtractionException
|
||||
{
|
||||
public PasswordProtectedException(string message)
|
||||
: base(message) { }
|
||||
|
||||
public PasswordProtectedException(string message, Exception inner)
|
||||
: base(message, inner) { }
|
||||
}
|
||||
@@ -12,10 +12,7 @@ internal class CryptKey3 : ICryptKey
|
||||
|
||||
private string _password;
|
||||
|
||||
public CryptKey3(string password)
|
||||
{
|
||||
_password = password ?? "";
|
||||
}
|
||||
public CryptKey3(string password) => _password = password ?? "";
|
||||
|
||||
public ICryptoTransform Transformer(byte[] salt)
|
||||
{
|
||||
|
||||
@@ -34,35 +34,33 @@ internal class CryptKey5 : ICryptKey
|
||||
int keyLength
|
||||
)
|
||||
{
|
||||
using (HMACSHA256 hmac = new HMACSHA256(Encoding.UTF8.GetBytes(password)))
|
||||
using var hmac = new HMACSHA256(Encoding.UTF8.GetBytes(password));
|
||||
var block = hmac.ComputeHash(salt);
|
||||
var finalHash = (byte[])block.Clone();
|
||||
|
||||
var loop = new int[] { iterations, 17, 17 };
|
||||
var res = new List<byte[]> { };
|
||||
|
||||
for (var x = 0; x < 3; x++)
|
||||
{
|
||||
byte[] block = hmac.ComputeHash(salt);
|
||||
byte[] finalHash = (byte[])block.Clone();
|
||||
|
||||
var loop = new int[] { iterations, 17, 17 };
|
||||
var res = new List<byte[]> { };
|
||||
|
||||
for (int x = 0; x < 3; x++)
|
||||
for (var i = 1; i < loop[x]; i++)
|
||||
{
|
||||
for (int i = 1; i < loop[x]; i++)
|
||||
block = hmac.ComputeHash(block);
|
||||
for (var j = 0; j < finalHash.Length; j++)
|
||||
{
|
||||
block = hmac.ComputeHash(block);
|
||||
for (int j = 0; j < finalHash.Length; j++)
|
||||
{
|
||||
finalHash[j] ^= block[j];
|
||||
}
|
||||
finalHash[j] ^= block[j];
|
||||
}
|
||||
|
||||
res.Add((byte[])finalHash.Clone());
|
||||
}
|
||||
|
||||
return res;
|
||||
res.Add((byte[])finalHash.Clone());
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
public ICryptoTransform Transformer(byte[] salt)
|
||||
{
|
||||
int iterations = (1 << _cryptoInfo.LG2Count); // Adjust the number of iterations as needed
|
||||
var iterations = (1 << _cryptoInfo.LG2Count); // Adjust the number of iterations as needed
|
||||
|
||||
var salt_rar5 = salt.Concat(new byte[] { 0, 0, 0, 1 });
|
||||
var derivedKey = GenerateRarPBKDF2Key(
|
||||
@@ -76,7 +74,7 @@ internal class CryptKey5 : ICryptKey
|
||||
|
||||
_pswCheck = new byte[EncryptionConstV5.SIZE_PSWCHECK];
|
||||
|
||||
for (int i = 0; i < SHA256_DIGEST_SIZE; i++)
|
||||
for (var i = 0; i < SHA256_DIGEST_SIZE; i++)
|
||||
{
|
||||
_pswCheck[i % EncryptionConstV5.SIZE_PSWCHECK] ^= derivedKey[2][i];
|
||||
}
|
||||
|
||||
@@ -12,10 +12,8 @@ internal class ArchiveCryptHeader : RarHeader
|
||||
public ArchiveCryptHeader(RarHeader header, RarCrcBinaryReader reader)
|
||||
: base(header, reader, HeaderType.Crypt) { }
|
||||
|
||||
public Rar5CryptoInfo CryptInfo = new Rar5CryptoInfo();
|
||||
public Rar5CryptoInfo CryptInfo = new();
|
||||
|
||||
protected override void ReadFinish(MarkingBinaryReader reader)
|
||||
{
|
||||
protected override void ReadFinish(MarkingBinaryReader reader) =>
|
||||
CryptInfo = new Rar5CryptoInfo(reader, false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -109,7 +109,7 @@ internal class FileHeader : RarHeader
|
||||
const ushort FHEXTRA_HASH = 0x02;
|
||||
const ushort FHEXTRA_HTIME = 0x03;
|
||||
// const ushort FHEXTRA_VERSION = 0x04;
|
||||
// const ushort FHEXTRA_REDIR = 0x05;
|
||||
const ushort FHEXTRA_REDIR = 0x05;
|
||||
// const ushort FHEXTRA_UOWNER = 0x06;
|
||||
// const ushort FHEXTRA_SUBDATA = 0x07;
|
||||
|
||||
@@ -120,9 +120,7 @@ internal class FileHeader : RarHeader
|
||||
var type = reader.ReadRarVIntUInt16();
|
||||
switch (type)
|
||||
{
|
||||
//TODO
|
||||
case FHEXTRA_CRYPT: // file encryption
|
||||
|
||||
{
|
||||
Rar5CryptoInfo = new Rar5CryptoInfo(reader, true);
|
||||
|
||||
@@ -133,7 +131,6 @@ internal class FileHeader : RarHeader
|
||||
}
|
||||
break;
|
||||
case FHEXTRA_HASH:
|
||||
|
||||
{
|
||||
const uint FHEXTRA_HASH_BLAKE2 = 0x0;
|
||||
// const uint HASH_BLAKE2 = 0x03;
|
||||
@@ -147,7 +144,6 @@ internal class FileHeader : RarHeader
|
||||
}
|
||||
break;
|
||||
case FHEXTRA_HTIME: // file time
|
||||
|
||||
{
|
||||
var flags = reader.ReadRarVIntUInt16();
|
||||
var isWindowsTime = (flags & 1) == 0;
|
||||
@@ -171,11 +167,16 @@ internal class FileHeader : RarHeader
|
||||
//
|
||||
// }
|
||||
// break;
|
||||
// case FHEXTRA_REDIR: // file system redirection
|
||||
// {
|
||||
//
|
||||
// }
|
||||
// break;
|
||||
case FHEXTRA_REDIR: // file system redirection
|
||||
{
|
||||
RedirType = reader.ReadRarVIntByte();
|
||||
RedirFlags = reader.ReadRarVIntByte();
|
||||
var nn = reader.ReadRarVIntUInt16();
|
||||
var bb = reader.ReadBytes(nn);
|
||||
RedirTargetName = ConvertPathV5(Encoding.UTF8.GetString(bb, 0, bb.Length));
|
||||
}
|
||||
break;
|
||||
//TODO
|
||||
// case FHEXTRA_UOWNER: // unix owner
|
||||
// {
|
||||
//
|
||||
@@ -189,6 +190,7 @@ internal class FileHeader : RarHeader
|
||||
|
||||
default:
|
||||
// skip unknown record types to allow new record types to be added in the future
|
||||
//Console.WriteLine($"unhandled rar header field type {type}");
|
||||
break;
|
||||
}
|
||||
// drain any trailing bytes of extra record
|
||||
@@ -278,7 +280,6 @@ internal class FileHeader : RarHeader
|
||||
switch (HeaderCode)
|
||||
{
|
||||
case HeaderCodeV.RAR4_FILE_HEADER:
|
||||
|
||||
{
|
||||
if (HasFlag(FileFlagsV4.UNICODE))
|
||||
{
|
||||
@@ -305,7 +306,6 @@ internal class FileHeader : RarHeader
|
||||
}
|
||||
break;
|
||||
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
|
||||
|
||||
{
|
||||
var datasize = HeaderSize - newLhdSize - nameSize;
|
||||
if (HasFlag(FileFlagsV4.SALT))
|
||||
@@ -437,6 +437,12 @@ internal class FileHeader : RarHeader
|
||||
|
||||
public bool IsSolid { get; private set; }
|
||||
|
||||
public byte RedirType { get; private set; }
|
||||
public bool IsRedir => RedirType != 0;
|
||||
public byte RedirFlags { get; private set; }
|
||||
public bool IsRedirDirectory => (RedirFlags & RedirFlagV5.DIRECTORY) != 0;
|
||||
public string RedirTargetName { get; private set; }
|
||||
|
||||
// unused for UnpackV1 implementation (limitation)
|
||||
internal size_t WindowSize { get; private set; }
|
||||
|
||||
|
||||
@@ -157,3 +157,17 @@ internal static class EndArchiveFlagsV5
|
||||
{
|
||||
public const ushort HAS_NEXT_VOLUME = 0x0001;
|
||||
}
|
||||
|
||||
internal static class RedirTypeV5
|
||||
{
|
||||
public const byte UNIX_SYMLINK = 0x0001;
|
||||
public const byte WIN_SYMLINK = 0x0002;
|
||||
public const byte WIN_JUNCTION = 0x0003;
|
||||
public const byte HARD_LINK = 0x0004;
|
||||
public const byte FILE_COPY = 0x0005;
|
||||
}
|
||||
|
||||
internal static class RedirFlagV5
|
||||
{
|
||||
public const byte DIRECTORY = 0x0001;
|
||||
}
|
||||
|
||||
@@ -98,13 +98,11 @@ public class RarHeaderFactory
|
||||
switch (StreamingMode)
|
||||
{
|
||||
case StreamingMode.Seekable:
|
||||
|
||||
{
|
||||
reader.BaseStream.Position += ph.DataSize;
|
||||
}
|
||||
break;
|
||||
case StreamingMode.Streaming:
|
||||
|
||||
{
|
||||
reader.BaseStream.Skip(ph.DataSize);
|
||||
}
|
||||
@@ -146,14 +144,12 @@ public class RarHeaderFactory
|
||||
switch (StreamingMode)
|
||||
{
|
||||
case StreamingMode.Seekable:
|
||||
|
||||
{
|
||||
fh.DataStartPosition = reader.BaseStream.Position;
|
||||
reader.BaseStream.Position += fh.CompressedSize;
|
||||
}
|
||||
break;
|
||||
case StreamingMode.Streaming:
|
||||
|
||||
{
|
||||
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
|
||||
if (fh.R4Salt is null && fh.Rar5CryptoInfo is null)
|
||||
@@ -204,14 +200,12 @@ public class RarHeaderFactory
|
||||
switch (StreamingMode)
|
||||
{
|
||||
case StreamingMode.Seekable:
|
||||
|
||||
{
|
||||
fh.DataStartPosition = reader.BaseStream.Position;
|
||||
reader.BaseStream.Position += fh.CompressedSize;
|
||||
}
|
||||
break;
|
||||
case StreamingMode.Streaming:
|
||||
|
||||
{
|
||||
//skip the data because it's useless?
|
||||
reader.BaseStream.Skip(fh.CompressedSize);
|
||||
|
||||
@@ -42,10 +42,8 @@ internal class Rar5CryptoInfo
|
||||
}
|
||||
}
|
||||
|
||||
public void ReadInitV(MarkingBinaryReader reader)
|
||||
{
|
||||
public void ReadInitV(MarkingBinaryReader reader) =>
|
||||
InitV = reader.ReadBytes(EncryptionConstV5.SIZE_INITV);
|
||||
}
|
||||
|
||||
public bool UsePswCheck = false;
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ namespace SharpCompress.Common.Rar;
|
||||
internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
|
||||
{
|
||||
private BlockTransformer _rijndael;
|
||||
private readonly Queue<byte> _data = new Queue<byte>();
|
||||
private readonly Queue<byte> _data = new();
|
||||
private long _readCount;
|
||||
|
||||
public RarCryptoBinaryReader(Stream stream, ICryptKey cryptKey)
|
||||
@@ -22,10 +22,7 @@ internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
|
||||
}
|
||||
|
||||
public RarCryptoBinaryReader(Stream stream, ICryptKey cryptKey, byte[] salt)
|
||||
: base(stream)
|
||||
{
|
||||
_rijndael = new BlockTransformer(cryptKey.Transformer(salt));
|
||||
}
|
||||
: base(stream) => _rijndael = new BlockTransformer(cryptKey.Transformer(salt));
|
||||
|
||||
// track read count ourselves rather than using the underlying stream since we buffer
|
||||
public override long CurrentReadByteCount
|
||||
@@ -39,15 +36,9 @@ internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
|
||||
|
||||
public override void Mark() => _readCount = 0;
|
||||
|
||||
public override byte ReadByte()
|
||||
{
|
||||
return ReadAndDecryptBytes(1)[0];
|
||||
}
|
||||
public override byte ReadByte() => ReadAndDecryptBytes(1)[0];
|
||||
|
||||
public override byte[] ReadBytes(int count)
|
||||
{
|
||||
return ReadAndDecryptBytes(count);
|
||||
}
|
||||
public override byte[] ReadBytes(int count) => ReadAndDecryptBytes(count);
|
||||
|
||||
private byte[] ReadAndDecryptBytes(int count)
|
||||
{
|
||||
|
||||
@@ -9,7 +9,7 @@ internal sealed class RarCryptoWrapper : Stream
|
||||
{
|
||||
private readonly Stream _actualStream;
|
||||
private BlockTransformer _rijndael;
|
||||
private readonly Queue<byte> _data = new Queue<byte>();
|
||||
private readonly Queue<byte> _data = new();
|
||||
|
||||
public RarCryptoWrapper(Stream actualStream, byte[] salt, ICryptKey key)
|
||||
{
|
||||
@@ -23,10 +23,8 @@ internal sealed class RarCryptoWrapper : Stream
|
||||
|
||||
public override void SetLength(long value) => throw new NotSupportedException();
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
return ReadAndDecrypt(buffer, offset, count);
|
||||
}
|
||||
public override int Read(byte[] buffer, int offset, int count) =>
|
||||
ReadAndDecrypt(buffer, offset, count);
|
||||
|
||||
public int ReadAndDecrypt(byte[] buffer, int offset, int count)
|
||||
{
|
||||
@@ -72,11 +70,11 @@ internal sealed class RarCryptoWrapper : Stream
|
||||
|
||||
protected override void Dispose(bool disposing)
|
||||
{
|
||||
if (_rijndael != null)
|
||||
if (disposing)
|
||||
{
|
||||
_rijndael.Dispose();
|
||||
_rijndael = null!;
|
||||
}
|
||||
|
||||
base.Dispose(disposing);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ public abstract class RarEntry : Entry
|
||||
/// <summary>
|
||||
/// The path of the file internal to the Rar Archive.
|
||||
/// </summary>
|
||||
public override string Key => FileHeader.FileName;
|
||||
public override string? Key => FileHeader.FileName;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
@@ -66,6 +66,10 @@ public abstract class RarEntry : Entry
|
||||
|
||||
public override bool IsSplitAfter => FileHeader.IsSplitAfter;
|
||||
|
||||
public bool IsRedir => FileHeader.IsRedir;
|
||||
|
||||
public string RedirTargetName => FileHeader.RedirTargetName;
|
||||
|
||||
public override string ToString() =>
|
||||
string.Format(
|
||||
"Entry Path: {0} Compressed Size: {1} Uncompressed Size: {2} CRC: {3}",
|
||||
|
||||
@@ -15,17 +15,14 @@ namespace SharpCompress.Common.Rar;
|
||||
public abstract class RarVolume : Volume
|
||||
{
|
||||
private readonly RarHeaderFactory _headerFactory;
|
||||
internal int _maxCompressionAlgorithm;
|
||||
private int _maxCompressionAlgorithm;
|
||||
|
||||
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index = 0)
|
||||
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index)
|
||||
: base(stream, options, index) => _headerFactory = new RarHeaderFactory(mode, options);
|
||||
|
||||
#nullable disable
|
||||
internal ArchiveHeader ArchiveHeader { get; private set; }
|
||||
private ArchiveHeader? ArchiveHeader { get; set; }
|
||||
|
||||
#nullable enable
|
||||
|
||||
internal StreamingMode Mode => _headerFactory.StreamingMode;
|
||||
private StreamingMode Mode => _headerFactory.StreamingMode;
|
||||
|
||||
internal abstract IEnumerable<RarFilePart> ReadFileParts();
|
||||
|
||||
@@ -39,19 +36,16 @@ public abstract class RarVolume : Volume
|
||||
switch (header.HeaderType)
|
||||
{
|
||||
case HeaderType.Mark:
|
||||
|
||||
{
|
||||
lastMarkHeader = (MarkHeader)header;
|
||||
}
|
||||
break;
|
||||
case HeaderType.Archive:
|
||||
|
||||
{
|
||||
ArchiveHeader = (ArchiveHeader)header;
|
||||
}
|
||||
break;
|
||||
case HeaderType.File:
|
||||
|
||||
{
|
||||
var fh = (FileHeader)header;
|
||||
if (_maxCompressionAlgorithm < fh.CompressionAlgorithm)
|
||||
@@ -63,14 +57,12 @@ public abstract class RarVolume : Volume
|
||||
}
|
||||
break;
|
||||
case HeaderType.Service:
|
||||
|
||||
{
|
||||
var fh = (FileHeader)header;
|
||||
if (fh.FileName == "CMT")
|
||||
{
|
||||
var part = CreateFilePart(lastMarkHeader!, fh);
|
||||
var buffer = new byte[fh.CompressedSize];
|
||||
part.GetCompressedStream().Read(buffer, 0, buffer.Length);
|
||||
fh.PackedStream.Read(buffer, 0, buffer.Length);
|
||||
Comment = Encoding.UTF8.GetString(buffer, 0, buffer.Length - 1);
|
||||
}
|
||||
}
|
||||
@@ -105,7 +97,7 @@ public abstract class RarVolume : Volume
|
||||
get
|
||||
{
|
||||
EnsureArchiveHeaderLoaded();
|
||||
return ArchiveHeader.IsFirstVolume;
|
||||
return ArchiveHeader?.IsFirstVolume ?? false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,7 +109,7 @@ public abstract class RarVolume : Volume
|
||||
get
|
||||
{
|
||||
EnsureArchiveHeaderLoaded();
|
||||
return ArchiveHeader.IsVolume;
|
||||
return ArchiveHeader?.IsVolume ?? false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -130,7 +122,7 @@ public abstract class RarVolume : Volume
|
||||
get
|
||||
{
|
||||
EnsureArchiveHeaderLoaded();
|
||||
return ArchiveHeader.IsSolid;
|
||||
return ArchiveHeader?.IsSolid ?? false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -15,15 +15,15 @@ internal class ArchiveDatabase
|
||||
internal long _startPositionAfterHeader;
|
||||
internal long _dataStartPosition;
|
||||
|
||||
internal List<long> _packSizes = new List<long>();
|
||||
internal List<uint?> _packCrCs = new List<uint?>();
|
||||
internal List<CFolder> _folders = new List<CFolder>();
|
||||
internal List<long> _packSizes = new();
|
||||
internal List<uint?> _packCrCs = new();
|
||||
internal List<CFolder> _folders = new();
|
||||
internal List<int> _numUnpackStreamsVector;
|
||||
internal List<CFileItem> _files = new List<CFileItem>();
|
||||
internal List<CFileItem> _files = new();
|
||||
|
||||
internal List<long> _packStreamStartPositions = new List<long>();
|
||||
internal List<int> _folderStartFileIndex = new List<int>();
|
||||
internal List<int> _fileIndexToFolderIndexMap = new List<int>();
|
||||
internal List<long> _packStreamStartPositions = new();
|
||||
internal List<int> _folderStartFileIndex = new();
|
||||
internal List<int> _fileIndexToFolderIndexMap = new();
|
||||
|
||||
internal IPasswordProvider PasswordProvider { get; }
|
||||
|
||||
@@ -35,7 +35,7 @@ internal class ArchiveDatabase
|
||||
_packSizes.Clear();
|
||||
_packCrCs.Clear();
|
||||
_folders.Clear();
|
||||
_numUnpackStreamsVector = null!;
|
||||
_numUnpackStreamsVector = null;
|
||||
_files.Clear();
|
||||
|
||||
_packStreamStartPositions.Clear();
|
||||
|
||||
@@ -14,13 +14,13 @@ namespace SharpCompress.Common.SevenZip;
|
||||
internal class ArchiveReader
|
||||
{
|
||||
internal Stream _stream;
|
||||
internal Stack<DataReader> _readerStack = new Stack<DataReader>();
|
||||
internal Stack<DataReader> _readerStack = new();
|
||||
internal DataReader _currentReader;
|
||||
internal long _streamOrigin;
|
||||
internal long _streamEnding;
|
||||
internal byte[] _header;
|
||||
|
||||
private readonly Dictionary<int, Stream> _cachedStreams = new Dictionary<int, Stream>();
|
||||
private readonly Dictionary<int, Stream> _cachedStreams = new();
|
||||
|
||||
internal void AddByteStream(byte[] buffer, int offset, int length)
|
||||
{
|
||||
@@ -1220,23 +1220,46 @@ internal class ArchiveReader
|
||||
|
||||
#region Public Methods
|
||||
|
||||
public void Open(Stream stream)
|
||||
public void Open(Stream stream, bool lookForHeader)
|
||||
{
|
||||
Close();
|
||||
|
||||
_streamOrigin = stream.Position;
|
||||
_streamEnding = stream.Length;
|
||||
|
||||
// TODO: Check Signature!
|
||||
_header = new byte[0x20];
|
||||
for (var offset = 0; offset < 0x20; )
|
||||
var canScan = lookForHeader ? 0x80000 - 20 : 0;
|
||||
while (true)
|
||||
{
|
||||
var delta = stream.Read(_header, offset, 0x20 - offset);
|
||||
if (delta == 0)
|
||||
// TODO: Check Signature!
|
||||
_header = new byte[0x20];
|
||||
for (var offset = 0; offset < 0x20; )
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
var delta = stream.Read(_header, offset, 0x20 - offset);
|
||||
if (delta == 0)
|
||||
{
|
||||
throw new EndOfStreamException();
|
||||
}
|
||||
|
||||
offset += delta;
|
||||
}
|
||||
offset += delta;
|
||||
|
||||
if (
|
||||
!lookForHeader
|
||||
|| _header
|
||||
.AsSpan(0, length: 6)
|
||||
.SequenceEqual<byte>([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])
|
||||
)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if (canScan == 0)
|
||||
{
|
||||
throw new InvalidFormatException("Unable to find 7z signature");
|
||||
}
|
||||
|
||||
canScan--;
|
||||
stream.Position = ++_streamOrigin;
|
||||
}
|
||||
|
||||
_stream = stream;
|
||||
@@ -1359,7 +1382,7 @@ internal class ArchiveReader
|
||||
{
|
||||
internal int _fileIndex;
|
||||
internal int _folderIndex;
|
||||
internal List<bool> _extractStatuses = new List<bool>();
|
||||
internal List<bool> _extractStatuses = new();
|
||||
|
||||
internal CExtractFolderInfo(int fileIndex, int folderIndex)
|
||||
{
|
||||
|
||||
@@ -6,11 +6,11 @@ namespace SharpCompress.Common.SevenZip;
|
||||
|
||||
internal class CFolder
|
||||
{
|
||||
internal List<CCoderInfo> _coders = new List<CCoderInfo>();
|
||||
internal List<CBindPair> _bindPairs = new List<CBindPair>();
|
||||
internal List<int> _packStreams = new List<int>();
|
||||
internal List<CCoderInfo> _coders = new();
|
||||
internal List<CBindPair> _bindPairs = new();
|
||||
internal List<int> _packStreams = new();
|
||||
internal int _firstPackStreamId;
|
||||
internal List<long> _unpackSizes = new List<long>();
|
||||
internal List<long> _unpackSizes = new();
|
||||
internal uint? _unpackCrc;
|
||||
|
||||
internal bool UnpackCrcDefined => _unpackCrc != null;
|
||||
|
||||
@@ -7,10 +7,10 @@ internal readonly struct CMethodId
|
||||
public const ulong K_LZMA2_ID = 0x21;
|
||||
public const ulong K_AES_ID = 0x06F10701;
|
||||
|
||||
public static readonly CMethodId K_COPY = new CMethodId(K_COPY_ID);
|
||||
public static readonly CMethodId K_LZMA = new CMethodId(K_LZMA_ID);
|
||||
public static readonly CMethodId K_LZMA2 = new CMethodId(K_LZMA2_ID);
|
||||
public static readonly CMethodId K_AES = new CMethodId(K_AES_ID);
|
||||
public static readonly CMethodId K_COPY = new(K_COPY_ID);
|
||||
public static readonly CMethodId K_LZMA = new(K_LZMA_ID);
|
||||
public static readonly CMethodId K_LZMA2 = new(K_LZMA2_ID);
|
||||
public static readonly CMethodId K_AES = new(K_AES_ID);
|
||||
|
||||
public readonly ulong _id;
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ public class SevenZipEntry : Entry
|
||||
|
||||
public override long Crc => FilePart.Header.Crc ?? 0;
|
||||
|
||||
public override string Key => FilePart.Header.Name;
|
||||
public override string? Key => FilePart.Header.Name;
|
||||
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ internal class SevenZipFilePart : FilePart
|
||||
{
|
||||
if (!Header.HasStream)
|
||||
{
|
||||
return null!;
|
||||
throw new InvalidOperationException("File does not have a stream.");
|
||||
}
|
||||
var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider);
|
||||
|
||||
@@ -73,34 +73,24 @@ internal class SevenZipFilePart : FilePart
|
||||
private const uint K_PPMD = 0x030401;
|
||||
private const uint K_B_ZIP2 = 0x040202;
|
||||
|
||||
internal CompressionType GetCompression()
|
||||
private CompressionType GetCompression()
|
||||
{
|
||||
if (Header.IsDir)
|
||||
return CompressionType.None;
|
||||
|
||||
var coder = Folder!._coders.First();
|
||||
switch (coder._methodId._id)
|
||||
{
|
||||
case K_LZMA:
|
||||
case K_LZMA2:
|
||||
{
|
||||
return CompressionType.LZMA;
|
||||
}
|
||||
case K_PPMD:
|
||||
{
|
||||
return CompressionType.PPMd;
|
||||
}
|
||||
case K_B_ZIP2:
|
||||
{
|
||||
return CompressionType.BZip2;
|
||||
}
|
||||
default:
|
||||
throw new NotImplementedException();
|
||||
return CompressionType.None;
|
||||
}
|
||||
|
||||
var coder = Folder.NotNull()._coders.First();
|
||||
return coder._methodId._id switch
|
||||
{
|
||||
K_LZMA or K_LZMA2 => CompressionType.LZMA,
|
||||
K_PPMD => CompressionType.PPMd,
|
||||
K_B_ZIP2 => CompressionType.BZip2,
|
||||
_ => throw new NotImplementedException()
|
||||
};
|
||||
}
|
||||
|
||||
internal bool IsEncrypted =>
|
||||
Header.IsDir
|
||||
? false
|
||||
: Folder!._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
|
||||
!Header.IsDir
|
||||
&& Folder?._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
@@ -9,12 +7,12 @@ namespace SharpCompress.Common.Tar.Headers;
|
||||
|
||||
internal sealed class TarHeader
|
||||
{
|
||||
internal static readonly DateTime EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||
internal static readonly DateTime EPOCH = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||
|
||||
public TarHeader(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
|
||||
|
||||
internal string Name { get; set; }
|
||||
internal string LinkName { get; set; }
|
||||
internal string? Name { get; set; }
|
||||
internal string? LinkName { get; set; }
|
||||
|
||||
internal long Mode { get; set; }
|
||||
internal long UserId { get; set; }
|
||||
@@ -22,7 +20,7 @@ internal sealed class TarHeader
|
||||
internal long Size { get; set; }
|
||||
internal DateTime LastModifiedTime { get; set; }
|
||||
internal EntryType EntryType { get; set; }
|
||||
internal Stream PackedStream { get; set; }
|
||||
internal Stream? PackedStream { get; set; }
|
||||
internal ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
internal const int BLOCK_SIZE = 512;
|
||||
@@ -36,7 +34,9 @@ internal sealed class TarHeader
|
||||
WriteOctalBytes(0, buffer, 116, 8); // group ID
|
||||
|
||||
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
|
||||
var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name);
|
||||
var nameByteCount = ArchiveEncoding
|
||||
.GetEncoding()
|
||||
.GetByteCount(Name.NotNull("Name is null"));
|
||||
if (nameByteCount > 100)
|
||||
{
|
||||
// Set mock filename and filetype to indicate the next block is the actual name of the file
|
||||
@@ -46,7 +46,7 @@ internal sealed class TarHeader
|
||||
}
|
||||
else
|
||||
{
|
||||
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100);
|
||||
WriteStringBytes(ArchiveEncoding.Encode(Name.NotNull("Name is null")), buffer, 100);
|
||||
WriteOctalBytes(Size, buffer, 124, 12);
|
||||
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
|
||||
WriteOctalBytes(time, buffer, 136, 12);
|
||||
@@ -77,7 +77,7 @@ internal sealed class TarHeader
|
||||
//
|
||||
// and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102.
|
||||
Name = ArchiveEncoding.Decode(
|
||||
ArchiveEncoding.Encode(Name),
|
||||
ArchiveEncoding.Encode(Name.NotNull("Name is null")),
|
||||
0,
|
||||
100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1)
|
||||
);
|
||||
@@ -87,7 +87,7 @@ internal sealed class TarHeader
|
||||
|
||||
private void WriteLongFilenameHeader(Stream output)
|
||||
{
|
||||
var nameBytes = ArchiveEncoding.Encode(Name);
|
||||
var nameBytes = ArchiveEncoding.Encode(Name.NotNull("Name is null"));
|
||||
output.Write(nameBytes, 0, nameBytes.Length);
|
||||
|
||||
// pad to multiple of BlockSize bytes, and make sure a terminating null is added
|
||||
@@ -101,57 +101,85 @@ internal sealed class TarHeader
|
||||
|
||||
internal bool Read(BinaryReader reader)
|
||||
{
|
||||
var buffer = ReadBlock(reader);
|
||||
if (buffer.Length == 0)
|
||||
string? longName = null;
|
||||
string? longLinkName = null;
|
||||
var hasLongValue = true;
|
||||
byte[] buffer;
|
||||
EntryType entryType;
|
||||
|
||||
do
|
||||
{
|
||||
buffer = ReadBlock(reader);
|
||||
|
||||
if (buffer.Length == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
entryType = ReadEntryType(buffer);
|
||||
|
||||
// LongName and LongLink headers can follow each other and need
|
||||
// to apply to the header that follows them.
|
||||
if (entryType == EntryType.LongName)
|
||||
{
|
||||
longName = ReadLongName(reader, buffer);
|
||||
continue;
|
||||
}
|
||||
else if (entryType == EntryType.LongLink)
|
||||
{
|
||||
longLinkName = ReadLongName(reader, buffer);
|
||||
continue;
|
||||
}
|
||||
|
||||
hasLongValue = false;
|
||||
} while (hasLongValue);
|
||||
|
||||
// Check header checksum
|
||||
if (!checkChecksum(buffer))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// for symlinks, additionally read the linkname
|
||||
if (ReadEntryType(buffer) == EntryType.SymLink)
|
||||
{
|
||||
LinkName = ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
|
||||
}
|
||||
|
||||
if (ReadEntryType(buffer) == EntryType.LongName)
|
||||
{
|
||||
Name = ReadLongName(reader, buffer);
|
||||
buffer = ReadBlock(reader);
|
||||
}
|
||||
else
|
||||
{
|
||||
Name = ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
|
||||
}
|
||||
|
||||
EntryType = ReadEntryType(buffer);
|
||||
Name = longName ?? ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
|
||||
EntryType = entryType;
|
||||
Size = ReadSize(buffer);
|
||||
|
||||
// for symlinks, additionally read the linkname
|
||||
if (entryType == EntryType.SymLink || entryType == EntryType.HardLink)
|
||||
{
|
||||
LinkName = longLinkName ?? ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
|
||||
}
|
||||
|
||||
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
|
||||
if (EntryType == EntryType.Directory)
|
||||
|
||||
if (entryType == EntryType.Directory)
|
||||
{
|
||||
Mode |= 0b1_000_000_000;
|
||||
}
|
||||
|
||||
UserId = ReadAsciiInt64Base8oldGnu(buffer, 108, 7);
|
||||
GroupId = ReadAsciiInt64Base8oldGnu(buffer, 116, 7);
|
||||
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
|
||||
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
|
||||
|
||||
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
|
||||
|
||||
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
|
||||
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
|
||||
|
||||
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic))
|
||||
{
|
||||
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157);
|
||||
namePrefix = namePrefix.TrimNulls();
|
||||
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls();
|
||||
|
||||
if (!string.IsNullOrEmpty(namePrefix))
|
||||
{
|
||||
Name = namePrefix + "/" + Name;
|
||||
}
|
||||
}
|
||||
if (EntryType != EntryType.LongName && Name.Length == 0)
|
||||
|
||||
if (entryType != EntryType.LongName && Name.Length == 0)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -289,6 +317,42 @@ internal sealed class TarHeader
|
||||
(byte)' '
|
||||
};
|
||||
|
||||
internal static bool checkChecksum(byte[] buf)
|
||||
{
|
||||
const int eightSpacesChksum = 256;
|
||||
var buffer = new Span<byte>(buf).Slice(0, 512);
|
||||
int posix_sum = eightSpacesChksum;
|
||||
int sun_sum = eightSpacesChksum;
|
||||
|
||||
foreach (byte b in buffer)
|
||||
{
|
||||
posix_sum += b;
|
||||
sun_sum += unchecked((sbyte)b);
|
||||
}
|
||||
|
||||
// Special case, empty file header
|
||||
if (posix_sum == eightSpacesChksum)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Remove current checksum from calculation
|
||||
foreach (byte b in buffer.Slice(148, 8))
|
||||
{
|
||||
posix_sum -= b;
|
||||
sun_sum -= unchecked((sbyte)b);
|
||||
}
|
||||
|
||||
// Read and compare checksum for header
|
||||
var crc = ReadAsciiInt64Base8(buf, 148, 7);
|
||||
if (crc != posix_sum && crc != sun_sum)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
internal static int RecalculateChecksum(byte[] buf)
|
||||
{
|
||||
// Set default value for checksum. That is 8 spaces.
|
||||
@@ -323,5 +387,5 @@ internal sealed class TarHeader
|
||||
|
||||
public long? DataStartPosition { get; set; }
|
||||
|
||||
public string Magic { get; set; }
|
||||
public string? Magic { get; set; }
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -10,9 +8,9 @@ namespace SharpCompress.Common.Tar;
|
||||
|
||||
public class TarEntry : Entry
|
||||
{
|
||||
private readonly TarFilePart _filePart;
|
||||
private readonly TarFilePart? _filePart;
|
||||
|
||||
internal TarEntry(TarFilePart filePart, CompressionType type)
|
||||
internal TarEntry(TarFilePart? filePart, CompressionType type)
|
||||
{
|
||||
_filePart = filePart;
|
||||
CompressionType = type;
|
||||
@@ -22,15 +20,15 @@ public class TarEntry : Entry
|
||||
|
||||
public override long Crc => 0;
|
||||
|
||||
public override string Key => _filePart.Header.Name;
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
|
||||
public override string LinkTarget => _filePart.Header.LinkName;
|
||||
public override string? LinkTarget => _filePart?.Header.LinkName;
|
||||
|
||||
public override long CompressedSize => _filePart.Header.Size;
|
||||
public override long CompressedSize => _filePart?.Header.Size ?? 0;
|
||||
|
||||
public override long Size => _filePart.Header.Size;
|
||||
public override long Size => _filePart?.Header.Size ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime => _filePart.Header.LastModifiedTime;
|
||||
public override DateTime? LastModifiedTime => _filePart?.Header.LastModifiedTime;
|
||||
|
||||
public override DateTime? CreatedTime => null;
|
||||
|
||||
@@ -40,17 +38,17 @@ public class TarEntry : Entry
|
||||
|
||||
public override bool IsEncrypted => false;
|
||||
|
||||
public override bool IsDirectory => _filePart.Header.EntryType == EntryType.Directory;
|
||||
public override bool IsDirectory => _filePart?.Header.EntryType == EntryType.Directory;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
public long Mode => _filePart.Header.Mode;
|
||||
public long Mode => _filePart?.Header.Mode ?? 0;
|
||||
|
||||
public long UserID => _filePart.Header.UserId;
|
||||
public long UserID => _filePart?.Header.UserId ?? 0;
|
||||
|
||||
public long GroupId => _filePart.Header.GroupId;
|
||||
public long GroupId => _filePart?.Header.GroupId ?? 0;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
|
||||
internal static IEnumerable<TarEntry> GetEntries(
|
||||
StreamingMode mode,
|
||||
@@ -59,17 +57,17 @@ public class TarEntry : Entry
|
||||
ArchiveEncoding archiveEncoding
|
||||
)
|
||||
{
|
||||
foreach (var h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
|
||||
foreach (var header in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
|
||||
{
|
||||
if (h != null)
|
||||
if (header != null)
|
||||
{
|
||||
if (mode == StreamingMode.Seekable)
|
||||
{
|
||||
yield return new TarEntry(new TarFilePart(h, stream), compressionType);
|
||||
yield return new TarEntry(new TarFilePart(header, stream), compressionType);
|
||||
}
|
||||
else
|
||||
{
|
||||
yield return new TarEntry(new TarFilePart(h, null), compressionType);
|
||||
yield return new TarEntry(new TarFilePart(header, null), compressionType);
|
||||
}
|
||||
}
|
||||
else
|
||||
|
||||
@@ -5,9 +5,9 @@ namespace SharpCompress.Common.Tar;
|
||||
|
||||
internal sealed class TarFilePart : FilePart
|
||||
{
|
||||
private readonly Stream _seekableStream;
|
||||
private readonly Stream? _seekableStream;
|
||||
|
||||
internal TarFilePart(TarHeader header, Stream seekableStream)
|
||||
internal TarFilePart(TarHeader header, Stream? seekableStream)
|
||||
: base(header.ArchiveEncoding)
|
||||
{
|
||||
_seekableStream = seekableStream;
|
||||
@@ -16,16 +16,16 @@ internal sealed class TarFilePart : FilePart
|
||||
|
||||
internal TarHeader Header { get; }
|
||||
|
||||
internal override string FilePartName => Header.Name;
|
||||
internal override string? FilePartName => Header?.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
if (_seekableStream != null)
|
||||
{
|
||||
_seekableStream.Position = Header.DataStartPosition!.Value;
|
||||
_seekableStream.Position = Header.DataStartPosition ?? 0;
|
||||
return new TarReadOnlySubStream(_seekableStream, Header.Size);
|
||||
}
|
||||
return Header.PackedStream;
|
||||
return Header.PackedStream.NotNull();
|
||||
}
|
||||
|
||||
internal override Stream? GetRawStream() => null;
|
||||
|
||||
@@ -28,7 +28,6 @@ internal static class TarHeaderFactory
|
||||
switch (mode)
|
||||
{
|
||||
case StreamingMode.Seekable:
|
||||
|
||||
{
|
||||
header.DataStartPosition = reader.BaseStream.Position;
|
||||
|
||||
@@ -37,7 +36,6 @@ internal static class TarHeaderFactory
|
||||
}
|
||||
break;
|
||||
case StreamingMode.Streaming:
|
||||
|
||||
{
|
||||
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
|
||||
}
|
||||
|
||||
@@ -63,7 +63,7 @@ internal class TarReadOnlySubStream : NonDisposingStream
|
||||
{
|
||||
count = (int)BytesLeftToRead;
|
||||
}
|
||||
int read = Stream.Read(buffer, offset, count);
|
||||
var read = Stream.Read(buffer, offset, count);
|
||||
if (read > 0)
|
||||
{
|
||||
BytesLeftToRead -= read;
|
||||
@@ -78,7 +78,7 @@ internal class TarReadOnlySubStream : NonDisposingStream
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
int value = Stream.ReadByte();
|
||||
var value = Stream.ReadByte();
|
||||
if (value != -1)
|
||||
{
|
||||
--BytesLeftToRead;
|
||||
|
||||
@@ -9,11 +9,11 @@ public abstract class Volume : IVolume
|
||||
{
|
||||
private readonly Stream _actualStream;
|
||||
|
||||
internal Volume(Stream stream, ReaderOptions readerOptions, int index = 0)
|
||||
internal Volume(Stream stream, ReaderOptions? readerOptions, int index = 0)
|
||||
{
|
||||
Index = index;
|
||||
ReaderOptions = readerOptions;
|
||||
if (readerOptions.LeaveStreamOpen)
|
||||
ReaderOptions = readerOptions ?? new ReaderOptions();
|
||||
if (ReaderOptions.LeaveStreamOpen)
|
||||
{
|
||||
stream = NonDisposingStream.Create(stream);
|
||||
}
|
||||
@@ -32,7 +32,7 @@ public abstract class Volume : IVolume
|
||||
|
||||
public virtual int Index { get; internal set; }
|
||||
|
||||
public string FileName => (_actualStream as FileStream)?.Name!;
|
||||
public string? FileName => (_actualStream as FileStream)?.Name;
|
||||
|
||||
/// <summary>
|
||||
/// RarArchive is part of a multi-part archive.
|
||||
|
||||
@@ -14,8 +14,8 @@ internal class DirectoryEntryHeader : ZipFileEntry
|
||||
VersionNeededToExtract = reader.ReadUInt16();
|
||||
Flags = (HeaderFlags)reader.ReadUInt16();
|
||||
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
|
||||
LastModifiedTime = reader.ReadUInt16();
|
||||
LastModifiedDate = reader.ReadUInt16();
|
||||
OriginalLastModifiedTime = LastModifiedTime = reader.ReadUInt16();
|
||||
OriginalLastModifiedDate = LastModifiedDate = reader.ReadUInt16();
|
||||
Crc = reader.ReadUInt32();
|
||||
CompressedSize = reader.ReadUInt32();
|
||||
UncompressedSize = reader.ReadUInt32();
|
||||
@@ -52,8 +52,8 @@ internal class DirectoryEntryHeader : ZipFileEntry
|
||||
|
||||
LoadExtra(extra);
|
||||
|
||||
var unicodePathExtra = Extra.FirstOrDefault(
|
||||
u => u.Type == ExtraDataType.UnicodePathExtraField
|
||||
var unicodePathExtra = Extra.FirstOrDefault(u =>
|
||||
u.Type == ExtraDataType.UnicodePathExtraField
|
||||
);
|
||||
if (unicodePathExtra != null && ArchiveEncoding.Forced == null)
|
||||
{
|
||||
|
||||
@@ -13,8 +13,8 @@ internal class LocalEntryHeader : ZipFileEntry
|
||||
Version = reader.ReadUInt16();
|
||||
Flags = (HeaderFlags)reader.ReadUInt16();
|
||||
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
|
||||
LastModifiedTime = reader.ReadUInt16();
|
||||
LastModifiedDate = reader.ReadUInt16();
|
||||
OriginalLastModifiedTime = LastModifiedTime = reader.ReadUInt16();
|
||||
OriginalLastModifiedDate = LastModifiedDate = reader.ReadUInt16();
|
||||
Crc = reader.ReadUInt32();
|
||||
CompressedSize = reader.ReadUInt32();
|
||||
UncompressedSize = reader.ReadUInt32();
|
||||
@@ -42,8 +42,8 @@ internal class LocalEntryHeader : ZipFileEntry
|
||||
|
||||
LoadExtra(extra);
|
||||
|
||||
var unicodePathExtra = Extra.FirstOrDefault(
|
||||
u => u.Type == ExtraDataType.UnicodePathExtraField
|
||||
var unicodePathExtra = Extra.FirstOrDefault(u =>
|
||||
u.Type == ExtraDataType.UnicodePathExtraField
|
||||
);
|
||||
if (unicodePathExtra != null && ArchiveEncoding.Forced == null)
|
||||
{
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Generic;
|
||||
@@ -20,21 +18,21 @@ internal abstract class ZipFileEntry : ZipHeader
|
||||
{
|
||||
get
|
||||
{
|
||||
if (Name.EndsWith('/'))
|
||||
if (Name?.EndsWith('/') ?? false)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
//.NET Framework 4.5 : System.IO.Compression::CreateFromDirectory() probably writes backslashes to headers
|
||||
return CompressedSize == 0 && UncompressedSize == 0 && Name.EndsWith('\\');
|
||||
return CompressedSize == 0 && UncompressedSize == 0 && (Name?.EndsWith('\\') ?? false);
|
||||
}
|
||||
}
|
||||
|
||||
internal Stream PackedStream { get; set; }
|
||||
internal Stream? PackedStream { get; set; }
|
||||
|
||||
internal ArchiveEncoding ArchiveEncoding { get; }
|
||||
|
||||
internal string Name { get; set; }
|
||||
internal string? Name { get; set; }
|
||||
|
||||
internal HeaderFlags Flags { get; set; }
|
||||
|
||||
@@ -48,7 +46,7 @@ internal abstract class ZipFileEntry : ZipHeader
|
||||
|
||||
internal List<ExtraData> Extra { get; set; }
|
||||
|
||||
public string Password { get; set; }
|
||||
public string? Password { get; set; }
|
||||
|
||||
internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream)
|
||||
{
|
||||
@@ -65,10 +63,28 @@ internal abstract class ZipFileEntry : ZipHeader
|
||||
return encryptionData;
|
||||
}
|
||||
|
||||
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
|
||||
internal WinzipAesEncryptionData? WinzipAesEncryptionData { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The last modified date as read from the Local or Central Directory header.
|
||||
/// </summary>
|
||||
internal ushort OriginalLastModifiedDate { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The last modified date from the UnixTimeExtraField, if present, or the
|
||||
/// Local or Cental Directory header, if not.
|
||||
/// </summary>
|
||||
internal ushort LastModifiedDate { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The last modified time as read from the Local or Central Directory header.
|
||||
/// </summary>
|
||||
internal ushort OriginalLastModifiedTime { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The last modified time from the UnixTimeExtraField, if present, or the
|
||||
/// Local or Cental Directory header, if not.
|
||||
/// </summary>
|
||||
internal ushort LastModifiedTime { get; set; }
|
||||
|
||||
internal uint Crc { get; set; }
|
||||
@@ -101,7 +117,7 @@ internal abstract class ZipFileEntry : ZipHeader
|
||||
}
|
||||
}
|
||||
|
||||
internal ZipFilePart Part { get; set; }
|
||||
internal ZipFilePart? Part { get; set; }
|
||||
|
||||
internal bool IsZip64 => CompressedSize >= uint.MaxValue;
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ namespace SharpCompress.Common.Zip;
|
||||
|
||||
internal class PkwareTraditionalEncryptionData
|
||||
{
|
||||
private static readonly CRC32 CRC32 = new CRC32();
|
||||
private static readonly CRC32 CRC32 = new();
|
||||
private readonly uint[] _keys = { 0x12345678, 0x23456789, 0x34567890 };
|
||||
private readonly ArchiveEncoding _archiveEncoding;
|
||||
|
||||
@@ -39,7 +39,7 @@ internal class PkwareTraditionalEncryptionData
|
||||
{
|
||||
throw new CryptographicException("The password did not match.");
|
||||
}
|
||||
if (plainTextHeader[11] != (byte)((header.LastModifiedTime >> 8) & 0xff))
|
||||
if (plainTextHeader[11] != (byte)((header.OriginalLastModifiedTime >> 8) & 0xff))
|
||||
{
|
||||
throw new CryptographicException("The password did not match.");
|
||||
}
|
||||
@@ -103,7 +103,7 @@ internal class PkwareTraditionalEncryptionData
|
||||
|
||||
internal byte[] StringToByteArray(string value)
|
||||
{
|
||||
var a = _archiveEncoding.Password.GetBytes(value);
|
||||
var a = _archiveEncoding.GetPasswordEncoding().GetBytes(value);
|
||||
return a;
|
||||
}
|
||||
|
||||
|
||||
@@ -42,16 +42,16 @@ internal class SeekableZipFilePart : ZipFilePart
|
||||
|
||||
protected override Stream CreateBaseStream()
|
||||
{
|
||||
BaseStream.Position = Header.DataStartPosition!.Value;
|
||||
BaseStream.Position = Header.DataStartPosition.NotNull();
|
||||
|
||||
if (
|
||||
(Header.CompressedSize == 0)
|
||||
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
|
||||
&& (_directoryEntryHeader?.HasData == true)
|
||||
&& (_directoryEntryHeader?.CompressedSize != 0)
|
||||
&& _directoryEntryHeader.HasData
|
||||
&& (_directoryEntryHeader.CompressedSize != 0)
|
||||
)
|
||||
{
|
||||
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader!.CompressedSize);
|
||||
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader.CompressedSize);
|
||||
}
|
||||
|
||||
return BaseStream;
|
||||
|
||||
@@ -13,7 +13,7 @@ internal sealed class StreamingZipFilePart : ZipFilePart
|
||||
internal StreamingZipFilePart(ZipFileEntry header, Stream stream)
|
||||
: base(header, stream) { }
|
||||
|
||||
protected override Stream CreateBaseStream() => Header.PackedStream;
|
||||
protected override Stream CreateBaseStream() => Header.PackedStream.NotNull();
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
|
||||
@@ -15,10 +15,7 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
|
||||
ArchiveEncoding archiveEncoding,
|
||||
IEnumerable<ZipEntry>? entries
|
||||
)
|
||||
: base(StreamingMode.Streaming, password, archiveEncoding)
|
||||
{
|
||||
_entries = entries;
|
||||
}
|
||||
: base(StreamingMode.Streaming, password, archiveEncoding) => _entries = entries;
|
||||
|
||||
internal IEnumerable<ZipHeader> ReadStreamHeader(Stream stream)
|
||||
{
|
||||
@@ -39,16 +36,19 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
|
||||
uint headerBytes = 0;
|
||||
if (
|
||||
_lastEntryHeader != null
|
||||
&& (
|
||||
FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
|
||||
|| _lastEntryHeader.IsZip64
|
||||
)
|
||||
&& FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
|
||||
)
|
||||
{
|
||||
if (_lastEntryHeader.Part is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
|
||||
ref rewindableStream
|
||||
);
|
||||
|
||||
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
|
||||
|
||||
var crc = reader.ReadUInt32();
|
||||
if (crc == POST_DATA_DESCRIPTOR)
|
||||
{
|
||||
@@ -81,6 +81,60 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
|
||||
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
|
||||
}
|
||||
}
|
||||
else if (_lastEntryHeader != null && _lastEntryHeader.IsZip64)
|
||||
{
|
||||
if (_lastEntryHeader.Part is null)
|
||||
continue;
|
||||
|
||||
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
|
||||
ref rewindableStream
|
||||
);
|
||||
|
||||
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
|
||||
|
||||
headerBytes = reader.ReadUInt32();
|
||||
|
||||
var version = reader.ReadUInt16();
|
||||
var flags = (HeaderFlags)reader.ReadUInt16();
|
||||
var compressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
|
||||
var lastModifiedDate = reader.ReadUInt16();
|
||||
var lastModifiedTime = reader.ReadUInt16();
|
||||
|
||||
var crc = reader.ReadUInt32();
|
||||
|
||||
if (crc == POST_DATA_DESCRIPTOR)
|
||||
{
|
||||
crc = reader.ReadUInt32();
|
||||
}
|
||||
_lastEntryHeader.Crc = crc;
|
||||
|
||||
// The DataDescriptor can be either 64bit or 32bit
|
||||
var compressed_size = reader.ReadUInt32();
|
||||
var uncompressed_size = reader.ReadUInt32();
|
||||
|
||||
// Check if we have header or 64bit DataDescriptor
|
||||
var test_header = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50);
|
||||
|
||||
var test_64bit = ((long)uncompressed_size << 32) | compressed_size;
|
||||
if (test_64bit == _lastEntryHeader.CompressedSize && test_header)
|
||||
{
|
||||
_lastEntryHeader.UncompressedSize =
|
||||
((long)reader.ReadUInt32() << 32) | headerBytes;
|
||||
headerBytes = reader.ReadUInt32();
|
||||
}
|
||||
else
|
||||
{
|
||||
_lastEntryHeader.UncompressedSize = uncompressed_size;
|
||||
}
|
||||
|
||||
if (pos.HasValue)
|
||||
{
|
||||
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
|
||||
|
||||
// 4 = First 4 bytes of the entry header (i.e. 50 4B 03 04)
|
||||
rewindableStream.Position = pos.Value + 4;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
headerBytes = reader.ReadUInt32();
|
||||
@@ -97,13 +151,12 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
|
||||
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
|
||||
{
|
||||
var local_header = ((LocalEntryHeader)header);
|
||||
var dir_header = _entries?.FirstOrDefault(
|
||||
entry =>
|
||||
entry.Key == local_header.Name
|
||||
&& local_header.CompressedSize == 0
|
||||
&& local_header.UncompressedSize == 0
|
||||
&& local_header.Crc == 0
|
||||
&& local_header.IsDirectory == false
|
||||
var dir_header = _entries?.FirstOrDefault(entry =>
|
||||
entry.Key == local_header.Name
|
||||
&& local_header.CompressedSize == 0
|
||||
&& local_header.UncompressedSize == 0
|
||||
&& local_header.Crc == 0
|
||||
&& local_header.IsDirectory == false
|
||||
);
|
||||
|
||||
if (dir_header != null)
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.Security.Cryptography;
|
||||
@@ -10,12 +8,7 @@ internal class WinzipAesEncryptionData
|
||||
{
|
||||
private const int RFC2898_ITERATIONS = 1000;
|
||||
|
||||
private readonly byte[] _salt;
|
||||
private readonly WinzipAesKeySize _keySize;
|
||||
private readonly byte[] _passwordVerifyValue;
|
||||
private readonly string _password;
|
||||
|
||||
private byte[] _generatedVerifyValue;
|
||||
|
||||
internal WinzipAesEncryptionData(
|
||||
WinzipAesKeySize keySize,
|
||||
@@ -25,10 +18,28 @@ internal class WinzipAesEncryptionData
|
||||
)
|
||||
{
|
||||
_keySize = keySize;
|
||||
_salt = salt;
|
||||
_passwordVerifyValue = passwordVerifyValue;
|
||||
_password = password;
|
||||
Initialize();
|
||||
|
||||
#if NETFRAMEWORK || NETSTANDARD2_0
|
||||
var rfc2898 = new Rfc2898DeriveBytes(password, salt, RFC2898_ITERATIONS);
|
||||
#else
|
||||
var rfc2898 = new Rfc2898DeriveBytes(
|
||||
password,
|
||||
salt,
|
||||
RFC2898_ITERATIONS,
|
||||
HashAlgorithmName.SHA1
|
||||
);
|
||||
#endif
|
||||
|
||||
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???
|
||||
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
|
||||
var generatedVerifyValue = rfc2898.GetBytes(2);
|
||||
|
||||
var verify = BinaryPrimitives.ReadInt16LittleEndian(passwordVerifyValue);
|
||||
var generated = BinaryPrimitives.ReadInt16LittleEndian(generatedVerifyValue);
|
||||
if (verify != generated)
|
||||
{
|
||||
throw new InvalidFormatException("bad password");
|
||||
}
|
||||
}
|
||||
|
||||
internal byte[] IvBytes { get; set; }
|
||||
@@ -45,32 +56,4 @@ internal class WinzipAesEncryptionData
|
||||
WinzipAesKeySize.KeySize256 => 32,
|
||||
_ => throw new InvalidOperationException(),
|
||||
};
|
||||
|
||||
private void Initialize()
|
||||
{
|
||||
#if NETFRAMEWORK || NETSTANDARD2_0
|
||||
var rfc2898 = new Rfc2898DeriveBytes(_password, _salt, RFC2898_ITERATIONS);
|
||||
#else
|
||||
var rfc2898 = new Rfc2898DeriveBytes(
|
||||
_password,
|
||||
_salt,
|
||||
RFC2898_ITERATIONS,
|
||||
HashAlgorithmName.SHA1
|
||||
);
|
||||
#endif
|
||||
|
||||
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???
|
||||
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
|
||||
_generatedVerifyValue = rfc2898.GetBytes(2);
|
||||
|
||||
var verify = BinaryPrimitives.ReadInt16LittleEndian(_passwordVerifyValue);
|
||||
if (_password != null)
|
||||
{
|
||||
var generated = BinaryPrimitives.ReadInt16LittleEndian(_generatedVerifyValue);
|
||||
if (verify != generated)
|
||||
{
|
||||
throw new InvalidFormatException("bad password");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,12 @@ namespace SharpCompress.Common.Zip;
|
||||
internal enum ZipCompressionMethod
|
||||
{
|
||||
None = 0,
|
||||
Shrink = 1,
|
||||
Reduce1 = 2,
|
||||
Reduce2 = 3,
|
||||
Reduce3 = 4,
|
||||
Reduce4 = 5,
|
||||
Explode = 6,
|
||||
Deflate = 8,
|
||||
Deflate64 = 9,
|
||||
BZip2 = 12,
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
@@ -8,67 +6,48 @@ namespace SharpCompress.Common.Zip;
|
||||
|
||||
public class ZipEntry : Entry
|
||||
{
|
||||
private readonly ZipFilePart _filePart;
|
||||
private readonly ZipFilePart? _filePart;
|
||||
|
||||
internal ZipEntry(ZipFilePart filePart)
|
||||
internal ZipEntry(ZipFilePart? filePart)
|
||||
{
|
||||
if (filePart != null)
|
||||
if (filePart == null)
|
||||
{
|
||||
_filePart = filePart;
|
||||
LastModifiedTime = Utility.DosDateToDateTime(
|
||||
filePart.Header.LastModifiedDate,
|
||||
filePart.Header.LastModifiedTime
|
||||
);
|
||||
return;
|
||||
}
|
||||
_filePart = filePart;
|
||||
LastModifiedTime = Utility.DosDateToDateTime(
|
||||
filePart.Header.LastModifiedDate,
|
||||
filePart.Header.LastModifiedTime
|
||||
);
|
||||
}
|
||||
|
||||
public override CompressionType CompressionType
|
||||
{
|
||||
get
|
||||
public override CompressionType CompressionType =>
|
||||
_filePart?.Header.CompressionMethod switch
|
||||
{
|
||||
switch (_filePart.Header.CompressionMethod)
|
||||
{
|
||||
case ZipCompressionMethod.BZip2:
|
||||
{
|
||||
return CompressionType.BZip2;
|
||||
}
|
||||
case ZipCompressionMethod.Deflate:
|
||||
{
|
||||
return CompressionType.Deflate;
|
||||
}
|
||||
case ZipCompressionMethod.Deflate64:
|
||||
{
|
||||
return CompressionType.Deflate64;
|
||||
}
|
||||
case ZipCompressionMethod.LZMA:
|
||||
{
|
||||
return CompressionType.LZMA;
|
||||
}
|
||||
case ZipCompressionMethod.PPMd:
|
||||
{
|
||||
return CompressionType.PPMd;
|
||||
}
|
||||
case ZipCompressionMethod.None:
|
||||
{
|
||||
return CompressionType.None;
|
||||
}
|
||||
default:
|
||||
{
|
||||
return CompressionType.Unknown;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ZipCompressionMethod.BZip2 => CompressionType.BZip2,
|
||||
ZipCompressionMethod.Deflate => CompressionType.Deflate,
|
||||
ZipCompressionMethod.Deflate64 => CompressionType.Deflate64,
|
||||
ZipCompressionMethod.LZMA => CompressionType.LZMA,
|
||||
ZipCompressionMethod.PPMd => CompressionType.PPMd,
|
||||
ZipCompressionMethod.None => CompressionType.None,
|
||||
ZipCompressionMethod.Shrink => CompressionType.Shrink,
|
||||
ZipCompressionMethod.Reduce1 => CompressionType.Reduce1,
|
||||
ZipCompressionMethod.Reduce2 => CompressionType.Reduce2,
|
||||
ZipCompressionMethod.Reduce3 => CompressionType.Reduce3,
|
||||
ZipCompressionMethod.Reduce4 => CompressionType.Reduce4,
|
||||
ZipCompressionMethod.Explode => CompressionType.Explode,
|
||||
_ => CompressionType.Unknown
|
||||
};
|
||||
|
||||
public override long Crc => _filePart.Header.Crc;
|
||||
public override long Crc => _filePart?.Header.Crc ?? 0;
|
||||
|
||||
public override string Key => _filePart.Header.Name;
|
||||
public override string? Key => _filePart?.Header.Name;
|
||||
|
||||
public override string LinkTarget => null;
|
||||
public override string? LinkTarget => null;
|
||||
|
||||
public override long CompressedSize => _filePart.Header.CompressedSize;
|
||||
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
|
||||
|
||||
public override long Size => _filePart.Header.UncompressedSize;
|
||||
public override long Size => _filePart?.Header.UncompressedSize ?? 0;
|
||||
|
||||
public override DateTime? LastModifiedTime { get; }
|
||||
|
||||
@@ -79,11 +58,11 @@ public class ZipEntry : Entry
|
||||
public override DateTime? ArchivedTime => null;
|
||||
|
||||
public override bool IsEncrypted =>
|
||||
FlagUtility.HasFlag(_filePart.Header.Flags, HeaderFlags.Encrypted);
|
||||
FlagUtility.HasFlag(_filePart?.Header.Flags ?? HeaderFlags.None, HeaderFlags.Encrypted);
|
||||
|
||||
public override bool IsDirectory => _filePart.Header.IsDirectory;
|
||||
public override bool IsDirectory => _filePart?.Header.IsDirectory ?? false;
|
||||
|
||||
public override bool IsSplitAfter => false;
|
||||
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
|
||||
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
|
||||
}
|
||||
|
||||
@@ -7,8 +7,11 @@ using SharpCompress.Compressors;
|
||||
using SharpCompress.Compressors.BZip2;
|
||||
using SharpCompress.Compressors.Deflate;
|
||||
using SharpCompress.Compressors.Deflate64;
|
||||
using SharpCompress.Compressors.Explode;
|
||||
using SharpCompress.Compressors.LZMA;
|
||||
using SharpCompress.Compressors.PPMd;
|
||||
using SharpCompress.Compressors.Reduce;
|
||||
using SharpCompress.Compressors.Shrink;
|
||||
using SharpCompress.Compressors.Xz;
|
||||
using SharpCompress.IO;
|
||||
using ZstdSharp;
|
||||
@@ -28,7 +31,7 @@ internal abstract class ZipFilePart : FilePart
|
||||
internal Stream BaseStream { get; }
|
||||
internal ZipFileEntry Header { get; set; }
|
||||
|
||||
internal override string FilePartName => Header.Name;
|
||||
internal override string? FilePartName => Header.Name;
|
||||
|
||||
internal override Stream GetCompressedStream()
|
||||
{
|
||||
@@ -79,6 +82,41 @@ internal abstract class ZipFilePart : FilePart
|
||||
|
||||
return new DataDescriptorStream(stream);
|
||||
}
|
||||
case ZipCompressionMethod.Shrink:
|
||||
{
|
||||
return new ShrinkStream(
|
||||
stream,
|
||||
CompressionMode.Decompress,
|
||||
Header.CompressedSize,
|
||||
Header.UncompressedSize
|
||||
);
|
||||
}
|
||||
case ZipCompressionMethod.Reduce1:
|
||||
{
|
||||
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 1);
|
||||
}
|
||||
case ZipCompressionMethod.Reduce2:
|
||||
{
|
||||
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 2);
|
||||
}
|
||||
case ZipCompressionMethod.Reduce3:
|
||||
{
|
||||
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 3);
|
||||
}
|
||||
case ZipCompressionMethod.Reduce4:
|
||||
{
|
||||
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 4);
|
||||
}
|
||||
case ZipCompressionMethod.Explode:
|
||||
{
|
||||
return new ExplodeStream(
|
||||
stream,
|
||||
Header.CompressedSize,
|
||||
Header.UncompressedSize,
|
||||
Header.Flags
|
||||
);
|
||||
}
|
||||
|
||||
case ZipCompressionMethod.Deflate:
|
||||
{
|
||||
return new DeflateStream(stream, CompressionMode.Decompress);
|
||||
@@ -192,6 +230,11 @@ internal abstract class ZipFilePart : FilePart
|
||||
switch (Header.CompressionMethod)
|
||||
{
|
||||
case ZipCompressionMethod.None:
|
||||
case ZipCompressionMethod.Shrink:
|
||||
case ZipCompressionMethod.Reduce1:
|
||||
case ZipCompressionMethod.Reduce2:
|
||||
case ZipCompressionMethod.Reduce3:
|
||||
case ZipCompressionMethod.Reduce4:
|
||||
case ZipCompressionMethod.Deflate:
|
||||
case ZipCompressionMethod.Deflate64:
|
||||
case ZipCompressionMethod.BZip2:
|
||||
|
||||
@@ -55,7 +55,13 @@ internal class ZipHeaderFactory
|
||||
}
|
||||
case POST_DATA_DESCRIPTOR:
|
||||
{
|
||||
if (FlagUtility.HasFlag(_lastEntryHeader!.Flags, HeaderFlags.UsePostDataDescriptor))
|
||||
if (
|
||||
_lastEntryHeader != null
|
||||
&& FlagUtility.HasFlag(
|
||||
_lastEntryHeader.NotNull().Flags,
|
||||
HeaderFlags.UsePostDataDescriptor
|
||||
)
|
||||
)
|
||||
{
|
||||
_lastEntryHeader.Crc = reader.ReadUInt32();
|
||||
_lastEntryHeader.CompressedSize = zip64
|
||||
@@ -142,8 +148,8 @@ internal class ZipHeaderFactory
|
||||
|
||||
if (entryHeader.CompressionMethod == ZipCompressionMethod.WinzipAes)
|
||||
{
|
||||
var data = entryHeader.Extra.SingleOrDefault(
|
||||
x => x.Type == ExtraDataType.WinZipAes
|
||||
var data = entryHeader.Extra.SingleOrDefault(x =>
|
||||
x.Type == ExtraDataType.WinZipAes
|
||||
);
|
||||
if (data != null)
|
||||
{
|
||||
|
||||
@@ -69,7 +69,7 @@ public sealed class BZip2Stream : Stream
|
||||
|
||||
public override void SetLength(long value) => stream.SetLength(value);
|
||||
|
||||
#if !NETFRAMEWORK && !NETSTANDARD2_0
|
||||
#if !NETFRAMEWORK&& !NETSTANDARD2_0
|
||||
|
||||
public override int Read(Span<byte> buffer) => stream.Read(buffer);
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#nullable disable
|
||||
#nullable disable
|
||||
|
||||
using System;
|
||||
using System.IO;
|
||||
@@ -42,14 +42,17 @@ internal class CBZip2InputStream : Stream
|
||||
private static void Cadvise()
|
||||
{
|
||||
//System.out.Println("CRC Error");
|
||||
//throw new CCoruptionError();
|
||||
throw new InvalidOperationException("BZip2 error");
|
||||
}
|
||||
|
||||
private static void BadBGLengths() => Cadvise();
|
||||
|
||||
private static void BitStreamEOF() => Cadvise();
|
||||
|
||||
private static void CompressedStreamEOF() => Cadvise();
|
||||
private static void CompressedStreamEOF()
|
||||
{
|
||||
throw new InvalidOperationException("BZip2 compressed file ends unexpectedly");
|
||||
}
|
||||
|
||||
private void MakeMaps()
|
||||
{
|
||||
@@ -87,7 +90,7 @@ internal class CBZip2InputStream : Stream
|
||||
|
||||
private int bsBuff;
|
||||
private int bsLive;
|
||||
private readonly CRC mCrc = new CRC();
|
||||
private readonly CRC mCrc = new();
|
||||
|
||||
private readonly bool[] inUse = new bool[256];
|
||||
private int nInUse;
|
||||
|
||||
@@ -284,7 +284,7 @@ internal sealed class CBZip2OutputStream : Stream
|
||||
private int bytesOut;
|
||||
private int bsBuff;
|
||||
private int bsLive;
|
||||
private readonly CRC mCrc = new CRC();
|
||||
private readonly CRC mCrc = new();
|
||||
|
||||
private readonly bool[] inUse = new bool[256];
|
||||
private int nInUse;
|
||||
|
||||
@@ -342,9 +342,9 @@ internal sealed partial class DeflateManager
|
||||
private readonly short[] dyn_dtree; // distance tree
|
||||
private readonly short[] bl_tree; // Huffman tree for bit lengths
|
||||
|
||||
private readonly Tree treeLiterals = new Tree(); // desc for literal tree
|
||||
private readonly Tree treeDistances = new Tree(); // desc for distance tree
|
||||
private readonly Tree treeBitLengths = new Tree(); // desc for bit length tree
|
||||
private readonly Tree treeLiterals = new(); // desc for literal tree
|
||||
private readonly Tree treeDistances = new(); // desc for distance tree
|
||||
private readonly Tree treeBitLengths = new(); // desc for bit length tree
|
||||
|
||||
// number of codes at each bit length for an optimal tree
|
||||
private readonly short[] bl_count = new short[InternalConstants.MAX_BITS + 1];
|
||||
@@ -1787,21 +1787,14 @@ internal sealed partial class DeflateManager
|
||||
return status == BUSY_STATE ? ZlibConstants.Z_DATA_ERROR : ZlibConstants.Z_OK;
|
||||
}
|
||||
|
||||
private void SetDeflater()
|
||||
{
|
||||
switch (config.Flavor)
|
||||
private void SetDeflater() =>
|
||||
DeflateFunction = config.Flavor switch
|
||||
{
|
||||
case DeflateFlavor.Store:
|
||||
DeflateFunction = DeflateNone;
|
||||
break;
|
||||
case DeflateFlavor.Fast:
|
||||
DeflateFunction = DeflateFast;
|
||||
break;
|
||||
case DeflateFlavor.Slow:
|
||||
DeflateFunction = DeflateSlow;
|
||||
break;
|
||||
}
|
||||
}
|
||||
DeflateFlavor.Store => DeflateNone,
|
||||
DeflateFlavor.Fast => DeflateFast,
|
||||
DeflateFlavor.Slow => DeflateSlow,
|
||||
_ => DeflateFunction
|
||||
};
|
||||
|
||||
internal int SetParams(CompressionLevel level, CompressionStrategy strategy)
|
||||
{
|
||||
|
||||
@@ -366,9 +366,5 @@ public class DeflateStream : Stream
|
||||
#endregion
|
||||
|
||||
public MemoryStream InputBuffer =>
|
||||
new MemoryStream(
|
||||
_baseStream._z.InputBuffer,
|
||||
_baseStream._z.NextIn,
|
||||
_baseStream._z.AvailableBytesIn
|
||||
);
|
||||
new(_baseStream._z.InputBuffer, _baseStream._z.NextIn, _baseStream._z.AvailableBytesIn);
|
||||
}
|
||||
|
||||
@@ -35,15 +35,7 @@ namespace SharpCompress.Compressors.Deflate;
|
||||
|
||||
public class GZipStream : Stream
|
||||
{
|
||||
internal static readonly DateTime UNIX_EPOCH = new DateTime(
|
||||
1970,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
DateTimeKind.Utc
|
||||
);
|
||||
internal static readonly DateTime UNIX_EPOCH = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
|
||||
|
||||
private string? _comment;
|
||||
private string? _fileName;
|
||||
|
||||
@@ -105,11 +105,11 @@ internal sealed class InflateBlocks
|
||||
internal int[] blens; // bit lengths of codes
|
||||
internal uint check; // check on output
|
||||
internal object checkfn; // check function
|
||||
internal InflateCodes codes = new InflateCodes(); // if CODES, current state
|
||||
internal InflateCodes codes = new(); // if CODES, current state
|
||||
internal int end; // one byte after sliding window
|
||||
internal int[] hufts; // single malloc for tree space
|
||||
internal int index; // index into blens (or border)
|
||||
internal InfTree inftree = new InfTree();
|
||||
internal InfTree inftree = new();
|
||||
internal int last; // true if this block is the last block
|
||||
internal int left; // if STORED, bytes left to copy
|
||||
private InflateBlockMode mode; // current inflate_block mode
|
||||
|
||||
@@ -102,7 +102,7 @@ internal class ZlibBaseStream : Stream
|
||||
{
|
||||
if (_z is null)
|
||||
{
|
||||
bool wantRfc1950Header = (_flavor == ZlibStreamFlavor.ZLIB);
|
||||
var wantRfc1950Header = (_flavor == ZlibStreamFlavor.ZLIB);
|
||||
_z = new ZlibCodec();
|
||||
if (_compressionMode == CompressionMode.Decompress)
|
||||
{
|
||||
@@ -147,13 +147,13 @@ internal class ZlibBaseStream : Stream
|
||||
z.InputBuffer = buffer;
|
||||
_z.NextIn = offset;
|
||||
_z.AvailableBytesIn = count;
|
||||
bool done = false;
|
||||
var done = false;
|
||||
do
|
||||
{
|
||||
_z.OutputBuffer = workingBuffer;
|
||||
_z.NextOut = 0;
|
||||
_z.AvailableBytesOut = _workingBuffer.Length;
|
||||
int rc = (_wantCompress) ? _z.Deflate(_flushMode) : _z.Inflate(_flushMode);
|
||||
var rc = (_wantCompress) ? _z.Deflate(_flushMode) : _z.Inflate(_flushMode);
|
||||
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
|
||||
{
|
||||
throw new ZlibException((_wantCompress ? "de" : "in") + "flating: " + _z.Message);
|
||||
@@ -181,18 +181,18 @@ internal class ZlibBaseStream : Stream
|
||||
|
||||
if (_streamMode == StreamMode.Writer)
|
||||
{
|
||||
bool done = false;
|
||||
var done = false;
|
||||
do
|
||||
{
|
||||
_z.OutputBuffer = workingBuffer;
|
||||
_z.NextOut = 0;
|
||||
_z.AvailableBytesOut = _workingBuffer.Length;
|
||||
int rc =
|
||||
var rc =
|
||||
(_wantCompress) ? _z.Deflate(FlushType.Finish) : _z.Inflate(FlushType.Finish);
|
||||
|
||||
if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK)
|
||||
{
|
||||
string verb = (_wantCompress ? "de" : "in") + "flating";
|
||||
var verb = (_wantCompress ? "de" : "in") + "flating";
|
||||
if (_z.Message is null)
|
||||
{
|
||||
throw new ZlibException(String.Format("{0}: (rc = {1})", verb, rc));
|
||||
@@ -225,7 +225,7 @@ internal class ZlibBaseStream : Stream
|
||||
Span<byte> intBuf = stackalloc byte[4];
|
||||
BinaryPrimitives.WriteInt32LittleEndian(intBuf, crc.Crc32Result);
|
||||
_stream.Write(intBuf);
|
||||
int c2 = (int)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
|
||||
var c2 = (int)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
|
||||
BinaryPrimitives.WriteInt32LittleEndian(intBuf, c2);
|
||||
_stream.Write(intBuf);
|
||||
}
|
||||
@@ -256,8 +256,8 @@ internal class ZlibBaseStream : Stream
|
||||
{
|
||||
// Make sure we have read to the end of the stream
|
||||
_z.InputBuffer.AsSpan(_z.NextIn, _z.AvailableBytesIn).CopyTo(trailer);
|
||||
int bytesNeeded = 8 - _z.AvailableBytesIn;
|
||||
int bytesRead = _stream.Read(
|
||||
var bytesNeeded = 8 - _z.AvailableBytesIn;
|
||||
var bytesRead = _stream.Read(
|
||||
trailer.Slice(_z.AvailableBytesIn, bytesNeeded)
|
||||
);
|
||||
if (bytesNeeded != bytesRead)
|
||||
@@ -275,10 +275,10 @@ internal class ZlibBaseStream : Stream
|
||||
_z.InputBuffer.AsSpan(_z.NextIn, trailer.Length).CopyTo(trailer);
|
||||
}
|
||||
|
||||
Int32 crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
|
||||
Int32 crc32_actual = crc.Crc32Result;
|
||||
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
|
||||
Int32 isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
|
||||
var crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
|
||||
var crc32_actual = crc.Crc32Result;
|
||||
var isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
|
||||
var isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
|
||||
|
||||
if (crc32_actual != crc32_expected)
|
||||
{
|
||||
@@ -380,11 +380,11 @@ internal class ZlibBaseStream : Stream
|
||||
private string ReadZeroTerminatedString()
|
||||
{
|
||||
var list = new List<byte>();
|
||||
bool done = false;
|
||||
var done = false;
|
||||
do
|
||||
{
|
||||
// workitem 7740
|
||||
int n = _stream.Read(_buf1, 0, 1);
|
||||
var n = _stream.Read(_buf1, 0, 1);
|
||||
if (n != 1)
|
||||
{
|
||||
throw new ZlibException("Unexpected EOF reading GZIP header.");
|
||||
@@ -398,17 +398,17 @@ internal class ZlibBaseStream : Stream
|
||||
list.Add(_buf1[0]);
|
||||
}
|
||||
} while (!done);
|
||||
byte[] buffer = list.ToArray();
|
||||
var buffer = list.ToArray();
|
||||
return _encoding.GetString(buffer, 0, buffer.Length);
|
||||
}
|
||||
|
||||
private int _ReadAndValidateGzipHeader()
|
||||
{
|
||||
int totalBytesRead = 0;
|
||||
var totalBytesRead = 0;
|
||||
|
||||
// read the header on the first read
|
||||
Span<byte> header = stackalloc byte[10];
|
||||
int n = _stream.Read(header);
|
||||
var n = _stream.Read(header);
|
||||
|
||||
// workitem 8501: handle edge case (decompress empty stream)
|
||||
if (n == 0)
|
||||
@@ -426,7 +426,7 @@ internal class ZlibBaseStream : Stream
|
||||
throw new ZlibException("Bad GZIP header.");
|
||||
}
|
||||
|
||||
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
|
||||
var timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
|
||||
_GzipMtime = TarHeader.EPOCH.AddSeconds(timet);
|
||||
totalBytesRead += n;
|
||||
if ((header[3] & 0x04) == 0x04)
|
||||
@@ -435,8 +435,8 @@ internal class ZlibBaseStream : Stream
|
||||
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
|
||||
totalBytesRead += n;
|
||||
|
||||
short extraLength = (short)(header[0] + header[1] * 256);
|
||||
byte[] extra = new byte[extraLength];
|
||||
var extraLength = (short)(header[0] + header[1] * 256);
|
||||
var extra = new byte[extraLength];
|
||||
n = _stream.Read(extra, 0, extra.Length);
|
||||
if (n != extraLength)
|
||||
{
|
||||
@@ -498,7 +498,7 @@ internal class ZlibBaseStream : Stream
|
||||
throw new ZlibException("Cannot Read after Writing.");
|
||||
}
|
||||
|
||||
int rc = 0;
|
||||
var rc = 0;
|
||||
|
||||
// set up the output of the deflate/inflate codec:
|
||||
_z.OutputBuffer = buffer;
|
||||
|
||||
@@ -118,8 +118,8 @@ public sealed class Deflate64Stream : Stream
|
||||
EnsureNotDisposed();
|
||||
|
||||
int bytesRead;
|
||||
int currentOffset = offset;
|
||||
int remainingCount = count;
|
||||
var currentOffset = offset;
|
||||
var remainingCount = count;
|
||||
|
||||
while (true)
|
||||
{
|
||||
@@ -142,7 +142,7 @@ public sealed class Deflate64Stream : Stream
|
||||
break;
|
||||
}
|
||||
|
||||
int bytes = _stream.Read(_buffer, 0, _buffer.Length);
|
||||
var bytes = _stream.Read(_buffer, 0, _buffer.Length);
|
||||
if (bytes <= 0)
|
||||
{
|
||||
break;
|
||||
|
||||
@@ -22,7 +22,7 @@ internal sealed class DeflateInput
|
||||
Debug.Assert(StartIndex + Count <= Buffer.Length, "Input buffer is in invalid state!");
|
||||
}
|
||||
|
||||
internal InputState DumpState() => new InputState(Count, StartIndex);
|
||||
internal InputState DumpState() => new(Count, StartIndex);
|
||||
|
||||
internal void RestoreState(InputState state)
|
||||
{
|
||||
|
||||
@@ -42,11 +42,9 @@ internal sealed class HuffmanTree
|
||||
private readonly int _tableMask;
|
||||
|
||||
// huffman tree for static block
|
||||
public static HuffmanTree StaticLiteralLengthTree { get; } =
|
||||
new HuffmanTree(GetStaticLiteralTreeLength());
|
||||
public static HuffmanTree StaticLiteralLengthTree { get; } = new(GetStaticLiteralTreeLength());
|
||||
|
||||
public static HuffmanTree StaticDistanceTree { get; } =
|
||||
new HuffmanTree(GetStaticDistanceTreeLength());
|
||||
public static HuffmanTree StaticDistanceTree { get; } = new(GetStaticDistanceTreeLength());
|
||||
|
||||
public HuffmanTree(byte[] codeLengths)
|
||||
{
|
||||
|
||||
@@ -243,8 +243,8 @@ internal sealed class InflaterManaged
|
||||
|
||||
private void Reset() =>
|
||||
_state = //_hasFormatReader ?
|
||||
//InflaterState.ReadingHeader : // start by reading Header info
|
||||
InflaterState.ReadingBFinal; // start by reading BFinal bit
|
||||
//InflaterState.ReadingHeader : // start by reading Header info
|
||||
InflaterState.ReadingBFinal; // start by reading BFinal bit
|
||||
|
||||
public void SetInput(byte[] inputBytes, int offset, int length) =>
|
||||
_input.SetInput(inputBytes, offset, length); // append the bytes
|
||||
|
||||
746
src/SharpCompress/Compressors/Explode/ExplodeStream.cs
Normal file
746
src/SharpCompress/Compressors/Explode/ExplodeStream.cs
Normal file
@@ -0,0 +1,746 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using SharpCompress.Common.Zip.Headers;
|
||||
|
||||
namespace SharpCompress.Compressors.Explode;
|
||||
|
||||
public class ExplodeStream : Stream
|
||||
{
|
||||
private const int INVALID_CODE = 99;
|
||||
private const int WSIZE = 64 * 1024;
|
||||
|
||||
private readonly long unCompressedSize;
|
||||
private readonly int compressedSize;
|
||||
private readonly HeaderFlags generalPurposeBitFlag;
|
||||
private readonly Stream inStream;
|
||||
|
||||
private huftNode[]? hufLiteralCodeTable; /* literal code table */
|
||||
private huftNode[] hufLengthCodeTable = []; /* length code table */
|
||||
private huftNode[] hufDistanceCodeTable = []; /* distance code table */
|
||||
|
||||
private int bitsForLiteralCodeTable;
|
||||
private int bitsForLengthCodeTable;
|
||||
private int bitsForDistanceCodeTable;
|
||||
private int numOfUncodedLowerDistanceBits; /* number of uncoded lower distance bits */
|
||||
|
||||
private ulong bitBuffer;
|
||||
private int bitBufferCount;
|
||||
|
||||
private readonly byte[] windowsBuffer;
|
||||
private uint maskForLiteralCodeTable;
|
||||
private uint maskForLengthCodeTable;
|
||||
private uint maskForDistanceCodeTable;
|
||||
private uint maskForDistanceLowBits;
|
||||
private long outBytesCount;
|
||||
|
||||
private int windowIndex;
|
||||
private int distance;
|
||||
private int length;
|
||||
|
||||
internal ExplodeStream(
|
||||
Stream inStr,
|
||||
long compressedSize,
|
||||
long uncompressedSize,
|
||||
HeaderFlags generalPurposeBitFlag
|
||||
)
|
||||
{
|
||||
inStream = inStr;
|
||||
this.compressedSize = (int)compressedSize;
|
||||
unCompressedSize = (long)uncompressedSize;
|
||||
this.generalPurposeBitFlag = generalPurposeBitFlag;
|
||||
explode_SetTables();
|
||||
|
||||
windowsBuffer = new byte[WSIZE];
|
||||
explode_var_init();
|
||||
}
|
||||
|
||||
public override void Flush()
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public override void SetLength(long value)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count)
|
||||
{
|
||||
throw new NotImplementedException();
|
||||
}
|
||||
|
||||
public override bool CanRead => true;
|
||||
public override bool CanSeek => false;
|
||||
public override bool CanWrite => false;
|
||||
public override long Length => unCompressedSize;
|
||||
public override long Position
|
||||
{
|
||||
get => outBytesCount;
|
||||
set { }
|
||||
}
|
||||
|
||||
static uint[] mask_bits = new uint[]
|
||||
{
|
||||
0x0000,
|
||||
0x0001,
|
||||
0x0003,
|
||||
0x0007,
|
||||
0x000f,
|
||||
0x001f,
|
||||
0x003f,
|
||||
0x007f,
|
||||
0x00ff,
|
||||
0x01ff,
|
||||
0x03ff,
|
||||
0x07ff,
|
||||
0x0fff,
|
||||
0x1fff,
|
||||
0x3fff,
|
||||
0x7fff,
|
||||
0xffff
|
||||
};
|
||||
|
||||
/* Tables for length and distance */
|
||||
static int[] cplen2 = new int[]
|
||||
{
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
18,
|
||||
19,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
24,
|
||||
25,
|
||||
26,
|
||||
27,
|
||||
28,
|
||||
29,
|
||||
30,
|
||||
31,
|
||||
32,
|
||||
33,
|
||||
34,
|
||||
35,
|
||||
36,
|
||||
37,
|
||||
38,
|
||||
39,
|
||||
40,
|
||||
41,
|
||||
42,
|
||||
43,
|
||||
44,
|
||||
45,
|
||||
46,
|
||||
47,
|
||||
48,
|
||||
49,
|
||||
50,
|
||||
51,
|
||||
52,
|
||||
53,
|
||||
54,
|
||||
55,
|
||||
56,
|
||||
57,
|
||||
58,
|
||||
59,
|
||||
60,
|
||||
61,
|
||||
62,
|
||||
63,
|
||||
64,
|
||||
65
|
||||
};
|
||||
|
||||
static int[] cplen3 = new int[]
|
||||
{
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
18,
|
||||
19,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
24,
|
||||
25,
|
||||
26,
|
||||
27,
|
||||
28,
|
||||
29,
|
||||
30,
|
||||
31,
|
||||
32,
|
||||
33,
|
||||
34,
|
||||
35,
|
||||
36,
|
||||
37,
|
||||
38,
|
||||
39,
|
||||
40,
|
||||
41,
|
||||
42,
|
||||
43,
|
||||
44,
|
||||
45,
|
||||
46,
|
||||
47,
|
||||
48,
|
||||
49,
|
||||
50,
|
||||
51,
|
||||
52,
|
||||
53,
|
||||
54,
|
||||
55,
|
||||
56,
|
||||
57,
|
||||
58,
|
||||
59,
|
||||
60,
|
||||
61,
|
||||
62,
|
||||
63,
|
||||
64,
|
||||
65,
|
||||
66
|
||||
};
|
||||
|
||||
static int[] extra = new int[]
|
||||
{
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
8
|
||||
};
|
||||
|
||||
static int[] cpdist4 = new int[]
|
||||
{
|
||||
1,
|
||||
65,
|
||||
129,
|
||||
193,
|
||||
257,
|
||||
321,
|
||||
385,
|
||||
449,
|
||||
513,
|
||||
577,
|
||||
641,
|
||||
705,
|
||||
769,
|
||||
833,
|
||||
897,
|
||||
961,
|
||||
1025,
|
||||
1089,
|
||||
1153,
|
||||
1217,
|
||||
1281,
|
||||
1345,
|
||||
1409,
|
||||
1473,
|
||||
1537,
|
||||
1601,
|
||||
1665,
|
||||
1729,
|
||||
1793,
|
||||
1857,
|
||||
1921,
|
||||
1985,
|
||||
2049,
|
||||
2113,
|
||||
2177,
|
||||
2241,
|
||||
2305,
|
||||
2369,
|
||||
2433,
|
||||
2497,
|
||||
2561,
|
||||
2625,
|
||||
2689,
|
||||
2753,
|
||||
2817,
|
||||
2881,
|
||||
2945,
|
||||
3009,
|
||||
3073,
|
||||
3137,
|
||||
3201,
|
||||
3265,
|
||||
3329,
|
||||
3393,
|
||||
3457,
|
||||
3521,
|
||||
3585,
|
||||
3649,
|
||||
3713,
|
||||
3777,
|
||||
3841,
|
||||
3905,
|
||||
3969,
|
||||
4033
|
||||
};
|
||||
|
||||
static int[] cpdist8 = new int[]
|
||||
{
|
||||
1,
|
||||
129,
|
||||
257,
|
||||
385,
|
||||
513,
|
||||
641,
|
||||
769,
|
||||
897,
|
||||
1025,
|
||||
1153,
|
||||
1281,
|
||||
1409,
|
||||
1537,
|
||||
1665,
|
||||
1793,
|
||||
1921,
|
||||
2049,
|
||||
2177,
|
||||
2305,
|
||||
2433,
|
||||
2561,
|
||||
2689,
|
||||
2817,
|
||||
2945,
|
||||
3073,
|
||||
3201,
|
||||
3329,
|
||||
3457,
|
||||
3585,
|
||||
3713,
|
||||
3841,
|
||||
3969,
|
||||
4097,
|
||||
4225,
|
||||
4353,
|
||||
4481,
|
||||
4609,
|
||||
4737,
|
||||
4865,
|
||||
4993,
|
||||
5121,
|
||||
5249,
|
||||
5377,
|
||||
5505,
|
||||
5633,
|
||||
5761,
|
||||
5889,
|
||||
6017,
|
||||
6145,
|
||||
6273,
|
||||
6401,
|
||||
6529,
|
||||
6657,
|
||||
6785,
|
||||
6913,
|
||||
7041,
|
||||
7169,
|
||||
7297,
|
||||
7425,
|
||||
7553,
|
||||
7681,
|
||||
7809,
|
||||
7937,
|
||||
8065
|
||||
};
|
||||
|
||||
private int get_tree(int[] arrBitLengths, int numberExpected)
|
||||
/* Get the bit lengths for a code representation from the compressed
|
||||
stream. If get_tree() returns 4, then there is an error in the data.
|
||||
Otherwise zero is returned. */
|
||||
{
|
||||
/* get bit lengths */
|
||||
int inIndex = inStream.ReadByte() + 1; /* length/count pairs to read */
|
||||
int outIndex = 0; /* next code */
|
||||
do
|
||||
{
|
||||
int nextByte = inStream.ReadByte();
|
||||
int bitLengthOfCodes = (nextByte & 0xf) + 1; /* bits in code (1..16) */
|
||||
int numOfCodes = ((nextByte & 0xf0) >> 4) + 1; /* codes with those bits (1..16) */
|
||||
if (outIndex + numOfCodes > numberExpected)
|
||||
return 4; /* don't overflow arrBitLengths[] */
|
||||
do
|
||||
{
|
||||
arrBitLengths[outIndex++] = bitLengthOfCodes;
|
||||
} while ((--numOfCodes) != 0);
|
||||
} while ((--inIndex) != 0);
|
||||
|
||||
return outIndex != numberExpected ? 4 : 0; /* should have read numberExpected of them */
|
||||
}
|
||||
|
||||
private int explode_SetTables()
|
||||
{
|
||||
int returnCode; /* return codes */
|
||||
int[] arrBitLengthsForCodes = new int[256]; /* bit lengths for codes */
|
||||
|
||||
bitsForLiteralCodeTable = 0; /* bits for tb */
|
||||
bitsForLengthCodeTable = 7;
|
||||
bitsForDistanceCodeTable = (compressedSize) > 200000 ? 8 : 7;
|
||||
|
||||
if ((generalPurposeBitFlag & HeaderFlags.Bit2) != 0)
|
||||
/* With literal tree--minimum match length is 3 */
|
||||
{
|
||||
bitsForLiteralCodeTable = 9; /* base table size for literals */
|
||||
if ((returnCode = get_tree(arrBitLengthsForCodes, 256)) != 0)
|
||||
return returnCode;
|
||||
|
||||
if (
|
||||
(
|
||||
returnCode = HuftTree.huftbuid(
|
||||
arrBitLengthsForCodes,
|
||||
256,
|
||||
256,
|
||||
[],
|
||||
[],
|
||||
out hufLiteralCodeTable,
|
||||
ref bitsForLiteralCodeTable
|
||||
)
|
||||
) != 0
|
||||
)
|
||||
return returnCode;
|
||||
|
||||
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
|
||||
return returnCode;
|
||||
|
||||
if (
|
||||
(
|
||||
returnCode = HuftTree.huftbuid(
|
||||
arrBitLengthsForCodes,
|
||||
64,
|
||||
0,
|
||||
cplen3,
|
||||
extra,
|
||||
out hufLengthCodeTable,
|
||||
ref bitsForLengthCodeTable
|
||||
)
|
||||
) != 0
|
||||
)
|
||||
return returnCode;
|
||||
}
|
||||
else
|
||||
/* No literal tree--minimum match length is 2 */
|
||||
{
|
||||
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
|
||||
return returnCode;
|
||||
|
||||
hufLiteralCodeTable = null;
|
||||
|
||||
if (
|
||||
(
|
||||
returnCode = HuftTree.huftbuid(
|
||||
arrBitLengthsForCodes,
|
||||
64,
|
||||
0,
|
||||
cplen2,
|
||||
extra,
|
||||
out hufLengthCodeTable,
|
||||
ref bitsForLengthCodeTable
|
||||
)
|
||||
) != 0
|
||||
)
|
||||
return returnCode;
|
||||
}
|
||||
|
||||
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
|
||||
return (int)returnCode;
|
||||
|
||||
if ((generalPurposeBitFlag & HeaderFlags.Bit1) != 0) /* true if 8K */
|
||||
{
|
||||
numOfUncodedLowerDistanceBits = 7;
|
||||
returnCode = HuftTree.huftbuid(
|
||||
arrBitLengthsForCodes,
|
||||
64,
|
||||
0,
|
||||
cpdist8,
|
||||
extra,
|
||||
out hufDistanceCodeTable,
|
||||
ref bitsForDistanceCodeTable
|
||||
);
|
||||
}
|
||||
else /* else 4K */
|
||||
{
|
||||
numOfUncodedLowerDistanceBits = 6;
|
||||
returnCode = HuftTree.huftbuid(
|
||||
arrBitLengthsForCodes,
|
||||
64,
|
||||
0,
|
||||
cpdist4,
|
||||
extra,
|
||||
out hufDistanceCodeTable,
|
||||
ref bitsForDistanceCodeTable
|
||||
);
|
||||
}
|
||||
|
||||
return returnCode;
|
||||
}
|
||||
|
||||
private void NeedBits(int numberOfBits)
|
||||
{
|
||||
while (bitBufferCount < (numberOfBits))
|
||||
{
|
||||
bitBuffer |= (uint)inStream.ReadByte() << bitBufferCount;
|
||||
bitBufferCount += 8;
|
||||
}
|
||||
}
|
||||
|
||||
private void DumpBits(int numberOfBits)
|
||||
{
|
||||
bitBuffer >>= numberOfBits;
|
||||
bitBufferCount -= numberOfBits;
|
||||
}
|
||||
|
||||
int DecodeHuft(huftNode[] htab, int bits, uint mask, out huftNode huftPointer, out int e)
|
||||
{
|
||||
NeedBits(bits);
|
||||
|
||||
int tabOffset = (int)(~bitBuffer & mask);
|
||||
huftPointer = htab[tabOffset];
|
||||
|
||||
while (true)
|
||||
{
|
||||
DumpBits(huftPointer.NumberOfBitsUsed);
|
||||
e = huftPointer.NumberOfExtraBits;
|
||||
if (e <= 32)
|
||||
break;
|
||||
if (e == INVALID_CODE)
|
||||
return 1;
|
||||
|
||||
e &= 31;
|
||||
NeedBits(e);
|
||||
|
||||
tabOffset = (int)(~bitBuffer & mask_bits[e]);
|
||||
huftPointer = huftPointer.ChildNodes[tabOffset];
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
private void explode_var_init()
|
||||
{
|
||||
/* explode the coded data */
|
||||
bitBuffer = 0;
|
||||
bitBufferCount = 0;
|
||||
maskForLiteralCodeTable = mask_bits[bitsForLiteralCodeTable]; //only used in explode_lit
|
||||
maskForLengthCodeTable = mask_bits[bitsForLengthCodeTable];
|
||||
maskForDistanceCodeTable = mask_bits[bitsForDistanceCodeTable];
|
||||
maskForDistanceLowBits = mask_bits[numOfUncodedLowerDistanceBits];
|
||||
outBytesCount = 0;
|
||||
|
||||
windowIndex = 0; /* initialize bit buffer, window */
|
||||
}
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
int countIndex = 0;
|
||||
while (countIndex < count && outBytesCount < unCompressedSize) /* do until unCompressedSize bytes uncompressed */
|
||||
{
|
||||
if (length == 0)
|
||||
{
|
||||
NeedBits(1);
|
||||
bool literal = (bitBuffer & 1) == 1;
|
||||
DumpBits(1);
|
||||
|
||||
huftNode huftPointer;
|
||||
if (literal) /* then literal--decode it */
|
||||
{
|
||||
byte nextByte;
|
||||
if (hufLiteralCodeTable != null)
|
||||
{
|
||||
/* get coded literal */
|
||||
if (
|
||||
DecodeHuft(
|
||||
hufLiteralCodeTable,
|
||||
bitsForLiteralCodeTable,
|
||||
maskForLiteralCodeTable,
|
||||
out huftPointer,
|
||||
out _
|
||||
) != 0
|
||||
)
|
||||
throw new Exception("Error decoding literal value");
|
||||
|
||||
nextByte = (byte)huftPointer.Value;
|
||||
}
|
||||
else
|
||||
{
|
||||
NeedBits(8);
|
||||
nextByte = (byte)bitBuffer;
|
||||
DumpBits(8);
|
||||
}
|
||||
|
||||
buffer[offset + (countIndex++)] = nextByte;
|
||||
windowsBuffer[windowIndex++] = nextByte;
|
||||
outBytesCount++;
|
||||
|
||||
if (windowIndex == WSIZE)
|
||||
windowIndex = 0;
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
NeedBits(numOfUncodedLowerDistanceBits); /* get distance low bits */
|
||||
distance = (int)(bitBuffer & maskForDistanceLowBits);
|
||||
DumpBits(numOfUncodedLowerDistanceBits);
|
||||
|
||||
/* get coded distance high bits */
|
||||
if (
|
||||
DecodeHuft(
|
||||
hufDistanceCodeTable,
|
||||
bitsForDistanceCodeTable,
|
||||
maskForDistanceCodeTable,
|
||||
out huftPointer,
|
||||
out _
|
||||
) != 0
|
||||
)
|
||||
throw new Exception("Error decoding distance high bits");
|
||||
|
||||
distance = windowIndex - (distance + huftPointer.Value); /* construct offset */
|
||||
|
||||
/* get coded length */
|
||||
if (
|
||||
DecodeHuft(
|
||||
hufLengthCodeTable,
|
||||
bitsForLengthCodeTable,
|
||||
maskForLengthCodeTable,
|
||||
out huftPointer,
|
||||
out int extraBitLength
|
||||
) != 0
|
||||
)
|
||||
throw new Exception("Error decoding coded length");
|
||||
|
||||
length = huftPointer.Value;
|
||||
|
||||
if (extraBitLength != 0) /* get length extra bits */
|
||||
{
|
||||
NeedBits(8);
|
||||
length += (int)(bitBuffer & 0xff);
|
||||
DumpBits(8);
|
||||
}
|
||||
|
||||
if (length > (unCompressedSize - outBytesCount))
|
||||
length = (int)(unCompressedSize - outBytesCount);
|
||||
|
||||
distance &= WSIZE - 1;
|
||||
}
|
||||
|
||||
while (length != 0 && countIndex < count)
|
||||
{
|
||||
byte nextByte = windowsBuffer[distance++];
|
||||
buffer[offset + (countIndex++)] = nextByte;
|
||||
windowsBuffer[windowIndex++] = nextByte;
|
||||
outBytesCount++;
|
||||
|
||||
if (distance == WSIZE)
|
||||
distance = 0;
|
||||
|
||||
if (windowIndex == WSIZE)
|
||||
windowIndex = 0;
|
||||
|
||||
length--;
|
||||
}
|
||||
}
|
||||
|
||||
return countIndex;
|
||||
}
|
||||
}
|
||||
269
src/SharpCompress/Compressors/Explode/HuftTree.cs
Normal file
269
src/SharpCompress/Compressors/Explode/HuftTree.cs
Normal file
@@ -0,0 +1,269 @@
|
||||
/*
|
||||
* This code has been converted to C# based on the original huft_tree code found in
|
||||
* inflate.c -- by Mark Adler version c17e, 30 Mar 2007
|
||||
*/
|
||||
|
||||
namespace SharpCompress.Compressors.Explode;
|
||||
|
||||
public class huftNode
|
||||
{
|
||||
public int NumberOfExtraBits; /* number of extra bits or operation */
|
||||
public int NumberOfBitsUsed; /* number of bits in this code or subcode */
|
||||
public int Value; /* literal, length base, or distance base */
|
||||
public huftNode[] ChildNodes = []; /* next level of table */
|
||||
}
|
||||
|
||||
public static class HuftTree
|
||||
{
|
||||
private const int INVALID_CODE = 99;
|
||||
|
||||
/* If BMAX needs to be larger than 16, then h and x[] should be ulg. */
|
||||
private const int BMAX = 16; /* maximum bit length of any code (16 for explode) */
|
||||
private const int N_MAX = 288; /* maximum number of codes in any set */
|
||||
|
||||
public static int huftbuid(
|
||||
int[] arrBitLengthForCodes,
|
||||
int numberOfCodes,
|
||||
int numberOfSimpleValueCodes,
|
||||
int[] arrBaseValuesForNonSimpleCodes,
|
||||
int[] arrExtraBitsForNonSimpleCodes,
|
||||
out huftNode[] outHufTable,
|
||||
ref int outBitsForTable
|
||||
)
|
||||
/* Given a list of code lengths and a maximum table size, make a set of
|
||||
tables to decode that set of codes. Return zero on success, one if
|
||||
the given code set is incomplete (the tables are still built in this
|
||||
case), two if the input is invalid (all zero length codes or an
|
||||
oversubscribed set of lengths), and three if not enough memory.
|
||||
The code with value 256 is special, and the tables are constructed
|
||||
so that no bits beyond that code are fetched when that code is
|
||||
decoded. */
|
||||
{
|
||||
outHufTable = [];
|
||||
|
||||
/* Generate counts for each bit length */
|
||||
int lengthOfEOBcode = numberOfCodes > 256 ? arrBitLengthForCodes[256] : BMAX; /* set length of EOB code, if any */
|
||||
|
||||
int[] arrBitLengthCount = new int[BMAX + 1];
|
||||
for (int i = 0; i < BMAX + 1; i++)
|
||||
arrBitLengthCount[i] = 0;
|
||||
|
||||
int pIndex = 0;
|
||||
int counterCurrentCode = numberOfCodes;
|
||||
do
|
||||
{
|
||||
arrBitLengthCount[arrBitLengthForCodes[pIndex]]++;
|
||||
pIndex++; /* assume all entries <= BMAX */
|
||||
} while ((--counterCurrentCode) != 0);
|
||||
|
||||
if (arrBitLengthCount[0] == numberOfCodes) /* null input--all zero length codes */
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Find minimum and maximum length, bound *outBitsForTable by those */
|
||||
int counter;
|
||||
for (counter = 1; counter <= BMAX; counter++)
|
||||
if (arrBitLengthCount[counter] != 0)
|
||||
break;
|
||||
|
||||
int numberOfBitsInCurrentCode = counter; /* minimum code length */
|
||||
if (outBitsForTable < counter)
|
||||
outBitsForTable = counter;
|
||||
|
||||
for (counterCurrentCode = BMAX; counterCurrentCode != 0; counterCurrentCode--)
|
||||
if (arrBitLengthCount[counterCurrentCode] != 0)
|
||||
break;
|
||||
|
||||
int maximumCodeLength = counterCurrentCode; /* maximum code length */
|
||||
if (outBitsForTable > counterCurrentCode)
|
||||
outBitsForTable = counterCurrentCode;
|
||||
|
||||
/* Adjust last length count to fill out codes, if needed */
|
||||
int numberOfDummyCodesAdded;
|
||||
for (
|
||||
numberOfDummyCodesAdded = 1 << counter;
|
||||
counter < counterCurrentCode;
|
||||
counter++, numberOfDummyCodesAdded <<= 1
|
||||
)
|
||||
if ((numberOfDummyCodesAdded -= arrBitLengthCount[counter]) < 0)
|
||||
return 2; /* bad input: more codes than bits */
|
||||
|
||||
if ((numberOfDummyCodesAdded -= arrBitLengthCount[counterCurrentCode]) < 0)
|
||||
return 2;
|
||||
|
||||
arrBitLengthCount[counterCurrentCode] += numberOfDummyCodesAdded;
|
||||
|
||||
/* Generate starting offsets into the value table for each length */
|
||||
int[] bitOffset = new int[BMAX + 1];
|
||||
bitOffset[1] = 0;
|
||||
counter = 0;
|
||||
pIndex = 1;
|
||||
int xIndex = 2;
|
||||
while ((--counterCurrentCode) != 0)
|
||||
{ /* note that i == g from above */
|
||||
bitOffset[xIndex++] = (counter += arrBitLengthCount[pIndex++]);
|
||||
}
|
||||
|
||||
/* Make a table of values in order of bit lengths */
|
||||
int[] arrValuesInOrderOfBitLength = new int[N_MAX];
|
||||
for (int i = 0; i < N_MAX; i++)
|
||||
arrValuesInOrderOfBitLength[i] = 0;
|
||||
|
||||
pIndex = 0;
|
||||
counterCurrentCode = 0;
|
||||
do
|
||||
{
|
||||
if ((counter = arrBitLengthForCodes[pIndex++]) != 0)
|
||||
arrValuesInOrderOfBitLength[bitOffset[counter]++] = counterCurrentCode;
|
||||
} while (++counterCurrentCode < numberOfCodes);
|
||||
|
||||
numberOfCodes = bitOffset[maximumCodeLength]; /* set numberOfCodes to length of v */
|
||||
|
||||
/* Generate the Huffman codes and for each, make the table entries */
|
||||
bitOffset[0] = counterCurrentCode = 0; /* first Huffman code is zero */
|
||||
pIndex = 0; /* grab values in bit order */
|
||||
int tableLevel = -1; /* no tables yet--level -1 */
|
||||
int bitsBeforeThisTable = 0;
|
||||
int[] arrLX = new int[BMAX + 1];
|
||||
int stackOfBitsPerTable = 1; /* stack of bits per table */
|
||||
arrLX[stackOfBitsPerTable - 1] = 0; /* no bits decoded yet */
|
||||
|
||||
huftNode[][] arrHufTableStack = new huftNode[BMAX][];
|
||||
huftNode[] pointerToCurrentTable = [];
|
||||
int numberOfEntriesInCurrentTable = 0;
|
||||
|
||||
bool first = true;
|
||||
|
||||
/* go through the bit lengths (k already is bits in shortest code) */
|
||||
for (; numberOfBitsInCurrentCode <= maximumCodeLength; numberOfBitsInCurrentCode++)
|
||||
{
|
||||
int counterForCodes = arrBitLengthCount[numberOfBitsInCurrentCode];
|
||||
while ((counterForCodes--) != 0)
|
||||
{
|
||||
/* here i is the Huffman code of length k bits for value *p */
|
||||
/* make tables up to required level */
|
||||
while (
|
||||
numberOfBitsInCurrentCode
|
||||
> bitsBeforeThisTable + arrLX[stackOfBitsPerTable + tableLevel]
|
||||
)
|
||||
{
|
||||
bitsBeforeThisTable += arrLX[stackOfBitsPerTable + (tableLevel++)]; /* add bits already decoded */
|
||||
|
||||
/* compute minimum size table less than or equal to *outBitsForTable bits */
|
||||
numberOfEntriesInCurrentTable =
|
||||
(numberOfEntriesInCurrentTable = maximumCodeLength - bitsBeforeThisTable)
|
||||
> outBitsForTable
|
||||
? outBitsForTable
|
||||
: numberOfEntriesInCurrentTable; /* upper limit */
|
||||
int fBitCounter1 =
|
||||
1 << (counter = numberOfBitsInCurrentCode - bitsBeforeThisTable);
|
||||
if (fBitCounter1 > counterForCodes + 1) /* try a k-w bit table */
|
||||
{ /* too few codes for k-w bit table */
|
||||
fBitCounter1 -= counterForCodes + 1; /* deduct codes from patterns left */
|
||||
xIndex = numberOfBitsInCurrentCode;
|
||||
while (++counter < numberOfEntriesInCurrentTable) /* try smaller tables up to z bits */
|
||||
{
|
||||
if ((fBitCounter1 <<= 1) <= arrBitLengthCount[++xIndex])
|
||||
break; /* enough codes to use up j bits */
|
||||
fBitCounter1 -= arrBitLengthCount[xIndex]; /* else deduct codes from patterns */
|
||||
}
|
||||
}
|
||||
if (
|
||||
bitsBeforeThisTable + counter > lengthOfEOBcode
|
||||
&& bitsBeforeThisTable < lengthOfEOBcode
|
||||
)
|
||||
counter = lengthOfEOBcode - bitsBeforeThisTable; /* make EOB code end at table */
|
||||
|
||||
numberOfEntriesInCurrentTable = 1 << counter; /* table entries for j-bit table */
|
||||
arrLX[stackOfBitsPerTable + tableLevel] = counter; /* set table size in stack */
|
||||
|
||||
/* allocate and link in new table */
|
||||
pointerToCurrentTable = new huftNode[numberOfEntriesInCurrentTable];
|
||||
|
||||
// set the pointer, pointed to by *outHufTable to the second huft in pointertoCurrentTable
|
||||
if (first)
|
||||
{
|
||||
outHufTable = pointerToCurrentTable; /* link to list for huft_free() */
|
||||
first = false;
|
||||
}
|
||||
|
||||
arrHufTableStack[tableLevel] = pointerToCurrentTable; /* table starts after link */
|
||||
|
||||
/* connect to last table, if there is one */
|
||||
if (tableLevel != 0)
|
||||
{
|
||||
bitOffset[tableLevel] = counterCurrentCode; /* save pattern for backing up */
|
||||
|
||||
huftNode vHuft = new huftNode
|
||||
{
|
||||
NumberOfBitsUsed = arrLX[stackOfBitsPerTable + tableLevel - 1], /* bits to dump before this table */
|
||||
NumberOfExtraBits = 32 + counter, /* bits in this table */
|
||||
ChildNodes = pointerToCurrentTable /* pointer to this table */
|
||||
};
|
||||
|
||||
counter =
|
||||
(counterCurrentCode & ((1 << bitsBeforeThisTable) - 1))
|
||||
>> (bitsBeforeThisTable - arrLX[stackOfBitsPerTable + tableLevel - 1]);
|
||||
arrHufTableStack[tableLevel - 1][counter] = vHuft; /* connect to last table */
|
||||
}
|
||||
}
|
||||
|
||||
/* set up table entry in r */
|
||||
huftNode vHuft1 = new huftNode
|
||||
{
|
||||
NumberOfBitsUsed = numberOfBitsInCurrentCode - bitsBeforeThisTable
|
||||
};
|
||||
|
||||
if (pIndex >= numberOfCodes)
|
||||
vHuft1.NumberOfExtraBits = INVALID_CODE; /* out of values--invalid code */
|
||||
else if (arrValuesInOrderOfBitLength[pIndex] < numberOfSimpleValueCodes)
|
||||
{
|
||||
vHuft1.NumberOfExtraBits = (
|
||||
arrValuesInOrderOfBitLength[pIndex] < 256 ? 32 : 31
|
||||
); /* 256 is end-of-block code */
|
||||
vHuft1.Value = arrValuesInOrderOfBitLength[pIndex++]; /* simple code is just the value */
|
||||
}
|
||||
else
|
||||
{
|
||||
vHuft1.NumberOfExtraBits = arrExtraBitsForNonSimpleCodes[
|
||||
arrValuesInOrderOfBitLength[pIndex] - numberOfSimpleValueCodes
|
||||
]; /* non-simple--look up in lists */
|
||||
vHuft1.Value = arrBaseValuesForNonSimpleCodes[
|
||||
arrValuesInOrderOfBitLength[pIndex++] - numberOfSimpleValueCodes
|
||||
];
|
||||
}
|
||||
|
||||
/* fill code-like entries with r */
|
||||
int fBitCounter2 = 1 << (numberOfBitsInCurrentCode - bitsBeforeThisTable);
|
||||
for (
|
||||
counter = counterCurrentCode >> bitsBeforeThisTable;
|
||||
counter < numberOfEntriesInCurrentTable;
|
||||
counter += fBitCounter2
|
||||
)
|
||||
pointerToCurrentTable[counter] = vHuft1;
|
||||
|
||||
/* backwards increment the k-bit code i */
|
||||
for (
|
||||
counter = 1 << (numberOfBitsInCurrentCode - 1);
|
||||
(counterCurrentCode & counter) != 0;
|
||||
counter >>= 1
|
||||
)
|
||||
counterCurrentCode ^= counter;
|
||||
counterCurrentCode ^= counter;
|
||||
|
||||
/* backup over finished tables */
|
||||
while (
|
||||
(counterCurrentCode & ((1 << bitsBeforeThisTable) - 1)) != bitOffset[tableLevel]
|
||||
)
|
||||
bitsBeforeThisTable -= arrLX[stackOfBitsPerTable + (--tableLevel)];
|
||||
}
|
||||
}
|
||||
|
||||
/* return actual size of base table */
|
||||
outBitsForTable = arrLX[stackOfBitsPerTable];
|
||||
|
||||
/* Return true (1) if we were given an incomplete table */
|
||||
return (numberOfDummyCodesAdded != 0 && maximumCodeLength != 1) ? 1 : 0;
|
||||
}
|
||||
}
|
||||
@@ -18,7 +18,7 @@ internal class BCJFilterARM : Filter
|
||||
{
|
||||
if ((buffer[i + 3] & 0xFF) == 0xEB)
|
||||
{
|
||||
int src =
|
||||
var src =
|
||||
((buffer[i + 2] & 0xFF) << 16)
|
||||
| ((buffer[i + 1] & 0xFF) << 8)
|
||||
| (buffer[i] & 0xFF);
|
||||
|
||||
63
src/SharpCompress/Compressors/Filters/BCJFilterARM64.cs
Normal file
63
src/SharpCompress/Compressors/Filters/BCJFilterARM64.cs
Normal file
@@ -0,0 +1,63 @@
|
||||
using System;
|
||||
using System.Buffers.Binary;
|
||||
using System.IO;
|
||||
|
||||
namespace SharpCompress.Compressors.Filters;
|
||||
|
||||
internal class BCJFilterARM64 : Filter
|
||||
{
|
||||
private int _pos;
|
||||
|
||||
public BCJFilterARM64(bool isEncoder, Stream baseStream)
|
||||
: base(isEncoder, baseStream, 8) => _pos = 0;
|
||||
|
||||
protected override int Transform(byte[] buffer, int offset, int count)
|
||||
{
|
||||
var end = offset + count - 4;
|
||||
int i;
|
||||
|
||||
for (i = offset; i <= end; i += 4)
|
||||
{
|
||||
uint pc = (uint)(_pos + i - offset);
|
||||
uint instr = BinaryPrimitives.ReadUInt32LittleEndian(
|
||||
new ReadOnlySpan<byte>(buffer, i, 4)
|
||||
);
|
||||
|
||||
if ((instr >> 26) == 0x25)
|
||||
{
|
||||
uint src = instr;
|
||||
instr = 0x94000000;
|
||||
|
||||
pc >>= 2;
|
||||
if (!_isEncoder)
|
||||
pc = 0U - pc;
|
||||
|
||||
instr |= (src + pc) & 0x03FFFFFF;
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), instr);
|
||||
}
|
||||
else if ((instr & 0x9F000000) == 0x90000000)
|
||||
{
|
||||
uint src = ((instr >> 29) & 3) | ((instr >> 3) & 0x001FFFFC);
|
||||
|
||||
if (((src + 0x00020000) & 0x001C0000) != 0)
|
||||
continue;
|
||||
|
||||
instr &= 0x9000001F;
|
||||
|
||||
pc >>= 12;
|
||||
if (!_isEncoder)
|
||||
pc = 0U - pc;
|
||||
|
||||
uint dest = src + pc;
|
||||
instr |= (dest & 3) << 29;
|
||||
instr |= (dest & 0x0003FFFC) << 3;
|
||||
instr |= (0U - (dest & 0x00020000)) & 0x00E00000;
|
||||
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), instr);
|
||||
}
|
||||
}
|
||||
|
||||
i -= offset;
|
||||
_pos += i;
|
||||
return i;
|
||||
}
|
||||
}
|
||||
@@ -18,7 +18,7 @@ internal class BCJFilterARMT : Filter
|
||||
{
|
||||
if ((buffer[i + 1] & 0xF8) == 0xF0 && (buffer[i + 3] & 0xF8) == 0xF8)
|
||||
{
|
||||
int src =
|
||||
var src =
|
||||
((buffer[i + 1] & 0x07) << 19)
|
||||
| ((buffer[i] & 0xFF) << 11)
|
||||
| ((buffer[i + 3] & 0x07) << 8)
|
||||
|
||||
@@ -52,29 +52,29 @@ internal class BCJFilterIA64 : Filter
|
||||
|
||||
for (i = offset; i <= end; i += 16)
|
||||
{
|
||||
int instrTemplate = buffer[i] & 0x1F;
|
||||
int mask = BRANCH_TABLE[instrTemplate];
|
||||
var instrTemplate = buffer[i] & 0x1F;
|
||||
var mask = BRANCH_TABLE[instrTemplate];
|
||||
|
||||
for (int slot = 0, bitPos = 5; slot < 3; ++slot, bitPos += 41)
|
||||
{
|
||||
if (((mask >>> slot) & 1) == 0)
|
||||
continue;
|
||||
|
||||
int bytePos = bitPos >>> 3;
|
||||
int bitRes = bitPos & 7;
|
||||
var bytePos = bitPos >>> 3;
|
||||
var bitRes = bitPos & 7;
|
||||
|
||||
long instr = 0;
|
||||
for (int j = 0; j < 6; ++j)
|
||||
for (var j = 0; j < 6; ++j)
|
||||
{
|
||||
instr |= (buffer[i + bytePos + j] & 0xFFL) << (8 * j);
|
||||
}
|
||||
|
||||
long instrNorm = instr >>> bitRes;
|
||||
var instrNorm = instr >>> bitRes;
|
||||
|
||||
if (((instrNorm >>> 37) & 0x0F) != 0x05 || ((instrNorm >>> 9) & 0x07) != 0x00)
|
||||
continue;
|
||||
|
||||
int src = (int)((instrNorm >>> 13) & 0x0FFFFF);
|
||||
var src = (int)((instrNorm >>> 13) & 0x0FFFFF);
|
||||
src |= ((int)(instrNorm >>> 36) & 1) << 20;
|
||||
src <<= 4;
|
||||
|
||||
@@ -93,7 +93,7 @@ internal class BCJFilterIA64 : Filter
|
||||
instr &= (1 << bitRes) - 1;
|
||||
instr |= instrNorm << bitRes;
|
||||
|
||||
for (int j = 0; j < 6; ++j)
|
||||
for (var j = 0; j < 6; ++j)
|
||||
{
|
||||
buffer[i + bytePos + j] = (byte)(instr >>> (8 * j));
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user