Compare commits

..

81 Commits

Author SHA1 Message Date
Adam Hathcock
7fe27ac310 Mark for 0.38 2024-09-02 09:09:57 +01:00
Adam Hathcock
1e300349ce Merge pull request #868 from kikaragyozov/patch-1
Fix small typo in USAGE.md
2024-09-02 07:43:30 +01:00
Kiril Karagyozov
6b01a7b08e Fix small typo in USAGE.md 2024-08-29 12:11:19 +03:00
Adam Hathcock
34d948df18 Merge pull request #866 from TwanVanDongen/master
Added shrink, reduce and implode to FORMATS
2024-08-22 16:07:23 +01:00
Twan
27091c4f1d Update FORMATS.md 2024-08-21 19:09:14 +02:00
Twan
970a3d7f2a Update FORMATS.md 2024-08-21 19:08:40 +02:00
Twan
2bedbbfc54 Update FORMATS.md 2024-08-21 19:06:14 +02:00
Adam Hathcock
8de33f0db3 Merge pull request #864 from adamhathcock/update-csproj
Update csproj to get green marks and update deps
2024-08-12 16:08:28 +01:00
Adam Hathcock
df4eab67dc Update csproj to get green marks and update deps 2024-08-08 08:41:51 +01:00
Adam Hathcock
2d13bc0046 Merge pull request #860 from lostmsu/7zSFX
Added support for 7zip SFX archives
2024-08-06 08:54:12 +01:00
Victor Nova
704a0cb35d added support for 7zip SFX archives by handling ReaderOptions.LookForHeader 2024-08-05 23:11:15 -07:00
Adam Hathcock
06a983e445 Merge pull request #859 from DineshSolanki/#858-fix-invalid-character-in-filename
Fix #858 - Replaces invalid filename characters
2024-07-30 08:22:01 +01:00
Dinesh Solanki
2d10df8b87 Fix #858 - Replaces invalid filename characters
Added a method to replace invalid characters in file names with underscores during file extraction. This prevents errors related to invalid file names.
2024-07-26 00:16:44 +05:30
Adam Hathcock
baf66db9dc format 2024-07-24 08:31:44 +01:00
GordonJ
3545693999 Added Tests and supporting Files. 2024-07-23 14:05:07 -05:00
gjefferyes
84fb99c2c8 Merge branch 'adamhathcock:master' into master 2024-07-23 13:58:48 -05:00
Adam Hathcock
21e2983ae1 Merge pull request #857 from alexprabhat99/master
Fix for missing empty directories when using ExtractToDirectory
2024-07-18 08:34:20 +01:00
Alex Prabhat Bara
004e0941d5 code formatted using csharpier 2024-07-16 20:12:01 +05:30
Alex Prabhat Bara
188a426dde fix for missing empty directories when using ExtractToDirectory 2024-07-16 16:20:04 +05:30
Adam Hathcock
6fcfae8bfe Merge pull request #855 from Erior/feature/Check-tar-crc-on-header
Check crc on tar header
2024-07-12 08:35:27 +01:00
Lars Vahlenberg
9515350f52 Remove using directive 2024-07-11 19:56:46 +02:00
Lars Vahlenberg
6b88f82656 Handle special case, empty file 2024-07-11 19:52:33 +02:00
Lars Vahlenberg
e42d953f47 Check crc on tar header 2024-07-10 19:53:32 +02:00
gjefferyes
9c257faf26 Merge branch 'master' into master 2024-06-26 06:28:55 -05:00
Adam Hathcock
d18cad6d76 Merge pull request #852 from LANCommander/fix-post-zip64-entry-subsequent-extractions
Fixed extractions after first ZIP64 entry is read from stream
2024-06-26 08:31:58 +01:00
GordonJ
061273be22 Added Export and (un)Reduce to sharpCompress 2024-06-25 11:35:11 -05:00
Pat Hartl
b89de6caad Fix formatting 2024-06-24 17:19:53 -05:00
Pat Hartl
9bc0a1d7c7 Null reference checking
Reorders this null reference check to avoid throwing a null reference exception.
2024-06-23 22:30:34 -05:00
Pat Hartl
eee518b7fa Reworked ZIP64 handling to separate block
The last commit made in this branch messed up some ZIP reading and caused a bunch of tests to fail. These changes branch off ZIP64 logic into its own block so that data is read correctly for 64 and non-64 entries.
2024-06-23 22:29:33 -05:00
Pat Hartl
b7b78edaa3 Fixed extractions after first ZIP64 entry is read from stream 2024-06-22 00:09:25 -05:00
Adam Hathcock
3eaac68ab4 Merge pull request #850 from Erior/feature/Issue-842
Issue 842
2024-06-18 13:45:53 +01:00
Adam Hathcock
a7672190e9 Merge branch 'master' into feature/Issue-842 2024-06-18 13:43:22 +01:00
Adam Hathcock
4e4e89b6eb Merge pull request #849 from Erior/develop
Fix for issue #844
2024-06-18 13:41:52 +01:00
Lars Vahlenberg
33dd519f56 Throw exception when bzip2 is corrupt 2024-06-08 18:26:12 +02:00
Lars Vahlenberg
5c1149aa8b #844 2024-06-08 17:22:20 +02:00
Adam Hathcock
9061e92af6 Merge pull request #848 from Morilli/fix-gzip-archivetype
Fix gzip archives having a `Type` of `ArchiveType.Tar` instead of `ArchiveType.Gzip`
2024-06-06 08:21:14 +01:00
Morilli
49f5ceaa9b Fix GZipArchive getting Type set to ArchiveType.Tar 2024-06-04 10:34:06 +02:00
Morilli
525b309d37 Add failing test 2024-06-04 10:33:32 +02:00
Adam Hathcock
bdb3a787fc Merge pull request #847 from DannyBoyk/846_tar_longlinkname
Tar: Add processing for the LongLink header type
2024-06-04 08:47:57 +01:00
Daniel Nash
a9601ef848 Tar: Add processing for the LongLink header type
Fixes #846
2024-06-03 12:54:19 -04:00
Adam Hathcock
6fc4b045fd mark for 0.37.2 2024-04-27 09:34:32 +01:00
Adam Hathcock
446852c7d0 really fix source link and central usage 2024-04-27 09:34:05 +01:00
Adam Hathcock
c635f00899 mark as 0.37.1 2024-04-27 09:12:17 +01:00
Adam Hathcock
1393629bc5 Mark sourcelink as PrivateAssets="All" 2024-04-27 06:15:29 +01:00
Adam Hathcock
49ce17b759 update zstdsharp.port and net8 is only trimmable 2024-04-25 08:35:52 +01:00
Adam Hathcock
74888021c8 Merge pull request #835 from Blokyk/fix-736
Prevent infinite loop when reading corrupted archive
2024-04-24 09:20:44 +01:00
Adam Hathcock
9483856439 fmt 2024-04-24 09:17:42 +01:00
blokyk
dbbc7c8132 fix(tar): prevent infinite loop when reading corrupted archive 2024-04-24 03:13:13 +02:00
Adam Hathcock
b203d165f5 Mark for 0.37.0 2024-04-23 10:25:32 +01:00
Adam Hathcock
c695e1136d Merge pull request #828 from adamhathcock/remove-netstandard20
Remove ~netstandard20~ just net7.0
2024-04-23 10:18:24 +01:00
Adam Hathcock
d847202308 add back net standard 2.0 2024-04-23 09:59:30 +01:00
Adam Hathcock
9d24e0a4b8 set package locks and central management 2024-04-23 09:37:25 +01:00
Adam Hathcock
745fe1eb9f references 2024-04-23 09:28:33 +01:00
Adam Hathcock
3e52b85e9d Merge remote-tracking branch 'origin/master' into remove-netstandard20
# Conflicts:
#	.config/dotnet-tools.json
2024-04-23 09:26:44 +01:00
Adam Hathcock
d26f020b50 Merge pull request #832 from adamhathcock/remove-ignored-nulls
Remove ignored nulls
2024-04-23 09:25:08 +01:00
Adam Hathcock
095b5f702c get rid of another null! 2024-04-23 09:20:20 +01:00
Adam Hathcock
9622853b8d fix and fmt 2024-04-23 09:16:05 +01:00
Adam Hathcock
b94e75fabe try to fix more tests 2024-04-23 09:06:49 +01:00
Adam Hathcock
23dd041e2e fix some tests 2024-04-23 08:52:10 +01:00
Adam Hathcock
c73ca21b4d fmt 2024-04-22 15:19:05 +01:00
Adam Hathcock
7ebdc85ad2 more null clean up 2024-04-22 15:17:24 +01:00
Adam Hathcock
99e2c8c90d more clean up 2024-04-22 15:10:22 +01:00
Adam Hathcock
f24bfdf945 fix tests 2024-04-22 14:57:08 +01:00
Adam Hathcock
7963233702 add missing usings 2024-04-22 14:18:41 +01:00
Adam Hathcock
b550df2038 get rid of more ! and update csharpier 2024-04-22 14:17:08 +01:00
Adam Hathcock
fb55624f5f add more null handling 2024-04-18 14:25:10 +01:00
Adam Hathcock
e96366f489 Entry can be null and remove other ! usages 2024-04-18 13:24:03 +01:00
Adam Hathcock
900190cf54 Merge pull request #829 from NeuroXiq/patch-1
Update README.md - Change API Docs to DNDocs
2024-04-15 08:14:16 +01:00
Marek Węglarz
2af744b474 Update README.md 2024-04-15 04:28:15 +02:00
Adam Hathcock
11153084e2 update csharpier 2024-04-11 15:47:39 +01:00
Adam Hathcock
4b9c814bfc remove .netstandard 2.0 and clean up 2024-04-11 15:46:43 +01:00
Adam Hathcock
1b5d3a3b6e Merge pull request #825 from adamhathcock/tar-corruption
Fix tar corruption when sizes mismatch
2024-04-11 13:11:29 +01:00
Adam Hathcock
373637e6a7 more logic fixes 2024-04-11 09:05:45 +01:00
Adam Hathcock
cb223217c1 actually, transfer block is different than overall transfer 2024-04-10 16:12:01 +01:00
Adam Hathcock
eab97a3f8b calculate remaining afterwards 2024-04-10 08:53:20 +01:00
Adam Hathcock
fdfaa8ab45 add max transfer size to tar 2024-04-09 15:35:15 +01:00
Adam Hathcock
2321d9dbee fix patch 2024-04-09 08:56:15 +01:00
Adam Hathcock
bf74dd887a Fix tar corruption when sizes mismatch 2024-04-09 08:19:23 +01:00
Adam Hathcock
3612035894 Merge pull request #823 from klimatr26/new-7z-filters
Add support for 7z ARM64 and RISCV filters
2024-04-08 09:56:07 +01:00
Adam Hathcock
6553e9b0cd formatting 2024-04-08 09:50:37 +01:00
klimatr26
09f2410170 Add support for 7z ARM64 and RISCV filters 2024-04-05 15:00:43 -05:00
119 changed files with 3399 additions and 1648 deletions

View File

@@ -3,7 +3,7 @@
"isRoot": true,
"tools": {
"csharpier": {
"version": "0.27.3",
"version": "0.28.1",
"commands": [
"dotnet-csharpier"
]

View File

@@ -70,7 +70,7 @@ indent_style = tab
[*.{cs,csx,cake,vb,vbx}]
# Default Severity for all .NET Code Style rules below
dotnet_analyzer_diagnostic.severity = warning
dotnet_analyzer_diagnostic.severity = silent
##########################################
# File Header (Uncomment to support file headers)
@@ -269,6 +269,8 @@ dotnet_diagnostic.CA1305.severity = suggestion
dotnet_diagnostic.CA1307.severity = suggestion
dotnet_diagnostic.CA1309.severity = suggestion
dotnet_diagnostic.CA1310.severity = error
dotnet_diagnostic.CA1507.severity = suggestion
dotnet_diagnostic.CA1513.severity = suggestion
dotnet_diagnostic.CA1707.severity = suggestion
dotnet_diagnostic.CA1708.severity = suggestion
dotnet_diagnostic.CA1711.severity = suggestion
@@ -286,6 +288,7 @@ dotnet_diagnostic.CA1834.severity = error
dotnet_diagnostic.CA1845.severity = suggestion
dotnet_diagnostic.CA1848.severity = suggestion
dotnet_diagnostic.CA1852.severity = suggestion
dotnet_diagnostic.CA1860.severity = silent
dotnet_diagnostic.CA2016.severity = suggestion
dotnet_diagnostic.CA2201.severity = error
dotnet_diagnostic.CA2206.severity = error
@@ -303,13 +306,12 @@ dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS8602.severity = error
dotnet_diagnostic.CS8604.severity = error
dotnet_diagnostic.CS8618.severity = error
dotnet_diagnostic.CS0618.severity = error
dotnet_diagnostic.CS0618.severity = suggestion
dotnet_diagnostic.CS1998.severity = error
dotnet_diagnostic.CS4014.severity = error
dotnet_diagnostic.CS8600.severity = error
dotnet_diagnostic.CS8603.severity = error
dotnet_diagnostic.CS8625.severity = error
dotnet_diagnostic.CS8981.severity = suggestion
dotnet_diagnostic.BL0005.severity = suggestion
@@ -318,7 +320,7 @@ dotnet_diagnostic.MVC1000.severity = suggestion
dotnet_diagnostic.RZ10012.severity = error
dotnet_diagnostic.IDE0004.severity = error # redundant cast
dotnet_diagnostic.IDE0005.severity = error
dotnet_diagnostic.IDE0005.severity = suggestion
dotnet_diagnostic.IDE0007.severity = error # Use var
dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
dotnet_diagnostic.IDE0010.severity = silent # populate switch
@@ -329,7 +331,7 @@ dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operat
dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
dotnet_diagnostic.IDE0028.severity = silent
dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors
dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type
@@ -337,7 +339,7 @@ dotnet_diagnostic.IDE0040.severity = error # modifiers required
dotnet_diagnostic.IDE0041.severity = error # simplify null
dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible
dotnet_diagnostic.IDE0047.severity = suggestion # paratemeter name
dotnet_diagnostic.IDE0047.severity = suggestion # parameter name
dotnet_diagnostic.IDE0051.severity = error # unused field
dotnet_diagnostic.IDE0052.severity = error # unused member
dotnet_diagnostic.IDE0053.severity = suggestion # lambda not needed
@@ -351,11 +353,20 @@ dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
dotnet_diagnostic.IDE0130.severity = error # namespace folder structure
dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
dotnet_diagnostic.IDE0161.severity = error # Please use file namespaces
dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed
dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching
dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation
dotnet_diagnostic.IDE0290.severity = error # Primary Constructor
dotnet_diagnostic.IDE0300.severity = suggestion # Collection
dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList
dotnet_diagnostic.NX0001.severity = error
dotnet_diagnostic.NX0002.severity = silent
dotnet_diagnostic.NX0003.severity = silent
##########################################
# Styles

View File

@@ -10,5 +10,7 @@
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<RunAnalyzersDuringLiveAnalysis>False</RunAnalyzersDuringLiveAnalysis>
<RunAnalyzersDuringBuild>False</RunAnalyzersDuringBuild>
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
<RestorePackagesWithLockFile>true</RestorePackagesWithLockFile>
</PropertyGroup>
</Project>

18
Directory.Packages.props Normal file
View File

@@ -0,0 +1,18 @@
<Project>
<ItemGroup>
<PackageVersion Include="Bullseye" Version="5.0.0" />
<PackageVersion Include="FluentAssertions" Version="6.12.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="12.0.0" />
<PackageVersion Include="System.Memory" Version="4.5.5" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageVersion Include="xunit" Version="2.9.0" />
<PackageVersion Include="xunit.runner.visualstudio" Version="2.8.2" />
<PackageVersion Include="xunit.SkippableFact" Version="1.4.13" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.1" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
</ItemGroup>
</Project>

View File

@@ -11,7 +11,7 @@
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
| Zip (2) | None, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Zip (2) | None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.BZip2 | BZip2 | Both | TarArchive | TarReader | TarWriter (3) |

9
NuGet.config Normal file
View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSourceMapping>
<!-- key value for <packageSource> should match key values from <packageSources> element -->
<packageSource key="nuget.org">
<package pattern="*" />
</packageSource>
</packageSourceMapping>
</configuration>

View File

@@ -1,12 +1,12 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
SharpCompress is a compression library in pure C# for .NET Framework 4.62, .NET Standard 2.1, .NET 6.0 and NET 8.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
GitHub Actions Build -
[![SharpCompress](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml/badge.svg)](https://github.com/adamhathcock/sharpcompress/actions/workflows/dotnetcore.yml)
[![Static Badge](https://img.shields.io/badge/API%20Documentation-RobiniaDocs-43bc00?logo=readme&logoColor=white)](https://www.robiniadocs.com/d/sharpcompress/api/SharpCompress.html)
[![Static Badge](https://img.shields.io/badge/API%20Docs-DNDocs-190088?logo=readme&logoColor=white)](https://dndocs.com/d/sharpcompress/api/index.html)
## Need Help?

View File

@@ -17,6 +17,9 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Config", "Config", "{CDB425
ProjectSection(SolutionItems) = preProject
Directory.Build.props = Directory.Build.props
global.json = global.json
.editorconfig = .editorconfig
Directory.Packages.props = Directory.Packages.props
NuGet.config = NuGet.config
EndProjectSection
EndProject
Global

View File

@@ -79,6 +79,10 @@
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticFields/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=PrivateStaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/PredefinedNamingRules/=StaticReadonly/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=15b5b1f1_002D457c_002D4ca6_002Db278_002D5615aedc07d3/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static readonly fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=4a98fdf6_002D7d98_002D4f5a_002Dafeb_002Dea44ad98c70c/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Instance" AccessRightKinds="Private" Description="Instance fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="FIELD" /&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="_" Suffix="" Style="aaBb" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=c873eafb_002Dd57f_002D481d_002D8c93_002D77f6863c2f88/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Protected, ProtectedInternal, Internal, Public, PrivateProtected" Description="Static readonly fields (not private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="READONLY_FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/CSharpNaming/UserRules/=f9fce829_002De6f4_002D4cb2_002D80f1_002D5497c44f51df/@EntryIndexedValue">&lt;Policy&gt;&lt;Descriptor Staticness="Static" AccessRightKinds="Private" Description="Static fields (private)"&gt;&lt;ElementKinds&gt;&lt;Kind Name="FIELD" /&gt;&lt;/ElementKinds&gt;&lt;/Descriptor&gt;&lt;Policy Inspect="True" Prefix="" Suffix="" Style="AA_BB" /&gt;&lt;/Policy&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FCONSTANT/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FFUNCTION/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
<s:String x:Key="/Default/CodeStyle/Naming/JavaScriptNaming/UserRules/=JS_005FBLOCK_005FSCOPE_005FVARIABLE/@EntryIndexedValue">&lt;Policy Inspect="True" Prefix="" Suffix="" Style="aaBb" /&gt;</s:String>
@@ -127,6 +131,7 @@
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EPredefinedNamingRulesToUserRulesUpgrade/@EntryIndexedValue">True</s:Boolean>
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue">&lt;SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"&gt;
&lt;Solution /&gt;
&lt;/SessionState&gt;</s:String></wpf:ResourceDictionary>

View File

@@ -27,7 +27,7 @@ To deal with the "correct" rules as well as the expectations of users, I've deci
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapped to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapper to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
## Samples

View File

@@ -2,13 +2,13 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net7.0</TargetFramework>
<TargetFramework>net8.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" Version="4.2.1" />
<PackageReference Include="Glob" Version="1.1.9" />
<PackageReference Include="SimpleExec" Version="11.0.0" />
<PackageReference Include="Bullseye" />
<PackageReference Include="Glob" />
<PackageReference Include="SimpleExec" />
</ItemGroup>
</Project>

45
build/packages.lock.json Normal file
View File

@@ -0,0 +1,45 @@
{
"version": 2,
"dependencies": {
"net8.0": {
"Bullseye": {
"type": "Direct",
"requested": "[5.0.0, )",
"resolved": "5.0.0",
"contentHash": "bqyt+m17ym+5aN45C5oZRAjuLDt8jKiCm/ys1XfymIXSkrTFwvI/QsbY3ucPSHDz7SF7uON7B57kXFv5H2k1ew=="
},
"Glob": {
"type": "Direct",
"requested": "[1.1.9, )",
"resolved": "1.1.9",
"contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"SimpleExec": {
"type": "Direct",
"requested": "[12.0.0, )",
"resolved": "12.0.0",
"contentHash": "ptxlWtxC8vM6Y6e3h9ZTxBBkOWnWrm/Sa1HT+2i1xcXY3Hx2hmKDZP5RShPf8Xr9D+ivlrXNy57ktzyH8kyt+Q=="
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
}
}
}
}

View File

@@ -12,39 +12,35 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private readonly LazyReadOnlyCollection<TVolume> lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> lazyEntries;
private readonly LazyReadOnlyCollection<TVolume> _lazyVolumes;
private readonly LazyReadOnlyCollection<TEntry> _lazyEntries;
private bool _disposed;
private readonly SourceStream? _sourceStream;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionBegin;
public event EventHandler<ArchiveExtractionEventArgs<IArchiveEntry>>? EntryExtractionEnd;
public event EventHandler<CompressedBytesReadEventArgs>? CompressedBytesRead;
public event EventHandler<FilePartExtractionBeginEventArgs>? FilePartExtractionBegin;
protected ReaderOptions ReaderOptions { get; }
private bool disposed;
protected SourceStream SrcStream;
internal AbstractArchive(ArchiveType type, SourceStream srcStream)
internal AbstractArchive(ArchiveType type, SourceStream sourceStream)
{
Type = type;
ReaderOptions = srcStream.ReaderOptions;
SrcStream = srcStream;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(SrcStream));
lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
ReaderOptions = sourceStream.ReaderOptions;
_sourceStream = sourceStream;
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(LoadVolumes(_sourceStream));
_lazyEntries = new LazyReadOnlyCollection<TEntry>(LoadEntries(Volumes));
}
#nullable disable
internal AbstractArchive(ArchiveType type)
{
Type = type;
lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
ReaderOptions = new();
_lazyVolumes = new LazyReadOnlyCollection<TVolume>(Enumerable.Empty<TVolume>());
_lazyEntries = new LazyReadOnlyCollection<TEntry>(Enumerable.Empty<TEntry>());
}
#nullable enable
public ArchiveType Type { get; }
void IArchiveExtractionListener.FireEntryExtractionBegin(IArchiveEntry entry) =>
@@ -65,12 +61,12 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries => lazyEntries;
public virtual ICollection<TEntry> Entries => _lazyEntries;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes => lazyVolumes;
public ICollection<TVolume> Volumes => _lazyVolumes;
/// <summary>
/// The total size of the files compressed in the archive.
@@ -84,29 +80,29 @@ public abstract class AbstractArchive<TEntry, TVolume> : IArchive, IArchiveExtra
public virtual long TotalUncompressSize =>
Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream srcStream);
protected abstract IEnumerable<TVolume> LoadVolumes(SourceStream sourceStream);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
IEnumerable<IVolume> IArchive.Volumes => _lazyVolumes.Cast<IVolume>();
public virtual void Dispose()
{
if (!disposed)
if (!_disposed)
{
lazyVolumes.ForEach(v => v.Dispose());
lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
SrcStream?.Dispose();
_lazyVolumes.ForEach(v => v.Dispose());
_lazyEntries.GetLoaded().Cast<Entry>().ForEach(x => x.Close());
_sourceStream?.Dispose();
disposed = true;
_disposed = true;
}
}
void IArchiveExtractionListener.EnsureEntriesLoaded()
{
lazyEntries.EnsureFullyLoaded();
lazyVolumes.EnsureFullyLoaded();
_lazyEntries.EnsureFullyLoaded();
_lazyVolumes.EnsureFullyLoaded();
}
void IExtractionListener.FireCompressedBytesRead(

View File

@@ -41,8 +41,8 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
internal AbstractWritableArchive(ArchiveType type)
: base(type) { }
internal AbstractWritableArchive(ArchiveType type, SourceStream srcStream)
: base(type, srcStream) { }
internal AbstractWritableArchive(ArchiveType type, SourceStream sourceStream)
: base(type, sourceStream) { }
public override ICollection<TEntry> Entries
{
@@ -120,6 +120,10 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
{
foreach (var path in Entries.Select(x => x.Key))
{
if (path is null)
{
continue;
}
var p = path.Replace('/', '\\');
if (p.Length > 0 && p[0] == '\\')
{

View File

@@ -239,4 +239,6 @@ public static class ArchiveFactory
}
}
}
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
}

View File

@@ -0,0 +1,27 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
class AutoArchiveFactory : IArchiveFactory
{
public string Name => nameof(AutoArchiveFactory);
public ArchiveType? KnownArchiveType => null;
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
public bool IsArchive(Stream stream, string? password = null) =>
throw new NotSupportedException();
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(stream, readerOptions);
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(fileInfo, readerOptions);
}

View File

@@ -90,7 +90,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
stream.CheckNotNull(nameof(stream));
return new GZipArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
@@ -99,16 +99,14 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal GZipArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream) { }
/// <param name="sourceStream"></param>
private GZipArchive(SourceStream sourceStream)
: base(ArchiveType.GZip, sourceStream) { }
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
{
srcStream.LoadAllParts();
var idx = 0;
return srcStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, idx++));
sourceStream.LoadAllParts();
return sourceStream.Streams.Select(a => new GZipVolume(a, ReaderOptions, 0));
}
public static bool IsGZipFile(string filePath) => IsGZipFile(new FileInfo(filePath));
@@ -184,7 +182,11 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime
);
}
}

View File

@@ -6,7 +6,7 @@ namespace SharpCompress.Archives.GZip;
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
{
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart part)
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part)
: base(part) => Archive = archive;
public virtual Stream OpenEntryStream()

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
@@ -32,7 +30,7 @@ internal sealed class GZipWritableArchiveEntry : GZipArchiveEntry, IWritableArch
public override long Crc => 0;
public override string Key { get; }
public override string? Key { get; }
public override long CompressedSize => 0;

View File

@@ -17,15 +17,11 @@ public static class IArchiveEntryExtensions
streamListener.EnsureEntriesLoaded();
streamListener.FireEntryExtractionBegin(archiveEntry);
streamListener.FireFilePartExtractionBegin(
archiveEntry.Key,
archiveEntry.Key ?? "Key",
archiveEntry.Size,
archiveEntry.CompressedSize
);
var entryStream = archiveEntry.OpenEntryStream();
if (entryStream is null)
{
return;
}
using (entryStream)
{
using Stream s = new ListeningStream(streamListener, entryStream);

View File

@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Archives;
@@ -55,14 +54,26 @@ public static class IArchiveExtensions
var entry = entries.Entry;
if (entry.IsDirectory)
{
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
if (
Path.GetDirectoryName(dirPath + "/") is { } emptyDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(emptyDirectory);
}
continue;
}
// Create each directory
var path = Path.Combine(destination, entry.Key);
if (Path.GetDirectoryName(path) is { } directory && seenDirectories.Add(path))
// Create each directory if not already created
var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
if (Path.GetDirectoryName(path) is { } directory)
{
Directory.CreateDirectory(directory);
if (!Directory.Exists(directory) && !seenDirectories.Contains(directory))
{
Directory.CreateDirectory(directory);
seenDirectories.Add(directory);
}
}
// Write file

View File

@@ -13,7 +13,7 @@ namespace SharpCompress.Archives.Rar;
/// </summary>
internal class FileInfoRarArchiveVolume : RarVolume
{
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index = 0)
internal FileInfoRarArchiveVolume(FileInfo fileInfo, ReaderOptions options, int index)
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options), index)
{
FileInfo = fileInfo;

View File

@@ -21,35 +21,33 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal RarArchive(SourceStream srcStream)
: base(ArchiveType.Rar, srcStream) { }
/// <param name="sourceStream"></param>
private RarArchive(SourceStream sourceStream)
: base(ArchiveType.Rar, sourceStream) { }
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes) =>
RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<RarVolume> LoadVolumes(SourceStream sourceStream)
{
SrcStream.LoadAllParts(); //request all streams
var streams = SrcStream.Streams.ToArray();
var idx = 0;
sourceStream.LoadAllParts(); //request all streams
var streams = sourceStream.Streams.ToArray();
var i = 0;
if (streams.Length > 1 && IsRarFile(streams[1], ReaderOptions)) //test part 2 - true = multipart not split
{
SrcStream.IsVolumes = true;
sourceStream.IsVolumes = true;
streams[1].Position = 0;
SrcStream.Position = 0;
sourceStream.Position = 0;
return srcStream.Streams.Select(a => new StreamRarArchiveVolume(
return sourceStream.Streams.Select(a => new StreamRarArchiveVolume(
a,
ReaderOptions,
idx++
i++
));
}
else //split mode or single file
{
return new StreamRarArchiveVolume(SrcStream, ReaderOptions, idx++).AsEnumerable();
}
//split mode or single file
return new StreamRarArchiveVolume(sourceStream, ReaderOptions, i++).AsEnumerable();
}
protected override IReader CreateReaderForSolidExtraction()
@@ -108,7 +106,7 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public static RarArchive Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
return new RarArchive(new SourceStream(stream, i => null, options ?? new ReaderOptions()));
return new RarArchive(new SourceStream(stream, _ => null, options ?? new ReaderOptions()));
}
/// <summary>

View File

@@ -6,8 +6,8 @@ namespace SharpCompress.Archives.Rar;
internal class SeekableFilePart : RarFilePart
{
private readonly Stream stream;
private readonly string? password;
private readonly Stream _stream;
private readonly string? _password;
internal SeekableFilePart(
MarkHeader mh,
@@ -18,27 +18,27 @@ internal class SeekableFilePart : RarFilePart
)
: base(mh, fh, index)
{
this.stream = stream;
this.password = password;
_stream = stream;
_password = password;
}
internal override Stream GetCompressedStream()
{
stream.Position = FileHeader.DataStartPosition;
_stream.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)
{
var cryptKey = new CryptKey3(password!);
return new RarCryptoWrapper(stream, FileHeader.R4Salt, cryptKey);
var cryptKey = new CryptKey3(_password!);
return new RarCryptoWrapper(_stream, FileHeader.R4Salt, cryptKey);
}
if (FileHeader.Rar5CryptoInfo != null)
{
var cryptKey = new CryptKey5(password!, FileHeader.Rar5CryptoInfo);
return new RarCryptoWrapper(stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
var cryptKey = new CryptKey5(_password!, FileHeader.Rar5CryptoInfo);
return new RarCryptoWrapper(_stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
}
return stream;
return _stream;
}
internal override string FilePartName => "Unknown Stream - File Entry: " + FileHeader.FileName;

View File

@@ -9,7 +9,7 @@ namespace SharpCompress.Archives.Rar;
internal class StreamRarArchiveVolume : RarVolume
{
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index = 0)
internal StreamRarArchiveVolume(Stream stream, ReaderOptions options, int index)
: base(StreamingMode.Seekable, stream, options, index) { }
internal override IEnumerable<RarFilePart> ReadFileParts() => GetVolumeFileParts();

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
@@ -14,14 +12,14 @@ namespace SharpCompress.Archives.SevenZip;
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
{
private ArchiveDatabase database;
private ArchiveDatabase? _database;
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
/// <param name="filePath"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(string filePath, ReaderOptions readerOptions = null)
public static SevenZipArchive Open(string filePath, ReaderOptions? readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
@@ -32,7 +30,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
public static SevenZipArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(
@@ -51,7 +49,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(
IEnumerable<FileInfo> fileInfos,
ReaderOptions readerOptions = null
ReaderOptions? readerOptions = null
)
{
fileInfos.CheckNotNull(nameof(fileInfos));
@@ -72,7 +70,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(
IEnumerable<Stream> streams,
ReaderOptions readerOptions = null
ReaderOptions? readerOptions = null
)
{
streams.CheckNotNull(nameof(streams));
@@ -91,27 +89,25 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
/// </summary>
/// <param name="stream"></param>
/// <param name="readerOptions"></param>
public static SevenZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
public static SevenZipArchive Open(Stream stream, ReaderOptions? readerOptions = null)
{
stream.CheckNotNull("stream");
return new SevenZipArchive(
new SourceStream(stream, i => null, readerOptions ?? new ReaderOptions())
new SourceStream(stream, _ => null, readerOptions ?? new ReaderOptions())
);
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal SevenZipArchive(SourceStream srcStream)
: base(ArchiveType.SevenZip, srcStream) { }
/// <param name="sourceStream"></param>
private SevenZipArchive(SourceStream sourceStream)
: base(ArchiveType.SevenZip, sourceStream) { }
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<SevenZipVolume> LoadVolumes(SourceStream sourceStream)
{
SrcStream.LoadAllParts(); //request all streams
var idx = 0;
return new SevenZipVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
}
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
@@ -135,13 +131,17 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
{
var stream = volumes.Single().Stream;
LoadFactory(stream);
var entries = new SevenZipArchiveEntry[database._files.Count];
for (var i = 0; i < database._files.Count; i++)
if (_database is null)
{
var file = database._files[i];
return Enumerable.Empty<SevenZipArchiveEntry>();
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
{
var file = _database._files[i];
entries[i] = new SevenZipArchiveEntry(
this,
new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding)
new SevenZipFilePart(stream, _database, i, file, ReaderOptions.ArchiveEncoding)
);
}
foreach (var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder))
@@ -159,12 +159,12 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
private void LoadFactory(Stream stream)
{
if (database is null)
if (_database is null)
{
stream.Position = 0;
var reader = new ArchiveReader();
reader.Open(stream);
database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
reader.Open(stream, lookForHeader: ReaderOptions.LookForHeader);
_database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
}
}
@@ -180,14 +180,14 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
}
}
private static ReadOnlySpan<byte> SIGNATURE =>
private static ReadOnlySpan<byte> Signature =>
new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static bool SignatureMatch(Stream stream)
{
var reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(SIGNATURE);
return signatureBytes.SequenceEqual(Signature);
}
protected override IReader CreateReaderForSolidExtraction() =>
@@ -196,30 +196,24 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
public override bool IsSolid =>
Entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder).Count() > 1;
public override long TotalSize
{
get
{
var i = Entries.Count;
return database._packSizes.Aggregate(0L, (total, packSize) => total + packSize);
}
}
public override long TotalSize =>
_database?._packSizes.Aggregate(0L, (total, packSize) => total + packSize) ?? 0;
private sealed class SevenZipReader : AbstractReader<SevenZipEntry, SevenZipVolume>
{
private readonly SevenZipArchive archive;
private CFolder currentFolder;
private Stream currentStream;
private CFileItem currentItem;
private readonly SevenZipArchive _archive;
private CFolder? _currentFolder;
private Stream? _currentStream;
private CFileItem? _currentItem;
internal SevenZipReader(ReaderOptions readerOptions, SevenZipArchive archive)
: base(readerOptions, ArchiveType.SevenZip) => this.archive = archive;
: base(readerOptions, ArchiveType.SevenZip) => this._archive = archive;
public override SevenZipVolume Volume => archive.Volumes.Single();
public override SevenZipVolume Volume => _archive.Volumes.Single();
protected override IEnumerable<SevenZipEntry> GetEntries(Stream stream)
{
var entries = archive.Entries.ToList();
var entries = _archive.Entries.ToList();
stream.Position = 0;
foreach (var dir in entries.Where(x => x.IsDirectory))
{
@@ -229,37 +223,42 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
var group in entries.Where(x => !x.IsDirectory).GroupBy(x => x.FilePart.Folder)
)
{
currentFolder = group.Key;
_currentFolder = group.Key;
if (group.Key is null)
{
currentStream = Stream.Null;
_currentStream = Stream.Null;
}
else
{
currentStream = archive.database.GetFolderStream(
_currentStream = _archive._database?.GetFolderStream(
stream,
currentFolder,
_currentFolder,
new PasswordProvider(Options.Password)
);
}
foreach (var entry in group)
{
currentItem = entry.FilePart.Header;
_currentItem = entry.FilePart.Header;
yield return entry;
}
}
}
protected override EntryStream GetEntryStream() =>
CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
CreateEntryStream(
new ReadOnlySubStream(
_currentStream.NotNull("currentStream is not null"),
_currentItem?.Size ?? 0
)
);
}
private class PasswordProvider : IPasswordProvider
{
private readonly string _password;
private readonly string? _password;
public PasswordProvider(string password) => _password = password;
public PasswordProvider(string? password) => _password = password;
public string CryptoGetTextPassword() => _password;
public string? CryptoGetTextPassword() => _password;
}
}

View File

@@ -114,7 +114,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
var tarHeader = new TarHeader(new ArchiveEncoding());
var readSucceeded = tarHeader.Read(new BinaryReader(stream));
var isEmptyArchive =
tarHeader.Name.Length == 0
tarHeader.Name?.Length == 0
&& tarHeader.Size == 0
&& Enum.IsDefined(typeof(EntryType), tarHeader.EntryType);
return readSucceeded || isEmptyArchive;
@@ -123,22 +123,20 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
return false;
}
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<TarVolume> LoadVolumes(SourceStream sourceStream)
{
SrcStream.LoadAllParts(); //request all streams
var idx = 0;
return new TarVolume(srcStream, ReaderOptions, idx++).AsEnumerable(); //simple single volume or split, multivolume not supported
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
return new TarVolume(sourceStream, ReaderOptions, 1).AsEnumerable(); //simple single volume or split, multivolume not supported
}
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="options"></param>
internal TarArchive(SourceStream srcStream)
: base(ArchiveType.Tar, srcStream) { }
/// <param name="sourceStream"></param>
private TarArchive(SourceStream sourceStream)
: base(ArchiveType.Tar, sourceStream) { }
internal TarArchive()
private TarArchive()
: base(ArchiveType.Tar) { }
protected override IEnumerable<TarArchiveEntry> LoadEntries(IEnumerable<TarVolume> volumes)
@@ -192,6 +190,10 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
);
}
}
else
{
throw new IncompleteArchiveException("Failed to read TAR header");
}
}
}
@@ -225,7 +227,12 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
writer.Write(entry.Key, entryStream, entry.LastModifiedTime, entry.Size);
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime,
entry.Size
);
}
}

View File

@@ -7,7 +7,7 @@ namespace SharpCompress.Archives.Tar;
public class TarArchiveEntry : TarEntry, IArchiveEntry
{
internal TarArchiveEntry(TarArchive archive, TarFilePart part, CompressionType compressionType)
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
: base(part, compressionType) => Archive = archive;
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream();

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;

View File

@@ -16,10 +16,7 @@ namespace SharpCompress.Archives.Zip;
public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
{
#nullable disable
private readonly SeekableZipHeaderFactory headerFactory;
#nullable enable
private readonly SeekableZipHeaderFactory? headerFactory;
/// <summary>
/// Gets or sets the compression level applied to files added to the archive,
@@ -30,13 +27,13 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
/// </summary>
/// <param name="srcStream"></param>
/// <param name="sourceStream"></param>
/// <param name="options"></param>
internal ZipArchive(SourceStream srcStream)
: base(ArchiveType.Zip, srcStream) =>
internal ZipArchive(SourceStream sourceStream)
: base(ArchiveType.Zip, sourceStream) =>
headerFactory = new SeekableZipHeaderFactory(
srcStream.ReaderOptions.Password,
srcStream.ReaderOptions.ArchiveEncoding
sourceStream.ReaderOptions.Password,
sourceStream.ReaderOptions.ArchiveEncoding
);
/// <summary>
@@ -189,21 +186,21 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
}
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream srcStream)
protected override IEnumerable<ZipVolume> LoadVolumes(SourceStream stream)
{
SrcStream.LoadAllParts(); //request all streams
SrcStream.Position = 0;
stream.LoadAllParts(); //request all streams
stream.Position = 0;
var streams = SrcStream.Streams.ToList();
var streams = stream.Streams.ToList();
var idx = 0;
if (streams.Count > 1) //test part 2 - true = multipart not split
if (streams.Count() > 1) //test part 2 - true = multipart not split
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
var isZip = IsZipFile(streams[1], ReaderOptions.Password);
streams[1].Position -= 4;
if (isZip)
{
SrcStream.IsVolumes = true;
stream.IsVolumes = true;
var tmp = streams[0]; //arcs as zip, z01 ... swap the zip the end
streams.RemoveAt(0);
@@ -215,7 +212,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
}
//split mode or single file
return new ZipVolume(SrcStream, ReaderOptions, idx++).AsEnumerable();
return new ZipVolume(stream, ReaderOptions, idx++).AsEnumerable();
}
internal ZipArchive()
@@ -224,14 +221,13 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
protected override IEnumerable<ZipArchiveEntry> LoadEntries(IEnumerable<ZipVolume> volumes)
{
var vols = volumes.ToArray();
foreach (var h in headerFactory.ReadSeekableHeader(vols.Last().Stream))
foreach (var h in headerFactory.NotNull().ReadSeekableHeader(vols.Last().Stream))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
var deh = (DirectoryEntryHeader)h;
Stream s;
@@ -254,14 +250,14 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
yield return new ZipArchiveEntry(
this,
new SeekableZipFilePart(headerFactory, deh, s)
new SeekableZipFilePart(headerFactory.NotNull(), deh, s)
);
}
break;
case ZipHeaderType.DirectoryEnd:
{
var bytes = ((DirectoryEndHeader)h).Comment ?? Array.Empty<byte>();
volumes.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
vols.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
@@ -282,7 +278,11 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
foreach (var entry in oldEntries.Concat(newEntries).Where(x => !x.IsDirectory))
{
using var entryStream = entry.OpenEntryStream();
writer.Write(entry.Key, entryStream, entry.LastModifiedTime);
writer.Write(
entry.Key.NotNull("Entry Key is null"),
entryStream,
entry.LastModifiedTime
);
}
}

View File

@@ -8,12 +8,12 @@ public class ArchiveEncoding
/// <summary>
/// Default encoding to use when archive format doesn't specify one.
/// </summary>
public Encoding Default { get; set; }
public Encoding? Default { get; set; }
/// <summary>
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
/// </summary>
public Encoding Password { get; set; }
public Encoding? Password { get; set; }
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
@@ -50,6 +50,8 @@ public class ArchiveEncoding
public Encoding GetEncoding() => Forced ?? Default ?? Encoding.UTF8;
public Encoding GetPasswordEncoding() => Password ?? Encoding.UTF8;
public Func<byte[], int, int, string> GetDecoder() =>
CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}

View File

@@ -16,5 +16,10 @@ public enum CompressionType
Unknown,
Deflate64,
Shrink,
Lzw
Lzw,
Reduce1,
Reduce2,
Reduce3,
Reduce4,
Explode
}

View File

@@ -14,7 +14,7 @@ public abstract class Entry : IEntry
/// <summary>
/// The string key of the file internal to the Archive.
/// </summary>
public abstract string Key { get; }
public abstract string? Key { get; }
/// <summary>
/// The target of a symlink entry internal to the Archive. Will be null if not a symlink.
@@ -71,11 +71,11 @@ public abstract class Entry : IEntry
/// </summary>
public abstract bool IsSplitAfter { get; }
public int VolumeIndexFirst => Parts?.FirstOrDefault()?.Index ?? 0;
public int VolumeIndexLast => Parts?.LastOrDefault()?.Index ?? 0;
public int VolumeIndexFirst => Parts.FirstOrDefault()?.Index ?? 0;
public int VolumeIndexLast => Parts.LastOrDefault()?.Index ?? 0;
/// <inheritdoc/>
public override string ToString() => Key;
public override string ToString() => Key ?? "Entry";
internal abstract IEnumerable<FilePart> Parts { get; }

View File

@@ -36,10 +36,12 @@ internal static class ExtractionMethods
options ??= new ExtractionOptions() { Overwrite = true };
var file = Path.GetFileName(entry.Key);
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
file = Utility.ReplaceInvalidFileNameChars(file);
if (options.ExtractFullPath)
{
var folder = Path.GetDirectoryName(entry.Key)!;
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))
.NotNull("Directory is null");
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))

View File

@@ -8,7 +8,7 @@ public abstract class FilePart
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string FilePartName { get; }
internal abstract string? FilePartName { get; }
public int Index { get; set; }
internal abstract Stream GetCompressedStream();

View File

@@ -6,23 +6,23 @@ namespace SharpCompress.Common.GZip;
public class GZipEntry : Entry
{
private readonly GZipFilePart _filePart;
private readonly GZipFilePart? _filePart;
internal GZipEntry(GZipFilePart filePart) => _filePart = filePart;
internal GZipEntry(GZipFilePart? filePart) => _filePart = filePart;
public override CompressionType CompressionType => CompressionType.GZip;
public override long Crc => _filePart.Crc ?? 0;
public override long Crc => _filePart?.Crc ?? 0;
public override string Key => _filePart.FilePartName;
public override string? Key => _filePart?.FilePartName;
public override string? LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => _filePart.UncompressedSize ?? 0;
public override long Size => _filePart?.UncompressedSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.DateModified;
public override DateTime? LastModifiedTime => _filePart?.DateModified;
public override DateTime? CreatedTime => null;
@@ -36,7 +36,7 @@ public class GZipEntry : Entry
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
{

View File

@@ -34,7 +34,7 @@ internal sealed class GZipFilePart : FilePart
internal uint? Crc { get; private set; }
internal uint? UncompressedSize { get; private set; }
internal override string FilePartName => _name!;
internal override string? FilePartName => _name;
internal override Stream GetCompressedStream() =>
new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);

View File

@@ -5,7 +5,7 @@ namespace SharpCompress.Common.GZip;
public class GZipVolume : Volume
{
public GZipVolume(Stream stream, ReaderOptions options, int index = 0)
public GZipVolume(Stream stream, ReaderOptions? options, int index)
: base(stream, options, index) { }
public GZipVolume(FileInfo fileInfo, ReaderOptions options)

View File

@@ -9,7 +9,7 @@ public interface IEntry
long CompressedSize { get; }
long Crc { get; }
DateTime? CreatedTime { get; }
string Key { get; }
string? Key { get; }
string? LinkTarget { get; }
bool IsDirectory { get; }
bool IsEncrypted { get; }

View File

@@ -6,5 +6,5 @@ public interface IVolume : IDisposable
{
int Index { get; }
string FileName { get; }
string? FileName { get; }
}

View File

@@ -121,7 +121,6 @@ internal class FileHeader : RarHeader
switch (type)
{
case FHEXTRA_CRYPT: // file encryption
{
Rar5CryptoInfo = new Rar5CryptoInfo(reader, true);
@@ -132,7 +131,6 @@ internal class FileHeader : RarHeader
}
break;
case FHEXTRA_HASH:
{
const uint FHEXTRA_HASH_BLAKE2 = 0x0;
// const uint HASH_BLAKE2 = 0x03;
@@ -146,7 +144,6 @@ internal class FileHeader : RarHeader
}
break;
case FHEXTRA_HTIME: // file time
{
var flags = reader.ReadRarVIntUInt16();
var isWindowsTime = (flags & 1) == 0;
@@ -171,7 +168,6 @@ internal class FileHeader : RarHeader
// }
// break;
case FHEXTRA_REDIR: // file system redirection
{
RedirType = reader.ReadRarVIntByte();
RedirFlags = reader.ReadRarVIntByte();
@@ -284,7 +280,6 @@ internal class FileHeader : RarHeader
switch (HeaderCode)
{
case HeaderCodeV.RAR4_FILE_HEADER:
{
if (HasFlag(FileFlagsV4.UNICODE))
{
@@ -311,7 +306,6 @@ internal class FileHeader : RarHeader
}
break;
case HeaderCodeV.RAR4_NEW_SUB_HEADER:
{
var datasize = HeaderSize - newLhdSize - nameSize;
if (HasFlag(FileFlagsV4.SALT))

View File

@@ -98,13 +98,11 @@ public class RarHeaderFactory
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
reader.BaseStream.Skip(ph.DataSize);
}
@@ -146,14 +144,12 @@ public class RarHeaderFactory
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.R4Salt is null && fh.Rar5CryptoInfo is null)
@@ -204,14 +200,12 @@ public class RarHeaderFactory
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);

View File

@@ -70,11 +70,11 @@ internal sealed class RarCryptoWrapper : Stream
protected override void Dispose(bool disposing)
{
if (_rijndael != null)
if (disposing)
{
_rijndael.Dispose();
_rijndael = null!;
}
base.Dispose(disposing);
}
}

View File

@@ -25,7 +25,7 @@ public abstract class RarEntry : Entry
/// <summary>
/// The path of the file internal to the Rar Archive.
/// </summary>
public override string Key => FileHeader.FileName;
public override string? Key => FileHeader.FileName;
public override string? LinkTarget => null;

View File

@@ -15,17 +15,14 @@ namespace SharpCompress.Common.Rar;
public abstract class RarVolume : Volume
{
private readonly RarHeaderFactory _headerFactory;
internal int _maxCompressionAlgorithm;
private int _maxCompressionAlgorithm;
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index = 0)
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index)
: base(stream, options, index) => _headerFactory = new RarHeaderFactory(mode, options);
#nullable disable
internal ArchiveHeader ArchiveHeader { get; private set; }
private ArchiveHeader? ArchiveHeader { get; set; }
#nullable enable
internal StreamingMode Mode => _headerFactory.StreamingMode;
private StreamingMode Mode => _headerFactory.StreamingMode;
internal abstract IEnumerable<RarFilePart> ReadFileParts();
@@ -39,19 +36,16 @@ public abstract class RarVolume : Volume
switch (header.HeaderType)
{
case HeaderType.Mark:
{
lastMarkHeader = (MarkHeader)header;
}
break;
case HeaderType.Archive:
{
ArchiveHeader = (ArchiveHeader)header;
}
break;
case HeaderType.File:
{
var fh = (FileHeader)header;
if (_maxCompressionAlgorithm < fh.CompressionAlgorithm)
@@ -63,14 +57,12 @@ public abstract class RarVolume : Volume
}
break;
case HeaderType.Service:
{
var fh = (FileHeader)header;
if (fh.FileName == "CMT")
{
var part = CreateFilePart(lastMarkHeader!, fh);
var buffer = new byte[fh.CompressedSize];
part.GetCompressedStream().Read(buffer, 0, buffer.Length);
fh.PackedStream.Read(buffer, 0, buffer.Length);
Comment = Encoding.UTF8.GetString(buffer, 0, buffer.Length - 1);
}
}
@@ -105,7 +97,7 @@ public abstract class RarVolume : Volume
get
{
EnsureArchiveHeaderLoaded();
return ArchiveHeader.IsFirstVolume;
return ArchiveHeader?.IsFirstVolume ?? false;
}
}
@@ -117,7 +109,7 @@ public abstract class RarVolume : Volume
get
{
EnsureArchiveHeaderLoaded();
return ArchiveHeader.IsVolume;
return ArchiveHeader?.IsVolume ?? false;
}
}
@@ -130,7 +122,7 @@ public abstract class RarVolume : Volume
get
{
EnsureArchiveHeaderLoaded();
return ArchiveHeader.IsSolid;
return ArchiveHeader?.IsSolid ?? false;
}
}

View File

@@ -35,7 +35,7 @@ internal class ArchiveDatabase
_packSizes.Clear();
_packCrCs.Clear();
_folders.Clear();
_numUnpackStreamsVector = null!;
_numUnpackStreamsVector = null;
_files.Clear();
_packStreamStartPositions.Clear();

View File

@@ -1220,23 +1220,46 @@ internal class ArchiveReader
#region Public Methods
public void Open(Stream stream)
public void Open(Stream stream, bool lookForHeader)
{
Close();
_streamOrigin = stream.Position;
_streamEnding = stream.Length;
// TODO: Check Signature!
_header = new byte[0x20];
for (var offset = 0; offset < 0x20; )
var canScan = lookForHeader ? 0x80000 - 20 : 0;
while (true)
{
var delta = stream.Read(_header, offset, 0x20 - offset);
if (delta == 0)
// TODO: Check Signature!
_header = new byte[0x20];
for (var offset = 0; offset < 0x20; )
{
throw new EndOfStreamException();
var delta = stream.Read(_header, offset, 0x20 - offset);
if (delta == 0)
{
throw new EndOfStreamException();
}
offset += delta;
}
offset += delta;
if (
!lookForHeader
|| _header
.AsSpan(0, length: 6)
.SequenceEqual<byte>([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])
)
{
break;
}
if (canScan == 0)
{
throw new InvalidFormatException("Unable to find 7z signature");
}
canScan--;
stream.Position = ++_streamOrigin;
}
_stream = stream;

View File

@@ -13,7 +13,7 @@ public class SevenZipEntry : Entry
public override long Crc => FilePart.Header.Crc ?? 0;
public override string Key => FilePart.Header.Name;
public override string? Key => FilePart.Header.Name;
public override string? LinkTarget => null;

View File

@@ -41,7 +41,7 @@ internal class SevenZipFilePart : FilePart
{
if (!Header.HasStream)
{
return null!;
throw new InvalidOperationException("File does not have a stream.");
}
var folderStream = _database.GetFolderStream(_stream, Folder!, _database.PasswordProvider);
@@ -73,34 +73,24 @@ internal class SevenZipFilePart : FilePart
private const uint K_PPMD = 0x030401;
private const uint K_B_ZIP2 = 0x040202;
internal CompressionType GetCompression()
private CompressionType GetCompression()
{
if (Header.IsDir)
return CompressionType.None;
var coder = Folder!._coders.First();
switch (coder._methodId._id)
{
case K_LZMA:
case K_LZMA2:
{
return CompressionType.LZMA;
}
case K_PPMD:
{
return CompressionType.PPMd;
}
case K_B_ZIP2:
{
return CompressionType.BZip2;
}
default:
throw new NotImplementedException();
return CompressionType.None;
}
var coder = Folder.NotNull()._coders.First();
return coder._methodId._id switch
{
K_LZMA or K_LZMA2 => CompressionType.LZMA,
K_PPMD => CompressionType.PPMd,
K_B_ZIP2 => CompressionType.BZip2,
_ => throw new NotImplementedException()
};
}
internal bool IsEncrypted =>
Header.IsDir
? false
: Folder!._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
!Header.IsDir
&& Folder?._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
}

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Buffers.Binary;
using System.IO;
@@ -13,8 +11,8 @@ internal sealed class TarHeader
public TarHeader(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
internal string Name { get; set; }
internal string LinkName { get; set; }
internal string? Name { get; set; }
internal string? LinkName { get; set; }
internal long Mode { get; set; }
internal long UserId { get; set; }
@@ -22,7 +20,7 @@ internal sealed class TarHeader
internal long Size { get; set; }
internal DateTime LastModifiedTime { get; set; }
internal EntryType EntryType { get; set; }
internal Stream PackedStream { get; set; }
internal Stream? PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal const int BLOCK_SIZE = 512;
@@ -36,7 +34,9 @@ internal sealed class TarHeader
WriteOctalBytes(0, buffer, 116, 8); // group ID
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name);
var nameByteCount = ArchiveEncoding
.GetEncoding()
.GetByteCount(Name.NotNull("Name is null"));
if (nameByteCount > 100)
{
// Set mock filename and filetype to indicate the next block is the actual name of the file
@@ -46,7 +46,7 @@ internal sealed class TarHeader
}
else
{
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100);
WriteStringBytes(ArchiveEncoding.Encode(Name.NotNull("Name is null")), buffer, 100);
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
@@ -77,7 +77,7 @@ internal sealed class TarHeader
//
// and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102.
Name = ArchiveEncoding.Decode(
ArchiveEncoding.Encode(Name),
ArchiveEncoding.Encode(Name.NotNull("Name is null")),
0,
100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1)
);
@@ -87,7 +87,7 @@ internal sealed class TarHeader
private void WriteLongFilenameHeader(Stream output)
{
var nameBytes = ArchiveEncoding.Encode(Name);
var nameBytes = ArchiveEncoding.Encode(Name.NotNull("Name is null"));
output.Write(nameBytes, 0, nameBytes.Length);
// pad to multiple of BlockSize bytes, and make sure a terminating null is added
@@ -101,57 +101,85 @@ internal sealed class TarHeader
internal bool Read(BinaryReader reader)
{
var buffer = ReadBlock(reader);
if (buffer.Length == 0)
string? longName = null;
string? longLinkName = null;
var hasLongValue = true;
byte[] buffer;
EntryType entryType;
do
{
buffer = ReadBlock(reader);
if (buffer.Length == 0)
{
return false;
}
entryType = ReadEntryType(buffer);
// LongName and LongLink headers can follow each other and need
// to apply to the header that follows them.
if (entryType == EntryType.LongName)
{
longName = ReadLongName(reader, buffer);
continue;
}
else if (entryType == EntryType.LongLink)
{
longLinkName = ReadLongName(reader, buffer);
continue;
}
hasLongValue = false;
} while (hasLongValue);
// Check header checksum
if (!checkChecksum(buffer))
{
return false;
}
// for symlinks, additionally read the linkname
if (ReadEntryType(buffer) == EntryType.SymLink)
{
LinkName = ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
if (ReadEntryType(buffer) == EntryType.LongName)
{
Name = ReadLongName(reader, buffer);
buffer = ReadBlock(reader);
}
else
{
Name = ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
}
EntryType = ReadEntryType(buffer);
Name = longName ?? ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
EntryType = entryType;
Size = ReadSize(buffer);
// for symlinks, additionally read the linkname
if (entryType == EntryType.SymLink || entryType == EntryType.HardLink)
{
LinkName = longLinkName ?? ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
if (EntryType == EntryType.Directory)
if (entryType == EntryType.Directory)
{
Mode |= 0b1_000_000_000;
}
UserId = ReadAsciiInt64Base8oldGnu(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8oldGnu(buffer, 116, 7);
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic))
{
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157);
namePrefix = namePrefix.TrimNulls();
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls();
if (!string.IsNullOrEmpty(namePrefix))
{
Name = namePrefix + "/" + Name;
}
}
if (EntryType != EntryType.LongName && Name.Length == 0)
if (entryType != EntryType.LongName && Name.Length == 0)
{
return false;
}
return true;
}
@@ -289,6 +317,42 @@ internal sealed class TarHeader
(byte)' '
};
internal static bool checkChecksum(byte[] buf)
{
const int eightSpacesChksum = 256;
var buffer = new Span<byte>(buf).Slice(0, 512);
int posix_sum = eightSpacesChksum;
int sun_sum = eightSpacesChksum;
foreach (byte b in buffer)
{
posix_sum += b;
sun_sum += unchecked((sbyte)b);
}
// Special case, empty file header
if (posix_sum == eightSpacesChksum)
{
return true;
}
// Remove current checksum from calculation
foreach (byte b in buffer.Slice(148, 8))
{
posix_sum -= b;
sun_sum -= unchecked((sbyte)b);
}
// Read and compare checksum for header
var crc = ReadAsciiInt64Base8(buf, 148, 7);
if (crc != posix_sum && crc != sun_sum)
{
return false;
}
return true;
}
internal static int RecalculateChecksum(byte[] buf)
{
// Set default value for checksum. That is 8 spaces.
@@ -323,5 +387,5 @@ internal sealed class TarHeader
public long? DataStartPosition { get; set; }
public string Magic { get; set; }
public string? Magic { get; set; }
}

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Collections.Generic;
using System.IO;
@@ -10,9 +8,9 @@ namespace SharpCompress.Common.Tar;
public class TarEntry : Entry
{
private readonly TarFilePart _filePart;
private readonly TarFilePart? _filePart;
internal TarEntry(TarFilePart filePart, CompressionType type)
internal TarEntry(TarFilePart? filePart, CompressionType type)
{
_filePart = filePart;
CompressionType = type;
@@ -22,15 +20,15 @@ public class TarEntry : Entry
public override long Crc => 0;
public override string Key => _filePart.Header.Name;
public override string? Key => _filePart?.Header.Name;
public override string LinkTarget => _filePart.Header.LinkName;
public override string? LinkTarget => _filePart?.Header.LinkName;
public override long CompressedSize => _filePart.Header.Size;
public override long CompressedSize => _filePart?.Header.Size ?? 0;
public override long Size => _filePart.Header.Size;
public override long Size => _filePart?.Header.Size ?? 0;
public override DateTime? LastModifiedTime => _filePart.Header.LastModifiedTime;
public override DateTime? LastModifiedTime => _filePart?.Header.LastModifiedTime;
public override DateTime? CreatedTime => null;
@@ -40,17 +38,17 @@ public class TarEntry : Entry
public override bool IsEncrypted => false;
public override bool IsDirectory => _filePart.Header.EntryType == EntryType.Directory;
public override bool IsDirectory => _filePart?.Header.EntryType == EntryType.Directory;
public override bool IsSplitAfter => false;
public long Mode => _filePart.Header.Mode;
public long Mode => _filePart?.Header.Mode ?? 0;
public long UserID => _filePart.Header.UserId;
public long UserID => _filePart?.Header.UserId ?? 0;
public long GroupId => _filePart.Header.GroupId;
public long GroupId => _filePart?.Header.GroupId ?? 0;
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
internal static IEnumerable<TarEntry> GetEntries(
StreamingMode mode,
@@ -59,17 +57,17 @@ public class TarEntry : Entry
ArchiveEncoding archiveEncoding
)
{
foreach (var h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
foreach (var header in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
{
if (h != null)
if (header != null)
{
if (mode == StreamingMode.Seekable)
{
yield return new TarEntry(new TarFilePart(h, stream), compressionType);
yield return new TarEntry(new TarFilePart(header, stream), compressionType);
}
else
{
yield return new TarEntry(new TarFilePart(h, null), compressionType);
yield return new TarEntry(new TarFilePart(header, null), compressionType);
}
}
else

View File

@@ -5,9 +5,9 @@ namespace SharpCompress.Common.Tar;
internal sealed class TarFilePart : FilePart
{
private readonly Stream _seekableStream;
private readonly Stream? _seekableStream;
internal TarFilePart(TarHeader header, Stream seekableStream)
internal TarFilePart(TarHeader header, Stream? seekableStream)
: base(header.ArchiveEncoding)
{
_seekableStream = seekableStream;
@@ -16,16 +16,16 @@ internal sealed class TarFilePart : FilePart
internal TarHeader Header { get; }
internal override string FilePartName => Header.Name;
internal override string? FilePartName => Header?.Name;
internal override Stream GetCompressedStream()
{
if (_seekableStream != null)
{
_seekableStream.Position = Header.DataStartPosition!.Value;
_seekableStream.Position = Header.DataStartPosition ?? 0;
return new TarReadOnlySubStream(_seekableStream, Header.Size);
}
return Header.PackedStream;
return Header.PackedStream.NotNull();
}
internal override Stream? GetRawStream() => null;

View File

@@ -28,7 +28,6 @@ internal static class TarHeaderFactory
switch (mode)
{
case StreamingMode.Seekable:
{
header.DataStartPosition = reader.BaseStream.Position;
@@ -37,7 +36,6 @@ internal static class TarHeaderFactory
}
break;
case StreamingMode.Streaming:
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
}

View File

@@ -9,11 +9,11 @@ public abstract class Volume : IVolume
{
private readonly Stream _actualStream;
internal Volume(Stream stream, ReaderOptions readerOptions, int index = 0)
internal Volume(Stream stream, ReaderOptions? readerOptions, int index = 0)
{
Index = index;
ReaderOptions = readerOptions;
if (readerOptions.LeaveStreamOpen)
ReaderOptions = readerOptions ?? new ReaderOptions();
if (ReaderOptions.LeaveStreamOpen)
{
stream = NonDisposingStream.Create(stream);
}
@@ -32,7 +32,7 @@ public abstract class Volume : IVolume
public virtual int Index { get; internal set; }
public string FileName => (_actualStream as FileStream)?.Name!;
public string? FileName => (_actualStream as FileStream)?.Name;
/// <summary>
/// RarArchive is part of a multi-part archive.

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
@@ -20,21 +18,21 @@ internal abstract class ZipFileEntry : ZipHeader
{
get
{
if (Name.EndsWith('/'))
if (Name?.EndsWith('/') ?? false)
{
return true;
}
//.NET Framework 4.5 : System.IO.Compression::CreateFromDirectory() probably writes backslashes to headers
return CompressedSize == 0 && UncompressedSize == 0 && Name.EndsWith('\\');
return CompressedSize == 0 && UncompressedSize == 0 && (Name?.EndsWith('\\') ?? false);
}
}
internal Stream PackedStream { get; set; }
internal Stream? PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal string Name { get; set; }
internal string? Name { get; set; }
internal HeaderFlags Flags { get; set; }
@@ -48,7 +46,7 @@ internal abstract class ZipFileEntry : ZipHeader
internal List<ExtraData> Extra { get; set; }
public string Password { get; set; }
public string? Password { get; set; }
internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream)
{
@@ -65,7 +63,7 @@ internal abstract class ZipFileEntry : ZipHeader
return encryptionData;
}
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
internal WinzipAesEncryptionData? WinzipAesEncryptionData { get; set; }
/// <summary>
/// The last modified date as read from the Local or Central Directory header.
@@ -119,7 +117,7 @@ internal abstract class ZipFileEntry : ZipHeader
}
}
internal ZipFilePart Part { get; set; }
internal ZipFilePart? Part { get; set; }
internal bool IsZip64 => CompressedSize >= uint.MaxValue;
}

View File

@@ -103,7 +103,7 @@ internal class PkwareTraditionalEncryptionData
internal byte[] StringToByteArray(string value)
{
var a = _archiveEncoding.Password.GetBytes(value);
var a = _archiveEncoding.GetPasswordEncoding().GetBytes(value);
return a;
}

View File

@@ -42,16 +42,16 @@ internal class SeekableZipFilePart : ZipFilePart
protected override Stream CreateBaseStream()
{
BaseStream.Position = Header.DataStartPosition!.Value;
BaseStream.Position = Header.DataStartPosition.NotNull();
if (
(Header.CompressedSize == 0)
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
&& (_directoryEntryHeader?.HasData == true)
&& (_directoryEntryHeader?.CompressedSize != 0)
&& _directoryEntryHeader.HasData
&& (_directoryEntryHeader.CompressedSize != 0)
)
{
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader!.CompressedSize);
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader.CompressedSize);
}
return BaseStream;

View File

@@ -13,7 +13,7 @@ internal sealed class StreamingZipFilePart : ZipFilePart
internal StreamingZipFilePart(ZipFileEntry header, Stream stream)
: base(header, stream) { }
protected override Stream CreateBaseStream() => Header.PackedStream;
protected override Stream CreateBaseStream() => Header.PackedStream.NotNull();
internal override Stream GetCompressedStream()
{

View File

@@ -36,16 +36,19 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
uint headerBytes = 0;
if (
_lastEntryHeader != null
&& (
FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
|| _lastEntryHeader.IsZip64
)
&& FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
)
{
if (_lastEntryHeader.Part is null)
{
continue;
}
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
ref rewindableStream
);
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
var crc = reader.ReadUInt32();
if (crc == POST_DATA_DESCRIPTOR)
{
@@ -78,6 +81,60 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
}
}
else if (_lastEntryHeader != null && _lastEntryHeader.IsZip64)
{
if (_lastEntryHeader.Part is null)
continue;
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
ref rewindableStream
);
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
headerBytes = reader.ReadUInt32();
var version = reader.ReadUInt16();
var flags = (HeaderFlags)reader.ReadUInt16();
var compressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
var lastModifiedDate = reader.ReadUInt16();
var lastModifiedTime = reader.ReadUInt16();
var crc = reader.ReadUInt32();
if (crc == POST_DATA_DESCRIPTOR)
{
crc = reader.ReadUInt32();
}
_lastEntryHeader.Crc = crc;
// The DataDescriptor can be either 64bit or 32bit
var compressed_size = reader.ReadUInt32();
var uncompressed_size = reader.ReadUInt32();
// Check if we have header or 64bit DataDescriptor
var test_header = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50);
var test_64bit = ((long)uncompressed_size << 32) | compressed_size;
if (test_64bit == _lastEntryHeader.CompressedSize && test_header)
{
_lastEntryHeader.UncompressedSize =
((long)reader.ReadUInt32() << 32) | headerBytes;
headerBytes = reader.ReadUInt32();
}
else
{
_lastEntryHeader.UncompressedSize = uncompressed_size;
}
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
// 4 = First 4 bytes of the entry header (i.e. 50 4B 03 04)
rewindableStream.Position = pos.Value + 4;
}
}
else
{
headerBytes = reader.ReadUInt32();

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Buffers.Binary;
using System.Security.Cryptography;
@@ -10,12 +8,7 @@ internal class WinzipAesEncryptionData
{
private const int RFC2898_ITERATIONS = 1000;
private readonly byte[] _salt;
private readonly WinzipAesKeySize _keySize;
private readonly byte[] _passwordVerifyValue;
private readonly string _password;
private byte[] _generatedVerifyValue;
internal WinzipAesEncryptionData(
WinzipAesKeySize keySize,
@@ -25,10 +18,28 @@ internal class WinzipAesEncryptionData
)
{
_keySize = keySize;
_salt = salt;
_passwordVerifyValue = passwordVerifyValue;
_password = password;
Initialize();
#if NETFRAMEWORK || NETSTANDARD2_0
var rfc2898 = new Rfc2898DeriveBytes(password, salt, RFC2898_ITERATIONS);
#else
var rfc2898 = new Rfc2898DeriveBytes(
password,
salt,
RFC2898_ITERATIONS,
HashAlgorithmName.SHA1
);
#endif
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
var generatedVerifyValue = rfc2898.GetBytes(2);
var verify = BinaryPrimitives.ReadInt16LittleEndian(passwordVerifyValue);
var generated = BinaryPrimitives.ReadInt16LittleEndian(generatedVerifyValue);
if (verify != generated)
{
throw new InvalidFormatException("bad password");
}
}
internal byte[] IvBytes { get; set; }
@@ -45,32 +56,4 @@ internal class WinzipAesEncryptionData
WinzipAesKeySize.KeySize256 => 32,
_ => throw new InvalidOperationException(),
};
private void Initialize()
{
#if NETFRAMEWORK || NETSTANDARD2_0
var rfc2898 = new Rfc2898DeriveBytes(_password, _salt, RFC2898_ITERATIONS);
#else
var rfc2898 = new Rfc2898DeriveBytes(
_password,
_salt,
RFC2898_ITERATIONS,
HashAlgorithmName.SHA1
);
#endif
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
_generatedVerifyValue = rfc2898.GetBytes(2);
var verify = BinaryPrimitives.ReadInt16LittleEndian(_passwordVerifyValue);
if (_password != null)
{
var generated = BinaryPrimitives.ReadInt16LittleEndian(_generatedVerifyValue);
if (verify != generated)
{
throw new InvalidFormatException("bad password");
}
}
}
}

View File

@@ -4,6 +4,11 @@ internal enum ZipCompressionMethod
{
None = 0,
Shrink = 1,
Reduce1 = 2,
Reduce2 = 3,
Reduce3 = 4,
Reduce4 = 5,
Explode = 6,
Deflate = 8,
Deflate64 = 9,
BZip2 = 12,

View File

@@ -1,5 +1,3 @@
#nullable disable
using System;
using System.Collections.Generic;
using SharpCompress.Common.Zip.Headers;
@@ -8,22 +6,23 @@ namespace SharpCompress.Common.Zip;
public class ZipEntry : Entry
{
private readonly ZipFilePart _filePart;
private readonly ZipFilePart? _filePart;
internal ZipEntry(ZipFilePart filePart)
internal ZipEntry(ZipFilePart? filePart)
{
if (filePart != null)
if (filePart == null)
{
_filePart = filePart;
LastModifiedTime = Utility.DosDateToDateTime(
filePart.Header.LastModifiedDate,
filePart.Header.LastModifiedTime
);
return;
}
_filePart = filePart;
LastModifiedTime = Utility.DosDateToDateTime(
filePart.Header.LastModifiedDate,
filePart.Header.LastModifiedTime
);
}
public override CompressionType CompressionType =>
_filePart.Header.CompressionMethod switch
_filePart?.Header.CompressionMethod switch
{
ZipCompressionMethod.BZip2 => CompressionType.BZip2,
ZipCompressionMethod.Deflate => CompressionType.Deflate,
@@ -32,18 +31,23 @@ public class ZipEntry : Entry
ZipCompressionMethod.PPMd => CompressionType.PPMd,
ZipCompressionMethod.None => CompressionType.None,
ZipCompressionMethod.Shrink => CompressionType.Shrink,
ZipCompressionMethod.Reduce1 => CompressionType.Reduce1,
ZipCompressionMethod.Reduce2 => CompressionType.Reduce2,
ZipCompressionMethod.Reduce3 => CompressionType.Reduce3,
ZipCompressionMethod.Reduce4 => CompressionType.Reduce4,
ZipCompressionMethod.Explode => CompressionType.Explode,
_ => CompressionType.Unknown
};
public override long Crc => _filePart.Header.Crc;
public override long Crc => _filePart?.Header.Crc ?? 0;
public override string Key => _filePart.Header.Name;
public override string? Key => _filePart?.Header.Name;
public override string LinkTarget => null;
public override string? LinkTarget => null;
public override long CompressedSize => _filePart.Header.CompressedSize;
public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
public override long Size => _filePart.Header.UncompressedSize;
public override long Size => _filePart?.Header.UncompressedSize ?? 0;
public override DateTime? LastModifiedTime { get; }
@@ -54,11 +58,11 @@ public class ZipEntry : Entry
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted =>
FlagUtility.HasFlag(_filePart.Header.Flags, HeaderFlags.Encrypted);
FlagUtility.HasFlag(_filePart?.Header.Flags ?? HeaderFlags.None, HeaderFlags.Encrypted);
public override bool IsDirectory => _filePart.Header.IsDirectory;
public override bool IsDirectory => _filePart?.Header.IsDirectory ?? false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal override IEnumerable<FilePart> Parts => _filePart.Empty();
}

View File

@@ -7,8 +7,10 @@ using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.Explode;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.Compressors.Reduce;
using SharpCompress.Compressors.Shrink;
using SharpCompress.Compressors.Xz;
using SharpCompress.IO;
@@ -29,7 +31,7 @@ internal abstract class ZipFilePart : FilePart
internal Stream BaseStream { get; }
internal ZipFileEntry Header { get; set; }
internal override string FilePartName => Header.Name;
internal override string? FilePartName => Header.Name;
internal override Stream GetCompressedStream()
{
@@ -89,6 +91,32 @@ internal abstract class ZipFilePart : FilePart
Header.UncompressedSize
);
}
case ZipCompressionMethod.Reduce1:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 1);
}
case ZipCompressionMethod.Reduce2:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 2);
}
case ZipCompressionMethod.Reduce3:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 3);
}
case ZipCompressionMethod.Reduce4:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 4);
}
case ZipCompressionMethod.Explode:
{
return new ExplodeStream(
stream,
Header.CompressedSize,
Header.UncompressedSize,
Header.Flags
);
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(stream, CompressionMode.Decompress);
@@ -203,6 +231,10 @@ internal abstract class ZipFilePart : FilePart
{
case ZipCompressionMethod.None:
case ZipCompressionMethod.Shrink:
case ZipCompressionMethod.Reduce1:
case ZipCompressionMethod.Reduce2:
case ZipCompressionMethod.Reduce3:
case ZipCompressionMethod.Reduce4:
case ZipCompressionMethod.Deflate:
case ZipCompressionMethod.Deflate64:
case ZipCompressionMethod.BZip2:

View File

@@ -55,7 +55,13 @@ internal class ZipHeaderFactory
}
case POST_DATA_DESCRIPTOR:
{
if (FlagUtility.HasFlag(_lastEntryHeader!.Flags, HeaderFlags.UsePostDataDescriptor))
if (
_lastEntryHeader != null
&& FlagUtility.HasFlag(
_lastEntryHeader.NotNull().Flags,
HeaderFlags.UsePostDataDescriptor
)
)
{
_lastEntryHeader.Crc = reader.ReadUInt32();
_lastEntryHeader.CompressedSize = zip64

View File

@@ -69,7 +69,7 @@ public sealed class BZip2Stream : Stream
public override void SetLength(long value) => stream.SetLength(value);
#if !NETFRAMEWORK && !NETSTANDARD2_0
#if !NETFRAMEWORK&& !NETSTANDARD2_0
public override int Read(Span<byte> buffer) => stream.Read(buffer);

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using System.IO;
@@ -42,14 +42,17 @@ internal class CBZip2InputStream : Stream
private static void Cadvise()
{
//System.out.Println("CRC Error");
//throw new CCoruptionError();
throw new InvalidOperationException("BZip2 error");
}
private static void BadBGLengths() => Cadvise();
private static void BitStreamEOF() => Cadvise();
private static void CompressedStreamEOF() => Cadvise();
private static void CompressedStreamEOF()
{
throw new InvalidOperationException("BZip2 compressed file ends unexpectedly");
}
private void MakeMaps()
{

View File

@@ -0,0 +1,746 @@
using System;
using System.IO;
using SharpCompress.Common.Zip.Headers;
namespace SharpCompress.Compressors.Explode;
public class ExplodeStream : Stream
{
private const int INVALID_CODE = 99;
private const int WSIZE = 64 * 1024;
private readonly long unCompressedSize;
private readonly int compressedSize;
private readonly HeaderFlags generalPurposeBitFlag;
private readonly Stream inStream;
private huftNode[]? hufLiteralCodeTable; /* literal code table */
private huftNode[] hufLengthCodeTable = []; /* length code table */
private huftNode[] hufDistanceCodeTable = []; /* distance code table */
private int bitsForLiteralCodeTable;
private int bitsForLengthCodeTable;
private int bitsForDistanceCodeTable;
private int numOfUncodedLowerDistanceBits; /* number of uncoded lower distance bits */
private ulong bitBuffer;
private int bitBufferCount;
private readonly byte[] windowsBuffer;
private uint maskForLiteralCodeTable;
private uint maskForLengthCodeTable;
private uint maskForDistanceCodeTable;
private uint maskForDistanceLowBits;
private long outBytesCount;
private int windowIndex;
private int distance;
private int length;
internal ExplodeStream(
Stream inStr,
long compressedSize,
long uncompressedSize,
HeaderFlags generalPurposeBitFlag
)
{
inStream = inStr;
this.compressedSize = (int)compressedSize;
unCompressedSize = (long)uncompressedSize;
this.generalPurposeBitFlag = generalPurposeBitFlag;
explode_SetTables();
windowsBuffer = new byte[WSIZE];
explode_var_init();
}
public override void Flush()
{
throw new NotImplementedException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => unCompressedSize;
public override long Position
{
get => outBytesCount;
set { }
}
static uint[] mask_bits = new uint[]
{
0x0000,
0x0001,
0x0003,
0x0007,
0x000f,
0x001f,
0x003f,
0x007f,
0x00ff,
0x01ff,
0x03ff,
0x07ff,
0x0fff,
0x1fff,
0x3fff,
0x7fff,
0xffff
};
/* Tables for length and distance */
static int[] cplen2 = new int[]
{
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65
};
static int[] cplen3 = new int[]
{
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65,
66
};
static int[] extra = new int[]
{
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
8
};
static int[] cpdist4 = new int[]
{
1,
65,
129,
193,
257,
321,
385,
449,
513,
577,
641,
705,
769,
833,
897,
961,
1025,
1089,
1153,
1217,
1281,
1345,
1409,
1473,
1537,
1601,
1665,
1729,
1793,
1857,
1921,
1985,
2049,
2113,
2177,
2241,
2305,
2369,
2433,
2497,
2561,
2625,
2689,
2753,
2817,
2881,
2945,
3009,
3073,
3137,
3201,
3265,
3329,
3393,
3457,
3521,
3585,
3649,
3713,
3777,
3841,
3905,
3969,
4033
};
static int[] cpdist8 = new int[]
{
1,
129,
257,
385,
513,
641,
769,
897,
1025,
1153,
1281,
1409,
1537,
1665,
1793,
1921,
2049,
2177,
2305,
2433,
2561,
2689,
2817,
2945,
3073,
3201,
3329,
3457,
3585,
3713,
3841,
3969,
4097,
4225,
4353,
4481,
4609,
4737,
4865,
4993,
5121,
5249,
5377,
5505,
5633,
5761,
5889,
6017,
6145,
6273,
6401,
6529,
6657,
6785,
6913,
7041,
7169,
7297,
7425,
7553,
7681,
7809,
7937,
8065
};
private int get_tree(int[] arrBitLengths, int numberExpected)
/* Get the bit lengths for a code representation from the compressed
stream. If get_tree() returns 4, then there is an error in the data.
Otherwise zero is returned. */
{
/* get bit lengths */
int inIndex = inStream.ReadByte() + 1; /* length/count pairs to read */
int outIndex = 0; /* next code */
do
{
int nextByte = inStream.ReadByte();
int bitLengthOfCodes = (nextByte & 0xf) + 1; /* bits in code (1..16) */
int numOfCodes = ((nextByte & 0xf0) >> 4) + 1; /* codes with those bits (1..16) */
if (outIndex + numOfCodes > numberExpected)
return 4; /* don't overflow arrBitLengths[] */
do
{
arrBitLengths[outIndex++] = bitLengthOfCodes;
} while ((--numOfCodes) != 0);
} while ((--inIndex) != 0);
return outIndex != numberExpected ? 4 : 0; /* should have read numberExpected of them */
}
private int explode_SetTables()
{
int returnCode; /* return codes */
int[] arrBitLengthsForCodes = new int[256]; /* bit lengths for codes */
bitsForLiteralCodeTable = 0; /* bits for tb */
bitsForLengthCodeTable = 7;
bitsForDistanceCodeTable = (compressedSize) > 200000 ? 8 : 7;
if ((generalPurposeBitFlag & HeaderFlags.Bit2) != 0)
/* With literal tree--minimum match length is 3 */
{
bitsForLiteralCodeTable = 9; /* base table size for literals */
if ((returnCode = get_tree(arrBitLengthsForCodes, 256)) != 0)
return returnCode;
if (
(
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
256,
256,
[],
[],
out hufLiteralCodeTable,
ref bitsForLiteralCodeTable
)
) != 0
)
return returnCode;
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
return returnCode;
if (
(
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
64,
0,
cplen3,
extra,
out hufLengthCodeTable,
ref bitsForLengthCodeTable
)
) != 0
)
return returnCode;
}
else
/* No literal tree--minimum match length is 2 */
{
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
return returnCode;
hufLiteralCodeTable = null;
if (
(
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
64,
0,
cplen2,
extra,
out hufLengthCodeTable,
ref bitsForLengthCodeTable
)
) != 0
)
return returnCode;
}
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
return (int)returnCode;
if ((generalPurposeBitFlag & HeaderFlags.Bit1) != 0) /* true if 8K */
{
numOfUncodedLowerDistanceBits = 7;
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
64,
0,
cpdist8,
extra,
out hufDistanceCodeTable,
ref bitsForDistanceCodeTable
);
}
else /* else 4K */
{
numOfUncodedLowerDistanceBits = 6;
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
64,
0,
cpdist4,
extra,
out hufDistanceCodeTable,
ref bitsForDistanceCodeTable
);
}
return returnCode;
}
private void NeedBits(int numberOfBits)
{
while (bitBufferCount < (numberOfBits))
{
bitBuffer |= (uint)inStream.ReadByte() << bitBufferCount;
bitBufferCount += 8;
}
}
private void DumpBits(int numberOfBits)
{
bitBuffer >>= numberOfBits;
bitBufferCount -= numberOfBits;
}
int DecodeHuft(huftNode[] htab, int bits, uint mask, out huftNode huftPointer, out int e)
{
NeedBits(bits);
int tabOffset = (int)(~bitBuffer & mask);
huftPointer = htab[tabOffset];
while (true)
{
DumpBits(huftPointer.NumberOfBitsUsed);
e = huftPointer.NumberOfExtraBits;
if (e <= 32)
break;
if (e == INVALID_CODE)
return 1;
e &= 31;
NeedBits(e);
tabOffset = (int)(~bitBuffer & mask_bits[e]);
huftPointer = huftPointer.ChildNodes[tabOffset];
}
return 0;
}
private void explode_var_init()
{
/* explode the coded data */
bitBuffer = 0;
bitBufferCount = 0;
maskForLiteralCodeTable = mask_bits[bitsForLiteralCodeTable]; //only used in explode_lit
maskForLengthCodeTable = mask_bits[bitsForLengthCodeTable];
maskForDistanceCodeTable = mask_bits[bitsForDistanceCodeTable];
maskForDistanceLowBits = mask_bits[numOfUncodedLowerDistanceBits];
outBytesCount = 0;
windowIndex = 0; /* initialize bit buffer, window */
}
public override int Read(byte[] buffer, int offset, int count)
{
int countIndex = 0;
while (countIndex < count && outBytesCount < unCompressedSize) /* do until unCompressedSize bytes uncompressed */
{
if (length == 0)
{
NeedBits(1);
bool literal = (bitBuffer & 1) == 1;
DumpBits(1);
huftNode huftPointer;
if (literal) /* then literal--decode it */
{
byte nextByte;
if (hufLiteralCodeTable != null)
{
/* get coded literal */
if (
DecodeHuft(
hufLiteralCodeTable,
bitsForLiteralCodeTable,
maskForLiteralCodeTable,
out huftPointer,
out _
) != 0
)
throw new Exception("Error decoding literal value");
nextByte = (byte)huftPointer.Value;
}
else
{
NeedBits(8);
nextByte = (byte)bitBuffer;
DumpBits(8);
}
buffer[offset + (countIndex++)] = nextByte;
windowsBuffer[windowIndex++] = nextByte;
outBytesCount++;
if (windowIndex == WSIZE)
windowIndex = 0;
continue;
}
NeedBits(numOfUncodedLowerDistanceBits); /* get distance low bits */
distance = (int)(bitBuffer & maskForDistanceLowBits);
DumpBits(numOfUncodedLowerDistanceBits);
/* get coded distance high bits */
if (
DecodeHuft(
hufDistanceCodeTable,
bitsForDistanceCodeTable,
maskForDistanceCodeTable,
out huftPointer,
out _
) != 0
)
throw new Exception("Error decoding distance high bits");
distance = windowIndex - (distance + huftPointer.Value); /* construct offset */
/* get coded length */
if (
DecodeHuft(
hufLengthCodeTable,
bitsForLengthCodeTable,
maskForLengthCodeTable,
out huftPointer,
out int extraBitLength
) != 0
)
throw new Exception("Error decoding coded length");
length = huftPointer.Value;
if (extraBitLength != 0) /* get length extra bits */
{
NeedBits(8);
length += (int)(bitBuffer & 0xff);
DumpBits(8);
}
if (length > (unCompressedSize - outBytesCount))
length = (int)(unCompressedSize - outBytesCount);
distance &= WSIZE - 1;
}
while (length != 0 && countIndex < count)
{
byte nextByte = windowsBuffer[distance++];
buffer[offset + (countIndex++)] = nextByte;
windowsBuffer[windowIndex++] = nextByte;
outBytesCount++;
if (distance == WSIZE)
distance = 0;
if (windowIndex == WSIZE)
windowIndex = 0;
length--;
}
}
return countIndex;
}
}

View File

@@ -0,0 +1,269 @@
/*
* This code has been converted to C# based on the original huft_tree code found in
* inflate.c -- by Mark Adler version c17e, 30 Mar 2007
*/
namespace SharpCompress.Compressors.Explode;
public class huftNode
{
public int NumberOfExtraBits; /* number of extra bits or operation */
public int NumberOfBitsUsed; /* number of bits in this code or subcode */
public int Value; /* literal, length base, or distance base */
public huftNode[] ChildNodes = []; /* next level of table */
}
public static class HuftTree
{
private const int INVALID_CODE = 99;
/* If BMAX needs to be larger than 16, then h and x[] should be ulg. */
private const int BMAX = 16; /* maximum bit length of any code (16 for explode) */
private const int N_MAX = 288; /* maximum number of codes in any set */
public static int huftbuid(
int[] arrBitLengthForCodes,
int numberOfCodes,
int numberOfSimpleValueCodes,
int[] arrBaseValuesForNonSimpleCodes,
int[] arrExtraBitsForNonSimpleCodes,
out huftNode[] outHufTable,
ref int outBitsForTable
)
/* Given a list of code lengths and a maximum table size, make a set of
tables to decode that set of codes. Return zero on success, one if
the given code set is incomplete (the tables are still built in this
case), two if the input is invalid (all zero length codes or an
oversubscribed set of lengths), and three if not enough memory.
The code with value 256 is special, and the tables are constructed
so that no bits beyond that code are fetched when that code is
decoded. */
{
outHufTable = [];
/* Generate counts for each bit length */
int lengthOfEOBcode = numberOfCodes > 256 ? arrBitLengthForCodes[256] : BMAX; /* set length of EOB code, if any */
int[] arrBitLengthCount = new int[BMAX + 1];
for (int i = 0; i < BMAX + 1; i++)
arrBitLengthCount[i] = 0;
int pIndex = 0;
int counterCurrentCode = numberOfCodes;
do
{
arrBitLengthCount[arrBitLengthForCodes[pIndex]]++;
pIndex++; /* assume all entries <= BMAX */
} while ((--counterCurrentCode) != 0);
if (arrBitLengthCount[0] == numberOfCodes) /* null input--all zero length codes */
{
return 0;
}
/* Find minimum and maximum length, bound *outBitsForTable by those */
int counter;
for (counter = 1; counter <= BMAX; counter++)
if (arrBitLengthCount[counter] != 0)
break;
int numberOfBitsInCurrentCode = counter; /* minimum code length */
if (outBitsForTable < counter)
outBitsForTable = counter;
for (counterCurrentCode = BMAX; counterCurrentCode != 0; counterCurrentCode--)
if (arrBitLengthCount[counterCurrentCode] != 0)
break;
int maximumCodeLength = counterCurrentCode; /* maximum code length */
if (outBitsForTable > counterCurrentCode)
outBitsForTable = counterCurrentCode;
/* Adjust last length count to fill out codes, if needed */
int numberOfDummyCodesAdded;
for (
numberOfDummyCodesAdded = 1 << counter;
counter < counterCurrentCode;
counter++, numberOfDummyCodesAdded <<= 1
)
if ((numberOfDummyCodesAdded -= arrBitLengthCount[counter]) < 0)
return 2; /* bad input: more codes than bits */
if ((numberOfDummyCodesAdded -= arrBitLengthCount[counterCurrentCode]) < 0)
return 2;
arrBitLengthCount[counterCurrentCode] += numberOfDummyCodesAdded;
/* Generate starting offsets into the value table for each length */
int[] bitOffset = new int[BMAX + 1];
bitOffset[1] = 0;
counter = 0;
pIndex = 1;
int xIndex = 2;
while ((--counterCurrentCode) != 0)
{ /* note that i == g from above */
bitOffset[xIndex++] = (counter += arrBitLengthCount[pIndex++]);
}
/* Make a table of values in order of bit lengths */
int[] arrValuesInOrderOfBitLength = new int[N_MAX];
for (int i = 0; i < N_MAX; i++)
arrValuesInOrderOfBitLength[i] = 0;
pIndex = 0;
counterCurrentCode = 0;
do
{
if ((counter = arrBitLengthForCodes[pIndex++]) != 0)
arrValuesInOrderOfBitLength[bitOffset[counter]++] = counterCurrentCode;
} while (++counterCurrentCode < numberOfCodes);
numberOfCodes = bitOffset[maximumCodeLength]; /* set numberOfCodes to length of v */
/* Generate the Huffman codes and for each, make the table entries */
bitOffset[0] = counterCurrentCode = 0; /* first Huffman code is zero */
pIndex = 0; /* grab values in bit order */
int tableLevel = -1; /* no tables yet--level -1 */
int bitsBeforeThisTable = 0;
int[] arrLX = new int[BMAX + 1];
int stackOfBitsPerTable = 1; /* stack of bits per table */
arrLX[stackOfBitsPerTable - 1] = 0; /* no bits decoded yet */
huftNode[][] arrHufTableStack = new huftNode[BMAX][];
huftNode[] pointerToCurrentTable = [];
int numberOfEntriesInCurrentTable = 0;
bool first = true;
/* go through the bit lengths (k already is bits in shortest code) */
for (; numberOfBitsInCurrentCode <= maximumCodeLength; numberOfBitsInCurrentCode++)
{
int counterForCodes = arrBitLengthCount[numberOfBitsInCurrentCode];
while ((counterForCodes--) != 0)
{
/* here i is the Huffman code of length k bits for value *p */
/* make tables up to required level */
while (
numberOfBitsInCurrentCode
> bitsBeforeThisTable + arrLX[stackOfBitsPerTable + tableLevel]
)
{
bitsBeforeThisTable += arrLX[stackOfBitsPerTable + (tableLevel++)]; /* add bits already decoded */
/* compute minimum size table less than or equal to *outBitsForTable bits */
numberOfEntriesInCurrentTable =
(numberOfEntriesInCurrentTable = maximumCodeLength - bitsBeforeThisTable)
> outBitsForTable
? outBitsForTable
: numberOfEntriesInCurrentTable; /* upper limit */
int fBitCounter1 =
1 << (counter = numberOfBitsInCurrentCode - bitsBeforeThisTable);
if (fBitCounter1 > counterForCodes + 1) /* try a k-w bit table */
{ /* too few codes for k-w bit table */
fBitCounter1 -= counterForCodes + 1; /* deduct codes from patterns left */
xIndex = numberOfBitsInCurrentCode;
while (++counter < numberOfEntriesInCurrentTable) /* try smaller tables up to z bits */
{
if ((fBitCounter1 <<= 1) <= arrBitLengthCount[++xIndex])
break; /* enough codes to use up j bits */
fBitCounter1 -= arrBitLengthCount[xIndex]; /* else deduct codes from patterns */
}
}
if (
bitsBeforeThisTable + counter > lengthOfEOBcode
&& bitsBeforeThisTable < lengthOfEOBcode
)
counter = lengthOfEOBcode - bitsBeforeThisTable; /* make EOB code end at table */
numberOfEntriesInCurrentTable = 1 << counter; /* table entries for j-bit table */
arrLX[stackOfBitsPerTable + tableLevel] = counter; /* set table size in stack */
/* allocate and link in new table */
pointerToCurrentTable = new huftNode[numberOfEntriesInCurrentTable];
// set the pointer, pointed to by *outHufTable to the second huft in pointertoCurrentTable
if (first)
{
outHufTable = pointerToCurrentTable; /* link to list for huft_free() */
first = false;
}
arrHufTableStack[tableLevel] = pointerToCurrentTable; /* table starts after link */
/* connect to last table, if there is one */
if (tableLevel != 0)
{
bitOffset[tableLevel] = counterCurrentCode; /* save pattern for backing up */
huftNode vHuft = new huftNode
{
NumberOfBitsUsed = arrLX[stackOfBitsPerTable + tableLevel - 1], /* bits to dump before this table */
NumberOfExtraBits = 32 + counter, /* bits in this table */
ChildNodes = pointerToCurrentTable /* pointer to this table */
};
counter =
(counterCurrentCode & ((1 << bitsBeforeThisTable) - 1))
>> (bitsBeforeThisTable - arrLX[stackOfBitsPerTable + tableLevel - 1]);
arrHufTableStack[tableLevel - 1][counter] = vHuft; /* connect to last table */
}
}
/* set up table entry in r */
huftNode vHuft1 = new huftNode
{
NumberOfBitsUsed = numberOfBitsInCurrentCode - bitsBeforeThisTable
};
if (pIndex >= numberOfCodes)
vHuft1.NumberOfExtraBits = INVALID_CODE; /* out of values--invalid code */
else if (arrValuesInOrderOfBitLength[pIndex] < numberOfSimpleValueCodes)
{
vHuft1.NumberOfExtraBits = (
arrValuesInOrderOfBitLength[pIndex] < 256 ? 32 : 31
); /* 256 is end-of-block code */
vHuft1.Value = arrValuesInOrderOfBitLength[pIndex++]; /* simple code is just the value */
}
else
{
vHuft1.NumberOfExtraBits = arrExtraBitsForNonSimpleCodes[
arrValuesInOrderOfBitLength[pIndex] - numberOfSimpleValueCodes
]; /* non-simple--look up in lists */
vHuft1.Value = arrBaseValuesForNonSimpleCodes[
arrValuesInOrderOfBitLength[pIndex++] - numberOfSimpleValueCodes
];
}
/* fill code-like entries with r */
int fBitCounter2 = 1 << (numberOfBitsInCurrentCode - bitsBeforeThisTable);
for (
counter = counterCurrentCode >> bitsBeforeThisTable;
counter < numberOfEntriesInCurrentTable;
counter += fBitCounter2
)
pointerToCurrentTable[counter] = vHuft1;
/* backwards increment the k-bit code i */
for (
counter = 1 << (numberOfBitsInCurrentCode - 1);
(counterCurrentCode & counter) != 0;
counter >>= 1
)
counterCurrentCode ^= counter;
counterCurrentCode ^= counter;
/* backup over finished tables */
while (
(counterCurrentCode & ((1 << bitsBeforeThisTable) - 1)) != bitOffset[tableLevel]
)
bitsBeforeThisTable -= arrLX[stackOfBitsPerTable + (--tableLevel)];
}
}
/* return actual size of base table */
outBitsForTable = arrLX[stackOfBitsPerTable];
/* Return true (1) if we were given an incomplete table */
return (numberOfDummyCodesAdded != 0 && maximumCodeLength != 1) ? 1 : 0;
}
}

View File

@@ -0,0 +1,63 @@
using System;
using System.Buffers.Binary;
using System.IO;
namespace SharpCompress.Compressors.Filters;
internal class BCJFilterARM64 : Filter
{
private int _pos;
public BCJFilterARM64(bool isEncoder, Stream baseStream)
: base(isEncoder, baseStream, 8) => _pos = 0;
protected override int Transform(byte[] buffer, int offset, int count)
{
var end = offset + count - 4;
int i;
for (i = offset; i <= end; i += 4)
{
uint pc = (uint)(_pos + i - offset);
uint instr = BinaryPrimitives.ReadUInt32LittleEndian(
new ReadOnlySpan<byte>(buffer, i, 4)
);
if ((instr >> 26) == 0x25)
{
uint src = instr;
instr = 0x94000000;
pc >>= 2;
if (!_isEncoder)
pc = 0U - pc;
instr |= (src + pc) & 0x03FFFFFF;
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), instr);
}
else if ((instr & 0x9F000000) == 0x90000000)
{
uint src = ((instr >> 29) & 3) | ((instr >> 3) & 0x001FFFFC);
if (((src + 0x00020000) & 0x001C0000) != 0)
continue;
instr &= 0x9000001F;
pc >>= 12;
if (!_isEncoder)
pc = 0U - pc;
uint dest = src + pc;
instr |= (dest & 3) << 29;
instr |= (dest & 0x0003FFFC) << 3;
instr |= (0U - (dest & 0x00020000)) & 0x00E00000;
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), instr);
}
}
i -= offset;
_pos += i;
return i;
}
}

View File

@@ -0,0 +1,210 @@
using System;
using System.Buffers.Binary;
using System.IO;
namespace SharpCompress.Compressors.Filters;
internal class BCJFilterRISCV : Filter
{
private int _pos;
public BCJFilterRISCV(bool isEncoder, Stream baseStream)
: base(isEncoder, baseStream, 8) => _pos = 0;
private int Decode(byte[] buffer, int offset, int count)
{
if (count < 8)
{
return 0;
}
var end = offset + count - 8;
int i;
for (i = offset; i <= end; i += 2)
{
uint inst = buffer[i];
if (inst == 0xEF)
{
uint b1 = buffer[i + 1];
if ((b1 & 0x0D) != 0)
continue;
uint b2 = buffer[i + 2];
uint b3 = buffer[i + 3];
uint pc = (uint)(_pos + i);
uint addr = ((b1 & 0xF0) << 13) | (b2 << 9) | (b3 << 1);
addr -= pc;
buffer[i + 1] = (byte)((b1 & 0x0F) | ((addr >> 8) & 0xF0));
buffer[i + 2] = (byte)(
((addr >> 16) & 0x0F) | ((addr >> 7) & 0x10) | ((addr << 4) & 0xE0)
);
buffer[i + 3] = (byte)(((addr >> 4) & 0x7F) | ((addr >> 13) & 0x80));
i += 4 - 2;
}
else if ((inst & 0x7F) == 0x17)
{
uint inst2 = 0;
inst |= (uint)buffer[i + 1] << 8;
inst |= (uint)buffer[i + 2] << 16;
inst |= (uint)buffer[i + 3] << 24;
if ((inst & 0xE80) != 0)
{
inst2 = BinaryPrimitives.ReadUInt32LittleEndian(
new ReadOnlySpan<byte>(buffer, i + 4, 4)
);
if (((((inst) << 8) ^ (inst2)) & 0xF8003) != 3)
{
i += 6 - 2;
continue;
}
uint addr = inst & 0xFFFFF000;
addr += inst2 >> 20;
inst = 0x17 | (2 << 7) | (inst2 << 12);
inst2 = addr;
}
else
{
uint inst2_rs1 = inst >> 27;
if ((uint)(((inst) - 0x3117) << 18) >= ((inst2_rs1) & 0x1D))
{
i += 4 - 2;
continue;
}
uint addr = BinaryPrimitives.ReadUInt32BigEndian(
new ReadOnlySpan<byte>(buffer, i + 4, 4)
);
addr -= (uint)(_pos + i);
inst2 = (inst >> 12) | (addr << 20);
inst = 0x17 | (inst2_rs1 << 7) | ((addr + 0x800) & 0xFFFFF000);
}
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), inst);
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i + 4, 4), inst2);
i += 8 - 2;
}
}
i -= offset;
_pos += i;
return i;
}
private int Encode(byte[] buffer, int offset, int count)
{
if (count < 8)
{
return 0;
}
var end = offset + count - 8;
int i;
for (i = offset; i <= end; i += 2)
{
uint inst = buffer[i];
if (inst == 0xEF)
{
uint b1 = buffer[i + 1];
if ((b1 & 0x0D) != 0)
continue;
uint b2 = buffer[i + 2];
uint b3 = buffer[i + 3];
uint pc = (uint)(_pos + i);
uint addr =
((b1 & 0xF0) << 8)
| ((b2 & 0x0F) << 16)
| ((b2 & 0x10) << 7)
| ((b2 & 0xE0) >> 4)
| ((b3 & 0x7F) << 4)
| ((b3 & 0x80) << 13);
addr += pc;
buffer[i + 1] = (byte)((b1 & 0x0F) | ((addr >> 13) & 0xF0));
buffer[i + 2] = (byte)(addr >> 9);
buffer[i + 3] = (byte)(addr >> 1);
i += 4 - 2;
}
else if ((inst & 0x7F) == 0x17)
{
inst |= (uint)buffer[i + 1] << 8;
inst |= (uint)buffer[i + 2] << 16;
inst |= (uint)buffer[i + 3] << 24;
if ((inst & 0xE80) != 0)
{
uint inst2 = BinaryPrimitives.ReadUInt32LittleEndian(
new ReadOnlySpan<byte>(buffer, i + 4, 4)
);
if (((((inst) << 8) ^ (inst2)) & 0xF8003) != 3)
{
i += 6 - 2;
continue;
}
uint addr = inst & 0xFFFFF000;
addr += (inst2 >> 20) - ((inst2 >> 19) & 0x1000);
addr += (uint)(_pos + i);
inst = 0x17 | (2 << 7) | (inst2 << 12);
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), inst);
BinaryPrimitives.WriteUInt32BigEndian(new Span<byte>(buffer, i + 4, 4), addr);
}
else
{
uint fake_rs1 = inst >> 27;
if ((uint)(((inst) - 0x3117) << 18) >= ((fake_rs1) & 0x1D))
{
i += 4 - 2;
continue;
}
uint fake_addr = BinaryPrimitives.ReadUInt32LittleEndian(
new ReadOnlySpan<byte>(buffer, i + 4, 4)
);
uint fake_inst2 = (inst >> 12) | (fake_addr << 20);
inst = 0x17 | (fake_rs1 << 7) | (fake_addr & 0xFFFFF000);
BinaryPrimitives.WriteUInt32LittleEndian(new Span<byte>(buffer, i, 4), inst);
BinaryPrimitives.WriteUInt32LittleEndian(
new Span<byte>(buffer, i + 4, 4),
fake_inst2
);
}
i += 8 - 2;
}
}
i -= offset;
_pos += i;
return i;
}
protected override int Transform(byte[] buffer, int offset, int count)
{
if (_isEncoder)
{
return Encode(buffer, offset, count);
}
else
{
return Decode(buffer, offset, count);
}
}
}

View File

@@ -20,7 +20,8 @@ internal sealed class AesDecoderStream : DecoderStream2
public AesDecoderStream(Stream input, byte[] info, IPasswordProvider pass, long limit)
{
if (pass.CryptoGetTextPassword() == null)
var password = pass.CryptoGetTextPassword();
if (password == null)
{
throw new SharpCompress.Common.CryptographicException(
"Encrypted 7Zip archive has no password specified."
@@ -37,8 +38,8 @@ internal sealed class AesDecoderStream : DecoderStream2
Init(info, out var numCyclesPower, out var salt, out var seed);
var password = Encoding.Unicode.GetBytes(pass.CryptoGetTextPassword());
var key = InitKey(numCyclesPower, salt, password);
var passwordBytes = Encoding.Unicode.GetBytes(password);
var key = InitKey(numCyclesPower, salt, passwordBytes);
if (key == null)
{
throw new InvalidOperationException("Initialized with null key");
@@ -207,28 +208,6 @@ internal sealed class AesDecoderStream : DecoderStream2
}
else
{
#if NETSTANDARD2_0
using var sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
var counter = new byte[8];
var numRounds = 1L << mNumCyclesPower;
for (long round = 0; round < numRounds; round++)
{
sha.AppendData(salt, 0, salt.Length);
sha.AppendData(pass, 0, pass.Length);
sha.AppendData(counter, 0, 8);
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (var i = 0; i < 8; i++)
{
if (++counter[i] != 0)
{
break;
}
}
}
return sha.GetHashAndReset();
#else
using var sha = SHA256.Create();
var counter = new byte[8];
var numRounds = 1L << mNumCyclesPower;
@@ -251,7 +230,6 @@ internal sealed class AesDecoderStream : DecoderStream2
sha.TransformFinalBlock(counter, 0, 0);
return sha.Hash;
#endif
}
}

View File

@@ -63,18 +63,18 @@ public sealed class LZipStream : Stream
var crc32Stream = (Crc32Stream)_stream;
crc32Stream.WrappedStream.Dispose();
crc32Stream.Dispose();
var compressedCount = _countingWritableSubStream!.Count;
var compressedCount = _countingWritableSubStream.NotNull().Count;
Span<byte> intBuf = stackalloc byte[8];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc32Stream.Crc);
_countingWritableSubStream.Write(intBuf.Slice(0, 4));
_countingWritableSubStream?.Write(intBuf.Slice(0, 4));
BinaryPrimitives.WriteInt64LittleEndian(intBuf, _writeCount);
_countingWritableSubStream.Write(intBuf);
_countingWritableSubStream?.Write(intBuf);
//total with headers
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, compressedCount + 6 + 20);
_countingWritableSubStream.Write(intBuf);
_countingWritableSubStream?.Write(intBuf);
}
_finished = true;
}

View File

@@ -25,6 +25,8 @@ internal static class DecoderRegistry
private const uint K_ARM = 0x03030501;
private const uint K_ARMT = 0x03030701;
private const uint K_SPARC = 0x03030805;
private const uint K_ARM64 = 0x0A;
private const uint K_RISCV = 0x0B;
private const uint K_DEFLATE = 0x040108;
private const uint K_B_ZIP2 = 0x040202;
private const uint K_ZSTD = 0x4F71101;
@@ -66,6 +68,10 @@ internal static class DecoderRegistry
return new BCJFilterARMT(false, inStreams.Single());
case K_SPARC:
return new BCJFilterSPARC(false, inStreams.Single());
case K_ARM64:
return new BCJFilterARM64(false, inStreams.Single());
case K_RISCV:
return new BCJFilterRISCV(false, inStreams.Single());
case K_B_ZIP2:
return new BZip2Stream(inStreams.Single(), CompressionMode.Decompress, true);
case K_PPMD:

View File

@@ -2,5 +2,5 @@ namespace SharpCompress.Compressors.LZMA.Utilites;
internal interface IPasswordProvider
{
string CryptoGetTextPassword();
string? CryptoGetTextPassword();
}

View File

@@ -530,7 +530,6 @@ internal partial class Unpack
{
case FILTER_E8:
case FILTER_E8E9:
{
var FileOffset = (uint)WrittenFileSize;
@@ -569,7 +568,6 @@ internal partial class Unpack
}
return SrcData;
case FILTER_ARM:
{
var FileOffset = (uint)WrittenFileSize;
// DataSize is unsigned, so we use "CurPos+3" and not "DataSize-3"

View File

@@ -228,7 +228,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_CMP:
{
var value1 = (VMFlags)GetValue(cmd.IsByteMode, Mem, op1);
var result = value1 - GetValue(cmd.IsByteMode, Mem, op2);
@@ -247,7 +246,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_CMPB:
{
var value1 = (VMFlags)GetValue(true, Mem, op1);
var result = value1 - GetValue(true, Mem, op2);
@@ -265,7 +263,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_CMPD:
{
var value1 = (VMFlags)GetValue(false, Mem, op1);
var result = value1 - GetValue(false, Mem, op2);
@@ -283,7 +280,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_ADD:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var result = (int)(
@@ -351,7 +347,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_SUB:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var result = (int)(
@@ -411,7 +406,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_INC:
{
var result = (int)(GetValue(cmd.IsByteMode, Mem, op1) & (0xFFffFFffL + 1L));
if (cmd.IsByteMode)
@@ -440,7 +434,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_DEC:
{
var result = (int)(GetValue(cmd.IsByteMode, Mem, op1) & (0xFFffFFff - 1));
SetValue(cmd.IsByteMode, Mem, op1, result);
@@ -463,7 +456,6 @@ internal sealed class RarVM : BitInput
continue;
case VMCommands.VM_XOR:
{
var result =
GetValue(cmd.IsByteMode, Mem, op1) ^ GetValue(cmd.IsByteMode, Mem, op2);
@@ -475,7 +467,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_AND:
{
var result =
GetValue(cmd.IsByteMode, Mem, op1) & GetValue(cmd.IsByteMode, Mem, op2);
@@ -487,7 +478,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_OR:
{
var result =
GetValue(cmd.IsByteMode, Mem, op1) | GetValue(cmd.IsByteMode, Mem, op2);
@@ -499,7 +489,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_TEST:
{
var result =
GetValue(cmd.IsByteMode, Mem, op1) & GetValue(cmd.IsByteMode, Mem, op2);
@@ -578,7 +567,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_SHL:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var value2 = GetValue(cmd.IsByteMode, Mem, op2);
@@ -596,7 +584,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_SHR:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var value2 = GetValue(cmd.IsByteMode, Mem, op2);
@@ -610,7 +597,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_SAR:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var value2 = GetValue(cmd.IsByteMode, Mem, op2);
@@ -624,7 +610,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_NEG:
{
var result = -GetValue(cmd.IsByteMode, Mem, op1);
flags = (VMFlags)(
@@ -645,7 +630,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_PUSHA:
{
for (int i = 0, SP = R[7] - 4; i < regCount; i++, SP -= 4)
{
@@ -656,7 +640,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_POPA:
{
for (int i = 0, SP = R[7]; i < regCount; i++, SP += 4)
{
@@ -684,7 +667,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_XCHG:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
SetValue(cmd.IsByteMode, Mem, op1, GetValue(cmd.IsByteMode, Mem, op2));
@@ -693,7 +675,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_MUL:
{
var result = (int)(
(
@@ -707,7 +688,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_DIV:
{
var divider = GetValue(cmd.IsByteMode, Mem, op2);
if (divider != 0)
@@ -719,7 +699,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_ADC:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var FC = (int)(flags & VMFlags.VM_FC);
@@ -749,7 +728,6 @@ internal sealed class RarVM : BitInput
break;
case VMCommands.VM_SBB:
{
var value1 = GetValue(cmd.IsByteMode, Mem, op1);
var FC = (int)(flags & VMFlags.VM_FC);
@@ -1156,7 +1134,6 @@ internal sealed class RarVM : BitInput
{
case VMStandardFilters.VMSF_E8:
case VMStandardFilters.VMSF_E8E9:
{
var dataSize = R[4];
long fileOffset = R[6] & unchecked((int)0xFFffFFff);
@@ -1211,7 +1188,6 @@ internal sealed class RarVM : BitInput
break;
case VMStandardFilters.VMSF_ITANIUM:
{
var dataSize = R[4];
long fileOffset = R[6] & unchecked((int)0xFFffFFff);
@@ -1269,7 +1245,6 @@ internal sealed class RarVM : BitInput
break;
case VMStandardFilters.VMSF_DELTA:
{
var dataSize = R[4] & unchecked((int)0xFFffFFff);
var channels = R[0] & unchecked((int)0xFFffFFff);
@@ -1300,7 +1275,6 @@ internal sealed class RarVM : BitInput
break;
case VMStandardFilters.VMSF_RGB:
{
// byte *SrcData=Mem,*DestData=SrcData+DataSize;
int dataSize = R[4],
@@ -1366,7 +1340,6 @@ internal sealed class RarVM : BitInput
break;
case VMStandardFilters.VMSF_AUDIO:
{
int dataSize = R[4],
channels = R[0];
@@ -1497,7 +1470,6 @@ internal sealed class RarVM : BitInput
break;
case VMStandardFilters.VMSF_UPCASE:
{
int dataSize = R[4],
srcPos = 0,

View File

@@ -0,0 +1,249 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.Reduce;
public class ReduceStream : Stream
{
private readonly long unCompressedSize;
private readonly long compressedSize;
private readonly Stream inStream;
private long inByteCount;
private const int EOF = 1234;
private readonly int factor;
private readonly int distanceMask;
private readonly int lengthMask;
private long outBytesCount;
private readonly byte[] windowsBuffer;
private int windowIndex;
private int length;
private int distance;
public ReduceStream(Stream inStr, long compsize, long unCompSize, int factor)
{
inStream = inStr;
compressedSize = compsize;
unCompressedSize = unCompSize;
inByteCount = 0;
outBytesCount = 0;
this.factor = factor;
distanceMask = (int)mask_bits[factor] << 8;
lengthMask = 0xff >> factor;
windowIndex = 0;
length = 0;
distance = 0;
windowsBuffer = new byte[WSIZE];
outByte = 0;
LoadBitLengthTable();
LoadNextByteTable();
}
public override void Flush()
{
throw new NotImplementedException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => unCompressedSize;
public override long Position
{
get => outBytesCount;
set { }
}
private const int RunLengthCode = 144;
private const int WSIZE = 0x4000;
private readonly uint[] mask_bits = new uint[]
{
0x0000,
0x0001,
0x0003,
0x0007,
0x000f,
0x001f,
0x003f,
0x007f,
0x00ff,
0x01ff,
0x03ff,
0x07ff,
0x0fff,
0x1fff,
0x3fff,
0x7fff,
0xffff
};
private int bitBufferCount;
private ulong bitBuffer;
private int NEXTBYTE()
{
if (inByteCount == compressedSize)
return EOF;
inByteCount++;
return inStream.ReadByte();
}
private void READBITS(int nbits, out byte zdest)
{
if (nbits > bitBufferCount)
{
int temp;
while (bitBufferCount <= 8 * (int)(4 - 1) && (temp = NEXTBYTE()) != EOF)
{
bitBuffer |= (ulong)temp << bitBufferCount;
bitBufferCount += 8;
}
}
zdest = (byte)(bitBuffer & (ulong)mask_bits[nbits]);
bitBuffer >>= nbits;
bitBufferCount -= nbits;
}
private byte[] bitCountTable = [];
private void LoadBitLengthTable()
{
byte[] bitPos = { 0, 2, 4, 8, 16, 32, 64, 128, 255 };
bitCountTable = new byte[256];
for (byte i = 1; i <= 8; i++)
{
int vMin = bitPos[i - 1] + 1;
int vMax = bitPos[i];
for (int j = vMin; j <= vMax; j++)
{
bitCountTable[j] = i;
}
}
}
private byte[][] nextByteTable = [];
private void LoadNextByteTable()
{
nextByteTable = new byte[256][];
for (int x = 255; x >= 0; x--)
{
READBITS(6, out byte Slen);
nextByteTable[x] = new byte[Slen];
for (int i = 0; i < Slen; i++)
{
READBITS(8, out nextByteTable[x][i]);
}
}
}
private byte outByte;
private byte GetNextByte()
{
if (nextByteTable[outByte].Length == 0)
{
READBITS(8, out outByte);
return outByte;
}
READBITS(1, out byte nextBit);
if (nextBit == 1)
{
READBITS(8, out outByte);
return outByte;
}
READBITS(bitCountTable[nextByteTable[outByte].Length], out byte nextByteIndex);
outByte = nextByteTable[outByte][nextByteIndex];
return outByte;
}
public override int Read(byte[] buffer, int offset, int count)
{
int countIndex = 0;
while (countIndex < count && outBytesCount < unCompressedSize)
{
if (length == 0)
{
byte nextByte = GetNextByte();
if (nextByte != RunLengthCode)
{
buffer[offset + (countIndex++)] = nextByte;
windowsBuffer[windowIndex++] = nextByte;
outBytesCount++;
if (windowIndex == WSIZE)
windowIndex = 0;
continue;
}
nextByte = GetNextByte();
if (nextByte == 0)
{
buffer[offset + (countIndex++)] = RunLengthCode;
windowsBuffer[windowIndex++] = RunLengthCode;
outBytesCount++;
if (windowIndex == WSIZE)
windowIndex = 0;
continue;
}
int lengthDistanceByte = nextByte;
length = lengthDistanceByte & lengthMask;
if (length == lengthMask)
{
length += GetNextByte();
}
length += 3;
int distanceHighByte = (lengthDistanceByte << factor) & distanceMask;
distance = windowIndex - (distanceHighByte + GetNextByte() + 1);
distance &= WSIZE - 1;
}
while (length != 0 && countIndex < count)
{
byte nextByte = windowsBuffer[distance++];
buffer[offset + (countIndex++)] = nextByte;
windowsBuffer[windowIndex++] = nextByte;
outBytesCount++;
if (distance == WSIZE)
distance = 0;
if (windowIndex == WSIZE)
windowIndex = 0;
length--;
}
}
return countIndex;
}
}

View File

@@ -11,8 +11,8 @@ public class SourceStream : Stream
private long _prevSize;
private readonly List<FileInfo> _files;
private readonly List<Stream> _streams;
private readonly Func<int, FileInfo?> _getFilePart;
private readonly Func<int, Stream?> _getStreamPart;
private readonly Func<int, FileInfo?>? _getFilePart;
private readonly Func<int, Stream?>? _getStreamPart;
private int _stream;
public SourceStream(FileInfo file, Func<int, FileInfo?> getPart, ReaderOptions options)
@@ -38,8 +38,8 @@ public class SourceStream : Stream
if (!IsFileMode)
{
_streams.Add(stream!);
_getStreamPart = getStreamPart!;
_getFilePart = _ => null!;
_getStreamPart = getStreamPart;
_getFilePart = _ => null;
if (stream is FileStream fileStream)
{
_files.Add(new FileInfo(fileStream.Name));
@@ -49,8 +49,8 @@ public class SourceStream : Stream
{
_files.Add(file!);
_streams.Add(_files[0].OpenRead());
_getFilePart = getFilePart!;
_getStreamPart = _ => null!;
_getFilePart = getFilePart;
_getStreamPart = _ => null;
}
_stream = 0;
_prevSize = 0;
@@ -78,7 +78,7 @@ public class SourceStream : Stream
{
if (IsFileMode)
{
var f = _getFilePart(_streams.Count);
var f = _getFilePart.NotNull("GetFilePart is null")(_streams.Count);
if (f == null)
{
_stream = _streams.Count - 1;
@@ -90,7 +90,7 @@ public class SourceStream : Stream
}
else
{
var s = _getStreamPart(_streams.Count);
var s = _getStreamPart.NotNull("GetStreamPart is null")(_streams.Count);
if (s == null)
{
_stream = _streams.Count - 1;

View File

@@ -0,0 +1,65 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Runtime.CompilerServices;
namespace SharpCompress;
public static class NotNullExtensions
{
public static IEnumerable<T> Empty<T>(this IEnumerable<T>? source) =>
source ?? Enumerable.Empty<T>();
public static IEnumerable<T> Empty<T>(this T? source)
{
if (source is null)
{
return Enumerable.Empty<T>();
}
return source.AsEnumerable();
}
#if NETFRAMEWORK || NETSTANDARD
public static T NotNull<T>(this T? obj, string? message = null)
where T : class
{
if (obj is null)
{
throw new ArgumentNullException(message ?? "Value is null");
}
return obj;
}
public static T NotNull<T>(this T? obj, string? message = null)
where T : struct
{
if (obj is null)
{
throw new ArgumentNullException(message ?? "Value is null");
}
return obj.Value;
}
#else
public static T NotNull<T>(
[NotNull] this T? obj,
[CallerArgumentExpression(nameof(obj))] string? paramName = null
)
where T : class
{
ArgumentNullException.ThrowIfNull(obj, paramName);
return obj;
}
public static T NotNull<T>(
[NotNull] this T? obj,
[CallerArgumentExpression(nameof(obj))] string? paramName = null
)
where T : struct
{
ArgumentNullException.ThrowIfNull(obj, paramName);
return obj.Value;
}
#endif
}

View File

@@ -13,9 +13,9 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
where TEntry : Entry
where TVolume : Volume
{
private bool completed;
private IEnumerator<TEntry>? entriesForCurrentReadStream;
private bool wroteCurrentEntry;
private bool _completed;
private IEnumerator<TEntry>? _entriesForCurrentReadStream;
private bool _wroteCurrentEntry;
public event EventHandler<ReaderExtractionEventArgs<IEntry>>? EntryExtractionProgress;
@@ -35,18 +35,18 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
/// <summary>
/// Current volume that the current entry resides in
/// </summary>
public abstract TVolume Volume { get; }
public abstract TVolume? Volume { get; }
/// <summary>
/// Current file entry
/// </summary>
public TEntry Entry => entriesForCurrentReadStream!.Current;
public TEntry Entry => _entriesForCurrentReadStream.NotNull().Current;
#region IDisposable Members
public void Dispose()
{
entriesForCurrentReadStream?.Dispose();
_entriesForCurrentReadStream?.Dispose();
Volume?.Dispose();
}
@@ -61,7 +61,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
/// </summary>
public void Cancel()
{
if (!completed)
if (!_completed)
{
Cancelled = true;
}
@@ -69,7 +69,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
public bool MoveToNextEntry()
{
if (completed)
if (_completed)
{
return false;
}
@@ -77,27 +77,27 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
{
throw new ReaderCancelledException("Reader has been cancelled.");
}
if (entriesForCurrentReadStream is null)
if (_entriesForCurrentReadStream is null)
{
return LoadStreamForReading(RequestInitialStream());
}
if (!wroteCurrentEntry)
if (!_wroteCurrentEntry)
{
SkipEntry();
}
wroteCurrentEntry = false;
_wroteCurrentEntry = false;
if (NextEntryForCurrentStream())
{
return true;
}
completed = true;
_completed = true;
return false;
}
protected bool LoadStreamForReading(Stream stream)
{
entriesForCurrentReadStream?.Dispose();
if ((stream is null) || (!stream.CanRead))
_entriesForCurrentReadStream?.Dispose();
if (stream is null || !stream.CanRead)
{
throw new MultipartStreamRequiredException(
"File is split into multiple archives: '"
@@ -105,13 +105,15 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
+ "'. A new readable stream is required. Use Cancel if it was intended."
);
}
entriesForCurrentReadStream = GetEntries(stream).GetEnumerator();
return entriesForCurrentReadStream.MoveNext();
_entriesForCurrentReadStream = GetEntries(stream).GetEnumerator();
return _entriesForCurrentReadStream.MoveNext();
}
protected virtual Stream RequestInitialStream() => Volume.Stream;
protected virtual Stream RequestInitialStream() =>
Volume.NotNull("Volume isn't loaded.").Stream;
internal virtual bool NextEntryForCurrentStream() => entriesForCurrentReadStream!.MoveNext();
internal virtual bool NextEntryForCurrentStream() =>
_entriesForCurrentReadStream.NotNull().MoveNext();
protected abstract IEnumerable<TEntry> GetEntries(Stream stream);
@@ -149,7 +151,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
public void WriteEntryTo(Stream writableStream)
{
if (wroteCurrentEntry)
if (_wroteCurrentEntry)
{
throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once.");
}
@@ -166,7 +168,7 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
}
Write(writableStream);
wroteCurrentEntry = true;
_wroteCurrentEntry = true;
}
internal void Write(Stream writeStream)
@@ -178,12 +180,12 @@ public abstract class AbstractReader<TEntry, TVolume> : IReader, IReaderExtracti
public EntryStream OpenEntryStream()
{
if (wroteCurrentEntry)
if (_wroteCurrentEntry)
{
throw new ArgumentException("WriteEntryTo or OpenEntryStream can only be called once.");
}
var stream = GetEntryStream();
wroteCurrentEntry = true;
_wroteCurrentEntry = true;
return stream;
}

View File

@@ -7,8 +7,8 @@ namespace SharpCompress.Readers.GZip;
public class GZipReader : AbstractReader<GZipEntry, GZipVolume>
{
internal GZipReader(Stream stream, ReaderOptions options)
: base(options, ArchiveType.GZip) => Volume = new GZipVolume(stream, options);
private GZipReader(Stream stream, ReaderOptions options)
: base(options, ArchiveType.GZip) => Volume = new GZipVolume(stream, options, 0);
public override GZipVolume Volume { get; }

View File

@@ -17,7 +17,7 @@ internal class MultiVolumeRarReader : RarReader
internal MultiVolumeRarReader(IEnumerable<Stream> streams, ReaderOptions options)
: base(options) => this.streams = streams.GetEnumerator();
internal override void ValidateArchive(RarVolume archive) { }
protected override void ValidateArchive(RarVolume archive) { }
protected override Stream RequestInitialStream()
{

View File

@@ -14,16 +14,16 @@ namespace SharpCompress.Readers.Rar;
public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
{
private RarVolume? volume;
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
private Lazy<IRarUnpack> UnpackV2017 { get; } =
new(() => new Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
private Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
internal RarReader(ReaderOptions options)
: base(options, ArchiveType.Rar) { }
internal abstract void ValidateArchive(RarVolume archive);
protected abstract void ValidateArchive(RarVolume archive);
public override RarVolume Volume => volume!;
public override RarVolume? Volume => volume;
/// <summary>
/// Opens a RarReader for Non-seeking usage with a single volume
@@ -51,7 +51,7 @@ public abstract class RarReader : AbstractReader<RarReaderEntry, RarVolume>
protected override IEnumerable<RarReaderEntry> GetEntries(Stream stream)
{
volume = new RarReaderVolume(stream, Options);
volume = new RarReaderVolume(stream, Options, 0);
foreach (var fp in volume.ReadFileParts())
{
ValidateArchive(volume);

View File

@@ -8,7 +8,7 @@ namespace SharpCompress.Readers.Rar;
public class RarReaderVolume : RarVolume
{
internal RarReaderVolume(Stream stream, ReaderOptions options, int index = 0)
internal RarReaderVolume(Stream stream, ReaderOptions options, int index)
: base(StreamingMode.Streaming, stream, options, index) { }
internal override RarFilePart CreateFilePart(MarkHeader markHeader, FileHeader fileHeader) =>

View File

@@ -11,7 +11,7 @@ internal class SingleVolumeRarReader : RarReader
internal SingleVolumeRarReader(Stream stream, ReaderOptions options)
: base(options) => this.stream = stream;
internal override void ValidateArchive(RarVolume archive)
protected override void ValidateArchive(RarVolume archive)
{
if (archive.IsMultiVolume)
{

View File

@@ -69,7 +69,6 @@ public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
switch (h.ZipHeaderType)
{
case ZipHeaderType.LocalEntry:
{
yield return new ZipEntry(
new StreamingZipFilePart((LocalEntryHeader)h, stream)

View File

@@ -2,11 +2,11 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.36.0</VersionPrefix>
<AssemblyVersion>0.36.0</AssemblyVersion>
<FileVersion>0.36.0</FileVersion>
<VersionPrefix>0.38.0</VersionPrefix>
<AssemblyVersion>0.38.0</AssemblyVersion>
<FileVersion>0.38.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks>net462;netstandard2.0;netstandard2.1;net6.0;net7.0;net8.0</TargetFrameworks>
<TargetFrameworks>net462;netstandard2.0;netstandard2.1;net6.0;net8.0</TargetFrameworks>
<AssemblyName>SharpCompress</AssemblyName>
<AssemblyOriginatorKeyFile>../../SharpCompress.snk</AssemblyOriginatorKeyFile>
<SignAssembly>true</SignAssembly>
@@ -17,33 +17,37 @@
<Copyright>Copyright (c) 2014 Adam Hathcock</Copyright>
<GenerateAssemblyTitleAttribute>false</GenerateAssemblyTitleAttribute>
<GenerateAssemblyProductAttribute>false</GenerateAssemblyProductAttribute>
<Description>SharpCompress is a compression library for NET Standard 2.0/2.1/NET 6.0/NET 7.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
<Description>SharpCompress is a compression library for NET Standard 2.0/NET Standard 2.1/NET 6.0/NET 8.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
<PublishRepositoryUrl>true</PublishRepositoryUrl>
<IncludeSymbols>true</IncludeSymbols>
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
<IsTrimmable>true</IsTrimmable>
<LangVersion>latest</LangVersion>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<PackageReadmeFile>README.md</PackageReadmeFile>
<ContinuousIntegrationBuild>true</ContinuousIntegrationBuild>
<EmbedUntrackedSources>true</EmbedUntrackedSources>
<AllowedOutputExtensionsInPackageBuildOutputFolder>$(AllowedOutputExtensionsInPackageBuildOutputFolder);.pdb</AllowedOutputExtensionsInPackageBuildOutputFolder>
</PropertyGroup>
<ItemGroup>
<Compile Remove="Compressors\Lzw\LzwException.cs" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" PrivateAssets="All" />
<PackageReference Include="ZstdSharp.Port" Version="0.7.4" />
</ItemGroup>
<PropertyGroup Condition=" '$(TargetFramework)' == 'net8.0' ">
<IsTrimmable>true</IsTrimmable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="ZstdSharp.Port" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.1' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="8.0.0" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageReference Include="System.Memory" Version="4.5.5" />
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
<PackageReference Include="System.Text.Encoding.CodePages" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
<PackageReference Include="System.Text.Encoding.CodePages" />
<PackageReference Include="System.Memory" />
</ItemGroup>
<ItemGroup Condition=" '$(VersionlessImplicitFrameworkDefine)' == 'NETFRAMEWORK' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageReference Include="System.Memory" Version="4.5.5" />
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
<PackageReference Include="System.Text.Encoding.CodePages" />
<PackageReference Include="System.Memory" />
</ItemGroup>
<ItemGroup>
<None Include="..\..\README.md" Pack="true" PackagePath="\" />

View File

@@ -2,8 +2,7 @@ using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using System.Text;
using SharpCompress.Readers;
namespace SharpCompress;
@@ -279,8 +278,42 @@ public static class Utility
long total = 0;
while (ReadTransferBlock(source, array, out var count))
{
total += count;
destination.Write(array, 0, count);
total += count;
}
return total;
}
finally
{
ArrayPool<byte>.Shared.Return(array);
}
}
public static long TransferTo(this Stream source, Stream destination, long maxLength)
{
var array = GetTransferByteArray();
var maxReadSize = array.Length;
try
{
long total = 0;
var remaining = maxLength;
if (remaining < maxReadSize)
{
maxReadSize = (int)remaining;
}
while (ReadTransferBlock(source, array, maxReadSize, out var count))
{
destination.Write(array, 0, count);
total += count;
if (remaining - count < 0)
{
break;
}
remaining -= count;
if (remaining < maxReadSize)
{
maxReadSize = (int)remaining;
}
}
return total;
}
@@ -320,6 +353,16 @@ public static class Utility
private static bool ReadTransferBlock(Stream source, byte[] array, out int count) =>
(count = source.Read(array, 0, array.Length)) != 0;
private static bool ReadTransferBlock(Stream source, byte[] array, int size, out int count)
{
if (size > array.Length)
{
size = array.Length;
}
count = source.Read(array, 0, size);
return count != 0;
}
private static byte[] GetTransferByteArray() => ArrayPool<byte>.Shared.Rent(81920);
public static bool ReadFully(this Stream stream, byte[] buffer)
@@ -393,9 +436,16 @@ public static class Utility
buffer[offset + 3] = (byte)number;
}
public static async ValueTask WriteAsync(
this Stream stream,
byte[] bytes,
CancellationToken cancellationToken
) => await stream.WriteAsync(bytes, 0, bytes.Length, cancellationToken).ConfigureAwait(false);
public static string ReplaceInvalidFileNameChars(string fileName)
{
var invalidChars = new HashSet<char>(Path.GetInvalidFileNameChars());
var sb = new StringBuilder(fileName.Length);
foreach (var c in fileName)
{
var newChar = invalidChars.Contains(c) ? '_' : c;
sb.Append(newChar);
}
return sb.ToString();
}
}

View File

@@ -1,42 +1,26 @@
#nullable disable
using System;
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Writers;
public abstract class AbstractWriter : IWriter
#pragma warning disable CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable.
public abstract class AbstractWriter(ArchiveType type, WriterOptions writerOptions) : IWriter
{
private bool _isDisposed;
protected AbstractWriter(ArchiveType type, WriterOptions writerOptions)
{
WriterType = type;
WriterOptions = writerOptions;
}
//always initializes the stream
protected void InitalizeStream(Stream stream) => OutputStream = stream;
protected void InitializeStream(Stream stream) => OutputStream = stream;
protected Stream OutputStream { get; private set; }
#pragma warning restore CS8618 // Non-nullable field must contain a non-null value when exiting constructor. Consider declaring as nullable.
public ArchiveType WriterType { get; }
public ArchiveType WriterType { get; } = type;
protected WriterOptions WriterOptions { get; }
protected WriterOptions WriterOptions { get; } = writerOptions;
public abstract void Write(string filename, Stream source, DateTime? modificationTime);
#if !NETFRAMEWORK && !NETSTANDARD2_0
public abstract ValueTask WriteAsync(
string filename,
Stream source,
DateTime? modificationTime,
CancellationToken cancellationToken
);
public abstract ValueTask DisposeAsync();
#endif
protected virtual void Dispose(bool isDisposing)
{

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
@@ -20,7 +18,7 @@ public sealed class GZipWriter : AbstractWriter
{
destination = NonDisposingStream.Create(destination);
}
InitalizeStream(
InitializeStream(
new GZipStream(
destination,
CompressionMode.Compress,
@@ -52,15 +50,4 @@ public sealed class GZipWriter : AbstractWriter
source.TransferTo(stream);
_wroteToStream = true;
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override ValueTask DisposeAsync() => throw new NotImplementedException();
public override ValueTask WriteAsync(
string filename,
Stream source,
DateTime? modificationTime,
CancellationToken cancellationToken
) => throw new NotImplementedException();
#endif
}

View File

@@ -1,24 +1,11 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
namespace SharpCompress.Writers;
public interface IWriter : IDisposable
#if !NETFRAMEWORK && !NETSTANDARD2_0
, IAsyncDisposable
#endif
{
ArchiveType WriterType { get; }
void Write(string filename, Stream source, DateTime? modificationTime);
#if !NETFRAMEWORK && !NETSTANDARD2_0
ValueTask WriteAsync(
string filename,
Stream source,
DateTime? modificationTime,
CancellationToken cancellationToken
);
#endif
}

View File

@@ -1,7 +1,5 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
@@ -34,19 +32,16 @@ public class TarWriter : AbstractWriter
case CompressionType.None:
break;
case CompressionType.BZip2:
{
destination = new BZip2Stream(destination, CompressionMode.Compress, false);
}
break;
case CompressionType.GZip:
{
destination = new GZipStream(destination, CompressionMode.Compress);
}
break;
case CompressionType.LZip:
{
destination = new LZipStream(destination, CompressionMode.Compress);
}
@@ -58,7 +53,7 @@ public class TarWriter : AbstractWriter
);
}
}
InitalizeStream(destination);
InitializeStream(destination);
}
public override void Write(string filename, Stream source, DateTime? modificationTime) =>
@@ -92,8 +87,7 @@ public class TarWriter : AbstractWriter
header.Name = NormalizeFilename(filename);
header.Size = realSize;
header.Write(OutputStream);
size = source.TransferTo(OutputStream);
size = source.TransferTo(OutputStream, realSize);
PadTo512(size.Value);
}
@@ -128,15 +122,4 @@ public class TarWriter : AbstractWriter
}
base.Dispose(isDisposing);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override ValueTask DisposeAsync() => throw new NotImplementedException();
public override ValueTask WriteAsync(
string filename,
Stream source,
DateTime? modificationTime,
CancellationToken cancellationToken
) => throw new NotImplementedException();
#endif
}

View File

@@ -1,560 +0,0 @@
#if NETFRAMEWORK || NETSTANDARD2_0
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.IO;
namespace SharpCompress.Writers.Zip;
public class ZipWriter : AbstractWriter
{
private readonly CompressionType compressionType;
private readonly CompressionLevel compressionLevel;
private readonly List<ZipCentralDirectoryEntry> entries = new();
private readonly string zipComment;
private long streamPosition;
private PpmdProperties? ppmdProps;
private readonly bool isZip64;
public ZipWriter(Stream destination, ZipWriterOptions zipWriterOptions)
: base(ArchiveType.Zip, zipWriterOptions)
{
zipComment = zipWriterOptions.ArchiveComment ?? string.Empty;
isZip64 = zipWriterOptions.UseZip64;
if (destination.CanSeek)
{
streamPosition = destination.Position;
}
compressionType = zipWriterOptions.CompressionType;
compressionLevel = zipWriterOptions.DeflateCompressionLevel;
if (WriterOptions.LeaveStreamOpen)
{
destination = NonDisposingStream.Create(destination);
}
InitalizeStream(destination);
}
private PpmdProperties PpmdProperties => ppmdProps ??= new PpmdProperties();
protected override void Dispose(bool isDisposing)
{
if (isDisposing)
{
ulong size = 0;
foreach (var entry in entries)
{
size += entry.Write(OutputStream);
}
WriteEndRecord(size);
}
base.Dispose(isDisposing);
}
private static ZipCompressionMethod ToZipCompressionMethod(CompressionType compressionType) =>
compressionType switch
{
CompressionType.None => ZipCompressionMethod.None,
CompressionType.Deflate => ZipCompressionMethod.Deflate,
CompressionType.BZip2 => ZipCompressionMethod.BZip2,
CompressionType.LZMA => ZipCompressionMethod.LZMA,
CompressionType.PPMd => ZipCompressionMethod.PPMd,
_ => throw new InvalidFormatException("Invalid compression method: " + compressionType)
};
public override void Write(string entryPath, Stream source, DateTime? modificationTime) =>
Write(
entryPath,
source,
new ZipWriterEntryOptions() { ModificationDateTime = modificationTime }
);
public void Write(string entryPath, Stream source, ZipWriterEntryOptions zipWriterEntryOptions)
{
using var output = WriteToStream(entryPath, zipWriterEntryOptions);
source.TransferTo(output);
}
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)
{
var compression = ToZipCompressionMethod(options.CompressionType ?? compressionType);
entryPath = NormalizeFilename(entryPath);
options.ModificationDateTime ??= DateTime.Now;
options.EntryComment ??= string.Empty;
var entry = new ZipCentralDirectoryEntry(
compression,
entryPath,
(ulong)streamPosition,
WriterOptions.ArchiveEncoding
)
{
Comment = options.EntryComment,
ModificationTime = options.ModificationDateTime
};
// Use the archive default setting for zip64 and allow overrides
var useZip64 = isZip64;
if (options.EnableZip64.HasValue)
{
useZip64 = options.EnableZip64.Value;
}
var headersize = (uint)WriteHeader(entryPath, options, entry, useZip64);
streamPosition += headersize;
return new ZipWritingStream(
this,
OutputStream,
entry,
compression,
options.DeflateCompressionLevel ?? compressionLevel
);
}
private string NormalizeFilename(string filename)
{
filename = filename.Replace('\\', '/');
var pos = filename.IndexOf(':');
if (pos >= 0)
{
filename = filename.Remove(0, pos + 1);
}
return filename.Trim('/');
}
private int WriteHeader(
string filename,
ZipWriterEntryOptions zipWriterEntryOptions,
ZipCentralDirectoryEntry entry,
bool useZip64
)
{
// We err on the side of caution until the zip specification clarifies how to support this
if (!OutputStream.CanSeek && useZip64)
{
throw new NotSupportedException(
"Zip64 extensions are not supported on non-seekable streams"
);
}
var explicitZipCompressionInfo = ToZipCompressionMethod(
zipWriterEntryOptions.CompressionType ?? compressionType
);
var encodedFilename = WriterOptions.ArchiveEncoding.Encode(filename);
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, ZipHeaderFactory.ENTRY_HEADER_BYTES);
OutputStream.Write(intBuf);
if (explicitZipCompressionInfo == ZipCompressionMethod.Deflate)
{
if (OutputStream.CanSeek && useZip64)
{
OutputStream.Write(stackalloc byte[] { 45, 0 }); //smallest allowed version for zip64
}
else
{
OutputStream.Write(stackalloc byte[] { 20, 0 }); //older version which is more compatible
}
}
else
{
OutputStream.Write(stackalloc byte[] { 63, 0 }); //version says we used PPMd or LZMA
}
var flags = Equals(WriterOptions.ArchiveEncoding.GetEncoding(), Encoding.UTF8)
? HeaderFlags.Efs
: 0;
if (!OutputStream.CanSeek)
{
flags |= HeaderFlags.UsePostDataDescriptor;
if (explicitZipCompressionInfo == ZipCompressionMethod.LZMA)
{
flags |= HeaderFlags.Bit1; // eos marker
}
}
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
OutputStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)explicitZipCompressionInfo);
OutputStream.Write(intBuf.Slice(0, 2)); // zipping method
BinaryPrimitives.WriteUInt32LittleEndian(
intBuf,
zipWriterEntryOptions.ModificationDateTime.DateTimeToDosTime()
);
OutputStream.Write(intBuf);
// zipping date and time
OutputStream.Write(stackalloc byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 });
// unused CRC, un/compressed size, updated later
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedFilename.Length);
OutputStream.Write(intBuf.Slice(0, 2)); // filename length
var extralength = 0;
if (OutputStream.CanSeek && useZip64)
{
extralength = 2 + 2 + 8 + 8;
}
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)extralength);
OutputStream.Write(intBuf.Slice(0, 2)); // extra length
OutputStream.Write(encodedFilename, 0, encodedFilename.Length);
if (extralength != 0)
{
OutputStream.Write(new byte[extralength], 0, extralength); // reserve space for zip64 data
entry.Zip64HeaderOffset = (ushort)(6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length);
}
return 6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length + extralength;
}
private void WriteFooter(uint crc, uint compressed, uint uncompressed)
{
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc);
OutputStream.Write(intBuf);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, compressed);
OutputStream.Write(intBuf);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, uncompressed);
OutputStream.Write(intBuf);
}
private void WriteEndRecord(ulong size)
{
var zip64EndOfCentralDirectoryNeeded =
entries.Count > ushort.MaxValue
|| streamPosition >= uint.MaxValue
|| size >= uint.MaxValue;
var sizevalue = size >= uint.MaxValue ? uint.MaxValue : (uint)size;
var streampositionvalue =
streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streamPosition;
Span<byte> intBuf = stackalloc byte[8];
if (zip64EndOfCentralDirectoryNeeded)
{
var recordlen = 2 + 2 + 4 + 4 + 8 + 8 + 8 + 8;
// Write zip64 end of central directory record
OutputStream.Write(stackalloc byte[] { 80, 75, 6, 6 });
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)recordlen);
OutputStream.Write(intBuf); // Size of zip64 end of central directory record
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
OutputStream.Write(intBuf.Slice(0, 2)); // Made by
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
OutputStream.Write(intBuf.Slice(0, 2)); // Version needed
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
OutputStream.Write(intBuf.Slice(0, 4)); // Disk number
OutputStream.Write(intBuf.Slice(0, 4)); // Central dir disk
// TODO: entries.Count is int, so max 2^31 files
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)entries.Count);
OutputStream.Write(intBuf); // Entries in this disk
OutputStream.Write(intBuf); // Total entries
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, size);
OutputStream.Write(intBuf); // Central Directory size
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition);
OutputStream.Write(intBuf); // Disk offset
// Write zip64 end of central directory locator
OutputStream.Write(stackalloc byte[] { 80, 75, 6, 7 });
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
OutputStream.Write(intBuf.Slice(0, 4)); // Entry disk
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition + size);
OutputStream.Write(intBuf); // Offset to the zip64 central directory
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 1);
OutputStream.Write(intBuf.Slice(0, 4)); // Number of disks
streamPosition += 4 + 8 + recordlen + (4 + 4 + 8 + 4);
}
// Write normal end of central directory record
OutputStream.Write(stackalloc byte[] { 80, 75, 5, 6, 0, 0, 0, 0 });
BinaryPrimitives.WriteUInt16LittleEndian(
intBuf,
(ushort)(entries.Count < 0xFFFF ? entries.Count : 0xFFFF)
);
OutputStream.Write(intBuf.Slice(0, 2));
OutputStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, sizevalue);
OutputStream.Write(intBuf.Slice(0, 4));
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, streampositionvalue);
OutputStream.Write(intBuf.Slice(0, 4));
var encodedComment = WriterOptions.ArchiveEncoding.Encode(zipComment);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedComment.Length);
OutputStream.Write(intBuf.Slice(0, 2));
OutputStream.Write(encodedComment, 0, encodedComment.Length);
}
#region Nested type: ZipWritingStream
internal class ZipWritingStream : Stream
{
private readonly CRC32 crc = new();
private readonly ZipCentralDirectoryEntry entry;
private readonly Stream originalStream;
private readonly Stream writeStream;
private readonly ZipWriter writer;
private readonly ZipCompressionMethod zipCompressionMethod;
private readonly CompressionLevel compressionLevel;
private CountingWritableSubStream? counting;
private ulong decompressed;
// Flag to prevent throwing exceptions on Dispose
private bool _limitsExceeded;
private bool isDisposed;
internal ZipWritingStream(
ZipWriter writer,
Stream originalStream,
ZipCentralDirectoryEntry entry,
ZipCompressionMethod zipCompressionMethod,
CompressionLevel compressionLevel
)
{
this.writer = writer;
this.originalStream = originalStream;
this.entry = entry;
this.zipCompressionMethod = zipCompressionMethod;
this.compressionLevel = compressionLevel;
writeStream = GetWriteStream(originalStream);
}
public override bool CanRead => false;
public override bool CanSeek => false;
public override bool CanWrite => true;
public override long Length => throw new NotSupportedException();
public override long Position
{
get => throw new NotSupportedException();
set => throw new NotSupportedException();
}
private Stream GetWriteStream(Stream writeStream)
{
counting = new CountingWritableSubStream(writeStream);
Stream output = counting;
switch (zipCompressionMethod)
{
case ZipCompressionMethod.None:
{
return output;
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(counting, CompressionMode.Compress, compressionLevel);
}
case ZipCompressionMethod.BZip2:
{
return new BZip2Stream(counting, CompressionMode.Compress, false);
}
case ZipCompressionMethod.LZMA:
{
counting.WriteByte(9);
counting.WriteByte(20);
counting.WriteByte(5);
counting.WriteByte(0);
var lzmaStream = new LzmaStream(
new LzmaEncoderProperties(!originalStream.CanSeek),
false,
counting
);
counting.Write(lzmaStream.Properties, 0, lzmaStream.Properties.Length);
return lzmaStream;
}
case ZipCompressionMethod.PPMd:
{
counting.Write(writer.PpmdProperties.Properties, 0, 2);
return new PpmdStream(writer.PpmdProperties, counting, true);
}
default:
{
throw new NotSupportedException("CompressionMethod: " + zipCompressionMethod);
}
}
}
protected override void Dispose(bool disposing)
{
if (isDisposed)
{
return;
}
isDisposed = true;
base.Dispose(disposing);
if (disposing)
{
writeStream.Dispose();
if (_limitsExceeded)
{
// We have written invalid data into the archive,
// so we destroy it now, instead of allowing the user to continue
// with a defunct archive
originalStream.Dispose();
return;
}
entry.Crc = (uint)crc.Crc32Result;
entry.Compressed = counting!.Count;
entry.Decompressed = decompressed;
var zip64 =
entry.Compressed >= uint.MaxValue || entry.Decompressed >= uint.MaxValue;
var compressedvalue = zip64 ? uint.MaxValue : (uint)counting.Count;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)entry.Decompressed;
if (originalStream.CanSeek)
{
originalStream.Position = (long)(entry.HeaderOffset + 6);
originalStream.WriteByte(0);
if (counting.Count == 0 && entry.Decompressed == 0)
{
// set compression to STORED for zero byte files (no compression data)
originalStream.Position = (long)(entry.HeaderOffset + 8);
originalStream.WriteByte(0);
originalStream.WriteByte(0);
}
originalStream.Position = (long)(entry.HeaderOffset + 14);
writer.WriteFooter(entry.Crc, compressedvalue, decompressedvalue);
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64 && entry.Zip64HeaderOffset == 0)
{
throw new NotSupportedException(
"Attempted to write a stream that is larger than 4GiB without setting the zip64 option"
);
}
// If we have pre-allocated space for zip64 data,
// fill it out, even if it is not required
if (entry.Zip64HeaderOffset != 0)
{
originalStream.Position = (long)(
entry.HeaderOffset + entry.Zip64HeaderOffset
);
Span<byte> intBuf = stackalloc byte[8];
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0x0001);
originalStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 8 + 8);
originalStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Decompressed);
originalStream.Write(intBuf);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Compressed);
originalStream.Write(intBuf);
}
originalStream.Position = writer.streamPosition + (long)entry.Compressed;
writer.streamPosition += (long)entry.Compressed;
}
else
{
// We have a streaming archive, so we should add a post-data-descriptor,
// but we cannot as it does not hold the zip64 values
// Throwing an exception until the zip specification is clarified
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64)
{
throw new NotSupportedException(
"Streams larger than 4GiB are not supported for non-seekable streams"
);
}
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteUInt32LittleEndian(
intBuf,
ZipHeaderFactory.POST_DATA_DESCRIPTOR
);
originalStream.Write(intBuf);
writer.WriteFooter(entry.Crc, compressedvalue, decompressedvalue);
writer.streamPosition += (long)entry.Compressed + 16;
}
writer.entries.Add(entry);
}
}
public override void Flush() => writeStream.Flush();
public override int Read(byte[] buffer, int offset, int count) =>
throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
{
// We check the limits first, because we can keep the archive consistent
// if we can prevent the writes from happening
if (entry.Zip64HeaderOffset == 0)
{
// Pre-check, the counting.Count is not exact, as we do not know the size before having actually compressed it
if (
_limitsExceeded
|| ((decompressed + (uint)count) > uint.MaxValue)
|| (counting!.Count + (uint)count) > uint.MaxValue
)
{
throw new NotSupportedException(
"Attempted to write a stream that is larger than 4GiB without setting the zip64 option"
);
}
}
decompressed += (uint)count;
crc.SlurpBlock(buffer, offset, count);
writeStream.Write(buffer, offset, count);
if (entry.Zip64HeaderOffset == 0)
{
// Post-check, this is accurate
if ((decompressed > uint.MaxValue) || counting!.Count > uint.MaxValue)
{
// We have written the data, so the archive is now broken
// Throwing the exception here, allows us to avoid
// throwing an exception in Dispose() which is discouraged
// as it can mask other errors
_limitsExceeded = true;
throw new NotSupportedException(
"Attempted to write a stream that is larger than 4GiB without setting the zip64 option"
);
}
}
}
}
#endregion Nested type: ZipWritingStream
}
#endif

View File

@@ -1,12 +1,8 @@
#if !NETFRAMEWORK && !NETSTANDARD2_0
using System;
using System.Buffers;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
@@ -21,8 +17,6 @@ namespace SharpCompress.Writers.Zip;
public class ZipWriter : AbstractWriter
{
private static readonly byte[] ZIP64eND_OFdIRECTORY = [80, 75, 6, 6];
private static readonly byte[] END_OFdIRECTORY = [80, 75, 6, 7];
private readonly CompressionType compressionType;
private readonly CompressionLevel compressionLevel;
private readonly List<ZipCentralDirectoryEntry> entries = new();
@@ -30,7 +24,6 @@ public class ZipWriter : AbstractWriter
private long streamPosition;
private PpmdProperties? ppmdProps;
private readonly bool isZip64;
private bool isDisposed;
public ZipWriter(Stream destination, ZipWriterOptions zipWriterOptions)
: base(ArchiveType.Zip, zipWriterOptions)
@@ -49,29 +42,14 @@ public class ZipWriter : AbstractWriter
{
destination = NonDisposingStream.Create(destination);
}
InitalizeStream(destination);
InitializeStream(destination);
}
private PpmdProperties PpmdProperties => ppmdProps ??= new PpmdProperties();
public override async ValueTask DisposeAsync()
{
if (isDisposed)
{
return;
}
ulong size = 0;
foreach (var entry in entries)
{
size += entry.Write(OutputStream);
}
await WriteEndRecordAsync(size, CancellationToken.None).ConfigureAwait(false);
isDisposed = true;
}
protected override void Dispose(bool isDisposing)
{
if (isDisposing)
if (isDisposing && OutputStream is not null)
{
ulong size = 0;
foreach (var entry in entries)
@@ -83,9 +61,8 @@ public class ZipWriter : AbstractWriter
base.Dispose(isDisposing);
}
private static ZipCompressionMethod ToZipCompressionMethod(CompressionType compressionType)
{
return compressionType switch
private static ZipCompressionMethod ToZipCompressionMethod(CompressionType compressionType) =>
compressionType switch
{
CompressionType.None => ZipCompressionMethod.None,
CompressionType.Deflate => ZipCompressionMethod.Deflate,
@@ -94,7 +71,6 @@ public class ZipWriter : AbstractWriter
CompressionType.PPMd => ZipCompressionMethod.PPMd,
_ => throw new InvalidFormatException("Invalid compression method: " + compressionType)
};
}
public override void Write(string entryPath, Stream source, DateTime? modificationTime) =>
Write(
@@ -109,34 +85,6 @@ public class ZipWriter : AbstractWriter
source.TransferTo(output);
}
public override async ValueTask WriteAsync(
string entryPath,
Stream source,
DateTime? modificationTime,
CancellationToken cancellationToken
) =>
await WriteAsync(
entryPath,
source,
new ZipWriterEntryOptions() { ModificationDateTime = modificationTime },
cancellationToken
);
public async ValueTask WriteAsync(
string entryPath,
Stream source,
ZipWriterEntryOptions zipWriterEntryOptions,
CancellationToken cancellationToken
)
{
await using var output = await WriteToStreamAsync(
entryPath,
zipWriterEntryOptions,
cancellationToken
);
await source.CopyToAsync(output, cancellationToken);
}
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)
{
var compression = ToZipCompressionMethod(options.CompressionType ?? compressionType);
@@ -166,49 +114,7 @@ public class ZipWriter : AbstractWriter
streamPosition += headersize;
return new ZipWritingStream(
this,
OutputStream,
entry,
compression,
options.DeflateCompressionLevel ?? compressionLevel
);
}
public async ValueTask<Stream> WriteToStreamAsync(
string entryPath,
ZipWriterEntryOptions options,
CancellationToken cancellationToken
)
{
var compression = ToZipCompressionMethod(options.CompressionType ?? compressionType);
entryPath = NormalizeFilename(entryPath);
options.ModificationDateTime ??= DateTime.Now;
options.EntryComment ??= string.Empty;
var entry = new ZipCentralDirectoryEntry(
compression,
entryPath,
(ulong)streamPosition,
WriterOptions.ArchiveEncoding
)
{
Comment = options.EntryComment,
ModificationTime = options.ModificationDateTime
};
// Use the archive default setting for zip64 and allow overrides
var useZip64 = isZip64;
if (options.EnableZip64.HasValue)
{
useZip64 = options.EnableZip64.Value;
}
var headersize = (uint)
await WriteHeaderAsync(entryPath, options, entry, useZip64, cancellationToken)
.ConfigureAwait(false);
streamPosition += headersize;
return new ZipWritingStream(
this,
OutputStream,
OutputStream.NotNull(),
entry,
compression,
options.DeflateCompressionLevel ?? compressionLevel
@@ -315,105 +221,6 @@ public class ZipWriter : AbstractWriter
return 6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length + extralength;
}
private async ValueTask<int> WriteHeaderAsync(
string filename,
ZipWriterEntryOptions zipWriterEntryOptions,
ZipCentralDirectoryEntry entry,
bool useZip64,
CancellationToken cancellationToken
)
{
// We err on the side of caution until the zip specification clarifies how to support this
if (!OutputStream.CanSeek && useZip64)
{
throw new NotSupportedException(
"Zip64 extensions are not supported on non-seekable streams"
);
}
var explicitZipCompressionInfo = ToZipCompressionMethod(
zipWriterEntryOptions.CompressionType ?? compressionType
);
var encodedFilename = WriterOptions.ArchiveEncoding.Encode(filename);
var intBuf = ArrayPool<byte>.Shared.Rent(4);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, ZipHeaderFactory.ENTRY_HEADER_BYTES);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
if (explicitZipCompressionInfo == ZipCompressionMethod.Deflate)
{
if (OutputStream.CanSeek && useZip64)
{
await OutputStream
.WriteAsync([45, 0], 0, 2, cancellationToken)
.ConfigureAwait(false); //smallest allowed version for zip64
}
else
{
await OutputStream
.WriteAsync([20, 0], 0, 2, cancellationToken)
.ConfigureAwait(false); //older version which is more compatible
}
}
else
{
await OutputStream.WriteAsync([63, 0], 0, 2, cancellationToken).ConfigureAwait(false); //version says we used PPMd or LZMA
}
var flags = Equals(WriterOptions.ArchiveEncoding.GetEncoding(), Encoding.UTF8)
? HeaderFlags.Efs
: 0;
if (!OutputStream.CanSeek)
{
flags |= HeaderFlags.UsePostDataDescriptor;
if (explicitZipCompressionInfo == ZipCompressionMethod.LZMA)
{
flags |= HeaderFlags.Bit1; // eos marker
}
}
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)explicitZipCompressionInfo);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // zipping method
BinaryPrimitives.WriteUInt32LittleEndian(
intBuf,
zipWriterEntryOptions.ModificationDateTime.DateTimeToDosTime()
);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
// zipping date and time
await OutputStream
.WriteAsync([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], cancellationToken)
.ConfigureAwait(false);
// unused CRC, un/compressed size, updated later
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedFilename.Length);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // filename length
var extralength = 0;
if (OutputStream.CanSeek && useZip64)
{
extralength = 2 + 2 + 8 + 8;
}
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)extralength);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // extra length
await OutputStream.WriteAsync(encodedFilename, cancellationToken).ConfigureAwait(false);
if (extralength != 0)
{
await OutputStream
.WriteAsync(new byte[extralength], cancellationToken)
.ConfigureAwait(false); // reserve space for zip64 data
entry.Zip64HeaderOffset = (ushort)(6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length);
}
ArrayPool<byte>.Shared.Return(intBuf);
return 6 + 2 + 2 + 4 + 12 + 2 + 2 + encodedFilename.Length + extralength;
}
private void WriteFooter(uint crc, uint compressed, uint uncompressed)
{
Span<byte> intBuf = stackalloc byte[4];
@@ -425,23 +232,6 @@ public class ZipWriter : AbstractWriter
OutputStream.Write(intBuf);
}
private async ValueTask WriteFooterAsync(
uint crc,
uint compressed,
uint uncompressed,
CancellationToken cancellationToken
)
{
var intBuf = ArrayPool<byte>.Shared.Rent(4);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc);
await OutputStream.WriteAsync(intBuf, cancellationToken).ConfigureAwait(false);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, compressed);
await OutputStream.WriteAsync(intBuf, cancellationToken).ConfigureAwait(false);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, uncompressed);
await OutputStream.WriteAsync(intBuf, cancellationToken).ConfigureAwait(false);
ArrayPool<byte>.Shared.Return(intBuf);
}
private void WriteEndRecord(ulong size)
{
var zip64EndOfCentralDirectoryNeeded =
@@ -512,82 +302,6 @@ public class ZipWriter : AbstractWriter
OutputStream.Write(encodedComment, 0, encodedComment.Length);
}
private async ValueTask WriteEndRecordAsync(ulong size, CancellationToken cancellationToken)
{
var zip64EndOfCentralDirectoryNeeded =
entries.Count > ushort.MaxValue
|| streamPosition >= uint.MaxValue
|| size >= uint.MaxValue;
var sizevalue = size >= uint.MaxValue ? uint.MaxValue : (uint)size;
var streampositionvalue =
streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streamPosition;
var intBuf = ArrayPool<byte>.Shared.Rent(8);
if (zip64EndOfCentralDirectoryNeeded)
{
var recordlen = 2 + 2 + 4 + 4 + 8 + 8 + 8 + 8;
// Write zip64 end of central directory record
await OutputStream
.WriteAsync(ZIP64eND_OFdIRECTORY, cancellationToken)
.ConfigureAwait(false);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)recordlen);
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Size of zip64 end of central directory record
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // Made by
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false); // Version needed
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false); // Disk number
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false); // Central dir disk
// TODO: entries.Count is int, so max 2^31 files
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)entries.Count);
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Entries in this disk
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Total entries
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, size);
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Central Directory size
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition);
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Disk offset
// Write zip64 end of central directory locator
OutputStream.Write(stackalloc byte[] { 80, 75, 6, 7 });
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false); // Entry disk
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition + size);
await OutputStream.WriteAsync(intBuf, 0, 8, cancellationToken).ConfigureAwait(false); // Offset to the zip64 central directory
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 1);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false); // Number of disks
streamPosition += 4 + 8 + recordlen + (4 + 4 + 8 + 4);
}
// Write normal end of central directory record
OutputStream.Write(END_OFdIRECTORY);
BinaryPrimitives.WriteUInt16LittleEndian(
intBuf,
(ushort)(entries.Count < 0xFFFF ? entries.Count : 0xFFFF)
);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, sizevalue);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, streampositionvalue);
await OutputStream.WriteAsync(intBuf, 0, 4, cancellationToken).ConfigureAwait(false);
var encodedComment = WriterOptions.ArchiveEncoding.Encode(zipComment);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedComment.Length);
await OutputStream.WriteAsync(intBuf, 0, 2, cancellationToken).ConfigureAwait(false);
await OutputStream
.WriteAsync(encodedComment, 0, encodedComment.Length, cancellationToken)
.ConfigureAwait(false);
ArrayPool<byte>.Shared.Return(intBuf);
}
#region Nested type: ZipWritingStream
internal class ZipWritingStream : Stream
@@ -614,6 +328,7 @@ public class ZipWriter : AbstractWriter
CompressionLevel compressionLevel
)
{
this.writer = writer;
this.originalStream = originalStream;
this.writer = writer;
this.entry = entry;
@@ -681,131 +396,6 @@ public class ZipWriter : AbstractWriter
}
}
public override async ValueTask DisposeAsync()
{
if (isDisposed)
{
return;
}
isDisposed = true;
await writeStream.DisposeAsync();
if (limitsExceeded)
{
// We have written invalid data into the archive,
// so we destroy it now, instead of allowing the user to continue
// with a defunct archive
await originalStream.DisposeAsync();
return;
}
entry.Crc = (uint)crc.Crc32Result;
entry.Compressed = counting!.Count;
entry.Decompressed = decompressed;
var zip64 = entry.Compressed >= uint.MaxValue || entry.Decompressed >= uint.MaxValue;
var compressedvalue = zip64 ? uint.MaxValue : (uint)counting.Count;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)entry.Decompressed;
if (originalStream.CanSeek)
{
originalStream.Position = (long)(entry.HeaderOffset + 6);
originalStream.WriteByte(0);
if (counting.Count == 0 && entry.Decompressed == 0)
{
// set compression to STORED for zero byte files (no compression data)
originalStream.Position = (long)(entry.HeaderOffset + 8);
originalStream.WriteByte(0);
originalStream.WriteByte(0);
}
originalStream.Position = (long)(entry.HeaderOffset + 14);
await writer.WriteFooterAsync(
entry.Crc,
compressedvalue,
decompressedvalue,
CancellationToken.None
);
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64 && entry.Zip64HeaderOffset == 0)
{
throw new NotSupportedException(
"Attempted to write a stream that is larger than 4GiB without setting the zip64 option"
);
}
// If we have pre-allocated space for zip64 data,
// fill it out, even if it is not required
if (entry.Zip64HeaderOffset != 0)
{
originalStream.Position = (long)(entry.HeaderOffset + entry.Zip64HeaderOffset);
var intBuf = ArrayPool<byte>.Shared.Rent(8);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0x0001);
await originalStream
.WriteAsync(intBuf, 0, 2, CancellationToken.None)
.ConfigureAwait(false);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 8 + 8);
await originalStream
.WriteAsync(intBuf, 0, 2, CancellationToken.None)
.ConfigureAwait(false);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Decompressed);
await originalStream
.WriteAsync(intBuf, CancellationToken.None)
.ConfigureAwait(false);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Compressed);
await originalStream
.WriteAsync(intBuf, CancellationToken.None)
.ConfigureAwait(false);
ArrayPool<byte>.Shared.Return(intBuf);
}
originalStream.Position = writer.streamPosition + (long)entry.Compressed;
writer.streamPosition += (long)entry.Compressed;
}
else
{
// We have a streaming archive, so we should add a post-data-descriptor,
// but we cannot as it does not hold the zip64 values
// Throwing an exception until the zip specification is clarified
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64)
{
throw new NotSupportedException(
"Streams larger than 4GiB are not supported for non-seekable streams"
);
}
var intBuf = ArrayPool<byte>.Shared.Rent(4);
BinaryPrimitives.WriteUInt32LittleEndian(
intBuf,
ZipHeaderFactory.POST_DATA_DESCRIPTOR
);
await originalStream
.WriteAsync(intBuf, CancellationToken.None)
.ConfigureAwait(false);
await writer
.WriteFooterAsync(
entry.Crc,
compressedvalue,
decompressedvalue,
CancellationToken.None
)
.ConfigureAwait(false);
writer.streamPosition += (long)entry.Compressed + 16;
ArrayPool<byte>.Shared.Return(intBuf);
}
writer.entries.Add(entry);
}
protected override void Dispose(bool disposing)
{
if (isDisposed)
@@ -829,13 +419,14 @@ public class ZipWriter : AbstractWriter
return;
}
var countingCount = counting?.Count ?? 0;
entry.Crc = (uint)crc.Crc32Result;
entry.Compressed = counting!.Count;
entry.Compressed = countingCount;
entry.Decompressed = decompressed;
var zip64 =
entry.Compressed >= uint.MaxValue || entry.Decompressed >= uint.MaxValue;
var compressedvalue = zip64 ? uint.MaxValue : (uint)counting.Count;
var compressedvalue = zip64 ? uint.MaxValue : (uint)countingCount;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)entry.Decompressed;
if (originalStream.CanSeek)
@@ -843,7 +434,7 @@ public class ZipWriter : AbstractWriter
originalStream.Position = (long)(entry.HeaderOffset + 6);
originalStream.WriteByte(0);
if (counting.Count == 0 && entry.Decompressed == 0)
if (countingCount == 0 && entry.Decompressed == 0)
{
// set compression to STORED for zero byte files (no compression data)
originalStream.Position = (long)(entry.HeaderOffset + 8);
@@ -930,11 +521,12 @@ public class ZipWriter : AbstractWriter
// if we can prevent the writes from happening
if (entry.Zip64HeaderOffset == 0)
{
var countingCount = counting?.Count ?? 0;
// Pre-check, the counting.Count is not exact, as we do not know the size before having actually compressed it
if (
limitsExceeded
|| ((decompressed + (uint)count) > uint.MaxValue)
|| (counting!.Count + (uint)count) > uint.MaxValue
|| (countingCount + (uint)count) > uint.MaxValue
)
{
throw new NotSupportedException(
@@ -949,8 +541,9 @@ public class ZipWriter : AbstractWriter
if (entry.Zip64HeaderOffset == 0)
{
var countingCount = counting?.Count ?? 0;
// Post-check, this is accurate
if ((decompressed > uint.MaxValue) || counting!.Count > uint.MaxValue)
if ((decompressed > uint.MaxValue) || countingCount > uint.MaxValue)
{
// We have written the data, so the archive is now broken
// Throwing the exception here, allows us to avoid
@@ -967,4 +560,3 @@ public class ZipWriter : AbstractWriter
#endregion Nested type: ZipWritingStream
}
#endif

View File

@@ -0,0 +1,338 @@
{
"version": 2,
"dependencies": {
".NETFramework,Version=v4.6.2": {
"Microsoft.Bcl.AsyncInterfaces": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
"type": "Direct",
"requested": "[1.0.3, )",
"resolved": "1.0.3",
"contentHash": "vUc9Npcs14QsyOD01tnv/m8sQUnGTGOw1BCmKcv77LBJY7OxhJ+zJF7UD/sCL3lYNFuqmQEVlkfS4Quif6FyYg==",
"dependencies": {
"Microsoft.NETFramework.ReferenceAssemblies.net462": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"System.Memory": {
"type": "Direct",
"requested": "[4.5.5, )",
"resolved": "4.5.5",
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
"dependencies": {
"System.Buffers": "4.5.1",
"System.Numerics.Vectors": "4.5.0",
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.1, )",
"resolved": "0.8.1",
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg==",
"dependencies": {
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
"System.Memory": "4.5.5"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net462": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "IzAV30z22ESCeQfxP29oVf4qEo8fBGXLXSU6oacv/9Iqe6PzgHDKCaWfwMBak7bSJQM0F5boXWoZS+kChztRIQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
},
"System.Buffers": {
"type": "Transitive",
"resolved": "4.5.1",
"contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg=="
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.5.0",
"contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
"resolved": "4.5.4",
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
}
},
".NETStandard,Version=v2.0": {
"Microsoft.Bcl.AsyncInterfaces": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw==",
"dependencies": {
"System.Threading.Tasks.Extensions": "4.5.4"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"NETStandard.Library": {
"type": "Direct",
"requested": "[2.0.3, )",
"resolved": "2.0.3",
"contentHash": "st47PosZSHrjECdjeIzZQbzivYBJFv6P2nv4cj2ypdI204DO+vZ7l5raGMiX4eXMJ53RfOIg+/s4DHVZ54Nu2A==",
"dependencies": {
"Microsoft.NETCore.Platforms": "1.1.0"
}
},
"System.Memory": {
"type": "Direct",
"requested": "[4.5.5, )",
"resolved": "4.5.5",
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
"dependencies": {
"System.Buffers": "4.5.1",
"System.Numerics.Vectors": "4.4.0",
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.1, )",
"resolved": "0.8.1",
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg==",
"dependencies": {
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
"System.Memory": "4.5.5"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.NETCore.Platforms": {
"type": "Transitive",
"resolved": "1.1.0",
"contentHash": "kz0PEW2lhqygehI/d6XsPCQzD7ff7gUJaVGPVETX611eadGsA3A877GdSlU0LRVMCTH/+P3o2iDTak+S08V2+A=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
},
"System.Buffers": {
"type": "Transitive",
"resolved": "4.5.1",
"contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg=="
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.4.0",
"contentHash": "UiLzLW+Lw6HLed1Hcg+8jSRttrbuXv7DANVj0DkL9g6EnnzbL75EB7EWsw5uRbhxd/4YdG8li5XizGWepmG3PQ=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
},
"System.Threading.Tasks.Extensions": {
"type": "Transitive",
"resolved": "4.5.4",
"contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==",
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
}
},
".NETStandard,Version=v2.1": {
"Microsoft.Bcl.AsyncInterfaces": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "3WA9q9yVqJp222P3x1wYIGDAkpjAku0TMUaaQV22g6L67AI0LdOIrVS7Ht2vJfLHGSPVuqN94vIr15qn+HEkHw=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"System.Text.Encoding.CodePages": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "OZIsVplFGaVY90G2SbpgU7EnCoOO5pw1t4ic21dBF3/1omrJFpAGoNAVpPyMVOC90/hvgkGG3VFqR13YgZMQfg==",
"dependencies": {
"System.Memory": "4.5.5",
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.1, )",
"resolved": "0.8.1",
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg==",
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
},
"System.Buffers": {
"type": "Transitive",
"resolved": "4.5.1",
"contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg=="
},
"System.Numerics.Vectors": {
"type": "Transitive",
"resolved": "4.4.0",
"contentHash": "UiLzLW+Lw6HLed1Hcg+8jSRttrbuXv7DANVj0DkL9g6EnnzbL75EB7EWsw5uRbhxd/4YdG8li5XizGWepmG3PQ=="
},
"System.Runtime.CompilerServices.Unsafe": {
"type": "Transitive",
"resolved": "6.0.0",
"contentHash": "/iUeP3tq1S0XdNNoMz5C9twLSrM/TH+qElHkXWaPvuNOt+99G75NrV0OS2EqHx5wMN7popYjpc8oTjC1y16DLg=="
},
"System.Memory": {
"type": "CentralTransitive",
"requested": "[4.5.5, )",
"resolved": "4.5.5",
"contentHash": "XIWiDvKPXaTveaB7HVganDlOCRoj03l+jrwNvcge/t8vhGYKvqV+dMv6G4SAX2NoNmN0wZfVPTAlFwZcZvVOUw==",
"dependencies": {
"System.Buffers": "4.5.1",
"System.Numerics.Vectors": "4.4.0",
"System.Runtime.CompilerServices.Unsafe": "4.5.3"
}
}
},
"net6.0": {
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.1, )",
"resolved": "0.8.1",
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg=="
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
}
},
"net8.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[8.0.7, )",
"resolved": "8.0.7",
"contentHash": "iI52ptEKby2ymQ6B7h4TWbFmm85T4VvLgc/HvS45Yr3lgi4IIFbQtjON3bQbX/Vc94jXNSLvrDOp5Kh7SJyFYQ=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.1, )",
"resolved": "0.8.1",
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg=="
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
}
}
}
}

View File

@@ -73,27 +73,45 @@ public class ArchiveTests : ReaderTests
}
}
protected void ArchiveStreamRead(string testArchive, ReaderOptions? readerOptions = null)
protected void ArchiveStreamRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveStreamRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveStreamRead(readerOptions, testArchive);
ArchiveStreamRead(archiveFactory, readerOptions, testArchive);
}
protected void ArchiveStreamRead(
ReaderOptions? readerOptions = null,
params string[] testArchives
) => ArchiveStreamRead(ArchiveFactory.AutoFactory, readerOptions, testArchives);
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
ReaderOptions? readerOptions = null,
params string[] testArchives
) =>
ArchiveStreamRead(
archiveFactory,
readerOptions,
testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x))
);
protected void ArchiveStreamRead(ReaderOptions? readerOptions, IEnumerable<string> testArchives)
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
ReaderOptions? readerOptions,
IEnumerable<string> testArchives
)
{
foreach (var path in testArchives)
{
using (var stream = NonDisposingStream.Create(File.OpenRead(path), true))
using (var archive = ArchiveFactory.Open(stream, readerOptions))
using (var archive = archiveFactory.Open(stream, readerOptions))
{
try
{
@@ -218,10 +236,14 @@ public class ArchiveTests : ReaderTests
}
}
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null)
protected void ArchiveFileRead(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using (var archive = ArchiveFactory.Open(testArchive, readerOptions))
using (var archive = archiveFactory.Open(new FileInfo(testArchive), readerOptions))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
@@ -234,18 +256,19 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveFileRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveFileSkip(
string testArchive,
string fileOrder,
ReaderOptions? readerOptions = null
)
{
#if !NETFRAMEWORK
if (!OperatingSystem.IsWindows())
if (!Environment.OSVersion.IsWindows())
{
fileOrder = fileOrder.Replace('\\', '/');
}
#endif
var expected = new Stack<string>(fileOrder.Split(' '));
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using var archive = ArchiveFactory.Open(testArchive, readerOptions);

View File

@@ -0,0 +1,21 @@
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using Xunit;
namespace SharpCompress.Test.BZip2;
public class BZip2ReaderTests : ReaderTests
{
[Fact]
public void BZip2_Reader_Factory()
{
Stream stream = new MemoryStream(
new byte[] { 0x42, 0x5a, 0x68, 0x34, 0x31, 0x41, 0x59, 0x26, 0x53, 0x59, 0x35 }
);
Assert.Throws(typeof(InvalidOperationException), () => ReaderFactory.Open(stream));
}
}

View File

@@ -4,6 +4,7 @@ using System.Linq;
using SharpCompress.Archives;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using Xunit;
namespace SharpCompress.Test.GZip;
@@ -19,7 +20,7 @@ public class GZipArchiveTests : ArchiveTests
using (var archive = ArchiveFactory.Open(stream))
{
var entry = archive.Entries.First();
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key));
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"));
@@ -41,7 +42,7 @@ public class GZipArchiveTests : ArchiveTests
using (var archive = GZipArchive.Open(stream))
{
var entry = archive.Entries.First();
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key));
entry.WriteToFile(Path.Combine(SCRATCH_FILES_PATH, entry.Key.NotNull()));
var size = entry.Size;
var scratch = new FileInfo(Path.Combine(SCRATCH_FILES_PATH, "Tar.tar"));
@@ -94,6 +95,7 @@ public class GZipArchiveTests : ArchiveTests
using (var entryStream = archiveEntry.OpenEntryStream())
{
var result = TarArchive.IsTarFile(entryStream);
Assert.True(result);
}
Assert.Equal(size, tarStream.Length);
using (var entryStream = archiveEntry.OpenEntryStream())
@@ -105,7 +107,7 @@ public class GZipArchiveTests : ArchiveTests
}
[Fact]
public void TestGzCrcWithMostSignificaltBitNotNegative()
public void TestGzCrcWithMostSignificantBitNotNegative()
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
using var archive = GZipArchive.Open(stream);
@@ -115,4 +117,12 @@ public class GZipArchiveTests : ArchiveTests
Assert.InRange(entry.Crc, 0L, 0xFFFFFFFFL);
}
}
[Fact]
public void TestGzArchiveTypeGzip()
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
using var archive = GZipArchive.Open(stream);
Assert.Equal(archive.Type, ArchiveType.GZip);
}
}

View File

@@ -0,0 +1,11 @@
using System;
namespace SharpCompress.Test;
public static class OperatingSystemExtensions
{
public static bool IsWindows(this OperatingSystem os) =>
os.Platform == PlatformID.Win32NT
|| os.Platform == PlatformID.Win32Windows
|| os.Platform == PlatformID.Win32S;
}

View File

@@ -209,7 +209,7 @@ public class RarReaderTests : ReaderTests
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
using var entryStream = reader.OpenEntryStream();
var file = Path.GetFileName(reader.Entry.Key);
var file = Path.GetFileName(reader.Entry.Key).NotNull();
var folder =
Path.GetDirectoryName(reader.Entry.Key)
?? throw new ArgumentNullException();
@@ -293,7 +293,7 @@ public class RarReaderTests : ReaderTests
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
while (reader.MoveToNextEntry())
{
if (reader.Entry.Key.Contains("jpg"))
if (reader.Entry.Key.NotNull().Contains("jpg"))
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(
@@ -316,7 +316,7 @@ public class RarReaderTests : ReaderTests
using var reader = ReaderFactory.Open(stream, new ReaderOptions { LookForHeader = true });
while (reader.MoveToNextEntry())
{
if (reader.Entry.Key.Contains("jpg"))
if (reader.Entry.Key.NotNull().Contains("jpg"))
{
Assert.Equal(CompressionType.Rar, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(

Some files were not shown because too many files have changed in this diff Show More