Compare commits

...

137 Commits

Author SHA1 Message Date
Adam Hathcock
862fa50fcb Don't use SkipLocalsInit 2021-11-29 09:29:40 +00:00
Adam Hathcock
7b87924172 Add zstd classes for initial work 2021-11-28 10:11:54 +00:00
Adam Hathcock
d9d7ea8ec5 Add zstandard compression 2021-11-28 09:52:09 +00:00
Adam Hathcock
8d17d09455 Merge pull request #624 from adamhathcock/issue-617
Add test and probable fix for Issue 617
2021-11-22 09:20:15 +00:00
Adam Hathcock
05208ccd9b Add test and probable fix for Issue 617 2021-11-22 08:40:40 +00:00
Adam Hathcock
a1e7c0068d Merge pull request #622 from adamhathcock/net461-tests
Net461 tests
2021-10-02 15:32:20 +01:00
Adam Hathcock
e6bec19946 Mark for 0.30 2021-10-02 15:29:22 +01:00
Adam Hathcock
ec2be2869f Fix whitespace from dotnet format 2021-10-02 15:29:03 +01:00
Adam Hathcock
ce5432ed73 Fix tests for multi-targetting 2021-10-02 15:25:43 +01:00
Adam Hathcock
b6e0ad89ce Remove duplicated artifact step 2021-10-02 15:21:05 +01:00
Adam Hathcock
2745bfa19b Minor SDK update 2021-10-02 15:19:51 +01:00
Adam Hathcock
3cdc4b38a6 Test 461 on github actions 2021-10-02 15:19:13 +01:00
Adam Hathcock
fc1ca808d7 Merge pull request #621 from inthemedium/master
Add net461 target to clean up issues with system.* nuget dependencies
2021-10-02 15:08:18 +01:00
Jeff Tyson
6983e66037 Fix preprocessor condition 2021-10-01 16:34:00 +00:00
Jeff Tyson
01f7336d09 Based on docs, the target should bet net461 2021-09-29 22:04:47 +00:00
Jeff Tyson
1561bba538 Add net462 target to clean up issues with system.* nuget dependencies 2021-09-29 21:55:11 +00:00
Adam Hathcock
3ecf8a5e0c Merge pull request #616 from amosonn/patch-1
Fix for chunked read for ZLibBaseStream
2021-09-27 09:14:58 +01:00
Adam Hathcock
e2095fc416 Merge branch 'master' into patch-1 2021-09-27 09:08:58 +01:00
Amos Onn
8398d40106 Fix #615 2021-09-14 22:02:18 +02:00
Amos Onn
134fa8892f Test for bug #615 2021-09-14 21:55:05 +02:00
Adam Hathcock
ea5c8dc063 Merge pull request #614 from adamhathcock/ensure-dest-dir-exists
Ensure destination directory exists.
2021-09-12 08:56:42 +01:00
Adam Hathcock
0209d00164 Minor updates and prep for 0.29 2021-09-12 08:52:00 +01:00
Adam Hathcock
a8d065dc9e Ensure destination directory exists 2021-09-12 08:47:30 +01:00
Adam Hathcock
7bd9711ade Merge pull request #610 from cyr/master
Bugfix for TarWriter - too much padding in large files
2021-09-12 08:43:20 +01:00
cyr
61802eadb4 Merge branch 'adamhathcock:master' into master 2021-09-12 09:37:07 +02:00
Adam Hathcock
b425659058 Merge pull request #611 from Thunderstr1k3/fix-zipheader-seeking
Allowing to seek empty zip files
2021-09-12 08:28:05 +01:00
Christian
3e32e3d7b1 Allowing to seek empty zip files 2021-09-02 13:54:32 +02:00
cyr
1b661c9df1 Fixed bug where large (int32+ file size) adds an additional 512 bytes of padding in tar files. 2021-08-27 22:38:04 +02:00
Adam Hathcock
54fc26b93d Update build and mark for 0.28.3 2021-06-04 13:43:35 +01:00
Adam Hathcock
161f99bbad Merge pull request #601 from salvois/master
Write ZIP64 End of Central Directory only if needed.
2021-06-04 13:22:01 +01:00
Adam Hathcock
c012db0776 Merge branch 'master' into master 2021-06-04 13:17:13 +01:00
Adam Hathcock
8ee257d299 Merge pull request #592 from adamhathcock/memory-downgrade
Downgrade System.Memory to fix buffer version issue
2021-06-04 13:16:40 +01:00
Adam Hathcock
f9522107c3 Merge branch 'master' into memory-downgrade 2021-06-04 13:16:34 +01:00
Adam Hathcock
e07046a37a Merge pull request #596 from DannyBoyk/issue_595_conditionally_read_zip64_extra
Conditionally parse Zip64 extra field based on specification
2021-06-04 13:07:08 +01:00
Salvatore Isaja
ad6d0d9ae8 Write ZIP64 End of Central Directory only if needed. 2021-05-23 21:10:35 +02:00
Daniel Nash
fdc33e91bd Conditionally parse Zip64 extra field based on specification
The Zip64 extra field should look for values based on the corresponding
values in the local entry header.

Fixes adamhathcock/sharpcompress#595
2021-04-26 14:58:10 -04:00
Adam Hathcock
a34f5a855c Mark for 0.28.2 2021-04-25 09:29:56 +01:00
Adam Hathcock
6474741af1 Merge pull request #593 from adamhathcock/fix-pkware-encryption
ReadFully used by pkware encryption didn’t like spans
2021-04-25 09:29:02 +01:00
Adam Hathcock
c10bd840c5 ReadFully used by pkware encryption didn’t like spans 2021-04-25 09:25:51 +01:00
Adam Hathcock
e6dded826b Downgrade System.Memory to fix buffer version issue 2021-04-24 09:16:46 +01:00
Adam Hathcock
8a022c4b18 Update FORMATS.md
remove LZipArchive/Reader/Writer mention
2021-03-28 08:58:11 +01:00
Adam Hathcock
cfef228afc Merge pull request #579 from Looooong/fix/do-not-place-extention-classes-in-common-namespace
Do not place extension classes in common namespace
2021-03-18 13:52:40 +00:00
Nguyễn Đức Long
237ff9f055 Do not place extension classes in common namespace 2021-03-18 20:44:04 +07:00
Adam Hathcock
020f862814 Bug fix for recursive call introduced in 0.28 2021-02-18 08:31:50 +00:00
Adam Hathcock
fa6107200d Merge pull request #572 from Erior/feature/521
Not so elegant perhaps for checking 7z encryption
2021-02-16 08:05:08 +00:00
Adam Hathcock
eb81f972c4 Merge branch 'master' into feature/521 2021-02-16 08:01:32 +00:00
Lars Vahlenberg
93c1ff396e Not so elegant perhaps 2021-02-14 16:29:01 +01:00
Adam Hathcock
403baf05a6 Mark for 0.28 2021-02-14 13:07:35 +00:00
Adam Hathcock
a51b56339a Fix complete entry check for RAR files. 2021-02-14 13:00:43 +00:00
Adam Hathcock
f48a6d47dc Merge pull request #571 from Erior/feature/540
Proposal fixing Extra bytes written when setting zip64
2021-02-14 12:54:17 +00:00
Adam Hathcock
5b52463e4c Merge pull request #570 from Erior/feature/555
Propsal for handling Zip with long comment
2021-02-14 12:52:42 +00:00
Adam Hathcock
6f08bb72d8 Merge pull request #569 from BrendanGrant/improve_how_missing_parts_are_handled
Improve how missing parts are handled
2021-02-14 12:49:49 +00:00
Lars Vahlenberg
045093f453 Linux is case sensitive with files names 2021-02-14 10:26:26 +01:00
Lars Vahlenberg
566c49ce53 Proposal
Zip64 requires version 4.5
Number of disks is 4 bytes and not 8
2021-02-14 02:42:32 +01:00
Lars Vahlenberg
d1d2758ee0 Propsal for handling Zip with long comment 2021-02-13 23:57:03 +01:00
Brendan Grant
5b86c40d5b Properly detect if RAR is complete at the end or not 2021-02-13 13:34:57 -06:00
Brendan Grant
53393e744e Supporting reading contents of incomplete files 2021-02-13 13:33:43 -06:00
Adam Hathcock
2dd17e3882 Be explicit about zip64 extra field sizes. Formatting 2021-02-13 07:05:53 +00:00
Adam Hathcock
c4f7433584 Merge pull request #567 from Nanook/master
Zip64 Header and Size fix
2021-02-13 06:58:41 +00:00
Adam Hathcock
9405a7cf4b Merge pull request #568 from Bond-009/stackalloc
Use stackallocs where possible/sensible
2021-02-13 06:39:32 +00:00
Bond_009
cd677440ce Use stackallocs where possible/sensible 2021-02-12 20:20:15 +01:00
Craig Greenhill
c06f4bc5a8 Zip64 Header and Size fix 2021-02-11 09:37:59 +00:00
Adam Hathcock
4a7337b223 Merge pull request #563 from adamhathcock/add-reader-test-gzip
Fix Rewindable stream Length and add GZip Reader tests
2021-01-13 15:13:34 +00:00
Adam Hathcock
1d8afb817e Bump version 2021-01-13 14:41:25 +00:00
Adam Hathcock
0f06c3d934 Fix rewindable stream to expose length 2021-01-13 14:40:36 +00:00
Adam Hathcock
9d5cb8d119 Add GZip Reader tests 2021-01-13 10:42:59 +00:00
Adam Hathcock
a28d686eb9 Fix relavant package references 2021-01-11 12:01:17 +00:00
Adam Hathcock
ac525a8ec2 Merge branch 'master' of github.com:adamhathcock/sharpcompress 2021-01-11 10:01:49 +00:00
Adam Hathcock
52c44befa2 Merge pull request #560 from adamhathcock/gzip-fixes
Expose Last Modified time on GZipStream.  Add CRC and Size to GZipEntries on Archive
2021-01-11 08:57:19 +00:00
Adam Hathcock
c64251c341 Mark for 0.27 2021-01-09 14:04:44 +00:00
Adam Hathcock
bdc57d3c33 Merge pull request #559 from adamhathcock/net5
Use Net5, NetCoreApp3.1, NetStandard2.1, NetStandard2.0 only
2021-01-09 13:43:37 +00:00
Adam Hathcock
7edc437df2 formatting 2021-01-09 13:40:57 +00:00
Adam Hathcock
57e4395e7d Merge branch 'master' into net5
# Conflicts:
#	build/Program.cs
#	src/SharpCompress/Common/Zip/ZipFilePart.cs
2021-01-09 13:40:09 +00:00
Adam Hathcock
ee17dca9e5 Fix formatting 2021-01-09 13:36:30 +00:00
Adam Hathcock
e9f3add5b9 Merge branch 'master' into gzip-fixes 2021-01-09 13:35:52 +00:00
Adam Hathcock
faf1a9f7e4 Merge pull request #561 from adamhathcock/format
Use dotnet format to ensure some kind of code style
2021-01-09 13:35:26 +00:00
Adam Hathcock
5357bd07c7 Let dotnet format do it’s thing 2021-01-09 13:33:34 +00:00
Adam Hathcock
8c0e2cbd25 Use dotnet format 2021-01-09 13:32:14 +00:00
Adam Hathcock
674f3b4f28 Merge branch 'master' into gzip-fixes 2021-01-09 13:25:55 +00:00
Adam Hathcock
6e42e00974 Merge pull request #485 from adamhathcock/issue-256
Create and using PauseEntryRebuilding for adding large numbers of ent…
2021-01-09 13:23:52 +00:00
Adam Hathcock
8598885258 Read trailer for GZip for CRC and uncompressed size 2021-01-09 13:22:06 +00:00
Adam Hathcock
669e40d53c Merge branch 'master' into issue-256 2021-01-09 13:01:16 +00:00
Adam Hathcock
1adcce6c62 Expose Last Modified time on GZipStream 2021-01-09 12:53:13 +00:00
Adam Hathcock
147be6e6e1 Use Net5, NetCoreApp3.1, NetStandard2.1, NetStandard2.0 only 2021-01-09 10:34:49 +00:00
Adam Hathcock
5879999094 Merge pull request #551 from carbon/alder32
Use hardware accelerated Alder32 impl
2020-11-19 08:21:31 +00:00
Jason Nelson
477a30cf5b Use hardware accelerated Alder32 impl 2020-11-18 11:21:29 -08:00
Adam Hathcock
2fec03e1ac Merge pull request #550 from carbon/cq
Improve Code Quality 3
2020-11-18 18:32:53 +00:00
Jason Nelson
9a17449a02 Format NewSubHeaderType 2020-11-18 09:44:13 -08:00
Jason Nelson
087a6aad8c Cross target .NETCOREAPP3.1 and react to new nullablity annotations 2020-11-18 09:43:08 -08:00
Jason Nelson
e243a8e88f Format AbstractArchive 2020-11-18 09:31:39 -08:00
Jason Nelson
b57df8026a Use pattern matching 2020-11-18 09:29:38 -08:00
Jason Nelson
a1d45b44cd Format ArchiveFactory 2020-11-18 09:28:24 -08:00
Jason Nelson
e47e1d220a Format AesDecoderStream 2020-11-18 09:25:38 -08:00
Jason Nelson
0129a933df Remove NETSTANDARD1_3 symbol 2020-11-18 09:23:50 -08:00
Jason Nelson
fa241bb0d7 Inline variable declarations 2020-11-18 09:21:45 -08:00
Jason Nelson
d8804ae108 Improve conditional logic to prepare to add .NETCOREAPP target 2020-11-18 09:19:21 -08:00
Jason Nelson
8090d269e7 Add polyfills for string.EndWith(char) && string.Contains(char) 2020-11-18 09:16:53 -08:00
Jason Nelson
b0101f20c5 Eliminate culture specific StartsWith comparisions 2020-11-18 09:12:01 -08:00
Jason Nelson
dd48e4299a Simplify .NET framework code exclusions, bump min .NET framework version to 4.6.1 2020-11-18 09:07:30 -08:00
Jason Nelson
c61ee0c24f Update deps 2020-11-18 09:02:11 -08:00
Jason Nelson
9576867c34 Enable C# 9 2020-11-18 09:01:35 -08:00
Adam Hathcock
4426a24298 Merge pull request #549 from adamhathcock/update-deps
Update dependencies
2020-11-03 08:47:58 +00:00
Adam Hathcock
3b43c1e413 Update dependencies 2020-11-03 08:45:10 +00:00
Adam Hathcock
aa6575c8f9 Merge pull request #541 from avao/master
UT and Fix for: Index out of range exception from gzip #532
2020-10-19 12:33:31 +01:00
avao
0268713960 UT and Fix for: Index out of range exception from gzip #532 2020-10-13 19:58:11 +01:00
Adam Hathcock
f36167d425 Merge pull request #531 from carbon/master
Improve CQ3
2020-08-01 06:25:09 +01:00
Jason Nelson
33ffcb9308 Use Array.Empty<byte> 2020-07-31 17:00:46 -07:00
Jason Nelson
a649c25a91 Eliminate two allocations in HuffmanTree 2020-07-31 16:58:21 -07:00
Jason Nelson
fa1e773960 Eliminate two allocations in Crc32 2020-07-31 16:55:07 -07:00
Jason Nelson
62f7238796 Make CMethodId readonly 2020-07-31 16:49:34 -07:00
Jason Nelson
d4ccf73340 Embed FAST_ENCODER_TREE_STRUCTURE_DATA 2020-07-31 16:47:05 -07:00
Jason Nelson
5ddb0f96bc Use switch expressions 2020-07-31 16:37:56 -07:00
Jason Nelson
75a6db8f4c Eliminate three allocations in HbMakeCodeLengths 2020-07-31 16:33:00 -07:00
Jason Nelson
ae5635319b Eliminate header bytes allocation 2020-07-31 16:30:26 -07:00
Jason Nelson
98ed3080d0 Eliminate three allocations 2020-07-31 16:30:09 -07:00
Jason Nelson
c618eacad4 Optimize RijndaelEngine 2020-07-31 16:22:44 -07:00
Jason Nelson
3b11e6ef97 Eliminate two allocations 2020-07-31 16:10:59 -07:00
Jason Nelson
40af9359db Pollyfill and use Stream.Read(Span<byte> buffer) 2020-07-31 16:08:38 -07:00
Jason Nelson
d6bf9dae42 Eliminate allocation 2020-07-31 16:01:09 -07:00
Adam Hathcock
13917941ff Merge pull request #530 from carbon/master
Enable test coverage for net461 and fix regression
2020-07-31 18:39:40 +01:00
Jason Nelson
28f04329ae Merge branch 'master' of https://github.com/carbon/sharpcompress 2020-07-31 10:12:37 -07:00
Jason Nelson
404a6b231d Fix .NET 461 failures 2020-07-31 10:12:34 -07:00
Jason Nelson
184596da3c Merge branch 'master' into master 2020-07-31 11:37:45 -05:00
Jason Nelson
f00f393687 Disable failing net461 tests 2020-07-31 09:30:20 -07:00
Jason Nelson
cbbfb89619 Add failure notes 2020-07-31 09:29:06 -07:00
Jason Nelson
6a5cf11dd0 Fix net461 bug 2020-07-31 09:27:41 -07:00
Jason Nelson
fc1d0a0464 Run tests against net461 2020-07-31 09:27:32 -07:00
Adam Hathcock
74af1759eb Merge pull request #529 from carbon/master
Improve code quality v2
2020-07-31 06:55:35 +01:00
Jason Nelson
ee3162ad71 Fix return 2020-07-30 17:49:29 -07:00
Jason Nelson
4357165163 Add Read/Write overrides to NonDisposingStream 2020-07-30 17:36:03 -07:00
Jason Nelson
6973436b94 Add and use Stream.Write(ReadOnlySpan<byte> buffer) polyfill 2020-07-30 17:29:33 -07:00
Jason Nelson
7750ed7106 Finish spanification of RijndaelEngine 2020-07-30 17:01:13 -07:00
Jason Nelson
773158e9d8 Seal LZipStream 2020-07-30 16:57:30 -07:00
Jason Nelson
4db615597d Refactor ExtraData and enable nullable 2020-07-30 16:48:22 -07:00
Jason Nelson
6bdf2365fc Inline variable declarations 2020-07-30 16:45:38 -07:00
Adam Hathcock
3b2e273832 Merge branch 'master' into issue-256 2019-10-10 09:27:46 +01:00
Adam Hathcock
43c839eb89 Create and using PauseEntryRebuilding for adding large numbers of entries 2019-10-09 09:55:16 +01:00
387 changed files with 48070 additions and 5674 deletions

12
.config/dotnet-tools.json Normal file
View File

@@ -0,0 +1,12 @@
{
"version": 1,
"isRoot": true,
"tools": {
"dotnet-format": {
"version": "4.1.131201",
"commands": [
"dotnet-format"
]
}
}
}

View File

@@ -12,13 +12,9 @@ jobs:
- uses: actions/checkout@v1
- uses: actions/setup-dotnet@v1
with:
dotnet-version: 3.1.302
dotnet-version: 5.0.401
- run: dotnet run -p build/build.csproj
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.os }}-sharpcompress.nupkg
path: artifacts/*
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.os }}-sharpcompress.snupkg
path: artifacts/*

View File

@@ -19,7 +19,6 @@
| Tar.XZ | LZMA2 | Decompress | TarArchive | TarReader | TarWriter (3) |
| GZip (single file) | DEFLATE | Both | GZipArchive | GZipReader | GZipWriter |
| 7Zip (4) | LZMA, LZMA2, BZip2, PPMd, BCJ, BCJ2, Deflate | Decompress | SevenZipArchive | N/A | N/A |
| LZip (single file) (5) | LZip (LZMA) | Both | LZipArchive | LZipReader | LZipWriter |
1. SOLID Rars are only supported in the RarReader API.
2. Zip format supports pkware and WinzipAES encryption. However, encrypted LZMA is not supported. Zip64 reading/writing is supported but only with seekable streams as the Zip spec doesn't support Zip64 data in post data descriptors. Deflate64 is only supported for reading.

View File

@@ -1,6 +1,6 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1 and NET 4.6 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
SharpCompress is a compression library in pure C# for .NET Standard 2.0, 2.1, .NET Core 3.1 and .NET 5.0 that can unrar, un7zip, unzip, untar unbzip2, ungzip, unlzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip/lzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).

View File

@@ -9,10 +9,11 @@ using static SimpleExec.Command;
class Program
{
private const string Clean = "clean";
private const string Format = "format";
private const string Build = "build";
private const string Test = "test";
private const string Publish = "publish";
static void Main(string[] args)
{
Target(Clean,
@@ -39,30 +40,36 @@ class Program
}
});
Target(Build, ForEach("net46", "netstandard2.0", "netstandard2.1"),
Target(Format, () =>
{
Run("dotnet", "tool restore");
Run("dotnet", "format --check");
});
Target(Build, DependsOn(Format),
framework =>
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net46")
{
return;
}
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
});
Run("dotnet", "build src/SharpCompress/SharpCompress.csproj -c Release");
});
Target(Test, DependsOn(Build), ForEach("netcoreapp3.1"),
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
Target(Test, DependsOn(Build), ForEach("net5.0", "net461"),
framework =>
{
IEnumerable<string> GetFiles(string d)
{
return Glob.Files(".", d);
}
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows) && framework == "net461")
{
return;
}
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework}");
}
});
foreach (var file in GetFiles("**/*.Test.csproj"))
{
Run("dotnet", $"test {file} -c Release -f {framework}");
}
});
Target(Publish, DependsOn(Test),
() =>
{
@@ -70,7 +77,7 @@ class Program
});
Target("default", DependsOn(Publish), () => Console.WriteLine("Done!"));
RunTargetsAndExit(args);
}
}

View File

@@ -2,13 +2,13 @@
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp3.1</TargetFramework>
<TargetFramework>net5.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Bullseye" Version="3.3.0" />
<PackageReference Include="Glob" Version="1.1.7" />
<PackageReference Include="SimpleExec" Version="6.2.0" />
<PackageReference Include="Bullseye" Version="3.8.0" />
<PackageReference Include="Glob" Version="1.1.8" />
<PackageReference Include="SimpleExec" Version="8.0.0" />
</ItemGroup>
</Project>

View File

@@ -1,5 +1,6 @@
{
"sdk": {
"version": "3.1.302"
"version": "5.0.300",
"rollForward": "latestFeature"
}
}

View File

@@ -0,0 +1,280 @@
// Copyright (c) Six Labors and contributors.
// Licensed under the GNU Affero General Public License, Version 3.
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
#if !NETSTANDARD2_0 && !NETSTANDARD2_1 && !NETFRAMEWORK
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
#endif
namespace SharpCompress.Algorithms
{
/// <summary>
/// Calculates the 32 bit Adler checksum of a given buffer according to
/// RFC 1950. ZLIB Compressed Data Format Specification version 3.3)
/// </summary>
internal static class Adler32
{
/// <summary>
/// The default initial seed value of a Adler32 checksum calculation.
/// </summary>
public const uint SeedValue = 1U;
#if !NETSTANDARD2_0 && !NETSTANDARD2_1 && !NETFRAMEWORK
private const int MinBufferSize = 64;
#endif
// Largest prime smaller than 65536
private const uint BASE = 65521;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private const uint NMAX = 5552;
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span.
/// </summary>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static uint Calculate(ReadOnlySpan<byte> buffer)
{
return Calculate(SeedValue, buffer);
}
/// <summary>
/// Calculates the Adler32 checksum with the bytes taken from the span and seed.
/// </summary>
/// <param name="adler">The input Adler32 value.</param>
/// <param name="buffer">The readonly span of bytes.</param>
/// <returns>The <see cref="uint"/>.</returns>
public static uint Calculate(uint adler, ReadOnlySpan<byte> buffer)
{
#if !NETSTANDARD2_0 && !NETSTANDARD2_1 && !NETFRAMEWORK
if (Sse3.IsSupported && buffer.Length >= MinBufferSize)
{
return CalculateSse(adler, buffer);
}
return CalculateScalar(adler, buffer);
#else
return CalculateScalar(adler, buffer);
#endif
}
// Based on https://github.com/chromium/chromium/blob/master/third_party/zlib/adler32_simd.c
#if !NETSTANDARD2_0 && !NETSTANDARD2_1 && !NETFRAMEWORK
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
const int BLOCK_SIZE = 1 << 5;
uint length = (uint)buffer.Length;
uint blocks = length / BLOCK_SIZE;
length -= blocks * BLOCK_SIZE;
int index = 0;
fixed (byte* bufferPtr = &buffer[0])
{
index += (int)blocks * BLOCK_SIZE;
var localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
var tap1 = Vector128.Create(32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17);
var tap2 = Vector128.Create(16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1);
Vector128<byte> zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
{
uint n = NMAX / BLOCK_SIZE; /* The NMAX constraint. */
if (n > blocks)
{
n = blocks;
}
blocks -= n;
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
Vector128<int> v_ps = Vector128.CreateScalar(s1 * n).AsInt32();
Vector128<int> v_s2 = Vector128.CreateScalar(s2).AsInt32();
Vector128<int> v_s1 = Vector128<int>.Zero;
do
{
// Load 32 input bytes.
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 16);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsInt32());
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones));
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsInt32());
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones));
localBufferPtr += BLOCK_SIZE;
}
while (--n > 0);
v_s2 = Sse2.Add(v_s2, Sse2.ShiftLeftLogical(v_ps, 5));
// Sum epi32 ints v_s1(s2) and accumulate in s1(s2).
const byte S2301 = 0b1011_0001; // A B C D -> B A D C
const byte S1032 = 0b0100_1110; // A B C D -> C D A B
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S2301));
v_s1 = Sse2.Add(v_s1, Sse2.Shuffle(v_s1, S1032));
s1 += (uint)v_s1.ToScalar();
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S2301));
v_s2 = Sse2.Add(v_s2, Sse2.Shuffle(v_s2, S1032));
s2 = (uint)v_s2.ToScalar();
// Reduce.
s1 %= BASE;
s2 %= BASE;
}
}
ref byte bufferRef = ref MemoryMarshal.GetReference(buffer);
if (length > 0)
{
if (length >= 16)
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
length -= 16;
}
while (length-- > 0)
{
s2 += s1 += Unsafe.Add(ref bufferRef, index++);
}
if (s1 >= BASE)
{
s1 -= BASE;
}
s2 %= BASE;
}
return s1 | (s2 << 16);
}
#endif
private static uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint k;
ref byte bufferRef = ref MemoryMarshal.GetReference<byte>(buffer);
uint length = (uint)buffer.Length;
int index = 0;
while (length > 0)
{
k = length < NMAX ? length : NMAX;
length -= k;
while (k >= 16)
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
k -= 16;
}
if (k != 0)
{
do
{
s1 += Unsafe.Add(ref bufferRef, index++);
s2 += s1;
}
while (--k != 0);
}
s1 %= BASE;
s2 %= BASE;
}
return (s2 << 16) | s1;
}
}
}

View File

@@ -81,29 +81,29 @@ namespace SharpCompress.Archives
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveEntries across the one or many parts of the RarArchive.
/// </summary>
public virtual ICollection<TEntry> Entries { get { return lazyEntries; } }
public virtual ICollection<TEntry> Entries => lazyEntries;
/// <summary>
/// Returns an ReadOnlyCollection of all the RarArchiveVolumes across the one or many parts of the RarArchive.
/// </summary>
public ICollection<TVolume> Volumes { get { return lazyVolumes; } }
public ICollection<TVolume> Volumes => lazyVolumes;
/// <summary>
/// The total size of the files compressed in the archive.
/// </summary>
public virtual long TotalSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize); } }
public virtual long TotalSize => Entries.Aggregate(0L, (total, cf) => total + cf.CompressedSize);
/// <summary>
/// The total size of the files as uncompressed in the archive.
/// </summary>
public virtual long TotalUncompressSize { get { return Entries.Aggregate(0L, (total, cf) => total + cf.Size); } }
public virtual long TotalUncompressSize => Entries.Aggregate(0L, (total, cf) => total + cf.Size);
protected abstract IEnumerable<TVolume> LoadVolumes(IEnumerable<Stream> streams);
protected abstract IEnumerable<TEntry> LoadEntries(IEnumerable<TVolume> volumes);
IEnumerable<IArchiveEntry> IArchive.Entries { get { return Entries.Cast<IArchiveEntry>(); } }
IEnumerable<IArchiveEntry> IArchive.Entries => Entries.Cast<IArchiveEntry>();
IEnumerable<IVolume> IArchive.Volumes { get { return lazyVolumes.Cast<IVolume>(); } }
IEnumerable<IVolume> IArchive.Volumes => lazyVolumes.Cast<IVolume>();
public virtual void Dispose()
{
@@ -132,9 +132,9 @@ namespace SharpCompress.Archives
void IExtractionListener.FireFilePartExtractionBegin(string name, long size, long compressedSize)
{
FilePartExtractionBegin?.Invoke(this, new FilePartExtractionBeginEventArgs(
compressedSize : compressedSize,
size : size,
name : name
compressedSize: compressedSize,
size: size,
name: name
));
}
@@ -160,7 +160,7 @@ namespace SharpCompress.Archives
/// <summary>
/// Archive is SOLID (this means the Archive saved bytes by reusing information which helps for archives containing many small files).
/// </summary>
public virtual bool IsSolid { get { return false; } }
public virtual bool IsSolid => false;
/// <summary>
/// The archive can find all the parts of the archive needed to fully extract the archive. This forces the parsing of the entire archive.

View File

@@ -12,11 +12,28 @@ namespace SharpCompress.Archives
where TEntry : IArchiveEntry
where TVolume : IVolume
{
private class RebuildPauseDisposable : IDisposable
{
private readonly AbstractWritableArchive<TEntry, TVolume> archive;
public RebuildPauseDisposable(AbstractWritableArchive<TEntry, TVolume> archive)
{
this.archive = archive;
archive.pauseRebuilding = true;
}
public void Dispose()
{
archive.pauseRebuilding = false;
archive.RebuildModifiedCollection();
}
}
private readonly List<TEntry> newEntries = new List<TEntry>();
private readonly List<TEntry> removedEntries = new List<TEntry>();
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
private bool hasModifications;
private bool pauseRebuilding;
internal AbstractWritableArchive(ArchiveType type)
: base(type)
@@ -45,8 +62,17 @@ namespace SharpCompress.Archives
}
}
public IDisposable PauseEntryRebuilding()
{
return new RebuildPauseDisposable(this);
}
private void RebuildModifiedCollection()
{
if (pauseRebuilding)
{
return;
}
hasModifications = true;
newEntries.RemoveAll(v => removedEntries.Contains(v));
modifiedEntries.Clear();
@@ -83,8 +109,7 @@ namespace SharpCompress.Archives
public TEntry AddEntry(string key, Stream source, bool closeStream,
long size = 0, DateTime? modified = null)
{
if (key.StartsWith("/")
|| key.StartsWith("\\"))
if (key.Length > 0 && key[0] is '/' or '\\')
{
key = key.Substring(1);
}
@@ -103,7 +128,7 @@ namespace SharpCompress.Archives
foreach (var path in Entries.Select(x => x.Key))
{
var p = path.Replace('/', '\\');
if (p.StartsWith("\\"))
if (p.Length > 0 && p[0] == '\\')
{
p = p.Substring(1);
}

View File

@@ -10,7 +10,7 @@ using SharpCompress.Readers;
namespace SharpCompress.Archives
{
public class ArchiveFactory
public static class ArchiveFactory
{
/// <summary>
/// Opens an Archive for random access
@@ -25,7 +25,7 @@ namespace SharpCompress.Archives
{
throw new ArgumentException("Stream should be readable and seekable");
}
readerOptions = readerOptions ?? new ReaderOptions();
readerOptions ??= new ReaderOptions();
if (ZipArchive.IsZipFile(stream, null))
{
stream.Seek(0, SeekOrigin.Begin);
@@ -60,25 +60,13 @@ namespace SharpCompress.Archives
public static IWritableArchive Create(ArchiveType type)
{
switch (type)
return type switch
{
case ArchiveType.Zip:
{
return ZipArchive.Create();
}
case ArchiveType.Tar:
{
return TarArchive.Create();
}
case ArchiveType.GZip:
{
return GZipArchive.Create();
}
default:
{
throw new NotSupportedException("Cannot create Archives of type: " + type);
}
}
ArchiveType.Zip => ZipArchive.Create(),
ArchiveType.Tar => TarArchive.Create(),
ArchiveType.GZip => GZipArchive.Create(),
_ => throw new NotSupportedException("Cannot create Archives of type: " + type)
};
}
/// <summary>
@@ -100,35 +88,34 @@ namespace SharpCompress.Archives
public static IArchive Open(FileInfo fileInfo, ReaderOptions? options = null)
{
fileInfo.CheckNotNull(nameof(fileInfo));
options = options ?? new ReaderOptions { LeaveStreamOpen = false };
using (var stream = fileInfo.OpenRead())
options ??= new ReaderOptions { LeaveStreamOpen = false };
using var stream = fileInfo.OpenRead();
if (ZipArchive.IsZipFile(stream, null))
{
if (ZipArchive.IsZipFile(stream, null))
{
return ZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
return SevenZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
return GZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, options))
{
return RarArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
return TarArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
return ZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (SevenZipArchive.IsSevenZipFile(stream))
{
return SevenZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (GZipArchive.IsGZipFile(stream))
{
return GZipArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (RarArchive.IsRarFile(stream, options))
{
return RarArchive.Open(fileInfo, options);
}
stream.Seek(0, SeekOrigin.Begin);
if (TarArchive.IsTarFile(stream))
{
return TarArchive.Open(fileInfo, options);
}
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Archive Formats: Zip, GZip, Tar, Rar, 7Zip");
}
/// <summary>
@@ -137,12 +124,10 @@ namespace SharpCompress.Archives
public static void WriteToDirectory(string sourceArchive, string destinationDirectory,
ExtractionOptions? options = null)
{
using (IArchive archive = Open(sourceArchive))
using IArchive archive = Open(sourceArchive);
foreach (IArchiveEntry entry in archive.Entries)
{
foreach (IArchiveEntry entry in archive.Entries)
{
entry.WriteToDirectory(destinationDirectory, options);
}
entry.WriteToDirectory(destinationDirectory, options);
}
}
}

View File

@@ -77,10 +77,9 @@ namespace SharpCompress.Archives.GZip
{
return false;
}
using (Stream stream = fileInfo.OpenRead())
{
return IsGZipFile(stream);
}
using Stream stream = fileInfo.OpenRead();
return IsGZipFile(stream);
}
public void SaveTo(string filePath)
@@ -99,7 +98,7 @@ namespace SharpCompress.Archives.GZip
public static bool IsGZipFile(Stream stream)
{
// read the header on the first read
byte[] header = new byte[10];
Span<byte> header = stackalloc byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!stream.ReadFully(header))

View File

@@ -31,7 +31,7 @@ namespace SharpCompress.Archives
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
@@ -45,11 +45,11 @@ namespace SharpCompress.Archives
/// <summary>
/// Extract to specific file
/// </summary>
public static void WriteToFile(this IArchiveEntry entry,
public static void WriteToFile(this IArchiveEntry entry,
string destinationFileName,
ExtractionOptions? options = null)
{
ExtractionMethods.WriteEntryToFile(entry, destinationFileName, options,
(x, fm) =>
{

View File

@@ -11,5 +11,11 @@ namespace SharpCompress.Archives
IArchiveEntry AddEntry(string key, Stream source, bool closeStream, long size = 0, DateTime? modified = null);
void SaveTo(Stream stream, WriterOptions options);
/// <summary>
/// Use this to pause entry rebuilding when adding large collections of entries. Dispose when complete. A using statement is recommended.
/// </summary>
/// <returns>IDisposeable to resume entry rebuilding</returns>
IDisposable PauseEntryRebuilding();
}
}

View File

@@ -35,11 +35,14 @@ namespace SharpCompress.Archives
this IWritableArchive writableArchive,
string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
{
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
using (writableArchive.PauseEntryRebuilding())
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
fileInfo.LastWriteTime);
}
}
}
public static IArchiveEntry AddEntry(this IWritableArchive writableArchive, string key, FileInfo fileInfo)

View File

@@ -10,7 +10,8 @@ using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar
{
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public class
RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
@@ -42,7 +43,7 @@ namespace SharpCompress.Archives.Rar
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes)
{
return RarArchiveEntryFactory.GetEntries(this, volumes);
return RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
}
protected override IEnumerable<RarVolume> LoadVolumes(IEnumerable<Stream> streams)
@@ -120,7 +121,7 @@ namespace SharpCompress.Archives.Rar
return IsRarFile(stream);
}
}
public static bool IsRarFile(Stream stream, ReaderOptions? options = null)
{
try

View File

@@ -6,6 +6,7 @@ using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
{
@@ -13,11 +14,13 @@ namespace SharpCompress.Archives.Rar
{
private readonly ICollection<RarFilePart> parts;
private readonly RarArchive archive;
private readonly ReaderOptions readerOptions;
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts)
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts, ReaderOptions readerOptions)
{
this.parts = parts.ToList();
this.archive = archive;
this.readerOptions = readerOptions;
}
public override CompressionType CompressionType => CompressionType.Rar;
@@ -61,21 +64,22 @@ namespace SharpCompress.Archives.Rar
{
return new RarStream(archive.UnpackV1.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
return new RarStream(archive.UnpackV2017.Value, FileHeader, new MultiVolumeReadOnlyStream(Parts.Cast<RarFilePart>(), archive));
}
public bool IsComplete
{
get
{
return parts.Select(fp => fp.FileHeader).Any(fh => !fh.IsSplitAfter);
}
public bool IsComplete
{
get
{
var headers = parts.Select(x => x.FileHeader);
return !headers.First().IsSplitBefore && !headers.Last().IsSplitAfter;
}
}
private void CheckIncomplete()
{
if (!IsComplete)
if (!readerOptions.DisableCheckIncomplete && !IsComplete)
{
throw new IncompleteArchiveException("ArchiveEntry is incomplete and cannot perform this operation.");
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
{
@@ -36,11 +37,12 @@ namespace SharpCompress.Archives.Rar
}
internal static IEnumerable<RarArchiveEntry> GetEntries(RarArchive archive,
IEnumerable<RarVolume> rarParts)
IEnumerable<RarVolume> rarParts,
ReaderOptions readerOptions)
{
foreach (var groupedParts in GetMatchedFileParts(rarParts))
{
yield return new RarArchiveEntry(archive, groupedParts);
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
}
}
}

View File

@@ -23,7 +23,7 @@ namespace SharpCompress.Archives.Rar
yield return part;
}
}
internal static IEnumerable<RarVolume> GetParts(FileInfo fileInfo, ReaderOptions options)
{
FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, options);
@@ -68,7 +68,7 @@ namespace SharpCompress.Archives.Rar
// .rar, .r00, .r01, ...
string extension = currentFileInfo.Extension;
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
var buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName.Substring(0,
currentFileInfo.FullName.Length - extension.Length));
if (string.Compare(extension, ".rar", StringComparison.OrdinalIgnoreCase) == 0)
@@ -77,8 +77,7 @@ namespace SharpCompress.Archives.Rar
}
else
{
int num = 0;
if (int.TryParse(extension.Substring(2, 2), out num))
if (int.TryParse(extension.Substring(2, 2), out int num))
{
num++;
buffer.Append(".r");
@@ -111,12 +110,11 @@ namespace SharpCompress.Archives.Rar
}
StringBuilder buffer = new StringBuilder(currentFileInfo.FullName.Length);
buffer.Append(currentFileInfo.FullName, 0, startIndex);
int num = 0;
string numString = currentFileInfo.FullName.Substring(startIndex + 5,
currentFileInfo.FullName.IndexOf('.', startIndex + 5) -
startIndex - 5);
buffer.Append(".part");
if (int.TryParse(numString, out num))
if (int.TryParse(numString, out int num))
{
num++;
for (int i = 0; i < numString.Length - num.ToString().Length; i++)

View File

@@ -131,7 +131,7 @@ namespace SharpCompress.Archives.SevenZip
}
}
private static ReadOnlySpan<byte> SIGNATURE => new byte[] {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
private static ReadOnlySpan<byte> SIGNATURE => new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
private static bool SignatureMatch(Stream stream)
{

View File

@@ -79,7 +79,7 @@ namespace SharpCompress.Archives.Tar
}
return false;
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>

View File

@@ -24,7 +24,7 @@ namespace SharpCompress.Archives.Zip
/// if the compression method is set to deflate
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -57,7 +57,7 @@ namespace SharpCompress.Archives.Zip
stream.CheckNotNull(nameof(stream));
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
}
public static bool IsZipFile(string filePath, string? password = null)
{
return IsZipFile(new FileInfo(filePath), password);
@@ -80,8 +80,7 @@ namespace SharpCompress.Archives.Zip
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
{
ZipHeader header =
headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
ZipHeader? header = headerFactory.ReadStreamHeader(stream).FirstOrDefault(x => x.ZipHeaderType != ZipHeaderType.Split);
if (header is null)
{
return false;
@@ -97,7 +96,7 @@ namespace SharpCompress.Archives.Zip
return false;
}
}
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>

View File

@@ -5,7 +5,7 @@ using System.Runtime.CompilerServices;
[assembly: AssemblyTitle("SharpCompress")]
[assembly: AssemblyProduct("SharpCompress")]
[assembly: InternalsVisibleTo("SharpCompress.Test" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: CLSCompliant(true)]
[assembly: CLSCompliant(false)]
namespace SharpCompress
{

View File

@@ -36,7 +36,7 @@ namespace SharpCompress.Common
Password = password;
}
#if NETSTANDARD1_3 || NETSTANDARD2_0 || NETSTANDARD2_1
#if !NETFRAMEWORK
static ArchiveEncoding()
{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);

View File

@@ -6,6 +6,7 @@
Zip,
Tar,
SevenZip,
GZip
GZip,
ZStandard
}
}

View File

@@ -14,6 +14,7 @@
LZip,
Xz,
Unknown,
Deflate64
Deflate64,
ZStandard,
}
}

View File

@@ -47,7 +47,8 @@ namespace SharpCompress.Common
public override bool CanWrite => false;
public override void Flush() {
public override void Flush()
{
}
public override long Length => _stream.Length;

View File

@@ -8,30 +8,39 @@ namespace SharpCompress.Common
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteEntryToDirectory(IEntry entry,
public static void WriteEntryToDirectory(IEntry entry,
string destinationDirectory,
ExtractionOptions? options,
ExtractionOptions? options,
Action<string, ExtractionOptions?> write)
{
string destinationFileName;
string file = Path.GetFileName(entry.Key);
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
options ??= new ExtractionOptions()
{
Overwrite = true
};
//check for trailing slash.
if (fullDestinationDirectoryPath[fullDestinationDirectoryPath.Length - 1] != Path.DirectorySeparatorChar)
{
fullDestinationDirectoryPath += Path.DirectorySeparatorChar;
}
if (!Directory.Exists(fullDestinationDirectoryPath))
{
throw new ExtractionException($"Directory does not exist to extract to: {fullDestinationDirectoryPath}");
}
options ??= new ExtractionOptions()
{
Overwrite = true
};
string file = Path.GetFileName(entry.Key);
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key);
string destdir = Path.GetFullPath(
Path.Combine(fullDestinationDirectoryPath, folder)
);
string folder = Path.GetDirectoryName(entry.Key)!;
string destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))
{
if (!destdir.StartsWith(fullDestinationDirectoryPath))
if (!destdir.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
{
throw new ExtractionException("Entry is trying to create a directory outside of the destination directory.");
}
@@ -41,7 +50,7 @@ namespace SharpCompress.Common
destinationFileName = Path.Combine(destdir, file);
}
else
{
{
destinationFileName = Path.Combine(fullDestinationDirectoryPath, file);
}
@@ -50,7 +59,7 @@ namespace SharpCompress.Common
{
destinationFileName = Path.GetFullPath(destinationFileName);
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath))
if (!destinationFileName.StartsWith(fullDestinationDirectoryPath, StringComparison.Ordinal))
{
throw new ExtractionException("Entry is trying to write a file outside of the destination directory.");
}
@@ -61,7 +70,7 @@ namespace SharpCompress.Common
Directory.CreateDirectory(destinationFileName);
}
}
public static void WriteEntryToFile(IEntry entry, string destinationFileName,
ExtractionOptions? options,
Action<string, FileMode> openAndWrite)
@@ -78,9 +87,9 @@ namespace SharpCompress.Common
{
FileMode fm = FileMode.Create;
options ??= new ExtractionOptions()
{
Overwrite = true
};
{
Overwrite = true
};
if (!options.Overwrite)
{

View File

@@ -7,7 +7,7 @@ namespace SharpCompress.Common
/// <summary>
/// overwrite target if it exists
/// </summary>
public bool Overwrite {get; set; }
public bool Overwrite { get; set; }
/// <summary>
/// extract with internal directory structure

View File

@@ -15,7 +15,7 @@ namespace SharpCompress.Common.GZip
public override CompressionType CompressionType => CompressionType.GZip;
public override long Crc => 0;
public override long Crc => _filePart.Crc ?? 0;
public override string Key => _filePart.FilePartName;
@@ -23,7 +23,7 @@ namespace SharpCompress.Common.GZip
public override long CompressedSize => 0;
public override long Size => 0;
public override long Size => _filePart.UncompressedSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.DateModified;

View File

@@ -8,7 +8,7 @@ using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Common.GZip
{
internal class GZipFilePart : FilePart
internal sealed class GZipFilePart : FilePart
{
private string? _name;
private readonly Stream _stream;
@@ -16,14 +16,23 @@ namespace SharpCompress.Common.GZip
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
{
ReadAndValidateGzipHeader(stream);
EntryStartPosition = stream.Position;
_stream = stream;
ReadAndValidateGzipHeader();
if (stream.CanSeek)
{
long position = stream.Position;
stream.Position = stream.Length - 8;
ReadTrailer();
stream.Position = position;
}
EntryStartPosition = stream.Position;
}
internal long EntryStartPosition { get; }
internal DateTime? DateModified { get; private set; }
internal int? Crc { get; private set; }
internal int? UncompressedSize { get; private set; }
internal override string FilePartName => _name!;
@@ -37,11 +46,21 @@ namespace SharpCompress.Common.GZip
return _stream;
}
private void ReadAndValidateGzipHeader(Stream stream)
private void ReadTrailer()
{
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
Span<byte> trailer = stackalloc byte[8];
int n = _stream.Read(trailer);
Crc = BinaryPrimitives.ReadInt32LittleEndian(trailer);
UncompressedSize = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
}
private void ReadAndValidateGzipHeader()
{
// read the header on the first read
byte[] header = new byte[10];
int n = stream.Read(header, 0, header.Length);
Span<byte> header = stackalloc byte[10];
int n = _stream.Read(header);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
@@ -59,17 +78,17 @@ namespace SharpCompress.Common.GZip
throw new ZlibException("Bad GZIP header.");
}
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
n = stream.Read(header, 0, 2); // 2-byte length field
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
short extraLength = (short)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
if (!stream.ReadFully(extra))
if (!_stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
@@ -77,27 +96,27 @@ namespace SharpCompress.Common.GZip
}
if ((header[3] & 0x08) == 0x08)
{
_name = ReadZeroTerminatedString(stream);
_name = ReadZeroTerminatedString(_stream);
}
if ((header[3] & 0x10) == 0x010)
{
ReadZeroTerminatedString(stream);
ReadZeroTerminatedString(_stream);
}
if ((header[3] & 0x02) == 0x02)
{
stream.ReadByte(); // CRC16, ignore
_stream.ReadByte(); // CRC16, ignore
}
}
private string ReadZeroTerminatedString(Stream stream)
{
byte[] buf1 = new byte[1];
Span<byte> buf1 = stackalloc byte[1];
var list = new List<byte>();
bool done = false;
do
{
// workitem 7740
int n = stream.Read(buf1, 0, 1);
int n = stream.Read(buf1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");

View File

@@ -4,8 +4,8 @@ namespace SharpCompress.Common.Rar.Headers
{
internal class AvHeader : RarHeader
{
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
{
if (IsRar5)
{

View File

@@ -6,7 +6,7 @@ namespace SharpCompress.Common.Rar.Headers
{
internal class ArchiveCryptHeader : RarHeader
{
private const int CRYPT_VERSION = 0; // Supported encryption version.
private const int SIZE_SALT50 = 16;
private const int SIZE_SALT30 = 8;
@@ -15,14 +15,14 @@ namespace SharpCompress.Common.Rar.Headers
private const int SIZE_PSWCHECK_CSUM = 4;
private const int CRYPT5_KDF_LG2_COUNT = 15; // LOG2 of PDKDF2 iteration count.
private const int CRYPT5_KDF_LG2_COUNT_MAX = 24; // LOG2 of maximum accepted iteration count.
private bool _usePswCheck;
private uint _lg2Count; // Log2 of PBKDF2 repetition count.
private byte[] _salt;
private byte[] _pswCheck;
private byte[] _pswCheckCsm;
public ArchiveCryptHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Crypt)
{
@@ -35,12 +35,12 @@ namespace SharpCompress.Common.Rar.Headers
{
//error?
return;
}
}
var encryptionFlags = reader.ReadRarVIntUInt32();
_usePswCheck = FlagUtility.HasFlag(encryptionFlags, EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
_lg2Count = reader.ReadRarVIntByte(1);
//UsePswCheck = HasHeaderFlag(EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
if (_lg2Count > CRYPT5_KDF_LG2_COUNT_MAX)
{

View File

@@ -4,14 +4,14 @@ namespace SharpCompress.Common.Rar.Headers
{
internal sealed class ArchiveHeader : RarHeader
{
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Archive)
public ArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Archive)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
if (IsRar5)
{
Flags = reader.ReadRarVIntUInt16();
if (HasFlag(ArchiveFlagsV5.HAS_VOLUME_NUMBER))
@@ -22,8 +22,8 @@ namespace SharpCompress.Common.Rar.Headers
//if (ExtraSize != 0) {
// ReadLocator(reader);
//}
}
else
}
else
{
Flags = HeaderFlags;
HighPosAv = reader.ReadInt16();
@@ -35,7 +35,8 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private void ReadLocator(MarkingBinaryReader reader) {
private void ReadLocator(MarkingBinaryReader reader)
{
var size = reader.ReadRarVIntUInt16();
var type = reader.ReadRarVIntUInt16();
if (type != 1)
@@ -47,18 +48,20 @@ namespace SharpCompress.Common.Rar.Headers
const ushort hasQuickOpenOffset = 0x01;
const ushort hasRecoveryOffset = 0x02;
ulong quickOpenOffset = 0;
if ((flags & hasQuickOpenOffset) == hasQuickOpenOffset) {
if ((flags & hasQuickOpenOffset) == hasQuickOpenOffset)
{
quickOpenOffset = reader.ReadRarVInt();
}
ulong recoveryOffset = 0;
if ((flags & hasRecoveryOffset) == hasRecoveryOffset) {
if ((flags & hasRecoveryOffset) == hasRecoveryOffset)
{
recoveryOffset = reader.ReadRarVInt();
}
}
private ushort Flags { get; set; }
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}

View File

@@ -5,8 +5,8 @@ namespace SharpCompress.Common.Rar.Headers
internal class CommentHeader : RarHeader
{
protected CommentHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Comment)
{
: base(header, reader, HeaderType.Comment)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");

View File

@@ -4,14 +4,14 @@ namespace SharpCompress.Common.Rar.Headers
{
internal class EndArchiveHeader : RarHeader
{
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.EndArchive)
public EndArchiveHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.EndArchive)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
if (IsRar5)
{
Flags = reader.ReadRarVIntUInt16();
}
@@ -31,7 +31,7 @@ namespace SharpCompress.Common.Rar.Headers
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}

View File

@@ -21,18 +21,18 @@ namespace SharpCompress.Common.Rar.Headers
{
private uint _fileCrc;
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType)
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType)
{
}
protected override void ReadFinish(MarkingBinaryReader reader)
protected override void ReadFinish(MarkingBinaryReader reader)
{
if (IsRar5)
if (IsRar5)
{
ReadFromReaderV5(reader);
}
else
}
else
{
ReadFromReaderV4(reader);
}
@@ -49,11 +49,13 @@ namespace SharpCompress.Common.Rar.Headers
FileAttributes = reader.ReadRarVIntUInt32();
if (HasFlag(FileFlagsV5.HAS_MOD_TIME)) {
if (HasFlag(FileFlagsV5.HAS_MOD_TIME))
{
FileLastModifiedTime = Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
if (HasFlag(FileFlagsV5.HAS_CRC32)) {
if (HasFlag(FileFlagsV5.HAS_CRC32))
{
FileCrc = reader.ReadUInt32();
}
@@ -65,7 +67,7 @@ namespace SharpCompress.Common.Rar.Headers
// but it was already used in RAR 1.5 and Unpack needs to distinguish
// them.
CompressionAlgorithm = (byte)((compressionInfo & 0x3f) + 50);
// 7th bit (0x0040) defines the solid flag. If it is set, RAR continues to use the compression dictionary left after processing preceding files.
// It can be set only for file headers and is never set for service headers.
IsSolid = (compressionInfo & 0x40) == 0x40;
@@ -74,7 +76,7 @@ namespace SharpCompress.Common.Rar.Headers
CompressionMethod = (byte)((compressionInfo >> 7) & 0x7);
// Bits 11 - 14 (0x3c00) define the minimum size of dictionary size required to extract data. Value 0 means 128 KB, 1 - 256 KB, ..., 14 - 2048 MB, 15 - 4096 MB.
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo>>10) & 0xf);
WindowSize = IsDirectory ? 0 : ((size_t)0x20000) << ((compressionInfo >> 10) & 0xf);
HostOs = reader.ReadRarVIntByte();
@@ -101,18 +103,20 @@ namespace SharpCompress.Common.Rar.Headers
FileName = ConvertPathV5(Encoding.UTF8.GetString(b, 0, b.Length));
// extra size seems to be redudant since we know the total header size
if (ExtraSize != RemainingHeaderBytes(reader))
if (ExtraSize != RemainingHeaderBytes(reader))
{
throw new InvalidFormatException("rar5 header size / extra size inconsistency");
}
isEncryptedRar5 = false;
while (RemainingHeaderBytes(reader) > 0) {
while (RemainingHeaderBytes(reader) > 0)
{
var size = reader.ReadRarVIntUInt16();
int n = RemainingHeaderBytes(reader);
var type = reader.ReadRarVIntUInt16();
switch (type) {
switch (type)
{
//TODO
case 1: // file encryption
{
@@ -120,7 +124,7 @@ namespace SharpCompress.Common.Rar.Headers
//var version = reader.ReadRarVIntByte();
//if (version != 0) throw new InvalidFormatException("unknown encryption algorithm " + version);
}
}
break;
// case 2: // file hash
// {
@@ -131,38 +135,41 @@ namespace SharpCompress.Common.Rar.Headers
{
ushort flags = reader.ReadRarVIntUInt16();
var isWindowsTime = (flags & 1) == 0;
if ((flags & 0x2) == 0x2) {
if ((flags & 0x2) == 0x2)
{
FileLastModifiedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x4) == 0x4) {
if ((flags & 0x4) == 0x4)
{
FileCreatedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
if ((flags & 0x8) == 0x8) {
if ((flags & 0x8) == 0x8)
{
FileLastAccessedTime = ReadExtendedTimeV5(reader, isWindowsTime);
}
}
break;
//TODO
// case 4: // file version
// {
//
// }
// break;
// case 5: // file system redirection
// {
//
// }
// break;
// case 6: // unix owner
// {
//
// }
// break;
// case 7: // service data
// {
//
// }
// break;
//TODO
// case 4: // file version
// {
//
// }
// break;
// case 5: // file system redirection
// {
//
// }
// break;
// case 6: // unix owner
// {
//
// }
// break;
// case 7: // service data
// {
//
// }
// break;
default:
// skip unknown record types to allow new record types to be added in the future
@@ -171,25 +178,26 @@ namespace SharpCompress.Common.Rar.Headers
// drain any trailing bytes of extra record
int did = n - RemainingHeaderBytes(reader);
int drain = size - did;
if (drain > 0)
if (drain > 0)
{
reader.ReadBytes(drain);
}
}
if (AdditionalDataSize != 0) {
if (AdditionalDataSize != 0)
{
CompressedSize = AdditionalDataSize;
}
}
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
private static DateTime ReadExtendedTimeV5(MarkingBinaryReader reader, bool isWindowsTime)
{
if (isWindowsTime)
if (isWindowsTime)
{
return DateTime.FromFileTime(reader.ReadInt64());
}
else
}
else
{
return Utility.UnixTimeToDateTime(reader.ReadUInt32());
}
@@ -201,7 +209,7 @@ namespace SharpCompress.Common.Rar.Headers
{
// replace embedded \\ with valid filename char
return path.Replace('\\', '-').Replace('/', '\\');
}
}
return path;
}
@@ -376,20 +384,22 @@ namespace SharpCompress.Common.Rar.Headers
private ushort Flags { get; set; }
private bool HasFlag(ushort flag)
private bool HasFlag(ushort flag)
{
return (Flags & flag) == flag;
}
internal uint FileCrc
{
get {
if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32)) {
//!!! rar5:
internal uint FileCrc
{
get
{
if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32))
{
//!!! rar5:
throw new InvalidOperationException("TODO rar5");
}
return _fileCrc;
}
return _fileCrc;
}
private set => _fileCrc = value;
}
@@ -409,7 +419,7 @@ namespace SharpCompress.Common.Rar.Headers
//case 29: // rar 3.x compression
//case 50: // RAR 5.0 compression algorithm.
internal byte CompressionAlgorithm { get; private set; }
public bool IsSolid { get; private set; }
// unused for UnpackV1 implementation (limitation)
@@ -427,13 +437,14 @@ namespace SharpCompress.Common.Rar.Headers
internal long DataStartPosition { get; set; }
public Stream PackedStream { get; set; }
public bool IsSplitBefore => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_BEFORE) : HasFlag(FileFlagsV4.SPLIT_BEFORE);
public bool IsSplitAfter => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_AFTER) : HasFlag(FileFlagsV4.SPLIT_AFTER);
public bool IsDirectory => HasFlag(IsRar5 ? FileFlagsV5.DIRECTORY : FileFlagsV4.DIRECTORY);
private bool isEncryptedRar5 = false;
public bool IsEncrypted => IsRar5 ? isEncryptedRar5: HasFlag(FileFlagsV4.PASSWORD);
public bool IsEncrypted => IsRar5 ? isEncryptedRar5 : HasFlag(FileFlagsV4.PASSWORD);
internal DateTime? FileLastModifiedTime { get; private set; }
internal DateTime? FileCreatedTime { get; private set; }

View File

@@ -42,10 +42,10 @@ namespace SharpCompress.Common.Rar.Headers
}
internal static class EncryptionFlagsV5
{
{
// RAR 5.0 archive encryption header specific flags.
public const uint CHFL_CRYPT_PSWCHECK = 0x01; // Password check data is present.
public const uint FHEXTRA_CRYPT_PSWCHECK = 0x01; // Password check data is present.
public const uint FHEXTRA_CRYPT_HASHMAC = 0x02;
}

View File

@@ -1,6 +1,6 @@
namespace SharpCompress.Common.Rar.Headers
{
internal interface IRarHeader
internal interface IRarHeader
{
HeaderType HeaderType { get; }
}

View File

@@ -11,39 +11,39 @@ namespace SharpCompress.Common.Rar.Headers
public bool IsRar5 { get; }
private MarkHeader(bool isRar5)
{
private MarkHeader(bool isRar5)
{
IsRar5 = isRar5;
}
public HeaderType HeaderType => HeaderType.Mark;
private static byte GetByte(Stream stream)
private static byte GetByte(Stream stream)
{
var b = stream.ReadByte();
if (b != -1)
if (b != -1)
{
return (byte)b;
}
throw new EndOfStreamException();
}
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
public static MarkHeader Read(Stream stream, bool leaveStreamOpen, bool lookForHeader)
{
int maxScanIndex = lookForHeader ? MAX_SFX_SIZE : 0;
try
{
int start = -1;
var b = GetByte(stream); start++;
while (start <= maxScanIndex)
while (start <= maxScanIndex)
{
// Rar old signature: 52 45 7E 5E
// Rar4 signature: 52 61 72 21 1A 07 00
// Rar5 signature: 52 61 72 21 1A 07 01 00
if (b == 0x52)
if (b == 0x52)
{
b = GetByte(stream); start++;
if (b == 0x61)
if (b == 0x61)
{
b = GetByte(stream); start++;
if (b != 0x72)
@@ -70,7 +70,7 @@ namespace SharpCompress.Common.Rar.Headers
}
b = GetByte(stream); start++;
if (b == 1)
if (b == 1)
{
b = GetByte(stream); start++;
if (b != 0)
@@ -79,13 +79,13 @@ namespace SharpCompress.Common.Rar.Headers
}
return new MarkHeader(true); // Rar5
}
else if (b == 0)
}
else if (b == 0)
{
return new MarkHeader(false); // Rar4
}
}
else if (b == 0x45)
}
}
else if (b == 0x45)
{
b = GetByte(stream); start++;
if (b != 0x7e)
@@ -100,9 +100,9 @@ namespace SharpCompress.Common.Rar.Headers
}
throw new InvalidFormatException("Rar format version pre-4 is unsupported.");
}
}
else
}
}
else
{
b = GetByte(stream); start++;
}

View File

@@ -2,23 +2,23 @@
namespace SharpCompress.Common.Rar.Headers
{
internal class NewSubHeaderType : IEquatable<NewSubHeaderType>
internal sealed class NewSubHeaderType : IEquatable<NewSubHeaderType>
{
internal static readonly NewSubHeaderType SUBHEAD_TYPE_CMT = new NewSubHeaderType('C', 'M', 'T');
internal static readonly NewSubHeaderType SUBHEAD_TYPE_CMT = new('C', 'M', 'T');
//internal static final NewSubHeaderType SUBHEAD_TYPE_ACL = new NewSubHeaderType(new byte[]{'A','C','L'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_ACL = new (new byte[]{'A','C','L'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_STREAM = new NewSubHeaderType(new byte[]{'S','T','M'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_STREAM = new (new byte[]{'S','T','M'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_UOWNER = new NewSubHeaderType(new byte[]{'U','O','W'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_UOWNER = new (new byte[]{'U','O','W'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_AV = new NewSubHeaderType(new byte[]{'A','V'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_AV = new (new byte[]{'A','V'});
internal static readonly NewSubHeaderType SUBHEAD_TYPE_RR = new NewSubHeaderType('R', 'R');
internal static readonly NewSubHeaderType SUBHEAD_TYPE_RR = new('R', 'R');
//internal static final NewSubHeaderType SUBHEAD_TYPE_OS2EA = new NewSubHeaderType(new byte[]{'E','A','2'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_OS2EA = new (new byte[]{'E','A','2'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_BEOSEA = new NewSubHeaderType(new byte[]{'E','A','B','E'});
//internal static final NewSubHeaderType SUBHEAD_TYPE_BEOSEA = new (new byte[]{'E','A','B','E'});
private readonly byte[] _bytes;
@@ -37,19 +37,13 @@ namespace SharpCompress.Common.Rar.Headers
{
return false;
}
for (int i = 0; i < bytes.Length; ++i)
{
if (_bytes[i] != bytes[i])
{
return false;
}
}
return true;
return _bytes.AsSpan().SequenceEqual(bytes);
}
public bool Equals(NewSubHeaderType other)
public bool Equals(NewSubHeaderType? other)
{
return Equals(other._bytes);
return other is not null && Equals(other._bytes);
}
}
}

View File

@@ -5,8 +5,8 @@ namespace SharpCompress.Common.Rar.Headers
// ProtectHeader is part of the Recovery Record feature
internal sealed class ProtectHeader : RarHeader
{
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Protect)
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Protect)
{
if (IsRar5)
{

View File

@@ -23,12 +23,12 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
private RarHeader(RarCrcBinaryReader reader, bool isRar5, ArchiveEncoding archiveEncoding)
{
_headerType = HeaderType.Null;
_isRar5 = isRar5;
ArchiveEncoding = archiveEncoding;
if (IsRar5)
if (IsRar5)
{
HeaderCrc = reader.ReadUInt32();
reader.ResetCrc();
@@ -45,7 +45,9 @@ namespace SharpCompress.Common.Rar.Headers
{
AdditionalDataSize = (long)reader.ReadRarVInt();
}
} else {
}
else
{
reader.Mark();
HeaderCrc = reader.ReadUInt16();
reader.ResetCrc();
@@ -59,7 +61,8 @@ namespace SharpCompress.Common.Rar.Headers
}
}
protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType) {
protected RarHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
{
_headerType = headerType;
_isRar5 = header.IsRar5;
HeaderCrc = header.HeaderCrc;
@@ -80,7 +83,8 @@ namespace SharpCompress.Common.Rar.Headers
VerifyHeaderCrc(reader.GetCrc32());
}
protected int RemainingHeaderBytes(MarkingBinaryReader reader) {
protected int RemainingHeaderBytes(MarkingBinaryReader reader)
{
return checked(HeaderSize - (int)reader.CurrentReadByteCount);
}
@@ -108,7 +112,7 @@ namespace SharpCompress.Common.Rar.Headers
protected ushort HeaderFlags { get; }
protected bool HasHeaderFlag(ushort flag)
protected bool HasHeaderFlag(ushort flag)
{
return (HeaderFlags & flag) == flag;
}

View File

@@ -41,11 +41,11 @@ namespace SharpCompress.Common.Rar.Headers
private RarHeader? TryReadNextHeader(Stream stream)
{
RarCrcBinaryReader reader;
if (!IsEncrypted)
if (!IsEncrypted)
{
reader = new RarCrcBinaryReader(stream);
}
else
}
else
{
if (Options.Password is null)
{
@@ -65,7 +65,7 @@ namespace SharpCompress.Common.Rar.Headers
case HeaderCodeV.RAR4_ARCHIVE_HEADER:
{
var ah = new ArchiveHeader(header, reader);
if (ah.IsEncrypted == true)
if (ah.IsEncrypted == true)
{
//!!! rar5 we don't know yet
IsEncrypted = true;
@@ -150,11 +150,11 @@ namespace SharpCompress.Common.Rar.Headers
return new EndArchiveHeader(header, reader);
}
case HeaderCodeV.RAR5_ARCHIVE_ENCRYPTION_HEADER:
{
var ch = new ArchiveCryptHeader(header, reader);
IsEncrypted = true;
return ch;
}
{
var ch = new ArchiveCryptHeader(header, reader);
IsEncrypted = true;
return ch;
}
default:
{
throw new InvalidFormatException("Unknown Rar Header: " + header.HeaderCode);
@@ -162,21 +162,26 @@ namespace SharpCompress.Common.Rar.Headers
}
}
private void SkipData(FileHeader fh, RarCrcBinaryReader reader) {
switch (StreamingMode) {
case StreamingMode.Seekable: {
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
private void SkipData(FileHeader fh, RarCrcBinaryReader reader)
{
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming: {
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
default: {
throw new InvalidFormatException("Invalid StreamingMode");
}
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
}

View File

@@ -5,8 +5,8 @@ namespace SharpCompress.Common.Rar.Headers
internal class SignHeader : RarHeader
{
protected SignHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Sign)
{
: base(header, reader, HeaderType.Sign)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");

View File

@@ -50,11 +50,11 @@ namespace SharpCompress.Common.Rar
if (sizeToRead > 0)
{
int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
Span<byte> cipherText = stackalloc byte[RarRijndael.CRYPTO_BLOCK_SIZE];
for (int i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();
_actualStream.Read(cipherText, 0, RarRijndael.CRYPTO_BLOCK_SIZE);
_actualStream.Read(cipherText);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)

View File

@@ -11,7 +11,7 @@ namespace SharpCompress.Common.Rar
/// As the V2017 port isn't complete, add this check to use the legacy Rar code.
/// </summary>
internal bool IsRarV3 => FileHeader.CompressionAlgorithm == 29 || FileHeader.CompressionAlgorithm == 36;
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>

View File

@@ -27,13 +27,13 @@ namespace SharpCompress.Common.Rar
_rijndael = new RijndaelEngine();
_aesInitializationVector = new byte[CRYPTO_BLOCK_SIZE];
int rawLength = 2*_password.Length;
int rawLength = 2 * _password.Length;
byte[] rawPassword = new byte[rawLength + 8];
byte[] passwordBytes = Encoding.UTF8.GetBytes(_password);
for (int i = 0; i < _password.Length; i++)
{
rawPassword[i*2] = passwordBytes[i];
rawPassword[i*2 + 1] = 0;
rawPassword[i * 2] = passwordBytes[i];
rawPassword[i * 2 + 1] = 0;
}
for (int i = 0; i < _salt.Length; i++)
{
@@ -68,11 +68,11 @@ namespace SharpCompress.Common.Rar
{
for (int j = 0; j < 4; j++)
{
aesKey[i*4 + j] = (byte)
(((digest[i*4]*0x1000000) & 0xff000000 |
(uint) ((digest[i*4 + 1]*0x10000) & 0xff0000) |
(uint) ((digest[i*4 + 2]*0x100) & 0xff00) |
(uint) (digest[i*4 + 3] & 0xff)) >> (j*8));
aesKey[i * 4 + j] = (byte)
(((digest[i * 4] * 0x1000000) & 0xff000000 |
(uint)((digest[i * 4 + 1] * 0x10000) & 0xff0000) |
(uint)((digest[i * 4 + 2] * 0x100) & 0xff00) |
(uint)(digest[i * 4 + 3] & 0xff)) >> (j * 8));
}
}
@@ -87,11 +87,11 @@ namespace SharpCompress.Common.Rar
return rijndael;
}
public byte[] ProcessBlock(byte[] cipherText)
public byte[] ProcessBlock(ReadOnlySpan<byte> cipherText)
{
var plainText = new byte[CRYPTO_BLOCK_SIZE];
Span<byte> plainText = stackalloc byte[CRYPTO_BLOCK_SIZE]; // 16 bytes
byte[] decryptedBytes = new byte[CRYPTO_BLOCK_SIZE];
_rijndael.ProcessBlock(cipherText, 0, plainText, 0);
_rijndael.ProcessBlock(cipherText, plainText);
for (int j = 0; j < CRYPTO_BLOCK_SIZE; j++)
{

View File

@@ -39,20 +39,20 @@ namespace SharpCompress.Common.Rar
switch (header.HeaderType)
{
case HeaderType.Mark:
{
lastMarkHeader = (MarkHeader)header;
}
{
lastMarkHeader = (MarkHeader)header;
}
break;
case HeaderType.Archive:
{
ArchiveHeader = (ArchiveHeader)header;
}
{
ArchiveHeader = (ArchiveHeader)header;
}
break;
case HeaderType.File:
{
var fh = (FileHeader)header;
yield return CreateFilePart(lastMarkHeader!, fh);
}
{
var fh = (FileHeader)header;
yield return CreateFilePart(lastMarkHeader!, fh);
}
break;
}
}

View File

@@ -89,7 +89,7 @@ namespace SharpCompress.Common.SevenZip
{
// v3.13 incorrectly worked with empty folders
// v4.07: Loop for skipping empty folders
for (;;)
for (; ; )
{
if (folderIndex >= _folders.Count)
{

View File

@@ -90,7 +90,7 @@ namespace SharpCompress.Common.SevenZip
private void WaitAttribute(BlockType attribute)
{
for (;;)
for (; ; )
{
BlockType? type = ReadId();
if (type == attribute)
@@ -452,7 +452,7 @@ namespace SharpCompress.Common.SevenZip
#endif
BlockType? type;
for (;;)
for (; ; )
{
type = ReadId();
if (type == BlockType.End)
@@ -508,7 +508,7 @@ namespace SharpCompress.Common.SevenZip
int index = 0;
for (int i = 0; i < numFolders; i++)
{
var f = new CFolder {_firstPackStreamId = index};
var f = new CFolder { _firstPackStreamId = index };
folders.Add(f);
GetNextFolderItem(f);
index += f._packStreams.Count;
@@ -539,7 +539,7 @@ namespace SharpCompress.Common.SevenZip
#endif
}
for (;;)
for (; ; )
{
BlockType? type = ReadId();
if (type == BlockType.End)
@@ -580,7 +580,7 @@ namespace SharpCompress.Common.SevenZip
numUnpackStreamsInFolders = null;
BlockType? type;
for (;;)
for (; ; )
{
type = ReadId();
if (type == BlockType.NumUnpackStream)
@@ -602,7 +602,7 @@ namespace SharpCompress.Common.SevenZip
#endif
continue;
}
if (type == BlockType.Crc || type == BlockType.Size)
if (type is BlockType.Crc or BlockType.Size)
{
break;
}
@@ -672,7 +672,7 @@ namespace SharpCompress.Common.SevenZip
digests = null;
for (;;)
for (; ; )
{
if (type == BlockType.Crc)
{
@@ -755,7 +755,7 @@ namespace SharpCompress.Common.SevenZip
unpackSizes = null;
digests = null;
for (;;)
for (; ; )
{
switch (ReadId())
{
@@ -791,22 +791,14 @@ namespace SharpCompress.Common.SevenZip
#endif
try
{
long dataStartPos;
List<long> packSizes;
List<uint?> packCrCs;
List<CFolder> folders;
List<int> numUnpackStreamsInFolders;
List<long> unpackSizes;
List<uint?> digests;
ReadStreamsInfo(null,
out dataStartPos,
out packSizes,
out packCrCs,
out folders,
out numUnpackStreamsInFolders,
out unpackSizes,
out digests);
out long dataStartPos,
out List<long> packSizes,
out List<uint?> packCrCs,
out List<CFolder> folders,
out List<int> numUnpackStreamsInFolders,
out List<long> unpackSizes,
out List<uint?> digests);
dataStartPos += baseOffset;
@@ -934,7 +926,7 @@ namespace SharpCompress.Common.SevenZip
BitVector antiFileVector = null;
int numEmptyStreams = 0;
for (;;)
for (; ; )
{
type = ReadId();
if (type == BlockType.End)
@@ -969,7 +961,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("WinAttributes:");
#endif
ReadAttributeVector(dataVector, numFiles, delegate(int i, uint? attr)
ReadAttributeVector(dataVector, numFiles, delegate (int i, uint? attr)
{
// Some third party implementations established an unofficial extension
// of the 7z archive format by placing posix file attributes in the high
@@ -1057,7 +1049,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("StartPos:");
#endif
ReadNumberVector(dataVector, numFiles, delegate(int i, long? startPos)
ReadNumberVector(dataVector, numFiles, delegate (int i, long? startPos)
{
db._files[i].StartPos = startPos;
#if DEBUG
@@ -1072,7 +1064,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("CTime:");
#endif
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time)
{
db._files[i].CTime = time;
#if DEBUG
@@ -1087,7 +1079,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("ATime:");
#endif
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time)
{
db._files[i].ATime = time;
#if DEBUG
@@ -1102,7 +1094,7 @@ namespace SharpCompress.Common.SevenZip
#if DEBUG
Log.Write("MTime:");
#endif
ReadDateTimeVector(dataVector, numFiles, delegate(int i, DateTime? time)
ReadDateTimeVector(dataVector, numFiles, delegate (int i, DateTime? time)
{
db._files[i].MTime = time;
#if DEBUG
@@ -1445,8 +1437,7 @@ namespace SharpCompress.Common.SevenZip
private Stream GetCachedDecoderStream(ArchiveDatabase db, int folderIndex)
{
Stream s;
if (!_cachedStreams.TryGetValue(folderIndex, out s))
if (!_cachedStreams.TryGetValue(folderIndex, out Stream s))
{
CFolder folderInfo = db._folders[folderIndex];
int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
@@ -1494,7 +1485,7 @@ namespace SharpCompress.Common.SevenZip
int numItems = allFilesMode
? db._files.Count
: indices.Length;
if (numItems == 0)
{
return;
@@ -1526,6 +1517,7 @@ namespace SharpCompress.Common.SevenZip
}
}
byte[] buffer = null;
foreach (CExtractFolderInfo efi in extractFolderInfoVector)
{
int startIndex;
@@ -1562,8 +1554,8 @@ namespace SharpCompress.Common.SevenZip
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes,
folderInfo, db.PasswordProvider);
byte[] buffer = new byte[4 << 10];
for (;;)
buffer ??= new byte[4 << 10];
for (; ; )
{
int processed = s.Read(buffer, 0, buffer.Length);
if (processed == 0)

View File

@@ -1,6 +1,6 @@
namespace SharpCompress.Common.SevenZip
{
internal struct CMethodId
internal readonly struct CMethodId
{
public const ulong K_COPY_ID = 0;
public const ulong K_LZMA_ID = 0x030101;
@@ -24,9 +24,9 @@
return _id.GetHashCode();
}
public override bool Equals(object obj)
public override bool Equals(object? obj)
{
return obj is CMethodId && (CMethodId)obj == this;
return obj is CMethodId other && Equals(other);
}
public bool Equals(CMethodId other)

View File

@@ -161,7 +161,7 @@ namespace SharpCompress.Common.SevenZip
{
int ending = Offset;
for (;;)
for (; ; )
{
if (ending + 2 > _ending)
{

View File

@@ -32,7 +32,7 @@ namespace SharpCompress.Common.SevenZip
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsEncrypted => FilePart.IsEncrypted;
public override bool IsDirectory => FilePart.Header.IsDir;

View File

@@ -84,23 +84,25 @@ namespace SharpCompress.Common.SevenZip
{
var coder = Folder!._coders.First();
switch (coder._methodId._id)
{
{
case K_LZMA:
case K_LZMA2:
{
return CompressionType.LZMA;
}
{
return CompressionType.LZMA;
}
case K_PPMD:
{
return CompressionType.PPMd;
}
{
return CompressionType.PPMd;
}
case K_B_ZIP2:
{
return CompressionType.BZip2;
}
{
return CompressionType.BZip2;
}
default:
throw new NotImplementedException();
}
}
internal bool IsEncrypted => Folder!._coders.FindIndex(c => c._methodId._id == CMethodId.K_AES_ID) != -1;
}
}

View File

@@ -7,7 +7,7 @@ using System.Text;
namespace SharpCompress.Common.Tar.Headers
{
internal class TarHeader
internal sealed class TarHeader
{
internal static readonly DateTime EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
@@ -97,7 +97,7 @@ namespace SharpCompress.Common.Tar.Headers
{
numPaddingBytes = BLOCK_SIZE;
}
output.Write(new byte[numPaddingBytes], 0, numPaddingBytes);
output.Write(stackalloc byte[numPaddingBytes]);
}
internal bool Read(BinaryReader reader)
@@ -260,10 +260,16 @@ namespace SharpCompress.Common.Tar.Headers
return Convert.ToInt64(s);
}
private static readonly byte[] eightSpaces = {
(byte)' ', (byte)' ', (byte)' ', (byte)' ',
(byte)' ', (byte)' ', (byte)' ', (byte)' '
};
internal static int RecalculateChecksum(byte[] buf)
{
// Set default value for checksum. That is 8 spaces.
Encoding.UTF8.GetBytes(" ").CopyTo(buf, 148);
eightSpaces.CopyTo(buf, 148);
// Calculate checksum
int headerChecksum = 0;
@@ -276,7 +282,7 @@ namespace SharpCompress.Common.Tar.Headers
internal static int RecalculateAltChecksum(byte[] buf)
{
Encoding.UTF8.GetBytes(" ").CopyTo(buf, 148);
eightSpaces.CopyTo(buf, 148);
int headerChecksum = 0;
foreach (byte b in buf)
{

View File

@@ -0,0 +1,50 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.GZip;
namespace SharpCompress.Common.ZStandard
{
public class ZStandardEntry : Entry
{
private readonly ZStandardFilePart _filePart;
internal ZStandardEntry(ZStandardFilePart filePart)
{
_filePart = filePart;
}
public override CompressionType CompressionType => CompressionType.GZip;
public override long Crc => _filePart.Crc ?? 0;
public override string Key => _filePart.FilePartName;
public override string? LinkTarget => null;
public override long CompressedSize => 0;
public override long Size => _filePart.UncompressedSize ?? 0;
public override DateTime? LastModifiedTime => _filePart.DateModified;
public override DateTime? CreatedTime => null;
public override DateTime? LastAccessedTime => null;
public override DateTime? ArchivedTime => null;
public override bool IsEncrypted => false;
public override bool IsDirectory => false;
public override bool IsSplitAfter => false;
internal override IEnumerable<FilePart> Parts => _filePart.AsEnumerable<FilePart>();
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
{
yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
}
}
}

View File

@@ -0,0 +1,37 @@
using System;
using System.IO;
using ZstdSharp;
namespace SharpCompress.Common.ZStandard
{
internal sealed class ZStandardFilePart : FilePart
{
private string _name = "";
private readonly Stream _stream;
internal ZStandardFilePart(Stream stream, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
{
_stream = stream;
EntryStartPosition = stream.Position;
}
internal long EntryStartPosition { get; }
internal DateTime? DateModified { get; private set; }
internal int? Crc { get; private set; }
internal int? UncompressedSize { get; private set; }
internal override string FilePartName => _name!;
internal override Stream GetCompressedStream()
{
return new DecompressionStream(_stream);
}
internal override Stream GetRawStream()
{
return _stream;
}
}
}

View File

@@ -0,0 +1,23 @@
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common.ZStandard
{
public class ZStandardVolume : Volume
{
public ZStandardVolume(Stream stream, ReaderOptions options)
: base(stream, options)
{
}
public ZStandardVolume(FileInfo fileInfo, ReaderOptions options)
: base(fileInfo.OpenRead(), options)
{
options.LeaveStreamOpen = false;
}
public override bool IsFirstVolume => true;
public override bool IsMultiVolume => true;
}
}

View File

@@ -32,7 +32,7 @@ namespace SharpCompress.Common.Zip.Headers
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
byte[] comment = reader.ReadBytes(commentLength);
// According to .ZIP File Format Specification
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
@@ -40,7 +40,7 @@ namespace SharpCompress.Common.Zip.Headers
// Bit 11: Language encoding flag (EFS). If this bit is set,
// the filename and comment fields for this file
// MUST be encoded using UTF-8. (see APPENDIX D)
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.DecodeUTF8(name);
@@ -63,6 +63,8 @@ namespace SharpCompress.Common.Zip.Headers
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
zip64ExtraData.Process(UncompressedSize, CompressedSize, RelativeOffsetOfEntryHeader, DiskNumberStart);
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;

View File

@@ -24,7 +24,7 @@ namespace SharpCompress.Common.Zip.Headers
ushort extraLength = reader.ReadUInt16();
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
// According to .ZIP File Format Specification
//
// For example: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
@@ -32,7 +32,7 @@ namespace SharpCompress.Common.Zip.Headers
// Bit 11: Language encoding flag (EFS). If this bit is set,
// the filename and comment fields for this file
// MUST be encoded using UTF-8. (see APPENDIX D)
if (Flags.HasFlag(HeaderFlags.Efs))
{
Name = ArchiveEncoding.DecodeUTF8(name);
@@ -41,7 +41,7 @@ namespace SharpCompress.Common.Zip.Headers
{
Name = ArchiveEncoding.Decode(name);
}
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnicodePathExtraField);
@@ -53,6 +53,8 @@ namespace SharpCompress.Common.Zip.Headers
var zip64ExtraData = Extra.OfType<Zip64ExtendedInformationExtraField>().FirstOrDefault();
if (zip64ExtraData != null)
{
zip64ExtraData.Process(UncompressedSize, CompressedSize, 0, 0);
if (CompressedSize == uint.MaxValue)
{
CompressedSize = zip64ExtraData.CompressedSize;

View File

@@ -1,6 +1,4 @@
#nullable disable
using System;
using System;
using System.Buffers.Binary;
using System.Text;
@@ -20,13 +18,25 @@ namespace SharpCompress.Common.Zip.Headers
internal class ExtraData
{
internal ExtraDataType Type { get; set; }
internal ushort Length { get; set; }
internal byte[] DataBytes { get; set; }
public ExtraData(ExtraDataType type, ushort length, byte[] dataBytes)
{
Type = type;
Length = length;
DataBytes = dataBytes;
}
internal ExtraDataType Type { get; }
internal ushort Length { get; }
internal byte[] DataBytes { get; }
}
internal class ExtraUnicodePathExtraField : ExtraData
internal sealed class ExtraUnicodePathExtraField : ExtraData
{
public ExtraUnicodePathExtraField(ExtraDataType type, ushort length, byte[] dataBytes)
: base(type, length, dataBytes)
{
}
internal byte Version => DataBytes[0];
internal byte[] NameCrc32
@@ -51,70 +61,79 @@ namespace SharpCompress.Common.Zip.Headers
}
}
internal class Zip64ExtendedInformationExtraField : ExtraData
internal sealed class Zip64ExtendedInformationExtraField : ExtraData
{
public Zip64ExtendedInformationExtraField(ExtraDataType type, ushort length, byte[] dataBytes)
: base(type, length, dataBytes)
{
Type = type;
Length = length;
DataBytes = dataBytes;
Process();
}
//From the spec values are only in the extradata if the standard
//value is set to 0xFFFF, but if one of the sizes are present, both are.
//Hence if length == 4 volume only
// if length == 8 offset only
// if length == 12 offset + volume
// if length == 16 sizes only
// if length == 20 sizes + volume
// if length == 24 sizes + offset
// if length == 28 everything.
//It is unclear how many of these are used in the wild.
private void Process()
// From the spec, values are only in the extradata if the standard
// value is set to 0xFFFFFFFF (or 0xFFFF for the Disk Start Number).
// Values, if present, must appear in the following order:
// - Original Size
// - Compressed Size
// - Relative Header Offset
// - Disk Start Number
public void Process(long uncompressedFileSize, long compressedFileSize, long relativeHeaderOffset, ushort diskNumber)
{
switch (DataBytes.Length)
var bytesRequired = ((uncompressedFileSize == uint.MaxValue) ? 8 : 0)
+ ((compressedFileSize == uint.MaxValue) ? 8 : 0)
+ ((relativeHeaderOffset == uint.MaxValue) ? 8 : 0)
+ ((diskNumber == ushort.MaxValue) ? 4 : 0);
var currentIndex = 0;
if (bytesRequired > DataBytes.Length)
{
case 4:
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes);
return;
case 8:
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
return;
case 12:
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(8));
return;
case 16:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
return;
case 20:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(16));
return;
case 24:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
return;
case 28:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(24));
return;
default:
throw new ArchiveException("Unexpected size of of Zip64 extended information extra field");
throw new ArchiveException("Zip64 extended information extra field is not large enough for the required information");
}
if (uncompressedFileSize == uint.MaxValue)
{
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(currentIndex));
currentIndex += 8;
}
if (compressedFileSize == uint.MaxValue)
{
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(currentIndex));
currentIndex += 8;
}
if (relativeHeaderOffset == uint.MaxValue)
{
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(currentIndex));
currentIndex += 8;
}
if (diskNumber == ushort.MaxValue)
{
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(currentIndex));
}
}
/// <summary>
/// Uncompressed file size. Only valid after <see cref="Process(long, long, long, ushort)"/> has been called and if the
/// original entry header had a corresponding 0xFFFFFFFF value.
/// </summary>
public long UncompressedSize { get; private set; }
/// <summary>
/// Compressed file size. Only valid after <see cref="Process(long, long, long, ushort)"/> has been called and if the
/// original entry header had a corresponding 0xFFFFFFFF value.
/// </summary>
public long CompressedSize { get; private set; }
/// <summary>
/// Relative offset of the entry header. Only valid after <see cref="Process(long, long, long, ushort)"/> has been called and if the
/// original entry header had a corresponding 0xFFFFFFFF value.
/// </summary>
public long RelativeOffsetOfEntryHeader { get; private set; }
/// <summary>
/// Volume number. Only valid after <see cref="Process(long, long, long, ushort)"/> has been called and if the
/// original entry header had a corresponding 0xFFFF value.
/// </summary>
public uint VolumeNumber { get; private set; }
}
@@ -122,30 +141,12 @@ namespace SharpCompress.Common.Zip.Headers
{
internal static ExtraData Create(ExtraDataType type, ushort length, byte[] extraData)
{
switch (type)
return type switch
{
case ExtraDataType.UnicodePathExtraField:
return new ExtraUnicodePathExtraField
{
Type = type,
Length = length,
DataBytes = extraData
};
case ExtraDataType.Zip64ExtendedInformationExtraField:
return new Zip64ExtendedInformationExtraField
(
type,
length,
extraData
);
default:
return new ExtraData
{
Type = type,
Length = length,
DataBytes = extraData
};
}
ExtraDataType.UnicodePathExtraField => new ExtraUnicodePathExtraField(type, length, extraData),
ExtraDataType.Zip64ExtendedInformationExtraField => new Zip64ExtendedInformationExtraField(type, length, extraData),
_ => new ExtraData(type, length, extraData)
};
}
}
}

View File

@@ -20,7 +20,7 @@ namespace SharpCompress.Common.Zip.Headers
{
get
{
if (Name.EndsWith("/"))
if (Name.EndsWith('/'))
{
return true;
}
@@ -28,7 +28,7 @@ namespace SharpCompress.Common.Zip.Headers
//.NET Framework 4.5 : System.IO.Compression::CreateFromDirectory() probably writes backslashes to headers
return CompressedSize == 0
&& UncompressedSize == 0
&& Name.EndsWith("\\");
&& Name.EndsWith('\\');
}
}
@@ -105,6 +105,6 @@ namespace SharpCompress.Common.Zip.Headers
internal ZipFilePart Part { get; set; }
internal bool IsZip64 => CompressedSize == uint.MaxValue;
internal bool IsZip64 => CompressedSize >= uint.MaxValue;
}
}

View File

@@ -7,7 +7,7 @@ namespace SharpCompress.Common.Zip
internal class PkwareTraditionalEncryptionData
{
private static readonly CRC32 CRC32 = new CRC32();
private readonly UInt32[] _keys = {0x12345678, 0x23456789, 0x34567890};
private readonly UInt32[] _keys = { 0x12345678, 0x23456789, 0x34567890 };
private readonly ArchiveEncoding _archiveEncoding;
private PkwareTraditionalEncryptionData(string password, ArchiveEncoding archiveEncoding)

View File

@@ -8,7 +8,10 @@ namespace SharpCompress.Common.Zip
{
internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
{
private const int MAX_ITERATIONS_FOR_DIRECTORY_HEADER = 4096;
private const int MINIMUM_EOCD_LENGTH = 22;
private const int ZIP64_EOCD_LENGTH = 20;
// Comment may be within 64kb + structure 22 bytes
private const int MAX_SEARCH_LENGTH_FOR_EOCD = 65557;
private bool _zip64;
internal SeekableZipHeaderFactory(string? password, ArchiveEncoding archiveEncoding)
@@ -20,14 +23,24 @@ namespace SharpCompress.Common.Zip
{
var reader = new BinaryReader(stream);
SeekBackToHeader(stream, reader, DIRECTORY_END_HEADER_BYTES);
SeekBackToHeader(stream, reader);
var eocd_location = stream.Position;
var entry = new DirectoryEndHeader();
entry.Read(reader);
if (entry.IsZip64)
{
_zip64 = true;
SeekBackToHeader(stream, reader, ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR);
// ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin);
uint zip64_locator = reader.ReadUInt32();
if (zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR)
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
zip64Locator.Read(reader);
@@ -73,27 +86,50 @@ namespace SharpCompress.Common.Zip
}
}
private static void SeekBackToHeader(Stream stream, BinaryReader reader, uint headerSignature)
private static bool IsMatch(byte[] haystack, int position, byte[] needle)
{
long offset = 0;
uint signature;
int iterationCount = 0;
do
for (int i = 0; i < needle.Length; i++)
{
if ((stream.Length + offset) - 4 < 0)
if (haystack[position + i] != needle[i])
{
throw new ArchiveException("Failed to locate the Zip Header");
}
stream.Seek(offset - 4, SeekOrigin.End);
signature = reader.ReadUInt32();
offset--;
iterationCount++;
if (iterationCount > MAX_ITERATIONS_FOR_DIRECTORY_HEADER)
{
throw new ArchiveException("Could not find Zip file Directory at the end of the file. File may be corrupted.");
return false;
}
}
while (signature != headerSignature);
return true;
}
private static void SeekBackToHeader(Stream stream, BinaryReader reader)
{
// Minimum EOCD length
if (stream.Length < MINIMUM_EOCD_LENGTH)
{
throw new ArchiveException("Could not find Zip file Directory at the end of the file. File may be corrupted.");
}
int len = stream.Length < MAX_SEARCH_LENGTH_FOR_EOCD ? (int)stream.Length : MAX_SEARCH_LENGTH_FOR_EOCD;
// We search for marker in reverse to find the first occurance
byte[] needle = { 0x06, 0x05, 0x4b, 0x50 };
stream.Seek(-len, SeekOrigin.End);
byte[] seek = reader.ReadBytes(len);
// Search in reverse
Array.Reverse(seek);
// don't exclude the minimum eocd region, otherwise you fail to locate the header in empty zip files
var max_search_area = len; // - MINIMUM_EOCD_LENGTH;
for (int pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
{
if (IsMatch(seek, pos_from_end, needle))
{
stream.Seek(-pos_from_end, SeekOrigin.End);
return;
}
}
throw new ArchiveException("Failed to locate the Zip Header");
}
internal LocalEntryHeader GetLocalHeader(Stream stream, DirectoryEntryHeader directoryEntryHeader)

View File

@@ -42,7 +42,7 @@ namespace SharpCompress.Common.Zip
if (Header.HasData && !Skipped)
{
_decompressionStream ??= GetCompressedStream();
_decompressionStream.Skip();
if (_decompressionStream is DeflateStream deflateStream)

View File

@@ -49,7 +49,10 @@ namespace SharpCompress.Common.Zip
_lastEntryHeader = null;
uint headerBytes = reader.ReadUInt32();
header = ReadHeader(headerBytes, reader);
if (header is null) { yield break; }
if (header is null)
{
yield break;
}
//entry could be zero bytes so we need to know that.
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
@@ -57,11 +60,11 @@ namespace SharpCompress.Common.Zip
var local_header = ((LocalEntryHeader)header);
// If we have CompressedSize, there is data to be read
if( local_header.CompressedSize > 0 )
if (local_header.CompressedSize > 0)
{
header.HasData = true;
} // Check if zip is streaming ( Length is 0 and is declared in PostDataDescriptor )
else if( local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor) )
else if (local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor))
{
bool isRecording = rewindableStream.IsRecording;
if (!isRecording)

View File

@@ -75,7 +75,7 @@ namespace SharpCompress.Common.Zip
if (disposing)
{
//read out last 10 auth bytes
var ten = new byte[10];
Span<byte> ten = stackalloc byte[10];
_stream.ReadFully(ten);
_stream.Dispose();
}

View File

@@ -33,8 +33,10 @@ namespace SharpCompress.Common.Zip
private int KeySizeInBytes
{
get { return KeyLengthInBytes(_keySize);
}
get
{
return KeyLengthInBytes(_keySize);
}
}
internal static int KeyLengthInBytes(WinzipAesKeySize keySize)

View File

@@ -27,33 +27,33 @@ namespace SharpCompress.Common.Zip
switch (_filePart.Header.CompressionMethod)
{
case ZipCompressionMethod.BZip2:
{
return CompressionType.BZip2;
}
{
return CompressionType.BZip2;
}
case ZipCompressionMethod.Deflate:
{
return CompressionType.Deflate;
}
{
return CompressionType.Deflate;
}
case ZipCompressionMethod.Deflate64:
{
return CompressionType.Deflate64;
}
{
return CompressionType.Deflate64;
}
case ZipCompressionMethod.LZMA:
{
return CompressionType.LZMA;
}
{
return CompressionType.LZMA;
}
case ZipCompressionMethod.PPMd:
{
return CompressionType.PPMd;
}
{
return CompressionType.PPMd;
}
case ZipCompressionMethod.None:
{
return CompressionType.None;
}
{
return CompressionType.None;
}
default:
{
return CompressionType.Unknown;
}
{
return CompressionType.Unknown;
}
}
}
}

View File

@@ -60,72 +60,72 @@ namespace SharpCompress.Common.Zip
switch (method)
{
case ZipCompressionMethod.None:
{
return stream;
}
{
return stream;
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(stream, CompressionMode.Decompress);
}
{
return new DeflateStream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.Deflate64:
{
return new Deflate64Stream(stream, CompressionMode.Decompress);
}
{
return new Deflate64Stream(stream, CompressionMode.Decompress);
}
case ZipCompressionMethod.BZip2:
{
return new BZip2Stream(stream, CompressionMode.Decompress, false);
}
{
return new BZip2Stream(stream, CompressionMode.Decompress, false);
}
case ZipCompressionMethod.LZMA:
{
if (FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted))
{
throw new NotSupportedException("LZMA with pkware encryption.");
if (FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted))
{
throw new NotSupportedException("LZMA with pkware encryption.");
}
var reader = new BinaryReader(stream);
reader.ReadUInt16(); //LZMA version
var props = new byte[reader.ReadUInt16()];
reader.Read(props, 0, props.Length);
return new LzmaStream(props, stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: (long)Header.UncompressedSize);
}
var reader = new BinaryReader(stream);
reader.ReadUInt16(); //LZMA version
var props = new byte[reader.ReadUInt16()];
reader.Read(props, 0, props.Length);
return new LzmaStream(props, stream,
Header.CompressedSize > 0 ? Header.CompressedSize - 4 - props.Length : -1,
FlagUtility.HasFlag(Header.Flags, HeaderFlags.Bit1)
? -1
: (long)Header.UncompressedSize);
}
case ZipCompressionMethod.PPMd:
{
var props = new byte[2];
stream.ReadFully(props);
return new PpmdStream(new PpmdProperties(props), stream, false);
}
{
Span<byte> props = stackalloc byte[2];
stream.ReadFully(props);
return new PpmdStream(new PpmdProperties(props), stream, false);
}
case ZipCompressionMethod.WinzipAes:
{
ExtraData data = Header.Extra.Where(x => x.Type == ExtraDataType.WinZipAes).SingleOrDefault();
if (data is null)
{
throw new InvalidFormatException("No Winzip AES extra data found.");
}
if (data.Length != 7)
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
ushort compressedMethod = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes);
ExtraData? data = Header.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
if (data is null)
{
throw new InvalidFormatException("No Winzip AES extra data found.");
}
if (data.Length != 7)
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
ushort compressedMethod = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes);
if (compressedMethod != 0x01 && compressedMethod != 0x02)
{
throw new InvalidFormatException("Unexpected vendor version number for WinZip AES metadata");
}
if (compressedMethod != 0x01 && compressedMethod != 0x02)
{
throw new InvalidFormatException("Unexpected vendor version number for WinZip AES metadata");
}
ushort vendorId = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(2));
if (vendorId != 0x4541)
{
throw new InvalidFormatException("Unexpected vendor ID for WinZip AES metadata");
ushort vendorId = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(2));
if (vendorId != 0x4541)
{
throw new InvalidFormatException("Unexpected vendor ID for WinZip AES metadata");
}
return CreateDecompressionStream(stream, (ZipCompressionMethod)BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)));
}
return CreateDecompressionStream(stream, (ZipCompressionMethod)BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)));
}
default:
{
throw new NotSupportedException("CompressionMethod: " + Header.CompressionMethod);
}
{
throw new NotSupportedException("CompressionMethod: " + Header.CompressionMethod);
}
}
}
@@ -159,23 +159,23 @@ namespace SharpCompress.Common.Zip
case ZipCompressionMethod.BZip2:
case ZipCompressionMethod.LZMA:
case ZipCompressionMethod.PPMd:
{
return new PkwareTraditionalCryptoStream(plainStream, Header.ComposeEncryptionData(plainStream), CryptoMode.Decrypt);
}
{
return new PkwareTraditionalCryptoStream(plainStream, Header.ComposeEncryptionData(plainStream), CryptoMode.Decrypt);
}
case ZipCompressionMethod.WinzipAes:
{
if (Header.WinzipAesEncryptionData != null)
{
return new WinzipAesCryptoStream(plainStream, Header.WinzipAesEncryptionData, Header.CompressedSize - 10);
if (Header.WinzipAesEncryptionData != null)
{
return new WinzipAesCryptoStream(plainStream, Header.WinzipAesEncryptionData, Header.CompressedSize - 10);
}
return plainStream;
}
return plainStream;
}
default:
{
throw new ArgumentOutOfRangeException();
}
{
throw new ArgumentOutOfRangeException();
}
}
}

View File

@@ -35,20 +35,20 @@ namespace SharpCompress.Common.Zip
switch (headerBytes)
{
case ENTRY_HEADER_BYTES:
{
var entryHeader = new LocalEntryHeader(_archiveEncoding);
entryHeader.Read(reader);
LoadHeader(entryHeader, reader.BaseStream);
{
var entryHeader = new LocalEntryHeader(_archiveEncoding);
entryHeader.Read(reader);
LoadHeader(entryHeader, reader.BaseStream);
_lastEntryHeader = entryHeader;
return entryHeader;
}
_lastEntryHeader = entryHeader;
return entryHeader;
}
case DIRECTORY_START_HEADER_BYTES:
{
var entry = new DirectoryEntryHeader(_archiveEncoding);
entry.Read(reader);
return entry;
}
{
var entry = new DirectoryEntryHeader(_archiveEncoding);
entry.Read(reader);
return entry;
}
case POST_DATA_DESCRIPTOR:
{
if (FlagUtility.HasFlag(_lastEntryHeader!.Flags, HeaderFlags.UsePostDataDescriptor))
@@ -129,7 +129,7 @@ namespace SharpCompress.Common.Zip
if (entryHeader.CompressionMethod == ZipCompressionMethod.WinzipAes)
{
ExtraData data = entryHeader.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
ExtraData? data = entryHeader.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
if (data != null)
{
var keySize = (WinzipAesKeySize)data.DataBytes[4];

View File

@@ -52,32 +52,24 @@ namespace SharpCompress.Compressors.ADC
private static int GetChunkSize(byte byt)
{
switch (GetChunkType(byt))
return GetChunkType(byt) switch
{
case PLAIN:
return (byt & 0x7F) + 1;
case TWO_BYTE:
return ((byt & 0x3F) >> 2) + 3;
case THREE_BYTE:
return (byt & 0x3F) + 4;
default:
return -1;
}
PLAIN => (byt & 0x7F) + 1,
TWO_BYTE => ((byt & 0x3F) >> 2) + 3,
THREE_BYTE => (byt & 0x3F) + 4,
_ => -1,
};
}
private static int GetOffset(ReadOnlySpan<byte> chunk)
{
switch (GetChunkType(chunk[0]))
return GetChunkType(chunk[0]) switch
{
case PLAIN:
return 0;
case TWO_BYTE:
return ((chunk[0] & 0x03) << 8) + chunk[1];
case THREE_BYTE:
return (chunk[1] << 8) + chunk[2];
default:
return -1;
}
PLAIN => 0,
TWO_BYTE => ((chunk[0] & 0x03) << 8) + chunk[1],
THREE_BYTE => (chunk[1] << 8) + chunk[2],
_ => -1,
};
}
/// <summary>

View File

@@ -1,8 +1,9 @@
using System.IO;
using System;
using System.IO;
namespace SharpCompress.Compressors.BZip2
{
public class BZip2Stream : Stream
public sealed class BZip2Stream : Stream
{
private readonly Stream stream;
private bool isDisposed;
@@ -82,6 +83,20 @@ namespace SharpCompress.Compressors.BZip2
stream.SetLength(value);
}
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{
return stream.Read(buffer);
}
public override void Write(ReadOnlySpan<byte> buffer)
{
stream.Write(buffer);
}
#endif
public override void Write(byte[] buffer, int offset, int count)
{
stream.Write(buffer, offset, count);
@@ -108,4 +123,4 @@ namespace SharpCompress.Compressors.BZip2
return true;
}
}
}
}

View File

@@ -1,5 +1,6 @@
#nullable disable
using System;
using System.IO;
/*
@@ -835,7 +836,7 @@ namespace SharpCompress.Compressors.BZip2
private void SetupBlock()
{
int[] cftab = new int[257];
Span<int> cftab = stackalloc int[257];
char ch;
cftab[0] = 0;
@@ -854,7 +855,6 @@ namespace SharpCompress.Compressors.BZip2
tt[cftab[ch]] = i;
cftab[ch]++;
}
cftab = null;
tPos = tt[origPtr];

View File

@@ -1,4 +1,5 @@
using System.IO;
using System;
using System.IO;
/*
* Copyright 2001,2004-2005 The Apache Software Foundation
@@ -88,9 +89,9 @@ namespace SharpCompress.Compressors.BZip2
int nNodes, nHeap, n1, n2, i, j, k;
bool tooLong;
int[] heap = new int[BZip2Constants.MAX_ALPHA_SIZE + 2];
int[] weight = new int[BZip2Constants.MAX_ALPHA_SIZE * 2];
int[] parent = new int[BZip2Constants.MAX_ALPHA_SIZE * 2];
Span<int> heap = stackalloc int[BZip2Constants.MAX_ALPHA_SIZE + 2]; // 1040 bytes
Span<int> weight = stackalloc int[BZip2Constants.MAX_ALPHA_SIZE * 2]; // 1040 bytes
Span<int> parent = stackalloc int[BZip2Constants.MAX_ALPHA_SIZE * 2]; // 1040 bytes
for (i = 0; i < alphaSize; i++)
{
@@ -603,14 +604,7 @@ namespace SharpCompress.Compressors.BZip2
while (bsLive > 0)
{
int ch = (bsBuff >> 24);
try
{
bsStream.WriteByte((byte)ch); // write 8-bit
}
catch (IOException e)
{
throw e;
}
bsStream.WriteByte((byte)ch); // write 8-bit
bsBuff <<= 8;
bsLive -= 8;
bytesOut++;
@@ -622,14 +616,7 @@ namespace SharpCompress.Compressors.BZip2
while (bsLive >= 8)
{
int ch = (bsBuff >> 24);
try
{
bsStream.WriteByte((byte)ch); // write 8-bit
}
catch (IOException e)
{
throw e;
}
bsStream.WriteByte((byte)ch); // write 8-bit
bsBuff <<= 8;
bsLive -= 8;
bytesOut++;
@@ -1328,8 +1315,8 @@ namespace SharpCompress.Compressors.BZip2
private void MainSort()
{
int i, j, ss, sb;
int[] runningOrder = new int[256];
int[] copy = new int[256];
Span<int> runningOrder = stackalloc int[256];
Span<int> copy = stackalloc int[256];
bool[] bigDone = new bool[256];
int c1, c2;
int numQSorted;

View File

@@ -189,7 +189,7 @@ namespace SharpCompress.Compressors.Deflate
// pre-initialize the crc table for speed of lookup.
private uint gf2_matrix_times(uint[] matrix, uint vec)
private uint gf2_matrix_times(ReadOnlySpan<uint> matrix, uint vec)
{
uint sum = 0;
int i = 0;
@@ -205,7 +205,7 @@ namespace SharpCompress.Compressors.Deflate
return sum;
}
private void gf2_matrix_square(uint[] square, uint[] mat)
private void gf2_matrix_square(Span<uint> square, Span<uint> mat)
{
for (int i = 0; i < 32; i++)
{
@@ -225,8 +225,8 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="length">the length of data the CRC value was calculated on</param>
public void Combine(int crc, int length)
{
var even = new uint[32]; // even-power-of-two zeros operator
var odd = new uint[32]; // odd-power-of-two zeros operator
Span<uint> even = stackalloc uint[32]; // even-power-of-two zeros operator
Span<uint> odd = stackalloc uint[32]; // odd-power-of-two zeros operator
if (length == 0)
{

View File

@@ -70,6 +70,8 @@
using System;
using SharpCompress.Algorithms;
namespace SharpCompress.Compressors.Deflate
{
internal sealed partial class DeflateManager
@@ -1685,7 +1687,7 @@ namespace SharpCompress.Compressors.Deflate
Rfc1950BytesEmitted = false;
status = (WantRfc1950HeaderBytes) ? INIT_STATE : BUSY_STATE;
_codec._Adler32 = Adler.Adler32(0, null, 0, 0);
_codec._adler32 = 1;
last_flush = (int)FlushType.None;
@@ -1763,7 +1765,7 @@ namespace SharpCompress.Compressors.Deflate
throw new ZlibException("Stream error.");
}
_codec._Adler32 = Adler.Adler32(_codec._Adler32, dictionary, 0, dictionary.Length);
_codec._adler32 = Adler32.Calculate(_codec._adler32, dictionary);
if (length < MIN_MATCH)
{
@@ -1850,12 +1852,12 @@ namespace SharpCompress.Compressors.Deflate
////putShortMSB((int)(SharedUtils.URShift(_codec._Adler32, 16)));
//putShortMSB((int)((UInt64)_codec._Adler32 >> 16));
//putShortMSB((int)(_codec._Adler32 & 0xffff));
pending[pendingCount++] = (byte)((_codec._Adler32 & 0xFF000000) >> 24);
pending[pendingCount++] = (byte)((_codec._Adler32 & 0x00FF0000) >> 16);
pending[pendingCount++] = (byte)((_codec._Adler32 & 0x0000FF00) >> 8);
pending[pendingCount++] = (byte)(_codec._Adler32 & 0x000000FF);
pending[pendingCount++] = (byte)((_codec._adler32 & 0xFF000000) >> 24);
pending[pendingCount++] = (byte)((_codec._adler32 & 0x00FF0000) >> 16);
pending[pendingCount++] = (byte)((_codec._adler32 & 0x0000FF00) >> 8);
pending[pendingCount++] = (byte)(_codec._adler32 & 0x000000FF);
}
_codec._Adler32 = Adler.Adler32(0, null, 0, 0);
_codec._adler32 = 1;
}
// Flush as much pending output as possible
@@ -1968,10 +1970,10 @@ namespace SharpCompress.Compressors.Deflate
}
// Write the zlib trailer (adler32)
pending[pendingCount++] = (byte)((_codec._Adler32 & 0xFF000000) >> 24);
pending[pendingCount++] = (byte)((_codec._Adler32 & 0x00FF0000) >> 16);
pending[pendingCount++] = (byte)((_codec._Adler32 & 0x0000FF00) >> 8);
pending[pendingCount++] = (byte)(_codec._Adler32 & 0x000000FF);
pending[pendingCount++] = (byte)((_codec._adler32 & 0xFF000000) >> 24);
pending[pendingCount++] = (byte)((_codec._adler32 & 0x00FF0000) >> 16);
pending[pendingCount++] = (byte)((_codec._adler32 & 0x0000FF00) >> 8);
pending[pendingCount++] = (byte)(_codec._adler32 & 0x000000FF);
//putShortMSB((int)(SharedUtils.URShift(_codec._Adler32, 16)));
//putShortMSB((int)(_codec._Adler32 & 0xffff));

View File

@@ -37,10 +37,9 @@ namespace SharpCompress.Compressors.Deflate
{
internal static readonly DateTime UNIX_EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
public DateTime? LastModified { get; set; }
private string? _comment;
private string? _fileName;
private DateTime? _lastModified;
internal ZlibBaseStream BaseStream;
private bool _disposed;
@@ -274,6 +273,7 @@ namespace SharpCompress.Compressors.Deflate
_firstReadDone = true;
FileName = BaseStream._GzipFileName;
Comment = BaseStream._GzipComment;
LastModified = BaseStream._GzipMtime;
}
return n;
}
@@ -358,6 +358,20 @@ namespace SharpCompress.Compressors.Deflate
}
}
public DateTime? LastModified
{
get => _lastModified;
set
{
if (_disposed)
{
throw new ObjectDisposedException(nameof(GZipStream));
}
_lastModified = value;
}
}
public string? FileName
{
get => _fileName;
@@ -372,28 +386,19 @@ namespace SharpCompress.Compressors.Deflate
{
return;
}
if (_fileName.Contains("/"))
if (_fileName.Contains('/'))
{
_fileName = _fileName.Replace('/', '\\');
}
if (_fileName.EndsWith("\\"))
if (_fileName.EndsWith('\\'))
{
throw new InvalidOperationException("Illegal filename");
}
if (_fileName.Contains("\\"))
if (_fileName.Contains('\\'))
{
// trim any leading path
int length = _fileName.Length;
int num = length;
while (--num >= 0)
{
char c = _fileName[num];
if (c == '\\')
{
_fileName = _fileName.Substring(num + 1, length - num - 1);
}
}
_fileName = Path.GetFileName(_fileName);
}
}
}
@@ -407,8 +412,8 @@ namespace SharpCompress.Compressors.Deflate
byte[]? filenameBytes = (FileName is null) ? null
: _encoding.GetBytes(FileName);
int cbLength = (commentBytes is null) ? 0 : commentBytes.Length + 1;
int fnLength = (filenameBytes is null) ? 0 : filenameBytes.Length + 1;
int cbLength = commentBytes?.Length + 1 ?? 0;
int fnLength = filenameBytes?.Length + 1 ?? 0;
int bufferLength = 10 + cbLength + fnLength;
var header = new byte[bufferLength];
@@ -434,7 +439,7 @@ namespace SharpCompress.Compressors.Deflate
header[i++] = flag;
// mtime
if (!LastModified.HasValue)
if (LastModified is null)
{
LastModified = DateTime.Now;
}
@@ -456,7 +461,7 @@ namespace SharpCompress.Compressors.Deflate
// filename
if (fnLength != 0)
{
Array.Copy(filenameBytes, 0, header, i, fnLength - 1);
Array.Copy(filenameBytes!, 0, header, i, fnLength - 1);
i += fnLength - 1;
header[i++] = 0; // terminate
}
@@ -464,7 +469,7 @@ namespace SharpCompress.Compressors.Deflate
// comment
if (cbLength != 0)
{
Array.Copy(commentBytes, 0, header, i, cbLength - 1);
Array.Copy(commentBytes!, 0, header, i, cbLength - 1);
i += cbLength - 1;
header[i++] = 0; // terminate
}

View File

@@ -65,6 +65,8 @@
using System;
using SharpCompress.Algorithms;
namespace SharpCompress.Compressors.Deflate
{
internal sealed class InflateBlocks
@@ -72,7 +74,7 @@ namespace SharpCompress.Compressors.Deflate
private const int MANY = 1440;
// Table for deflate from PKZIP's appnote.txt.
internal static readonly int[] border = {16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
internal static readonly int[] border = { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
internal ZlibCodec _codec; // pointer back to this zlib stream
internal int[] bb = new int[1]; // bit length tree depth
@@ -118,7 +120,7 @@ namespace SharpCompress.Compressors.Deflate
if (checkfn != null)
{
_codec._Adler32 = check = Adler.Adler32(0, null, 0, 0);
_codec._adler32 = check = 1;
}
return oldCheck;
}
@@ -559,35 +561,35 @@ namespace SharpCompress.Compressors.Deflate
}
tb[0] = -1;
{
var bl = new[] {9}; // must be <= 9 for lookahead assumptions
var bd = new[] {6}; // must be <= 9 for lookahead assumptions
var tl = new int[1];
var td = new int[1];
t = table;
t = inftree.inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f), blens, bl, bd, tl,
td, hufts, _codec);
if (t != ZlibConstants.Z_OK)
{
if (t == ZlibConstants.Z_DATA_ERROR)
{
blens = null;
mode = InflateBlockMode.BAD;
}
r = t;
var bl = new[] { 9 }; // must be <= 9 for lookahead assumptions
var bd = new[] { 6 }; // must be <= 9 for lookahead assumptions
var tl = new int[1];
var td = new int[1];
bitb = b;
bitk = k;
_codec.AvailableBytesIn = n;
_codec.TotalBytesIn += p - _codec.NextIn;
_codec.NextIn = p;
writeAt = q;
return Flush(r);
t = table;
t = inftree.inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f), blens, bl, bd, tl,
td, hufts, _codec);
if (t != ZlibConstants.Z_OK)
{
if (t == ZlibConstants.Z_DATA_ERROR)
{
blens = null;
mode = InflateBlockMode.BAD;
}
r = t;
bitb = b;
bitk = k;
_codec.AvailableBytesIn = n;
_codec.TotalBytesIn += p - _codec.NextIn;
_codec.NextIn = p;
writeAt = q;
return Flush(r);
}
codes.Init(bl[0], bd[0], hufts, tl[0], hufts, td[0]);
}
codes.Init(bl[0], bd[0], hufts, tl[0], hufts, td[0]);
}
mode = InflateBlockMode.CODES;
goto case InflateBlockMode.CODES;
@@ -739,7 +741,7 @@ namespace SharpCompress.Compressors.Deflate
// update check information
if (checkfn != null)
{
_codec._Adler32 = check = Adler.Adler32(check, window, readAt, nBytes);
_codec._adler32 = check = Adler32.Calculate(check, window.AsSpan(readAt, nBytes));
}
// copy as far as end of window
@@ -1565,7 +1567,7 @@ namespace SharpCompress.Compressors.Deflate
private const int PRESET_DICT = 0x20;
private const int Z_DEFLATED = 8;
private static readonly byte[] mark = {0, 0, 0xff, 0xff};
private static readonly byte[] mark = { 0, 0, 0xff, 0xff };
internal ZlibCodec _codec; // pointer back to this zlib stream
internal InflateBlocks blocks; // current inflate_blocks state
@@ -1764,7 +1766,7 @@ namespace SharpCompress.Compressors.Deflate
_codec.AvailableBytesIn--;
_codec.TotalBytesIn++;
expectedCheck += (uint)(_codec.InputBuffer[_codec.NextIn++] & 0x000000ff);
_codec._Adler32 = expectedCheck;
_codec._adler32 = expectedCheck;
mode = InflateManagerMode.DICT0;
return ZlibConstants.Z_NEED_DICT;
@@ -1879,12 +1881,12 @@ namespace SharpCompress.Compressors.Deflate
throw new ZlibException("Stream error.");
}
if (Adler.Adler32(1, dictionary, 0, dictionary.Length) != _codec._Adler32)
if (Adler32.Calculate(1, dictionary) != _codec._adler32)
{
return ZlibConstants.Z_DATA_ERROR;
}
_codec._Adler32 = Adler.Adler32(0, null, 0, 0);
_codec._adler32 = 1;
if (length >= (1 << wbits))
{

View File

@@ -374,7 +374,7 @@ namespace SharpCompress.Compressors.Deflate
};
// extra bits for each bit length code
internal static readonly int[] extra_blbits = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7};
internal static readonly int[] extra_blbits = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7 };
internal static readonly StaticTree Literals;
internal static readonly StaticTree Distances;
@@ -405,88 +405,4 @@ namespace SharpCompress.Compressors.Deflate
BitLengths = new StaticTree(null, extra_blbits, 0, InternalConstants.BL_CODES, InternalConstants.MAX_BL_BITS);
}
}
/// <summary>
/// Computes an Adler-32 checksum.
/// </summary>
/// <remarks>
/// The Adler checksum is similar to a CRC checksum, but faster to compute, though less
/// reliable. It is used in producing RFC1950 compressed streams. The Adler checksum
/// is a required part of the "ZLIB" standard. Applications will almost never need to
/// use this class directly.
/// </remarks>
internal sealed class Adler
{
// largest prime smaller than 65536
private static readonly uint BASE = 65521U;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private static readonly int NMAX = 5552;
internal static uint Adler32(uint adler, byte[]? buf, int index, int len)
{
if (buf is null)
{
return 1;
}
uint s1 = adler & 0xffffU;
uint s2 = (adler >> 16) & 0xffffU;
while (len > 0)
{
int k = len < NMAX ? len : NMAX;
len -= k;
while (k >= 16)
{
//s1 += (buf[index++] & 0xff); s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
s1 += buf[index++];
s2 += s1;
k -= 16;
}
if (k != 0)
{
do
{
s1 += buf[index++];
s2 += s1;
}
while (--k != 0);
}
s1 %= BASE;
s2 %= BASE;
}
return (s2 << 16) | s1;
}
}
}

View File

@@ -228,12 +228,12 @@ namespace SharpCompress.Compressors.Deflate
if (_wantCompress)
{
// Emit the GZIP trailer: CRC32 and size mod 2^32
byte[] intBuf = new byte[4];
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(intBuf, crc.Crc32Result);
_stream.Write(intBuf, 0, 4);
int c2 = (Int32)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
_stream.Write(intBuf);
int c2 = (int)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
BinaryPrimitives.WriteInt32LittleEndian(intBuf, c2);
_stream.Write(intBuf, 0, 4);
_stream.Write(intBuf);
}
else
{
@@ -256,17 +256,15 @@ namespace SharpCompress.Compressors.Deflate
}
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
byte[] trailer = new byte[8];
Span<byte> trailer = stackalloc byte[8];
// workitem 8679
if (_z.AvailableBytesIn != 8)
{
// Make sure we have read to the end of the stream
Array.Copy(_z.InputBuffer, _z.NextIn, trailer, 0, _z.AvailableBytesIn);
_z.InputBuffer.AsSpan(_z.NextIn, _z.AvailableBytesIn).CopyTo(trailer);
int bytesNeeded = 8 - _z.AvailableBytesIn;
int bytesRead = _stream.Read(trailer,
_z.AvailableBytesIn,
bytesNeeded);
int bytesRead = _stream.Read(trailer.Slice(_z.AvailableBytesIn, bytesNeeded));
if (bytesNeeded != bytesRead)
{
throw new ZlibException(String.Format(
@@ -276,12 +274,12 @@ namespace SharpCompress.Compressors.Deflate
}
else
{
Array.Copy(_z.InputBuffer, _z.NextIn, trailer, 0, trailer.Length);
_z.InputBuffer.AsSpan(_z.NextIn, trailer.Length).CopyTo(trailer);
}
Int32 crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
Int32 crc32_actual = crc.Crc32Result;
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.AsSpan(4));
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
Int32 isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
if (crc32_actual != crc32_expected)
@@ -413,8 +411,8 @@ namespace SharpCompress.Compressors.Deflate
int totalBytesRead = 0;
// read the header on the first read
byte[] header = new byte[10];
int n = _stream.Read(header, 0, header.Length);
Span<byte> header = stackalloc byte[10];
int n = _stream.Read(header);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
@@ -432,16 +430,16 @@ namespace SharpCompress.Compressors.Deflate
throw new ZlibException("Bad GZIP header.");
}
Int32 timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
_GzipMtime = TarHeader.EPOCH.AddSeconds(timet);
totalBytesRead += n;
if ((header[3] & 0x04) == 0x04)
{
// read and discard extra field
n = _stream.Read(header, 0, 2); // 2-byte length field
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
totalBytesRead += n;
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
short extraLength = (short)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
n = _stream.Read(extra, 0, extra.Length);
if (n != extraLength)
@@ -504,13 +502,37 @@ namespace SharpCompress.Compressors.Deflate
throw new ZlibException("Cannot Read after Writing.");
}
int rc = 0;
// set up the output of the deflate/inflate codec:
_z.OutputBuffer = buffer;
_z.NextOut = offset;
_z.AvailableBytesOut = count;
if (count == 0)
{
return 0;
}
if (nomoreinput && _wantCompress)
{
return 0; // workitem 8557
// no more input data available; therefore we flush to
// try to complete the read
rc = _z.Deflate(FlushType.Finish);
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException(String.Format("Deflating: rc={0} msg={1}", rc, _z.Message));
}
rc = (count - _z.AvailableBytesOut);
// calculate CRC after reading
if (crc != null)
{
crc.SlurpBlock(buffer, offset, rc);
}
return rc;
}
if (buffer is null)
{
@@ -529,13 +551,6 @@ namespace SharpCompress.Compressors.Deflate
throw new ArgumentOutOfRangeException(nameof(count));
}
int rc = 0;
// set up the output of the deflate/inflate codec:
_z.OutputBuffer = buffer;
_z.NextOut = offset;
_z.AvailableBytesOut = count;
// This is necessary in case _workingBuffer has been resized. (new byte[])
// (The first reference to _workingBuffer goes through the private accessor which
// may initialize it.)

View File

@@ -137,7 +137,7 @@ namespace SharpCompress.Compressors.Deflate
internal DeflateManager dstate;
internal InflateManager istate;
internal uint _Adler32;
internal uint _adler32;
/// <summary>
/// The compression level to use in this codec. Useful only in compression mode.
@@ -173,7 +173,7 @@ namespace SharpCompress.Compressors.Deflate
/// <summary>
/// The Adler32 checksum on the data transferred through the codec so far. You probably don't need to look at this.
/// </summary>
public int Adler32 => (int)_Adler32;
public int Adler32 => (int)_adler32;
/// <summary>
/// Create a ZlibCodec.
@@ -737,7 +737,7 @@ namespace SharpCompress.Compressors.Deflate
if (dstate.WantRfc1950HeaderBytes)
{
_Adler32 = Adler.Adler32(_Adler32, InputBuffer, NextIn, len);
_adler32 = Algorithms.Adler32.Calculate(_adler32, InputBuffer.AsSpan(NextIn, len));
}
Array.Copy(InputBuffer, NextIn, buf, start, len);
NextIn += len;

View File

@@ -111,7 +111,7 @@ namespace SharpCompress.Compressors.Deflate
/// <summary>
/// The size of the working buffer used in the ZlibCodec class. Defaults to 8192 bytes.
/// </summary>
#if NETCF
#if NETCF
public const int WorkingBufferSizeDefault = 8192;
#else
public const int WorkingBufferSizeDefault = 16384;

View File

@@ -2,6 +2,7 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Diagnostics;
namespace SharpCompress.Compressors.Deflate64
@@ -10,7 +11,7 @@ namespace SharpCompress.Compressors.Deflate64
{
// static information for encoding, DO NOT MODIFY
internal static readonly byte[] FAST_ENCODER_TREE_STRUCTURE_DATA =
internal static ReadOnlySpan<byte> FAST_ENCODER_TREE_STRUCTURE_DATA => new byte[]
{
0xec,0xbd,0x07,0x60,0x1c,0x49,0x96,0x25,0x26,0x2f,0x6d,0xca,
0x7b,0x7f,0x4a,0xf5,0x4a,0xd7,0xe0,0x74,0xa1,0x08,0x80,0x60,
@@ -23,7 +24,7 @@ namespace SharpCompress.Compressors.Deflate64
0x1f,0x3f
};
internal static readonly byte[] B_FINAL_FAST_ENCODER_TREE_STRUCTURE_DATA =
internal static ReadOnlySpan<byte> B_FINAL_FAST_ENCODER_TREE_STRUCTURE_DATA => new byte[]
{
0xed,0xbd,0x07,0x60,0x1c,0x49,0x96,0x25,0x26,0x2f,0x6d,0xca,
0x7b,0x7f,0x4a,0xf5,0x4a,0xd7,0xe0,0x74,0xa1,0x08,0x80,0x60,

View File

@@ -2,6 +2,7 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Diagnostics;
using System.IO;
@@ -118,14 +119,14 @@ namespace SharpCompress.Compressors.Deflate64
// This algorithm is described in standard RFC 1951
private uint[] CalculateHuffmanCode()
{
uint[] bitLengthCount = new uint[17];
Span<uint> bitLengthCount = stackalloc uint[17];
foreach (int codeLength in _codeLengthArray)
{
bitLengthCount[codeLength]++;
}
bitLengthCount[0] = 0; // clear count for length 0
uint[] nextCode = new uint[17];
Span<uint> nextCode = stackalloc uint[17];
uint tempCode = 0;
for (int bits = 1; bits <= 16; bits++)
{

View File

@@ -114,7 +114,7 @@ namespace SharpCompress.Compressors.Deflate64
private void Reset()
{
_state = //_hasFormatReader ?
//InflaterState.ReadingHeader : // start by reading Header info
//InflaterState.ReadingHeader : // start by reading Header info
InflaterState.ReadingBFinal; // start by reading BFinal bit
}
@@ -294,7 +294,7 @@ namespace SharpCompress.Compressors.Deflate64
//if (_hasFormatReader)
// _state = InflaterState.StartReadingFooter;
//else
_state = InflaterState.Done;
_state = InflaterState.Done;
}
return result;
}

View File

@@ -38,14 +38,14 @@ namespace SharpCompress.Compressors.Deflate64
public void WriteLengthDistance(int length, int distance)
{
Debug.Assert((_bytesUsed + length) <= WINDOW_SIZE, "No Enough space");
// move backwards distance bytes in the output stream,
// and copy length bytes from this position to the output stream.
_bytesUsed += length;
int copyStart = (_end - distance) & WINDOW_MASK; // start position for coping.
int border = WINDOW_SIZE - length;
if (copyStart <= border && _end < border)
if (copyStart <= border && _end < border)
{
if (length <= distance)
{

View File

@@ -4,9 +4,9 @@ namespace SharpCompress.Compressors.Filters
{
internal class BCJFilter : Filter
{
private static readonly bool[] MASK_TO_ALLOWED_STATUS = {true, true, true, false, true, false, false, false};
private static readonly bool[] MASK_TO_ALLOWED_STATUS = { true, true, true, false, true, false, false, false };
private static readonly int[] MASK_TO_BIT_NUMBER = {0, 1, 2, 2, 3, 3, 3, 3};
private static readonly int[] MASK_TO_BIT_NUMBER = { 0, 1, 2, 2, 3, 3, 3, 3 };
private int _pos;
private int _prevMask;

View File

@@ -6,10 +6,8 @@ using SharpCompress.Compressors.LZMA.Utilites;
namespace SharpCompress.Compressors.LZMA
{
internal class AesDecoderStream : DecoderStream2
internal sealed class AesDecoderStream : DecoderStream2
{
#region Variables
private readonly Stream mStream;
private readonly ICryptoTransform mDecoder;
private readonly byte[] mBuffer;
@@ -20,26 +18,24 @@ namespace SharpCompress.Compressors.LZMA
private int mUnderflow;
private bool isDisposed;
#endregion
#region Stream Methods
public AesDecoderStream(Stream input, byte[] info, IPasswordProvider pass, long limit)
{
mStream = input;
mLimit = limit;
if (((uint) input.Length & 15) != 0)
if (((uint)input.Length & 15) != 0)
{
throw new NotSupportedException("AES decoder does not support padding.");
}
int numCyclesPower;
byte[] salt, seed;
Init(info, out numCyclesPower, out salt, out seed);
Init(info, out int numCyclesPower, out byte[] salt, out byte[] seed);
byte[] password = Encoding.Unicode.GetBytes(pass.CryptoGetTextPassword());
byte[] key = InitKey(numCyclesPower, salt, password);
byte[]? key = InitKey(numCyclesPower, salt, password);
if (key == null)
{
throw new InvalidOperationException("Initialized with null key");
}
using (var aes = Aes.Create())
{
@@ -72,21 +68,9 @@ namespace SharpCompress.Compressors.LZMA
}
}
public override long Position
{
get
{
return mWritten;
}
}
public override long Position => mWritten;
public override long Length
{
get
{
return mLimit;
}
}
public override long Length => mLimit;
public override int Read(byte[] buffer, int offset, int count)
{
@@ -127,7 +111,7 @@ namespace SharpCompress.Compressors.LZMA
// the stream length is not a multiple of the block size.
if (count > mLimit - mWritten)
{
count = (int) (mLimit - mWritten);
count = (int)(mLimit - mWritten);
}
// We cannot transform less than 16 bytes into the target buffer,
@@ -150,8 +134,6 @@ namespace SharpCompress.Compressors.LZMA
return processed;
}
#endregion
#region Private Methods
private void Init(byte[] info, out int numCyclesPower, out byte[] salt, out byte[] iv)
@@ -199,7 +181,7 @@ namespace SharpCompress.Compressors.LZMA
}
}
private byte[] InitKey(int mNumCyclesPower, byte[] salt, byte[] pass)
private byte[]? InitKey(int mNumCyclesPower, byte[] salt, byte[] pass)
{
if (mNumCyclesPower == 0x3F)
{
@@ -220,54 +202,50 @@ namespace SharpCompress.Compressors.LZMA
}
else
{
#if NETSTANDARD1_3 || NETSTANDARD2_0
using (IncrementalHash sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256))
#if NETSTANDARD2_0
using IncrementalHash sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
byte[] counter = new byte[8];
long numRounds = 1L << mNumCyclesPower;
for (long round = 0; round < numRounds; round++)
{
byte[] counter = new byte[8];
long numRounds = 1L << mNumCyclesPower;
for (long round = 0; round < numRounds; round++)
{
sha.AppendData(salt, 0, salt.Length);
sha.AppendData(pass, 0, pass.Length);
sha.AppendData(counter, 0, 8);
sha.AppendData(salt, 0, salt.Length);
sha.AppendData(pass, 0, pass.Length);
sha.AppendData(counter, 0, 8);
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (int i = 0; i < 8; i++)
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (int i = 0; i < 8; i++)
{
if (++counter[i] != 0)
{
if (++counter[i] != 0)
{
break;
}
break;
}
}
return sha.GetHashAndReset();
}
return sha.GetHashAndReset();
#else
using (var sha = SHA256.Create())
using var sha = SHA256.Create();
byte[] counter = new byte[8];
long numRounds = 1L << mNumCyclesPower;
for (long round = 0; round < numRounds; round++)
{
byte[] counter = new byte[8];
long numRounds = 1L << mNumCyclesPower;
for (long round = 0; round < numRounds; round++)
{
sha.TransformBlock(salt, 0, salt.Length, null, 0);
sha.TransformBlock(pass, 0, pass.Length, null, 0);
sha.TransformBlock(counter, 0, 8, null, 0);
sha.TransformBlock(salt, 0, salt.Length, null, 0);
sha.TransformBlock(pass, 0, pass.Length, null, 0);
sha.TransformBlock(counter, 0, 8, null, 0);
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (int i = 0; i < 8; i++)
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (int i = 0; i < 8; i++)
{
if (++counter[i] != 0)
{
if (++counter[i] != 0)
{
break;
}
break;
}
}
sha.TransformFinalBlock(counter, 0, 0);
return sha.Hash;
}
sha.TransformFinalBlock(counter, 0, 0);
return sha.Hash;
#endif
}
}

View File

@@ -215,7 +215,7 @@ namespace SharpCompress.Compressors.LZMA
byte prevByte = 0;
uint processedBytes = 0;
for (;;)
for (; ; )
{
byte b = 0;
uint i;

View File

@@ -14,7 +14,7 @@ namespace SharpCompress.Compressors.LZMA
/// <summary>
/// Stream supporting the LZIP format, as documented at http://www.nongnu.org/lzip/manual/lzip_manual.html
/// </summary>
public class LZipStream : Stream
public sealed class LZipStream : Stream
{
private readonly Stream _stream;
private readonly CountingWritableSubStream? _countingWritableSubStream;
@@ -59,16 +59,16 @@ namespace SharpCompress.Compressors.LZMA
crc32Stream.Dispose();
var compressedCount = _countingWritableSubStream!.Count;
byte[] intBuf = new byte[8];
Span<byte> intBuf = stackalloc byte[8];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc32Stream.Crc);
_countingWritableSubStream.Write(intBuf, 0, 4);
_countingWritableSubStream.Write(intBuf.Slice(0, 4));
BinaryPrimitives.WriteInt64LittleEndian(intBuf, _writeCount);
_countingWritableSubStream.Write(intBuf, 0, 8);
_countingWritableSubStream.Write(intBuf);
//total with headers
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, compressedCount + 6 + 20);
_countingWritableSubStream.Write(intBuf, 0, 8);
_countingWritableSubStream.Write(intBuf);
}
_finished = true;
}
@@ -117,6 +117,23 @@ namespace SharpCompress.Compressors.LZMA
public override void SetLength(long value) => throw new NotImplementedException();
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{
return _stream.Read(buffer);
}
public override void Write(ReadOnlySpan<byte> buffer)
{
_stream.Write(buffer);
_writeCount += buffer.Length;
}
#endif
public override void Write(byte[] buffer, int offset, int count)
{
_stream.Write(buffer, offset, count);
@@ -151,9 +168,10 @@ namespace SharpCompress.Compressors.LZMA
{
throw new ArgumentNullException(nameof(stream));
}
// Read the header
byte[] header = new byte[6];
int n = stream.Read(header, 0, header.Length);
Span<byte> header = stackalloc byte[6];
int n = stream.Read(header);
// TODO: Handle reading only part of the header?

View File

@@ -766,7 +766,7 @@ namespace SharpCompress.Compressors.LZMA
{
offs += 2;
}
for (;; len++)
for (; ; len++)
{
UInt32 distance = _matchDistances[offs + 1];
UInt32 curAndLenPrice = normalMatchPrice + GetPosLenPrice(distance, len, posState);
@@ -1107,7 +1107,7 @@ namespace SharpCompress.Compressors.LZMA
offs += 2;
}
for (UInt32 lenTest = startLen;; lenTest++)
for (UInt32 lenTest = startLen; ; lenTest++)
{
UInt32 curBack = _matchDistances[offs + 1];
UInt32 curAndLenPrice = normalMatchPrice + GetPosLenPrice(curBack, lenTest, posState);
@@ -1665,129 +1665,129 @@ namespace SharpCompress.Compressors.LZMA
switch (propIDs[i])
{
case CoderPropId.NumFastBytes:
{
if (!(prop is Int32))
{
throw new InvalidParamException();
}
Int32 numFastBytes = (Int32)prop;
if (numFastBytes < 5 || numFastBytes > Base.K_MATCH_MAX_LEN)
{
throw new InvalidParamException();
}
_numFastBytes = (UInt32)numFastBytes;
break;
}
case CoderPropId.Algorithm:
{
/*
if (!(prop is Int32))
throw new InvalidParamException();
Int32 maximize = (Int32)prop;
_fastMode = (maximize == 0);
_maxMode = (maximize >= 2);
*/
break;
}
case CoderPropId.MatchFinder:
{
if (!(prop is String))
{
throw new InvalidParamException();
}
EMatchFinderType matchFinderIndexPrev = _matchFinderType;
int m = FindMatchFinder(((string)prop).ToUpper());
if (m < 0)
{
throw new InvalidParamException();
}
_matchFinderType = (EMatchFinderType)m;
if (_matchFinder != null && matchFinderIndexPrev != _matchFinderType)
{
_dictionarySizePrev = 0xFFFFFFFF;
_matchFinder = null;
}
break;
}
case CoderPropId.DictionarySize:
{
const int kDicLogSizeMaxCompress = 30;
if (!(prop is Int32))
{
throw new InvalidParamException();
}
;
Int32 dictionarySize = (Int32)prop;
if (dictionarySize < (UInt32)(1 << Base.K_DIC_LOG_SIZE_MIN) ||
dictionarySize > (UInt32)(1 << kDicLogSizeMaxCompress))
{
throw new InvalidParamException();
}
_dictionarySize = (UInt32)dictionarySize;
int dicLogSize;
for (dicLogSize = 0; dicLogSize < (UInt32)kDicLogSizeMaxCompress; dicLogSize++)
{
if (dictionarySize <= ((UInt32)(1) << dicLogSize))
if (!(prop is Int32))
{
break;
throw new InvalidParamException();
}
Int32 numFastBytes = (Int32)prop;
if (numFastBytes < 5 || numFastBytes > Base.K_MATCH_MAX_LEN)
{
throw new InvalidParamException();
}
_numFastBytes = (UInt32)numFastBytes;
break;
}
_distTableSize = (UInt32)dicLogSize * 2;
break;
}
case CoderPropId.PosStateBits:
{
if (!(prop is Int32))
case CoderPropId.Algorithm:
{
throw new InvalidParamException();
/*
if (!(prop is Int32))
throw new InvalidParamException();
Int32 maximize = (Int32)prop;
_fastMode = (maximize == 0);
_maxMode = (maximize >= 2);
*/
break;
}
Int32 v = (Int32)prop;
if (v < 0 || v > (UInt32)Base.K_NUM_POS_STATES_BITS_ENCODING_MAX)
case CoderPropId.MatchFinder:
{
throw new InvalidParamException();
if (!(prop is String))
{
throw new InvalidParamException();
}
EMatchFinderType matchFinderIndexPrev = _matchFinderType;
int m = FindMatchFinder(((string)prop).ToUpper());
if (m < 0)
{
throw new InvalidParamException();
}
_matchFinderType = (EMatchFinderType)m;
if (_matchFinder != null && matchFinderIndexPrev != _matchFinderType)
{
_dictionarySizePrev = 0xFFFFFFFF;
_matchFinder = null;
}
break;
}
_posStateBits = v;
_posStateMask = (((UInt32)1) << _posStateBits) - 1;
break;
}
case CoderPropId.LitPosBits:
{
if (!(prop is Int32))
case CoderPropId.DictionarySize:
{
throw new InvalidParamException();
}
Int32 v = (Int32)prop;
if (v < 0 || v > Base.K_NUM_LIT_POS_STATES_BITS_ENCODING_MAX)
{
throw new InvalidParamException();
}
_numLiteralPosStateBits = v;
break;
}
case CoderPropId.LitContextBits:
{
if (!(prop is Int32))
{
throw new InvalidParamException();
}
Int32 v = (Int32)prop;
if (v < 0 || v > Base.K_NUM_LIT_CONTEXT_BITS_MAX)
{
throw new InvalidParamException();
}
const int kDicLogSizeMaxCompress = 30;
if (!(prop is Int32))
{
throw new InvalidParamException();
}
;
_numLiteralContextBits = v;
break;
}
case CoderPropId.EndMarker:
{
if (!(prop is Boolean))
{
throw new InvalidParamException();
Int32 dictionarySize = (Int32)prop;
if (dictionarySize < (UInt32)(1 << Base.K_DIC_LOG_SIZE_MIN) ||
dictionarySize > (UInt32)(1 << kDicLogSizeMaxCompress))
{
throw new InvalidParamException();
}
_dictionarySize = (UInt32)dictionarySize;
int dicLogSize;
for (dicLogSize = 0; dicLogSize < (UInt32)kDicLogSizeMaxCompress; dicLogSize++)
{
if (dictionarySize <= ((UInt32)(1) << dicLogSize))
{
break;
}
}
_distTableSize = (UInt32)dicLogSize * 2;
break;
}
case CoderPropId.PosStateBits:
{
if (!(prop is Int32))
{
throw new InvalidParamException();
}
Int32 v = (Int32)prop;
if (v < 0 || v > (UInt32)Base.K_NUM_POS_STATES_BITS_ENCODING_MAX)
{
throw new InvalidParamException();
}
_posStateBits = v;
_posStateMask = (((UInt32)1) << _posStateBits) - 1;
break;
}
case CoderPropId.LitPosBits:
{
if (!(prop is Int32))
{
throw new InvalidParamException();
}
Int32 v = (Int32)prop;
if (v < 0 || v > Base.K_NUM_LIT_POS_STATES_BITS_ENCODING_MAX)
{
throw new InvalidParamException();
}
_numLiteralPosStateBits = v;
break;
}
case CoderPropId.LitContextBits:
{
if (!(prop is Int32))
{
throw new InvalidParamException();
}
Int32 v = (Int32)prop;
if (v < 0 || v > Base.K_NUM_LIT_CONTEXT_BITS_MAX)
{
throw new InvalidParamException();
}
;
_numLiteralContextBits = v;
break;
}
case CoderPropId.EndMarker:
{
if (!(prop is Boolean))
{
throw new InvalidParamException();
}
SetWriteEndMarkerMode((Boolean)prop);
break;
}
SetWriteEndMarkerMode((Boolean)prop);
break;
}
default:
throw new InvalidParamException();
}

View File

@@ -92,7 +92,7 @@ namespace SharpCompress.Compressors.PPMd.H
private readonly int[][] _binSumm = new int[128][]; // binary SEE-contexts
private static readonly int[] INIT_BIN_ESC = {0x3CDD, 0x1F3F, 0x59BF, 0x48F3, 0x64A1, 0x5ABC, 0x6632, 0x6051};
private static readonly int[] INIT_BIN_ESC = { 0x3CDD, 0x1F3F, 0x59BF, 0x48F3, 0x64A1, 0x5ABC, 0x6632, 0x6051 };
// Temp fields
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
@@ -846,7 +846,7 @@ namespace SharpCompress.Compressors.PPMd.H
_charMask[rs.Symbol] = 0;
_prevSuccess = 0;
}
for (;;)
for (; ; )
{
State s = _tempState1.Initialize(Heap);
int i;

View File

@@ -61,7 +61,7 @@ namespace SharpCompress.Compressors.PPMd.H
private int _suffix; // pointer ppmcontext
//UPGRADE_NOTE: Final was removed from the declaration of 'ExpEscape'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
public static readonly int[] EXP_ESCAPE = {25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2};
public static readonly int[] EXP_ESCAPE = { 25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2 };
// Temp fields
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"

View File

@@ -56,7 +56,7 @@ namespace SharpCompress.Compressors.PPMd.I1
0x6051
};
private static ReadOnlySpan<byte> EXPONENTIAL_ESCAPES => new byte[] {25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2};
private static ReadOnlySpan<byte> EXPONENTIAL_ESCAPES => new byte[] { 25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2 };
#region Public Methods
@@ -222,7 +222,7 @@ namespace SharpCompress.Compressors.PPMd.I1
_coder.RangeEncoderNormalize(target);
}
StopEncoding:
StopEncoding:
_coder.RangeEncoderFlush(target);
}
@@ -321,7 +321,7 @@ namespace SharpCompress.Compressors.PPMd.I1
_coder.RangeDecoderNormalize(source);
}
StopDecoding:
StopDecoding:
return total;
}
@@ -573,7 +573,7 @@ namespace SharpCompress.Compressors.PPMd.I1
_maximumContext = foundStateSuccessor;
return;
RestartModel:
RestartModel:
RestoreModel(currentContext, minimumContext, foundStateSuccessor);
}
@@ -633,7 +633,7 @@ namespace SharpCompress.Compressors.PPMd.I1
(byte)(((context.Suffix.NumberStatistics == 0) ? 1 : 0) & ((state.Frequency < 24) ? 1 : 0));
}
LoopEntry:
LoopEntry:
if (state.Successor != upBranch)
{
context = state.Successor;
@@ -643,7 +643,7 @@ namespace SharpCompress.Compressors.PPMd.I1
}
while (context.Suffix != PpmContext.ZERO);
NoLoop:
NoLoop:
if (stateIndex == 0)
{
return context;
@@ -767,7 +767,7 @@ namespace SharpCompress.Compressors.PPMd.I1
state.Frequency += (byte)((state.Frequency < 32) ? 1 : 0);
}
LoopEntry:
LoopEntry:
if (state.Successor != PpmContext.ZERO)
{
break;

View File

@@ -363,7 +363,7 @@ namespace SharpCompress.Compressors.PPMd.I1
_numberMasked = context.NumberStatistics;
return;
SymbolFound:
SymbolFound:
_coder._lowCount = lowCount;
lowCount += state.Frequency;
_coder._highCount = lowCount;

View File

@@ -31,7 +31,11 @@ namespace SharpCompress.Compressors.PPMd
public PpmdVersion Version { get; } = PpmdVersion.I1;
internal ModelRestorationMethod RestorationMethod { get; }
public PpmdProperties(byte[] properties)
public PpmdProperties(byte[] properties) : this(properties.AsSpan())
{
}
public PpmdProperties(ReadOnlySpan<byte> properties)
{
if (properties.Length == 2)
{
@@ -43,7 +47,7 @@ namespace SharpCompress.Compressors.PPMd
else if (properties.Length == 5)
{
Version = PpmdVersion.H7Z;
AllocatorSize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
AllocatorSize = BinaryPrimitives.ReadInt32LittleEndian(properties.Slice(1));
ModelOrder = properties[0];
}
}
@@ -57,7 +61,7 @@ namespace SharpCompress.Compressors.PPMd
if (Version == PpmdVersion.I1)
{
_allocator ??= new Allocator();
_allocator.Start(_allocatorSize);
}
}

View File

@@ -6,8 +6,9 @@ namespace SharpCompress.Compressors.Rar
{
private static readonly uint[] crcTab;
public static uint CheckCrc(uint startCrc, byte b) {
return (crcTab[((int) ((int) startCrc ^ (int) b)) & 0xff] ^ (startCrc >> 8));
public static uint CheckCrc(uint startCrc, byte b)
{
return (crcTab[((int)((int)startCrc ^ (int)b)) & 0xff] ^ (startCrc >> 8));
}
public static uint CheckCrc(uint startCrc, byte[] data, int offset, int count)

View File

@@ -36,7 +36,8 @@ namespace SharpCompress.Compressors.Rar
protected override void Dispose(bool disposing)
{
if (!isDisposed) {
if (!isDisposed)
{
isDisposed = true;
base.Dispose(disposing);
readStream.Dispose();

Some files were not shown because too many files have changed in this diff Show More