Compare commits

..

84 Commits

Author SHA1 Message Date
Adam Hathcock
f298ad3322 more reverts 2024-03-29 16:05:20 +00:00
Adam Hathcock
69872dd9e7 split files 2024-03-29 16:00:05 +00:00
Adam Hathcock
92174f49ae revert naming changes 2024-03-29 15:53:11 +00:00
Adam Hathcock
c39a155c8f Merge branch 'master' into async-2
# Conflicts:
#	src/SharpCompress/Writers/Zip/ZipWriter.cs
2024-03-29 15:27:43 +00:00
Adam Hathcock
fb73d8c0a7 Merge pull request #819 from TwanVanDongen/master
Support added for TAR LZW compression (Unix 'compress' resulting in .…
2024-03-25 08:41:34 +00:00
Twan van Dongen
f2b0368078 CSharpier reformat missed 2024-03-24 16:29:29 +01:00
Twan van Dongen
02301ecf6d Support added for TAR LZW compression (Unix 'compress' resulting in .Z files) 2024-03-24 16:23:25 +01:00
Adam Hathcock
bcb61ee3e4 Merge pull request #817 from btomblinson/master
#809 Add README.md to csproj for NuGet
2024-03-18 08:34:20 +00:00
btomblinson
6a824429d0 #809 Add README.md to csproj for NuGet 2024-03-16 22:52:36 -06:00
Adam Hathcock
6a52f9097f Merge pull request #815 from adamhathcock/code-clean-up
Code clean up
2024-03-14 16:01:34 +00:00
Adam Hathcock
3fa85fc516 Merge branch 'master' into code-clean-up 2024-03-14 15:58:29 +00:00
Adam Hathcock
498d132d8a Merge pull request #816 from coderb/pullreq-rar-memusage
rar5 improve memory usage
2024-03-14 15:58:15 +00:00
root
b6340f1458 rar5 improve memory usage
use ArrayPool for stream buffer
  use stackalloc for methods on file decompression code path
2024-03-14 11:50:45 -04:00
Adam Hathcock
4afc7ae2e4 use complete namespace 2024-03-14 13:07:40 +00:00
Adam Hathcock
95975a4c33 even more clean up 2024-03-14 09:07:21 +00:00
Adam Hathcock
198a0673a2 more clean up 2024-03-14 09:00:44 +00:00
Adam Hathcock
94d1503c64 more clean up 2024-03-14 08:57:16 +00:00
Adam Hathcock
5f13e245f0 more clean up on tests 2024-03-14 08:53:08 +00:00
Adam Hathcock
2715ae645d use var 2024-03-14 08:38:12 +00:00
Adam Hathcock
0299232cb5 just using rider to clean up 2024-03-14 08:37:17 +00:00
Adam Hathcock
93e181cfd9 update csharpier 2024-03-14 08:29:30 +00:00
Adam Hathcock
8072eb1212 Merge pull request #814 from coderb/pullreq-rar5-redir
rar5 read FHEXTRA_REDIR and expose via RarEntry
2024-03-14 08:26:06 +00:00
root
226ce340f2 rar5 read FHEXTRA_REDIR and expose via RarEntry
NOTE: api user should skip entries where RarEntry.IsRedir is true and not call OpenEntryStream()
2024-03-14 04:17:31 -04:00
Adam Hathcock
e5944cf72c add writer support for async 2024-03-12 15:40:29 +00:00
Adam Hathcock
ab5535eba3 Merge pull request #807 from TwanVanDongen/master
Support for decompressing Zip Shrink (Method:1)
2024-01-29 08:27:32 +00:00
Adam Hathcock
8da2499495 Merge pull request #805 from DannyBoyk/804_Fix_ZIP_Decryption
Zip: Use last modified time from basic header when validating zip decryption
2024-01-29 08:26:17 +00:00
Twan van Dongen
c057ffb153 Refrormatted using CSharpier 2024-01-27 18:59:56 +01:00
Twan van Dongen
fe13d29549 Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2024-01-27 18:31:13 +01:00
Twan van Dongen
225aaab4f4 Support for decompressing Zip Shrink (method:1) added 2024-01-27 18:28:46 +01:00
Daniel Nash
14c973558b Zip: Use last modified time from basic header when validating zip decryption
The last modified time used for zip decryption validation must be the
one from the basic header. If UnixTimeExtraFields are present, the
previous implementation was attempting to verify against that value
instead.
Fixed #804
2024-01-26 10:54:41 -05:00
Adam Hathcock
f515ff36b6 Mark for 0.36.0 2024-01-15 08:21:36 +00:00
Adam Hathcock
ed57cfd2f9 Merge pull request #803 from DannyBoyk/802_Add_UnixTimeExtraField_Support_Zips
Add support for the UnixTimeExtraField in Zip files
2024-01-15 08:19:29 +00:00
Daniel Nash
d69559e9c7 Add support for the UnixTimeExtraField in Zip files
Fixes #802
2024-01-12 09:34:13 -05:00
Adam Hathcock
396717efd1 Merge pull request #799 from Erior/feature/DataDescriptorStream-fix-report-size-position
Fix reporting size / position
2024-01-08 09:18:34 +00:00
Adam Hathcock
284fa24464 Merge pull request #800 from Erior/feature/Expose-file-attributes-for-rar-entries
Expose file attributes for rar
2024-01-08 09:17:53 +00:00
Adam Hathcock
0a20b9179a Merge pull request #798 from Erior/feature/Fix-crash-when-not-setting-password-for-rar5
Set Empty string for Rar5 password as default
2024-01-08 09:17:07 +00:00
Adam Hathcock
a0d5037885 Merge pull request #801 from Erior/feature/771
Issue 771, remove throw on flush for readonly streams
2024-01-08 09:13:39 +00:00
Lars Vahlenberg
4477833b1d Issue 771, remove throw on flush for readonly streams 2024-01-06 00:14:34 +01:00
Lars Vahlenberg
e0a5ed4bdb Expose file attributes 2024-01-05 09:50:58 +01:00
Lars Vahlenberg
46d4b26eba Fix testing under Linux 2024-01-05 00:35:24 +01:00
Lars Vahlenberg
f7c6edf849 Fix reporting size / position 2024-01-04 23:33:39 +01:00
Lars Vahlenberg
6c157def4b set empty string if password not set 2024-01-04 21:16:07 +01:00
Adam Hathcock
741712f89f Merge pull request #794 from Erior/feature/rar5-blake2
Feature/rar5 blake2
2024-01-03 08:34:54 +00:00
Lars Vahlenberg
4f749da628 Merge branch 'develop' into feature/rar5-blake2 2024-01-02 21:26:51 +01:00
Lars Vahlenberg
8b02795d69 CSharpier 2024-01-02 21:25:40 +01:00
Lars Vahlenberg
f8a0069a5d Calc checksum when encrypted is not working for RAR5, disable for now 2024-01-02 21:18:49 +01:00
Lars Vahlenberg
388bbe047e Blake2 Archive test OK 2024-01-02 20:46:55 +01:00
Adam Hathcock
2d4ce30e58 Merge pull request #792 from DannyBoyk/791_Correct_EOCD_ZipWriter
ZipWriter: Write correct EOCD record when more than 65,535 files
2023-12-27 08:55:02 +00:00
Daniel Nash
d4fb17cf66 ZipWriter: Write correct EOCD record when more than 65,535 files
0xFFFF will be written to the EOCD to signal to use the ZIP64
CentralDirectory record when the number of files is 65,535 or more.
Fixes #791
2023-12-22 11:26:01 -05:00
Adam Hathcock
372a2c8375 Mark for 0.35.0 2023-12-18 09:59:46 +00:00
Adam Hathcock
8f27121f21 Merge pull request #789 from adamhathcock/dotnet8
Dotnet8
2023-12-18 09:57:50 +00:00
Adam Hathcock
b986bf675f just remove readme 2023-12-18 09:31:33 +00:00
Adam Hathcock
80718a461b fix readme? 2023-12-18 09:24:00 +00:00
Adam Hathcock
2d14ecf58b add readme 2023-12-18 09:20:44 +00:00
Adam Hathcock
32aa9877c0 remove caching 2023-12-18 09:16:02 +00:00
Adam Hathcock
cee3a9c11d Revert "add lock files"
This reverts commit 30a31de45b.
2023-12-18 09:15:26 +00:00
Adam Hathcock
b78643f2d8 update upload artifact 2023-12-18 09:15:15 +00:00
Adam Hathcock
30a31de45b add lock files 2023-12-18 09:13:14 +00:00
Adam Hathcock
e4c4db534c build for dotnet 8 2023-12-18 09:09:31 +00:00
Adam Hathcock
4f7a0d3ad0 CI to dotnet 8 2023-12-18 09:08:06 +00:00
Adam Hathcock
ea3a96eead update and rerun csharpier 2023-12-18 09:04:04 +00:00
Adam Hathcock
c0e01ac132 Use dotnet 8 and update deps 2023-12-18 09:01:54 +00:00
Adam Hathcock
28ea50bca4 Merge pull request #788 from Erior/develop
RAR5 decryption support
2023-12-18 08:51:25 +00:00
Lars Vahlenberg
619e44b30f CSharpier fixes 2023-12-16 03:08:51 +01:00
Lars Vahlenberg
d678275dee Implement RAR5 decryption 2023-12-16 02:53:09 +01:00
Adam Hathcock
08eed53595 Merge pull request #787 from adamhathcock/dependabot/github_actions/actions/setup-dotnet-4
Bump actions/setup-dotnet from 3 to 4
2023-12-11 10:09:28 +00:00
dependabot[bot]
ff40f7d262 Bump actions/setup-dotnet from 3 to 4
Bumps [actions/setup-dotnet](https://github.com/actions/setup-dotnet) from 3 to 4.
- [Release notes](https://github.com/actions/setup-dotnet/releases)
- [Commits](https://github.com/actions/setup-dotnet/compare/v3...v4)

---
updated-dependencies:
- dependency-name: actions/setup-dotnet
  dependency-type: direct:production
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-12-11 09:35:30 +00:00
Adam Hathcock
3c1ae51dae Merge pull request #786 from Erior/feature/Issue-774
LZMA EOS marker detection
2023-12-11 08:46:20 +00:00
Adam Hathcock
8a59fc9aaf Merge pull request #785 from Erior/feature/Issue-782
Handle tar files generated with tar -H oldgnu that has large uid/gid values
2023-12-11 08:44:54 +00:00
Adam Hathcock
b7ea9dd841 Merge pull request #784 from Erior/feature/rar-comment
Dont crash on reading rar5 comment #783
2023-12-11 08:44:01 +00:00
Lars Vahlenberg
0320db6b4a LZMA EOS marker detection 2023-12-09 13:41:35 +01:00
Lars Vahlenberg
18c7f58093 Handle tar files generated with tar -H oldgnu that has large uid/gid values 2023-12-04 22:35:11 +01:00
Lars Vahlenberg
7f6f7b1436 Resharpier fix 2023-12-04 20:28:16 +01:00
Lars Vahlenberg
ca49176b97 Dont crash on reading rar5 comment 2023-12-04 20:19:11 +01:00
Adam Hathcock
67be0cd9d7 Mark for 0.34.2 2023-11-15 11:32:51 +00:00
Adam Hathcock
902fadef83 Merge pull request #780 from caesay/cs/revert-disable-strongname
Revert change disabling strong name signing in 92df1ec
2023-11-15 11:22:02 +00:00
Adam Hathcock
2777b6411f Merge branch 'master' into cs/revert-disable-strongname 2023-11-15 11:18:30 +00:00
Adam Hathcock
e3235d7f04 Merge pull request #781 from adamhathcock/fix-formatting
Update csharpier and fix formatting
2023-11-15 11:18:04 +00:00
Adam Hathcock
dc89c8858e comment out more C++ bits 2023-11-15 11:14:39 +00:00
Adam Hathcock
d28a278d63 Comment out flag to allow formatting 2023-11-15 11:10:05 +00:00
Adam Hathcock
7080c2abd0 Update csharpier and fix formatting 2023-11-15 11:05:30 +00:00
Caelan Sayler
43f86bcab8 Revert change disabling strong name signing in 92df1ec 2023-11-14 16:34:58 +00:00
Adam Hathcock
7d9c875c4d Merge pull request #778 from LANCommander/throw-cancelled-exception
Throw ReaderCancelledException on reader cancelled
2023-11-13 08:34:15 +00:00
Pat Hartl
ed4099eb12 Throw ReaderCancelledException on reader cancelled 2023-11-10 23:36:14 -06:00
186 changed files with 4996 additions and 4133 deletions

View File

@@ -3,7 +3,7 @@
"isRoot": true,
"tools": {
"csharpier": {
"version": "0.25.0",
"version": "0.27.3",
"commands": [
"dotnet-csharpier"
]

View File

@@ -15,18 +15,11 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: actions/setup-dotnet@v3
- uses: actions/setup-dotnet@v4
with:
dotnet-version: 7.0.x
- name: NuGet Caching
uses: actions/cache@v3
with:
path: ~/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('packages.lock.json', '*/packages.lock.json') }}
restore-keys: |
${{ runner.os }}-nuget-
dotnet-version: 8.0.x
- run: dotnet run --project build/build.csproj
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}-sharpcompress.nupkg
path: artifacts/*

View File

@@ -36,7 +36,7 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
## TODOs (always lots)
* RAR 5 decryption support
* RAR 5 decryption crc check support
* 7Zip writing
* Zip64 (Need writing and extend Reading)
* Multi-volume Zip support.

View File

@@ -61,7 +61,7 @@ Target(
Target(
Test,
DependsOn(Build),
ForEach("net7.0", "net462"),
ForEach("net8.0", "net462"),
framework =>
{
IEnumerable<string> GetFiles(string d)

View File

@@ -1,6 +1,6 @@
{
"sdk": {
"version": "7.0.101",
"version": "8.0.100",
"rollForward": "latestFeature"
}
}

View File

@@ -62,7 +62,7 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
public static int ReduceSum(Vector256<int> accumulator)
{
// Add upper lane to lower lane.
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
var vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
// Add odd to even.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_11_01_01));
@@ -81,7 +81,7 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int EvenReduceSum(Vector256<int> accumulator)
{
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
var vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10)); // add high to low
// Vector128<int>.ToScalar() isn't optimized pre-net5.0 https://github.com/dotnet/runtime/pull/37882
@@ -189,29 +189,29 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
var s1 = adler & 0xFFFF;
var s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
uint length = (uint)buffer.Length;
uint blocks = length / BlockSize;
var length = (uint)buffer.Length;
var blocks = length / BlockSize;
length -= blocks * BlockSize;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
fixed (byte* tapPtr = &MemoryMarshal.GetReference(Tap1Tap2))
{
byte* localBufferPtr = bufferPtr;
var localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
Vector128<sbyte> tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
Vector128<sbyte> tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
Vector128<byte> zero = Vector128<byte>.Zero;
var tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
var tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
var zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
{
uint n = NMAX / BlockSize; /* The NMAX constraint. */
var n = NMAX / BlockSize; /* The NMAX constraint. */
if (n > blocks)
{
n = blocks;
@@ -221,15 +221,15 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
Vector128<uint> v_ps = Vector128.CreateScalar(s1 * n);
Vector128<uint> v_s2 = Vector128.CreateScalar(s2);
Vector128<uint> v_s1 = Vector128<uint>.Zero;
var v_ps = Vector128.CreateScalar(s1 * n);
var v_s2 = Vector128.CreateScalar(s2);
var v_s1 = Vector128<uint>.Zero;
do
{
// Load 32 input bytes.
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
var bytes1 = Sse3.LoadDquVector128(localBufferPtr);
var bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
@@ -237,11 +237,11 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsUInt32());
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
var mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones).AsUInt32());
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsUInt32());
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
var mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones).AsUInt32());
localBufferPtr += BlockSize;
@@ -281,15 +281,15 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static unsafe uint CalculateAvx2(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint length = (uint)buffer.Length;
var s1 = adler & 0xFFFF;
var s2 = (adler >> 16) & 0xFFFF;
var length = (uint)buffer.Length;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
byte* localBufferPtr = bufferPtr;
var localBufferPtr = bufferPtr;
Vector256<byte> zero = Vector256<byte>.Zero;
var zero = Vector256<byte>.Zero;
var dot3v = Vector256.Create((short)1);
var dot2v = Vector256.Create(
32,
@@ -333,29 +333,29 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
while (length >= 32)
{
int k = length < NMAX ? (int)length : (int)NMAX;
var k = length < NMAX ? (int)length : (int)NMAX;
k -= k % 32;
length -= (uint)k;
Vector256<uint> vs10 = vs1;
Vector256<uint> vs3 = Vector256<uint>.Zero;
var vs10 = vs1;
var vs3 = Vector256<uint>.Zero;
while (k >= 32)
{
// Load 32 input bytes.
Vector256<byte> block = Avx.LoadVector256(localBufferPtr);
var block = Avx.LoadVector256(localBufferPtr);
// Sum of abs diff, resulting in 2 x int32's
Vector256<ushort> vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
var vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
vs1 = Avx2.Add(vs1, vs1sad.AsUInt32());
vs3 = Avx2.Add(vs3, vs10);
// sum 32 uint8s to 16 shorts.
Vector256<short> vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
var vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
// sum 16 shorts to 8 uint32s.
Vector256<int> vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
var vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
vs2 = Avx2.Add(vsum2.AsUInt32(), vs2);
vs10 = vs1;
@@ -434,14 +434,14 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
var s1 = adler & 0xFFFF;
var s2 = (adler >> 16) & 0xFFFF;
uint k;
fixed (byte* bufferPtr = buffer)
{
var localBufferPtr = bufferPtr;
uint length = (uint)buffer.Length;
var length = (uint)buffer.Length;
while (length > 0)
{

View File

@@ -31,10 +31,10 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
}
}
private readonly List<TEntry> newEntries = new List<TEntry>();
private readonly List<TEntry> removedEntries = new List<TEntry>();
private readonly List<TEntry> newEntries = new();
private readonly List<TEntry> removedEntries = new();
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
private readonly List<TEntry> modifiedEntries = new();
private bool hasModifications;
private bool pauseRebuilding;

View File

@@ -25,8 +25,8 @@ public static class ArchiveFactory
public static IWritableArchive Create(ArchiveType type)
{
var factory = Factory.Factories
.OfType<IWriteableArchiveFactory>()
var factory = Factory
.Factories.OfType<IWriteableArchiveFactory>()
.FirstOrDefault(item => item.KnownArchiveType == type);
if (factory != null)

View File

@@ -11,7 +11,7 @@ internal abstract class ArchiveVolumeFactory
FileInfo? item = null;
//split 001, 002 ...
Match m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
var m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(
Path.Combine(

View File

@@ -94,7 +94,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
public static GZipArchive Create() => new GZipArchive();
public static GZipArchive Create() => new();
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -14,9 +15,8 @@ namespace SharpCompress.Archives.Rar;
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
new Lazy<IRarUnpack>(() => new Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } =
new Lazy<IRarUnpack>(() => new Compressors.Rar.UnpackV1.Unpack());
new(() => new Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
@@ -40,9 +40,11 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
streams[1].Position = 0;
SrcStream.Position = 0;
return srcStream.Streams.Select(
a => new StreamRarArchiveVolume(a, ReaderOptions, idx++)
);
return srcStream.Streams.Select(a => new StreamRarArchiveVolume(
a,
ReaderOptions,
idx++
));
}
else //split mode or single file
{

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -40,7 +41,10 @@ public class RarArchiveEntry : RarEntry, IArchiveEntry
get
{
CheckIncomplete();
return parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc;
return BitConverter.ToUInt32(
parts.Select(fp => fp.FileHeader).Single(fh => !fh.IsSplitAfter).FileCrc,
0
);
}
}

View File

@@ -11,7 +11,7 @@ internal static class RarArchiveVolumeFactory
FileInfo? item = null;
//new style rar - ..part1 | /part01 | part001 ....
Match m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
var m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(
Path.Combine(

View File

@@ -25,10 +25,19 @@ internal class SeekableFilePart : RarFilePart
internal override Stream GetCompressedStream()
{
stream.Position = FileHeader.DataStartPosition;
if (FileHeader.R4Salt != null)
{
return new RarCryptoWrapper(stream, password!, FileHeader.R4Salt);
var cryptKey = new CryptKey3(password!);
return new RarCryptoWrapper(stream, FileHeader.R4Salt, cryptKey);
}
if (FileHeader.Rar5CryptoInfo != null)
{
var cryptKey = new CryptKey5(password!, FileHeader.Rar5CryptoInfo);
return new RarCryptoWrapper(stream, FileHeader.Rar5CryptoInfo.Salt, cryptKey);
}
return stream;
}

View File

@@ -195,7 +195,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
}
}
public static TarArchive Create() => new TarArchive();
public static TarArchive Create() => new();
protected override TarArchiveEntry CreateEntryInternal(
string filePath,

View File

@@ -294,7 +294,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
bool closeStream
) => new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
public static ZipArchive Create() => new ZipArchive();
public static ZipArchive Create() => new();
protected override IReader CreateReaderForSolidExtraction()
{

View File

@@ -12,7 +12,7 @@ internal static class ZipArchiveVolumeFactory
//load files with zip/zipx first. Swapped to end once loaded in ZipArchive
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
Match m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
var m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(
Path.Combine(

View File

@@ -0,0 +1,33 @@
using System.Buffers;
namespace SharpCompress;
internal static class BufferPool
{
/// <summary>
/// gets a buffer from the pool
/// </summary>
/// <param name="bufferSize">size of the buffer</param>
/// <returns>the buffer</returns>
public static byte[] Rent(int bufferSize)
{
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
return ArrayPool<byte>.Shared.Rent(bufferSize);
#else
return new byte[bufferSize];
#endif
}
/// <summary>
/// returns a buffer to the pool
/// </summary>
/// <param name="buffer">the buffer to return</param>
public static void Return(byte[] buffer)
{
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
ArrayPool<byte>.Shared.Return(buffer);
#else
// no-op
#endif
}
}

View File

@@ -14,5 +14,7 @@ public enum CompressionType
LZip,
Xz,
Unknown,
Deflate64
Deflate64,
Shrink,
Lzw
}

View File

@@ -16,7 +16,7 @@ internal static class ExtractionMethods
)
{
string destinationFileName;
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
//check for trailing slash.
if (
@@ -36,11 +36,11 @@ internal static class ExtractionMethods
options ??= new ExtractionOptions() { Overwrite = true };
string file = Path.GetFileName(entry.Key);
var file = Path.GetFileName(entry.Key);
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key)!;
string destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
var folder = Path.GetDirectoryName(entry.Key)!;
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))
{
@@ -102,7 +102,7 @@ internal static class ExtractionMethods
}
else
{
FileMode fm = FileMode.Create;
var fm = FileMode.Create;
options ??= new ExtractionOptions() { Overwrite = true };
if (!options.Overwrite)

View File

@@ -7,5 +7,5 @@ public class OptionsBase
/// </summary>
public bool LeaveStreamOpen { get; set; } = true;
public ArchiveEncoding ArchiveEncoding { get; set; } = new ArchiveEncoding();
public ArchiveEncoding ArchiveEncoding { get; set; } = new();
}

View File

@@ -1,12 +0,0 @@
using System;
namespace SharpCompress.Common;
public class PasswordProtectedException : ExtractionException
{
public PasswordProtectedException(string message)
: base(message) { }
public PasswordProtectedException(string message, Exception inner)
: base(message, inner) { }
}

View File

@@ -0,0 +1,84 @@
#nullable disable
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar;
internal class CryptKey3 : ICryptKey
{
const int AES_128 = 128;
private string _password;
public CryptKey3(string password) => _password = password ?? "";
public ICryptoTransform Transformer(byte[] salt)
{
var aesIV = new byte[EncryptionConstV5.SIZE_INITV];
var rawLength = 2 * _password.Length;
var rawPassword = new byte[rawLength + EncryptionConstV5.SIZE_SALT30];
var passwordBytes = Encoding.UTF8.GetBytes(_password);
for (var i = 0; i < _password.Length; i++)
{
rawPassword[i * 2] = passwordBytes[i];
rawPassword[(i * 2) + 1] = 0;
}
for (var i = 0; i < salt.Length; i++)
{
rawPassword[i + rawLength] = salt[i];
}
var msgDigest = SHA1.Create();
const int noOfRounds = (1 << 18);
const int iblock = 3;
byte[] digest;
var data = new byte[(rawPassword.Length + iblock) * noOfRounds];
//TODO slow code below, find ways to optimize
for (var i = 0; i < noOfRounds; i++)
{
rawPassword.CopyTo(data, i * (rawPassword.Length + iblock));
data[(i * (rawPassword.Length + iblock)) + rawPassword.Length + 0] = (byte)i;
data[(i * (rawPassword.Length + iblock)) + rawPassword.Length + 1] = (byte)(i >> 8);
data[(i * (rawPassword.Length + iblock)) + rawPassword.Length + 2] = (byte)(i >> 16);
if (i % (noOfRounds / EncryptionConstV5.SIZE_INITV) == 0)
{
digest = msgDigest.ComputeHash(data, 0, (i + 1) * (rawPassword.Length + iblock));
aesIV[i / (noOfRounds / EncryptionConstV5.SIZE_INITV)] = digest[19];
}
}
digest = msgDigest.ComputeHash(data);
//slow code ends
var aesKey = new byte[EncryptionConstV5.SIZE_INITV];
for (var i = 0; i < 4; i++)
{
for (var j = 0; j < 4; j++)
{
aesKey[(i * 4) + j] = (byte)(
(
((digest[i * 4] * 0x1000000) & 0xff000000)
| (uint)((digest[(i * 4) + 1] * 0x10000) & 0xff0000)
| (uint)((digest[(i * 4) + 2] * 0x100) & 0xff00)
| (uint)(digest[(i * 4) + 3] & 0xff)
) >> (j * 8)
);
}
}
var aes = Aes.Create();
aes.KeySize = AES_128;
aes.Mode = CipherMode.CBC;
aes.Padding = PaddingMode.None;
aes.Key = aesKey;
aes.IV = aesIV;
return aes.CreateDecryptor();
}
}

View File

@@ -0,0 +1,95 @@
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Common.Rar;
internal class CryptKey5 : ICryptKey
{
const int AES_256 = 256;
const int DERIVED_KEY_LENGTH = 0x10;
const int SHA256_DIGEST_SIZE = 32;
private string _password;
private Rar5CryptoInfo _cryptoInfo;
private byte[] _pswCheck = { };
private byte[] _hashKey = { };
public CryptKey5(string password, Rar5CryptoInfo rar5CryptoInfo)
{
_password = password ?? "";
_cryptoInfo = rar5CryptoInfo;
}
public byte[] PswCheck => _pswCheck;
public byte[] HashKey => _hashKey;
private static List<byte[]> GenerateRarPBKDF2Key(
string password,
byte[] salt,
int iterations,
int keyLength
)
{
using var hmac = new HMACSHA256(Encoding.UTF8.GetBytes(password));
var block = hmac.ComputeHash(salt);
var finalHash = (byte[])block.Clone();
var loop = new int[] { iterations, 17, 17 };
var res = new List<byte[]> { };
for (var x = 0; x < 3; x++)
{
for (var i = 1; i < loop[x]; i++)
{
block = hmac.ComputeHash(block);
for (var j = 0; j < finalHash.Length; j++)
{
finalHash[j] ^= block[j];
}
}
res.Add((byte[])finalHash.Clone());
}
return res;
}
public ICryptoTransform Transformer(byte[] salt)
{
var iterations = (1 << _cryptoInfo.LG2Count); // Adjust the number of iterations as needed
var salt_rar5 = salt.Concat(new byte[] { 0, 0, 0, 1 });
var derivedKey = GenerateRarPBKDF2Key(
_password,
salt_rar5.ToArray(),
iterations,
DERIVED_KEY_LENGTH
);
_hashKey = derivedKey[1];
_pswCheck = new byte[EncryptionConstV5.SIZE_PSWCHECK];
for (var i = 0; i < SHA256_DIGEST_SIZE; i++)
{
_pswCheck[i % EncryptionConstV5.SIZE_PSWCHECK] ^= derivedKey[2][i];
}
if (_cryptoInfo.UsePswCheck && !_cryptoInfo.PswCheck.SequenceEqual(_pswCheck))
{
throw new CryptographicException("The password did not match.");
}
var aes = Aes.Create();
aes.KeySize = AES_256;
aes.Mode = CipherMode.CBC;
aes.Padding = PaddingMode.None;
aes.Key = derivedKey[0];
aes.IV = _cryptoInfo.InitV;
return aes.CreateDecryptor();
}
}

View File

@@ -1,50 +1,19 @@
#nullable disable
using System;
using System.Security.Cryptography;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers;
internal class ArchiveCryptHeader : RarHeader
{
private const int CRYPT_VERSION = 0; // Supported encryption version.
private const int SIZE_SALT50 = 16;
private const int SIZE_PSWCHECK = 8;
private const int SIZE_PSWCHECK_CSUM = 4;
private const int CRYPT5_KDF_LG2_COUNT_MAX = 24; // LOG2 of maximum accepted iteration count.
private bool _usePswCheck;
private uint _lg2Count; // Log2 of PBKDF2 repetition count.
private byte[] _salt;
private byte[] _pswCheck;
private byte[] _pswCheckCsm;
public ArchiveCryptHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Crypt) { }
protected override void ReadFinish(MarkingBinaryReader reader)
{
var cryptVersion = reader.ReadRarVIntUInt32();
if (cryptVersion > CRYPT_VERSION)
{
//error?
return;
}
var encryptionFlags = reader.ReadRarVIntUInt32();
_usePswCheck = FlagUtility.HasFlag(encryptionFlags, EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
_lg2Count = reader.ReadRarVIntByte(1);
public Rar5CryptoInfo CryptInfo = new();
//UsePswCheck = HasHeaderFlag(EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
if (_lg2Count > CRYPT5_KDF_LG2_COUNT_MAX)
{
//error?
return;
}
_salt = reader.ReadBytes(SIZE_SALT50);
if (_usePswCheck)
{
_pswCheck = reader.ReadBytes(SIZE_PSWCHECK);
_pswCheckCsm = reader.ReadBytes(SIZE_PSWCHECK_CSUM);
}
}
protected override void ReadFinish(MarkingBinaryReader reader) =>
CryptInfo = new Rar5CryptoInfo(reader, false);
}

View File

@@ -1,5 +1,11 @@
#nullable disable
using System;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.IO;
#if !Rar2017_64bit
using size_t = System.UInt32;
#else
@@ -8,16 +14,11 @@ using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
using SharpCompress.IO;
using System;
using System.IO;
using System.Text;
namespace SharpCompress.Common.Rar.Headers;
internal class FileHeader : RarHeader
{
private uint _fileCrc;
private byte[] _hash;
public FileHeader(RarHeader header, RarCrcBinaryReader reader, HeaderType headerType)
: base(header, reader, headerType) { }
@@ -52,7 +53,7 @@ internal class FileHeader : RarHeader
if (HasFlag(FileFlagsV5.HAS_CRC32))
{
FileCrc = reader.ReadUInt32();
FileCrc = reader.ReadBytes(4);
}
var compressionInfo = reader.ReadRarVIntUInt16();
@@ -104,7 +105,13 @@ internal class FileHeader : RarHeader
throw new InvalidFormatException("rar5 header size / extra size inconsistency");
}
isEncryptedRar5 = false;
const ushort FHEXTRA_CRYPT = 0x01;
const ushort FHEXTRA_HASH = 0x02;
const ushort FHEXTRA_HTIME = 0x03;
// const ushort FHEXTRA_VERSION = 0x04;
const ushort FHEXTRA_REDIR = 0x05;
// const ushort FHEXTRA_UOWNER = 0x06;
// const ushort FHEXTRA_SUBDATA = 0x07;
while (RemainingHeaderBytes(reader) > 0)
{
@@ -113,22 +120,32 @@ internal class FileHeader : RarHeader
var type = reader.ReadRarVIntUInt16();
switch (type)
{
//TODO
case 1: // file encryption
case FHEXTRA_CRYPT: // file encryption
{
isEncryptedRar5 = true;
Rar5CryptoInfo = new Rar5CryptoInfo(reader, true);
//var version = reader.ReadRarVIntByte();
//if (version != 0) throw new InvalidFormatException("unknown encryption algorithm " + version);
if (Rar5CryptoInfo.PswCheck.All(singleByte => singleByte == 0))
{
Rar5CryptoInfo = null;
}
}
break;
// case 2: // file hash
// {
//
// }
// break;
case 3: // file time
case FHEXTRA_HASH:
{
const uint FHEXTRA_HASH_BLAKE2 = 0x0;
// const uint HASH_BLAKE2 = 0x03;
const int BLAKE2_DIGEST_SIZE = 0x20;
if ((uint)reader.ReadRarVInt() == FHEXTRA_HASH_BLAKE2)
{
// var hash = HASH_BLAKE2;
_hash = reader.ReadBytes(BLAKE2_DIGEST_SIZE);
}
// enum HASH_TYPE {HASH_NONE,HASH_RAR14,HASH_CRC32,HASH_BLAKE2};
}
break;
case FHEXTRA_HTIME: // file time
{
var flags = reader.ReadRarVIntUInt16();
@@ -148,22 +165,28 @@ internal class FileHeader : RarHeader
}
break;
//TODO
// case 4: // file version
// case FHEXTRA_VERSION: // file version
// {
//
// }
// break;
// case 5: // file system redirection
case FHEXTRA_REDIR: // file system redirection
{
RedirType = reader.ReadRarVIntByte();
RedirFlags = reader.ReadRarVIntByte();
var nn = reader.ReadRarVIntUInt16();
var bb = reader.ReadBytes(nn);
RedirTargetName = ConvertPathV5(Encoding.UTF8.GetString(bb, 0, bb.Length));
}
break;
//TODO
// case FHEXTRA_UOWNER: // unix owner
// {
//
// }
// break;
// case 6: // unix owner
// {
//
// }
// break;
// case 7: // service data
// case FHEXTRA_SUBDATA: // service data
// {
//
// }
@@ -171,6 +194,7 @@ internal class FileHeader : RarHeader
default:
// skip unknown record types to allow new record types to be added in the future
//Console.WriteLine($"unhandled rar header field type {type}");
break;
}
// drain any trailing bytes of extra record
@@ -222,7 +246,7 @@ internal class FileHeader : RarHeader
HostOs = reader.ReadByte();
FileCrc = reader.ReadUInt32();
FileCrc = reader.ReadBytes(4);
FileLastModifiedTime = Utility.DosDateToDateTime(reader.ReadUInt32());
@@ -255,7 +279,6 @@ internal class FileHeader : RarHeader
var fileNameBytes = reader.ReadBytes(nameSize);
const int saltSize = 8;
const int newLhdSize = 32;
switch (HeaderCode)
@@ -293,7 +316,7 @@ internal class FileHeader : RarHeader
var datasize = HeaderSize - newLhdSize - nameSize;
if (HasFlag(FileFlagsV4.SALT))
{
datasize -= saltSize;
datasize -= EncryptionConstV5.SIZE_SALT30;
}
if (datasize > 0)
{
@@ -314,7 +337,7 @@ internal class FileHeader : RarHeader
if (HasFlag(FileFlagsV4.SALT))
{
R4Salt = reader.ReadBytes(saltSize);
R4Salt = reader.ReadBytes(EncryptionConstV5.SIZE_SALT30);
}
if (HasFlag(FileFlagsV4.EXT_TIME))
{
@@ -395,18 +418,10 @@ internal class FileHeader : RarHeader
private bool HasFlag(ushort flag) => (Flags & flag) == flag;
internal uint FileCrc
internal byte[] FileCrc
{
get
{
if (IsRar5 && !HasFlag(FileFlagsV5.HAS_CRC32))
{
//!!! rar5:
throw new InvalidOperationException("TODO rar5");
}
return _fileCrc;
}
private set => _fileCrc = value;
get => _hash;
private set => _hash = value;
}
// 0 - storing
@@ -428,11 +443,17 @@ internal class FileHeader : RarHeader
public bool IsSolid { get; private set; }
public byte RedirType { get; private set; }
public bool IsRedir => RedirType != 0;
public byte RedirFlags { get; private set; }
public bool IsRedirDirectory => (RedirFlags & RedirFlagV5.DIRECTORY) != 0;
public string RedirTargetName { get; private set; }
// unused for UnpackV1 implementation (limitation)
internal size_t WindowSize { get; private set; }
internal byte[] R4Salt { get; private set; }
internal Rar5CryptoInfo Rar5CryptoInfo { get; private set; }
private byte HostOs { get; set; }
internal uint FileAttributes { get; private set; }
internal long CompressedSize { get; private set; }
@@ -450,8 +471,7 @@ internal class FileHeader : RarHeader
public bool IsDirectory => HasFlag(IsRar5 ? FileFlagsV5.DIRECTORY : FileFlagsV4.DIRECTORY);
private bool isEncryptedRar5 = false;
public bool IsEncrypted => IsRar5 ? isEncryptedRar5 : HasFlag(FileFlagsV4.PASSWORD);
public bool IsEncrypted => IsRar5 ? Rar5CryptoInfo != null : HasFlag(FileFlagsV4.PASSWORD);
internal DateTime? FileLastModifiedTime { get; private set; }

View File

@@ -50,6 +50,17 @@ internal static class EncryptionFlagsV5
public const uint FHEXTRA_CRYPT_HASHMAC = 0x02;
}
internal static class EncryptionConstV5
{
public const int VERSION = 0;
public const uint CRYPT5_KDF_LG2_COUNT_MAX = 0x24;
public const int SIZE_SALT30 = 0x08;
public const int SIZE_SALT50 = 0x10;
public const int SIZE_INITV = 0x10;
public const int SIZE_PSWCHECK = 0x08;
public const int SIZE_PSWCHECK_CSUM = 0x04;
}
internal static class HeaderFlagsV5
{
public const ushort HAS_EXTRA = 0x0001;
@@ -146,3 +157,17 @@ internal static class EndArchiveFlagsV5
{
public const ushort HAS_NEXT_VOLUME = 0x0001;
}
internal static class RedirTypeV5
{
public const byte UNIX_SYMLINK = 0x0001;
public const byte WIN_SYMLINK = 0x0002;
public const byte WIN_JUNCTION = 0x0003;
public const byte HARD_LINK = 0x0004;
public const byte FILE_COPY = 0x0005;
}
internal static class RedirFlagV5
{
public const byte DIRECTORY = 0x0001;
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using SharpCompress.IO;

View File

@@ -1,5 +1,7 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -9,6 +11,8 @@ public class RarHeaderFactory
{
private bool _isRar5;
private Rar5CryptoInfo? _cryptInfo;
public RarHeaderFactory(StreamingMode mode, ReaderOptions options)
{
StreamingMode = mode;
@@ -53,7 +57,19 @@ public class RarHeaderFactory
"Encrypted Rar archive has no password specified."
);
}
reader = new RarCryptoBinaryReader(stream, Options.Password);
if (_isRar5 && _cryptInfo != null)
{
_cryptInfo.ReadInitV(new MarkingBinaryReader(stream));
var _headerKey = new CryptKey5(Options.Password!, _cryptInfo);
reader = new RarCryptoBinaryReader(stream, _headerKey, _cryptInfo.Salt);
}
else
{
var key = new CryptKey3(Options.Password);
reader = new RarCryptoBinaryReader(stream, key);
}
}
var header = RarHeader.TryReadBase(reader, _isRar5, Options.ArchiveEncoding);
@@ -105,7 +121,14 @@ public class RarHeaderFactory
case HeaderCodeV.RAR5_SERVICE_HEADER:
{
var fh = new FileHeader(header, reader, HeaderType.Service);
SkipData(fh, reader);
if (fh.FileName == "CMT")
{
fh.PackedStream = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
}
else
{
SkipData(fh, reader);
}
return fh;
}
@@ -133,7 +156,7 @@ public class RarHeaderFactory
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.R4Salt is null)
if (fh.R4Salt is null && fh.Rar5CryptoInfo is null)
{
fh.PackedStream = ms;
}
@@ -141,8 +164,10 @@ public class RarHeaderFactory
{
fh.PackedStream = new RarCryptoWrapper(
ms,
Options.Password!,
fh.R4Salt
fh.R4Salt is null ? fh.Rar5CryptoInfo.Salt : fh.R4Salt,
fh.R4Salt is null
? new CryptKey5(Options.Password!, fh.Rar5CryptoInfo)
: new CryptKey3(Options.Password!)
);
}
}
@@ -161,9 +186,11 @@ public class RarHeaderFactory
}
case HeaderCodeV.RAR5_ARCHIVE_ENCRYPTION_HEADER:
{
var ch = new ArchiveCryptHeader(header, reader);
var cryptoHeader = new ArchiveCryptHeader(header, reader);
IsEncrypted = true;
return ch;
_cryptInfo = cryptoHeader.CryptInfo;
return cryptoHeader;
}
default:
{

View File

@@ -0,0 +1,8 @@
using System.Security.Cryptography;
namespace SharpCompress.Common.Rar;
internal interface ICryptKey
{
ICryptoTransform Transformer(byte[] salt);
}

View File

@@ -0,0 +1,57 @@
using System;
using System.Security.Cryptography;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar;
internal class Rar5CryptoInfo
{
public Rar5CryptoInfo() { }
public Rar5CryptoInfo(MarkingBinaryReader reader, bool readInitV)
{
var cryptVersion = reader.ReadRarVIntUInt32();
if (cryptVersion > EncryptionConstV5.VERSION)
{
throw new CryptographicException($"Unsupported crypto version of {cryptVersion}");
}
var encryptionFlags = reader.ReadRarVIntUInt32();
UsePswCheck = FlagUtility.HasFlag(encryptionFlags, EncryptionFlagsV5.CHFL_CRYPT_PSWCHECK);
LG2Count = reader.ReadRarVIntByte(1);
if (LG2Count > EncryptionConstV5.CRYPT5_KDF_LG2_COUNT_MAX)
{
throw new CryptographicException($"Unsupported LG2 count of {LG2Count}.");
}
Salt = reader.ReadBytes(EncryptionConstV5.SIZE_SALT50);
if (readInitV) // File header needs to read IV here
{
ReadInitV(reader);
}
if (UsePswCheck)
{
PswCheck = reader.ReadBytes(EncryptionConstV5.SIZE_PSWCHECK);
var _pswCheckCsm = reader.ReadBytes(EncryptionConstV5.SIZE_PSWCHECK_CSUM);
var sha = SHA256.Create();
UsePswCheck = sha.ComputeHash(PswCheck).AsSpan().StartsWith(_pswCheckCsm.AsSpan());
}
}
public void ReadInitV(MarkingBinaryReader reader) =>
InitV = reader.ReadBytes(EncryptionConstV5.SIZE_INITV);
public bool UsePswCheck = false;
public int LG2Count = 0;
public byte[] InitV = { };
public byte[] Salt = { };
public byte[] PswCheck = { };
}

View File

@@ -1,29 +1,29 @@
using System.Collections.Generic;
#nullable disable
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Rar;
internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
{
private RarRijndael _rijndael;
private byte[] _salt;
private readonly string _password;
private readonly Queue<byte> _data = new Queue<byte>();
private BlockTransformer _rijndael;
private readonly Queue<byte> _data = new();
private long _readCount;
public RarCryptoBinaryReader(Stream stream, string password)
public RarCryptoBinaryReader(Stream stream, ICryptKey cryptKey)
: base(stream)
{
_password = password;
// coderb: not sure why this was being done at this logical point
//SkipQueue();
var salt = ReadBytes(8);
_salt = salt;
_rijndael = RarRijndael.InitializeFrom(_password, salt);
var salt = base.ReadBytes(EncryptionConstV5.SIZE_SALT30);
_readCount += EncryptionConstV5.SIZE_SALT30;
_rijndael = new BlockTransformer(cryptKey.Transformer(salt));
}
public RarCryptoBinaryReader(Stream stream, ICryptKey cryptKey, byte[] salt)
: base(stream) => _rijndael = new BlockTransformer(cryptKey.Transformer(salt));
// track read count ourselves rather than using the underlying stream since we buffer
public override long CurrentReadByteCount
{
@@ -36,29 +36,9 @@ internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
public override void Mark() => _readCount = 0;
private bool UseEncryption => _salt != null;
public override byte ReadByte() => ReadAndDecryptBytes(1)[0];
public override byte ReadByte()
{
if (UseEncryption)
{
return ReadAndDecryptBytes(1)[0];
}
_readCount++;
return base.ReadByte();
}
public override byte[] ReadBytes(int count)
{
if (UseEncryption)
{
return ReadAndDecryptBytes(count);
}
_readCount += count;
return base.ReadBytes(count);
}
public override byte[] ReadBytes(int count) => ReadAndDecryptBytes(count);
private byte[] ReadAndDecryptBytes(int count)
{

View File

@@ -1,37 +1,30 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Rar;
internal sealed class RarCryptoWrapper : Stream
{
private readonly Stream _actualStream;
private readonly byte[] _salt;
private RarRijndael _rijndael;
private readonly Queue<byte> _data = new Queue<byte>();
private BlockTransformer _rijndael;
private readonly Queue<byte> _data = new();
public RarCryptoWrapper(Stream actualStream, string password, byte[] salt)
public RarCryptoWrapper(Stream actualStream, byte[] salt, ICryptKey key)
{
_actualStream = actualStream;
_salt = salt;
_rijndael = RarRijndael.InitializeFrom(password ?? "", salt);
_rijndael = new BlockTransformer(key.Transformer(salt));
}
public override void Flush() => throw new NotSupportedException();
public override void Flush() { }
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override int Read(byte[] buffer, int offset, int count)
{
if (_salt is null)
{
return _actualStream.Read(buffer, offset, count);
}
return ReadAndDecrypt(buffer, offset, count);
}
public override int Read(byte[] buffer, int offset, int count) =>
ReadAndDecrypt(buffer, offset, count);
public int ReadAndDecrypt(byte[] buffer, int offset, int count)
{
@@ -41,7 +34,7 @@ internal sealed class RarCryptoWrapper : Stream
if (sizeToRead > 0)
{
var alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
Span<byte> cipherText = stackalloc byte[RarRijndael.CRYPTO_BLOCK_SIZE];
Span<byte> cipherText = stackalloc byte[16];
for (var i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();

View File

@@ -20,7 +20,7 @@ public abstract class RarEntry : Entry
/// <summary>
/// The File's 32 bit CRC Hash
/// </summary>
public override long Crc => FileHeader.FileCrc;
public override long Crc => BitConverter.ToUInt32(FileHeader.FileCrc, 0);
/// <summary>
/// The path of the file internal to the Rar Archive.
@@ -55,12 +55,21 @@ public abstract class RarEntry : Entry
public override bool IsEncrypted => FileHeader.IsEncrypted;
/// <summary>
/// Entry is password protected and encrypted and cannot be extracted.
/// Entry Windows file attributes
/// </summary>
public override int? Attrib => (int)FileHeader.FileAttributes;
/// <summary>
/// Entry is a directory
/// </summary>
public override bool IsDirectory => FileHeader.IsDirectory;
public override bool IsSplitAfter => FileHeader.IsSplitAfter;
public bool IsRedir => FileHeader.IsRedir;
public string RedirTargetName => FileHeader.RedirTargetName;
public override string ToString() =>
string.Format(
"Entry Path: {0} Compressed Size: {1} Uncompressed Size: {2} CRC: {3}",

View File

@@ -1,114 +0,0 @@
#nullable disable
using System;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Crypto;
namespace SharpCompress.Common.Rar;
internal class RarRijndael : IDisposable
{
internal const int CRYPTO_BLOCK_SIZE = 16;
private readonly string _password;
private readonly byte[] _salt;
private byte[] _aesInitializationVector;
private RijndaelEngine _rijndael;
private RarRijndael(string password, byte[] salt)
{
_password = password;
_salt = salt;
}
private void Initialize()
{
_rijndael = new RijndaelEngine();
_aesInitializationVector = new byte[CRYPTO_BLOCK_SIZE];
var rawLength = 2 * _password.Length;
var rawPassword = new byte[rawLength + 8];
var passwordBytes = Encoding.UTF8.GetBytes(_password);
for (var i = 0; i < _password.Length; i++)
{
rawPassword[i * 2] = passwordBytes[i];
rawPassword[(i * 2) + 1] = 0;
}
for (var i = 0; i < _salt.Length; i++)
{
rawPassword[i + rawLength] = _salt[i];
}
const int noOfRounds = (1 << 18);
const int iblock = 3;
byte[] digest;
var data = new byte[(rawPassword.Length + iblock) * noOfRounds];
//TODO slow code below, find ways to optimize
for (var i = 0; i < noOfRounds; i++)
{
rawPassword.CopyTo(data, i * (rawPassword.Length + iblock));
data[(i * (rawPassword.Length + iblock)) + rawPassword.Length + 0] = (byte)i;
data[(i * (rawPassword.Length + iblock)) + rawPassword.Length + 1] = (byte)(i >> 8);
data[(i * (rawPassword.Length + iblock)) + rawPassword.Length + 2] = (byte)(
i >> CRYPTO_BLOCK_SIZE
);
if (i % (noOfRounds / CRYPTO_BLOCK_SIZE) == 0)
{
digest = SHA1.Create()
.ComputeHash(data, 0, (i + 1) * (rawPassword.Length + iblock));
_aesInitializationVector[i / (noOfRounds / CRYPTO_BLOCK_SIZE)] = digest[19];
}
}
digest = SHA1.Create().ComputeHash(data);
//slow code ends
var aesKey = new byte[CRYPTO_BLOCK_SIZE];
for (var i = 0; i < 4; i++)
{
for (var j = 0; j < 4; j++)
{
aesKey[(i * 4) + j] = (byte)(
(
((digest[i * 4] * 0x1000000) & 0xff000000)
| (uint)((digest[(i * 4) + 1] * 0x10000) & 0xff0000)
| (uint)((digest[(i * 4) + 2] * 0x100) & 0xff00)
| (uint)(digest[(i * 4) + 3] & 0xff)
) >> (j * 8)
);
}
}
_rijndael.Init(false, new KeyParameter(aesKey));
}
public static RarRijndael InitializeFrom(string password, byte[] salt)
{
var rijndael = new RarRijndael(password, salt);
rijndael.Initialize();
return rijndael;
}
public byte[] ProcessBlock(ReadOnlySpan<byte> cipherText)
{
Span<byte> plainText = stackalloc byte[CRYPTO_BLOCK_SIZE]; // 16 bytes
var decryptedBytes = new byte[CRYPTO_BLOCK_SIZE];
_rijndael.ProcessBlock(cipherText, plainText);
for (var j = 0; j < CRYPTO_BLOCK_SIZE; j++)
{
decryptedBytes[j] = (byte)(plainText[j] ^ _aesInitializationVector[j % 16]); //32:114, 33:101
}
for (var j = 0; j < _aesInitializationVector.Length; j++)
{
_aesInitializationVector[j] = cipherText[j];
}
return decryptedBytes;
}
public void Dispose() { }
}

View File

@@ -2,6 +2,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -70,11 +71,7 @@ public abstract class RarVolume : Volume
var part = CreateFilePart(lastMarkHeader!, fh);
var buffer = new byte[fh.CompressedSize];
part.GetCompressedStream().Read(buffer, 0, buffer.Length);
Comment = System.Text.Encoding.UTF8.GetString(
buffer,
0,
buffer.Length - 1
);
Comment = Encoding.UTF8.GetString(buffer, 0, buffer.Length - 1);
}
}
break;

View File

@@ -0,0 +1,12 @@
using System;
namespace SharpCompress.Common;
public class ReaderCancelledException : Exception
{
public ReaderCancelledException(string message)
: base(message) { }
public ReaderCancelledException(string message, Exception inner)
: base(message, inner) { }
}

View File

@@ -15,15 +15,15 @@ internal class ArchiveDatabase
internal long _startPositionAfterHeader;
internal long _dataStartPosition;
internal List<long> _packSizes = new List<long>();
internal List<uint?> _packCrCs = new List<uint?>();
internal List<CFolder> _folders = new List<CFolder>();
internal List<long> _packSizes = new();
internal List<uint?> _packCrCs = new();
internal List<CFolder> _folders = new();
internal List<int> _numUnpackStreamsVector;
internal List<CFileItem> _files = new List<CFileItem>();
internal List<CFileItem> _files = new();
internal List<long> _packStreamStartPositions = new List<long>();
internal List<int> _folderStartFileIndex = new List<int>();
internal List<int> _fileIndexToFolderIndexMap = new List<int>();
internal List<long> _packStreamStartPositions = new();
internal List<int> _folderStartFileIndex = new();
internal List<int> _fileIndexToFolderIndexMap = new();
internal IPasswordProvider PasswordProvider { get; }

View File

@@ -14,13 +14,13 @@ namespace SharpCompress.Common.SevenZip;
internal class ArchiveReader
{
internal Stream _stream;
internal Stack<DataReader> _readerStack = new Stack<DataReader>();
internal Stack<DataReader> _readerStack = new();
internal DataReader _currentReader;
internal long _streamOrigin;
internal long _streamEnding;
internal byte[] _header;
private readonly Dictionary<int, Stream> _cachedStreams = new Dictionary<int, Stream>();
private readonly Dictionary<int, Stream> _cachedStreams = new();
internal void AddByteStream(byte[] buffer, int offset, int length)
{
@@ -1359,7 +1359,7 @@ internal class ArchiveReader
{
internal int _fileIndex;
internal int _folderIndex;
internal List<bool> _extractStatuses = new List<bool>();
internal List<bool> _extractStatuses = new();
internal CExtractFolderInfo(int fileIndex, int folderIndex)
{
@@ -1393,7 +1393,7 @@ internal class ArchiveReader
public override bool CanWrite => false;
public override void Flush() => throw new NotSupportedException();
public override void Flush() { }
public override long Length => throw new NotSupportedException();

View File

@@ -6,11 +6,11 @@ namespace SharpCompress.Common.SevenZip;
internal class CFolder
{
internal List<CCoderInfo> _coders = new List<CCoderInfo>();
internal List<CBindPair> _bindPairs = new List<CBindPair>();
internal List<int> _packStreams = new List<int>();
internal List<CCoderInfo> _coders = new();
internal List<CBindPair> _bindPairs = new();
internal List<int> _packStreams = new();
internal int _firstPackStreamId;
internal List<long> _unpackSizes = new List<long>();
internal List<long> _unpackSizes = new();
internal uint? _unpackCrc;
internal bool UnpackCrcDefined => _unpackCrc != null;

View File

@@ -7,10 +7,10 @@ internal readonly struct CMethodId
public const ulong K_LZMA2_ID = 0x21;
public const ulong K_AES_ID = 0x06F10701;
public static readonly CMethodId K_COPY = new CMethodId(K_COPY_ID);
public static readonly CMethodId K_LZMA = new CMethodId(K_LZMA_ID);
public static readonly CMethodId K_LZMA2 = new CMethodId(K_LZMA2_ID);
public static readonly CMethodId K_AES = new CMethodId(K_AES_ID);
public static readonly CMethodId K_COPY = new(K_COPY_ID);
public static readonly CMethodId K_LZMA = new(K_LZMA_ID);
public static readonly CMethodId K_LZMA2 = new(K_LZMA2_ID);
public static readonly CMethodId K_AES = new(K_AES_ID);
public readonly ulong _id;

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using System.Buffers.Binary;
@@ -9,7 +9,7 @@ namespace SharpCompress.Common.Tar.Headers;
internal sealed class TarHeader
{
internal static readonly DateTime EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
internal static readonly DateTime EPOCH = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
public TarHeader(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
@@ -132,8 +132,8 @@ internal sealed class TarHeader
Mode |= 0b1_000_000_000;
}
UserId = ReadAsciiInt64Base8(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8(buffer, 116, 7);
UserId = ReadAsciiInt64Base8oldGnu(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8oldGnu(buffer, 116, 7);
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
@@ -249,6 +249,24 @@ internal sealed class TarHeader
return Convert.ToInt64(s, 8);
}
private static long ReadAsciiInt64Base8oldGnu(byte[] buffer, int offset, int count)
{
if (buffer[offset] == 0x80 && buffer[offset + 1] == 0x00)
{
return buffer[offset + 4] << 24
| buffer[offset + 5] << 16
| buffer[offset + 6] << 8
| buffer[offset + 7];
}
var s = Encoding.UTF8.GetString(buffer, offset, count).TrimNulls();
if (string.IsNullOrEmpty(s))
{
return 0;
}
return Convert.ToInt64(s, 8);
}
private static long ReadAsciiInt64(byte[] buffer, int offset, int count)
{
var s = Encoding.UTF8.GetString(buffer, offset, count).TrimNulls();

View File

@@ -1,6 +1,6 @@
using SharpCompress.IO;
using System;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Tar;
@@ -47,7 +47,7 @@ internal class TarReadOnlySubStream : NonDisposingStream
public override bool CanWrite => false;
public override void Flush() => throw new NotSupportedException();
public override void Flush() { }
public override long Length => throw new NotSupportedException();
@@ -63,7 +63,7 @@ internal class TarReadOnlySubStream : NonDisposingStream
{
count = (int)BytesLeftToRead;
}
int read = Stream.Read(buffer, offset, count);
var read = Stream.Read(buffer, offset, count);
if (read > 0)
{
BytesLeftToRead -= read;
@@ -78,7 +78,7 @@ internal class TarReadOnlySubStream : NonDisposingStream
{
return -1;
}
int value = Stream.ReadByte();
var value = Stream.ReadByte();
if (value != -1)
{
--BytesLeftToRead;

View File

@@ -14,8 +14,8 @@ internal class DirectoryEntryHeader : ZipFileEntry
VersionNeededToExtract = reader.ReadUInt16();
Flags = (HeaderFlags)reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
LastModifiedTime = reader.ReadUInt16();
LastModifiedDate = reader.ReadUInt16();
OriginalLastModifiedTime = LastModifiedTime = reader.ReadUInt16();
OriginalLastModifiedDate = LastModifiedDate = reader.ReadUInt16();
Crc = reader.ReadUInt32();
CompressedSize = reader.ReadUInt32();
UncompressedSize = reader.ReadUInt32();
@@ -52,8 +52,8 @@ internal class DirectoryEntryHeader : ZipFileEntry
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(
u => u.Type == ExtraDataType.UnicodePathExtraField
var unicodePathExtra = Extra.FirstOrDefault(u =>
u.Type == ExtraDataType.UnicodePathExtraField
);
if (unicodePathExtra != null && ArchiveEncoding.Forced == null)
{
@@ -85,6 +85,36 @@ internal class DirectoryEntryHeader : ZipFileEntry
RelativeOffsetOfEntryHeader = zip64ExtraData.RelativeOffsetOfEntryHeader;
}
}
var unixTimeExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnixTimeExtraField);
if (unixTimeExtra is not null)
{
// Tuple order is last modified time, last access time, and creation time.
var unixTimeTuple = ((UnixTimeExtraField)unixTimeExtra).UnicodeTimes;
if (unixTimeTuple.Item1.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item1.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item2.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item2.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item3.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item3.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
}
}
internal ushort Version { get; private set; }

View File

@@ -13,8 +13,8 @@ internal class LocalEntryHeader : ZipFileEntry
Version = reader.ReadUInt16();
Flags = (HeaderFlags)reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
LastModifiedTime = reader.ReadUInt16();
LastModifiedDate = reader.ReadUInt16();
OriginalLastModifiedTime = LastModifiedTime = reader.ReadUInt16();
OriginalLastModifiedDate = LastModifiedDate = reader.ReadUInt16();
Crc = reader.ReadUInt32();
CompressedSize = reader.ReadUInt32();
UncompressedSize = reader.ReadUInt32();
@@ -42,8 +42,8 @@ internal class LocalEntryHeader : ZipFileEntry
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(
u => u.Type == ExtraDataType.UnicodePathExtraField
var unicodePathExtra = Extra.FirstOrDefault(u =>
u.Type == ExtraDataType.UnicodePathExtraField
);
if (unicodePathExtra != null && ArchiveEncoding.Forced == null)
{
@@ -64,6 +64,36 @@ internal class LocalEntryHeader : ZipFileEntry
UncompressedSize = zip64ExtraData.UncompressedSize;
}
}
var unixTimeExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnixTimeExtraField);
if (unixTimeExtra is not null)
{
// Tuple order is last modified time, last access time, and creation time.
var unixTimeTuple = ((UnixTimeExtraField)unixTimeExtra).UnicodeTimes;
if (unixTimeTuple.Item1.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item1.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item2.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item2.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
else if (unixTimeTuple.Item3.HasValue)
{
var dosTime = Utility.DateTimeToDosTime(unixTimeTuple.Item3.Value);
LastModifiedDate = (ushort)(dosTime >> 16);
LastModifiedTime = (ushort)(dosTime & 0x0FFFF);
}
}
}
internal ushort Version { get; private set; }

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Buffers.Binary;
using System.Text;
@@ -13,7 +13,8 @@ internal enum ExtraDataType : ushort
// Third Party Mappings
// -Info-ZIP Unicode Path Extra Field
UnicodePathExtraField = 0x7075,
Zip64ExtendedInformationExtraField = 0x0001
Zip64ExtendedInformationExtraField = 0x0001,
UnixTimeExtraField = 0x5455
}
internal class ExtraData
@@ -145,6 +146,84 @@ internal sealed class Zip64ExtendedInformationExtraField : ExtraData
public uint VolumeNumber { get; private set; }
}
internal sealed class UnixTimeExtraField : ExtraData
{
public UnixTimeExtraField(ExtraDataType type, ushort length, byte[] dataBytes)
: base(type, length, dataBytes) { }
/// <summary>
/// The unix modified time, last access time, and creation time, if set.
/// </summary>
/// <remarks>Must return Tuple explicitly due to net462 support.</remarks>
internal Tuple<DateTime?, DateTime?, DateTime?> UnicodeTimes
{
get
{
// There has to be at least 5 byte for there to be a timestamp.
// 1 byte for flags and 4 bytes for a timestamp.
if (DataBytes is null || DataBytes.Length < 5)
{
return Tuple.Create<DateTime?, DateTime?, DateTime?>(null, null, null);
}
var flags = DataBytes[0];
var isModifiedTimeSpecified = (flags & 0x01) == 1;
var isLastAccessTimeSpecified = (flags & 0x02) == 1;
var isCreationTimeSpecified = (flags & 0x04) == 1;
var currentIndex = 1;
DateTime? modifiedTime = null;
DateTime? lastAccessTime = null;
DateTime? creationTime = null;
if (isModifiedTimeSpecified)
{
var modifiedEpochTime = BinaryPrimitives.ReadInt32LittleEndian(
DataBytes.AsSpan(currentIndex, 4)
);
currentIndex += 4;
modifiedTime = DateTimeOffset.FromUnixTimeSeconds(modifiedEpochTime).UtcDateTime;
}
if (isLastAccessTimeSpecified)
{
if (currentIndex + 4 > DataBytes.Length)
{
throw new ArchiveException("Invalid UnicodeExtraTime field");
}
var lastAccessEpochTime = BinaryPrimitives.ReadInt32LittleEndian(
DataBytes.AsSpan(currentIndex, 4)
);
currentIndex += 4;
lastAccessTime = DateTimeOffset
.FromUnixTimeSeconds(lastAccessEpochTime)
.UtcDateTime;
}
if (isCreationTimeSpecified)
{
if (currentIndex + 4 > DataBytes.Length)
{
throw new ArchiveException("Invalid UnicodeExtraTime field");
}
var creationTimeEpochTime = BinaryPrimitives.ReadInt32LittleEndian(
DataBytes.AsSpan(currentIndex, 4)
);
currentIndex += 4;
creationTime = DateTimeOffset
.FromUnixTimeSeconds(creationTimeEpochTime)
.UtcDateTime;
}
return Tuple.Create(modifiedTime, lastAccessTime, creationTime);
}
}
}
internal static class LocalEntryHeaderExtraFactory
{
internal static ExtraData Create(ExtraDataType type, ushort length, byte[] extraData) =>
@@ -154,6 +233,7 @@ internal static class LocalEntryHeaderExtraFactory
=> new ExtraUnicodePathExtraField(type, length, extraData),
ExtraDataType.Zip64ExtendedInformationExtraField
=> new Zip64ExtendedInformationExtraField(type, length, extraData),
ExtraDataType.UnixTimeExtraField => new UnixTimeExtraField(type, length, extraData),
_ => new ExtraData(type, length, extraData)
};
}

View File

@@ -67,8 +67,26 @@ internal abstract class ZipFileEntry : ZipHeader
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
/// <summary>
/// The last modified date as read from the Local or Central Directory header.
/// </summary>
internal ushort OriginalLastModifiedDate { get; set; }
/// <summary>
/// The last modified date from the UnixTimeExtraField, if present, or the
/// Local or Cental Directory header, if not.
/// </summary>
internal ushort LastModifiedDate { get; set; }
/// <summary>
/// The last modified time as read from the Local or Central Directory header.
/// </summary>
internal ushort OriginalLastModifiedTime { get; set; }
/// <summary>
/// The last modified time from the UnixTimeExtraField, if present, or the
/// Local or Cental Directory header, if not.
/// </summary>
internal ushort LastModifiedTime { get; set; }
internal uint Crc { get; set; }

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
namespace SharpCompress.Common.Zip;
@@ -87,10 +87,7 @@ internal class PkwareTraditionalCryptoStream : Stream
_stream.Write(encrypted, 0, encrypted.Length);
}
public override void Flush()
{
//throw new NotSupportedException();
}
public override void Flush() { }
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();

View File

@@ -6,7 +6,7 @@ namespace SharpCompress.Common.Zip;
internal class PkwareTraditionalEncryptionData
{
private static readonly CRC32 CRC32 = new CRC32();
private static readonly CRC32 CRC32 = new();
private readonly uint[] _keys = { 0x12345678, 0x23456789, 0x34567890 };
private readonly ArchiveEncoding _archiveEncoding;
@@ -39,7 +39,7 @@ internal class PkwareTraditionalEncryptionData
{
throw new CryptographicException("The password did not match.");
}
if (plainTextHeader[11] != (byte)((header.LastModifiedTime >> 8) & 0xff))
if (plainTextHeader[11] != (byte)((header.OriginalLastModifiedTime >> 8) & 0xff))
{
throw new CryptographicException("The password did not match.");
}

View File

@@ -15,10 +15,7 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
ArchiveEncoding archiveEncoding,
IEnumerable<ZipEntry>? entries
)
: base(StreamingMode.Streaming, password, archiveEncoding)
{
_entries = entries;
}
: base(StreamingMode.Streaming, password, archiveEncoding) => _entries = entries;
internal IEnumerable<ZipHeader> ReadStreamHeader(Stream stream)
{
@@ -97,13 +94,12 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
{
var local_header = ((LocalEntryHeader)header);
var dir_header = _entries?.FirstOrDefault(
entry =>
entry.Key == local_header.Name
&& local_header.CompressedSize == 0
&& local_header.UncompressedSize == 0
&& local_header.Crc == 0
&& local_header.IsDirectory == false
var dir_header = _entries?.FirstOrDefault(entry =>
entry.Key == local_header.Name
&& local_header.CompressedSize == 0
&& local_header.UncompressedSize == 0
&& local_header.Crc == 0
&& local_header.IsDirectory == false
);
if (dir_header != null)

View File

@@ -73,7 +73,7 @@ internal class WinzipAesCryptoStream : Stream
}
}
public override void Flush() => throw new NotSupportedException();
public override void Flush() { }
public override int Read(byte[] buffer, int offset, int count)
{

View File

@@ -48,15 +48,15 @@ internal class WinzipAesEncryptionData
private void Initialize()
{
#if NET7_0
#if NETFRAMEWORK || NETSTANDARD2_0
var rfc2898 = new Rfc2898DeriveBytes(_password, _salt, RFC2898_ITERATIONS);
#else
var rfc2898 = new Rfc2898DeriveBytes(
_password,
_salt,
RFC2898_ITERATIONS,
HashAlgorithmName.SHA1
);
#else
var rfc2898 = new Rfc2898DeriveBytes(_password, _salt, RFC2898_ITERATIONS);
#endif
KeyBytes = rfc2898.GetBytes(KeySizeInBytes); // 16 or 24 or 32 ???

View File

@@ -3,6 +3,7 @@ namespace SharpCompress.Common.Zip;
internal enum ZipCompressionMethod
{
None = 0,
Shrink = 1,
Deflate = 8,
Deflate64 = 9,
BZip2 = 12,

View File

@@ -22,43 +22,18 @@ public class ZipEntry : Entry
}
}
public override CompressionType CompressionType
{
get
public override CompressionType CompressionType =>
_filePart.Header.CompressionMethod switch
{
switch (_filePart.Header.CompressionMethod)
{
case ZipCompressionMethod.BZip2:
{
return CompressionType.BZip2;
}
case ZipCompressionMethod.Deflate:
{
return CompressionType.Deflate;
}
case ZipCompressionMethod.Deflate64:
{
return CompressionType.Deflate64;
}
case ZipCompressionMethod.LZMA:
{
return CompressionType.LZMA;
}
case ZipCompressionMethod.PPMd:
{
return CompressionType.PPMd;
}
case ZipCompressionMethod.None:
{
return CompressionType.None;
}
default:
{
return CompressionType.Unknown;
}
}
}
}
ZipCompressionMethod.BZip2 => CompressionType.BZip2,
ZipCompressionMethod.Deflate => CompressionType.Deflate,
ZipCompressionMethod.Deflate64 => CompressionType.Deflate64,
ZipCompressionMethod.LZMA => CompressionType.LZMA,
ZipCompressionMethod.PPMd => CompressionType.PPMd,
ZipCompressionMethod.None => CompressionType.None,
ZipCompressionMethod.Shrink => CompressionType.Shrink,
_ => CompressionType.Unknown
};
public override long Crc => _filePart.Header.Crc;

View File

@@ -9,6 +9,7 @@ using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.Compressors.Shrink;
using SharpCompress.Compressors.Xz;
using SharpCompress.IO;
using ZstdSharp;
@@ -79,6 +80,15 @@ internal abstract class ZipFilePart : FilePart
return new DataDescriptorStream(stream);
}
case ZipCompressionMethod.Shrink:
{
return new ShrinkStream(
stream,
CompressionMode.Decompress,
Header.CompressedSize,
Header.UncompressedSize
);
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(stream, CompressionMode.Decompress);
@@ -192,6 +202,7 @@ internal abstract class ZipFilePart : FilePart
switch (Header.CompressionMethod)
{
case ZipCompressionMethod.None:
case ZipCompressionMethod.Shrink:
case ZipCompressionMethod.Deflate:
case ZipCompressionMethod.Deflate64:
case ZipCompressionMethod.BZip2:

View File

@@ -142,8 +142,8 @@ internal class ZipHeaderFactory
if (entryHeader.CompressionMethod == ZipCompressionMethod.WinzipAes)
{
var data = entryHeader.Extra.SingleOrDefault(
x => x.Type == ExtraDataType.WinZipAes
var data = entryHeader.Extra.SingleOrDefault(x =>
x.Type == ExtraDataType.WinZipAes
);
if (data != null)
{

View File

@@ -87,7 +87,7 @@ internal class CBZip2InputStream : Stream
private int bsBuff;
private int bsLive;
private readonly CRC mCrc = new CRC();
private readonly CRC mCrc = new();
private readonly bool[] inUse = new bool[256];
private int nInUse;

View File

@@ -284,7 +284,7 @@ internal sealed class CBZip2OutputStream : Stream
private int bytesOut;
private int bsBuff;
private int bsLive;
private readonly CRC mCrc = new CRC();
private readonly CRC mCrc = new();
private readonly bool[] inUse = new bool[256];
private int nInUse;

View File

@@ -69,7 +69,6 @@
// -----------------------------------------------------------------------
using System;
using SharpCompress.Algorithms;
namespace SharpCompress.Compressors.Deflate;
@@ -343,9 +342,9 @@ internal sealed partial class DeflateManager
private readonly short[] dyn_dtree; // distance tree
private readonly short[] bl_tree; // Huffman tree for bit lengths
private readonly Tree treeLiterals = new Tree(); // desc for literal tree
private readonly Tree treeDistances = new Tree(); // desc for distance tree
private readonly Tree treeBitLengths = new Tree(); // desc for bit length tree
private readonly Tree treeLiterals = new(); // desc for literal tree
private readonly Tree treeDistances = new(); // desc for distance tree
private readonly Tree treeBitLengths = new(); // desc for bit length tree
// number of codes at each bit length for an optimal tree
private readonly short[] bl_count = new short[InternalConstants.MAX_BITS + 1];
@@ -1788,21 +1787,14 @@ internal sealed partial class DeflateManager
return status == BUSY_STATE ? ZlibConstants.Z_DATA_ERROR : ZlibConstants.Z_OK;
}
private void SetDeflater()
{
switch (config.Flavor)
private void SetDeflater() =>
DeflateFunction = config.Flavor switch
{
case DeflateFlavor.Store:
DeflateFunction = DeflateNone;
break;
case DeflateFlavor.Fast:
DeflateFunction = DeflateFast;
break;
case DeflateFlavor.Slow:
DeflateFunction = DeflateSlow;
break;
}
}
DeflateFlavor.Store => DeflateNone,
DeflateFlavor.Fast => DeflateFast,
DeflateFlavor.Slow => DeflateSlow,
_ => DeflateFunction
};
internal int SetParams(CompressionLevel level, CompressionStrategy strategy)
{
@@ -1959,7 +1951,9 @@ internal sealed partial class DeflateManager
// returning Z_STREAM_END instead of Z_BUFF_ERROR.
}
else if (
_codec.AvailableBytesIn == 0 && (int)flush <= old_flush && flush != FlushType.Finish
_codec.AvailableBytesIn == 0
&& (int)flush <= old_flush
&& flush != FlushType.Finish
)
{
// workitem 8557

View File

@@ -366,9 +366,5 @@ public class DeflateStream : Stream
#endregion
public MemoryStream InputBuffer =>
new MemoryStream(
_baseStream._z.InputBuffer,
_baseStream._z.NextIn,
_baseStream._z.AvailableBytesIn
);
new(_baseStream._z.InputBuffer, _baseStream._z.NextIn, _baseStream._z.AvailableBytesIn);
}

View File

@@ -35,15 +35,7 @@ namespace SharpCompress.Compressors.Deflate;
public class GZipStream : Stream
{
internal static readonly DateTime UNIX_EPOCH = new DateTime(
1970,
1,
1,
0,
0,
0,
DateTimeKind.Utc
);
internal static readonly DateTime UNIX_EPOCH = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
private string? _comment;
private string? _fileName;

View File

@@ -64,7 +64,6 @@
// -----------------------------------------------------------------------
using System;
using SharpCompress.Algorithms;
namespace SharpCompress.Compressors.Deflate;
@@ -106,11 +105,11 @@ internal sealed class InflateBlocks
internal int[] blens; // bit lengths of codes
internal uint check; // check on output
internal object checkfn; // check function
internal InflateCodes codes = new InflateCodes(); // if CODES, current state
internal InflateCodes codes = new(); // if CODES, current state
internal int end; // one byte after sliding window
internal int[] hufts; // single malloc for tree space
internal int index; // index into blens (or border)
internal InfTree inftree = new InfTree();
internal InfTree inftree = new();
internal int last; // true if this block is the last block
internal int left; // if STORED, bytes left to copy
private InflateBlockMode mode; // current inflate_block mode

View File

@@ -30,8 +30,8 @@ using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using System.Text;
using SharpCompress.Common.Tar.Headers;
namespace SharpCompress.Compressors.Deflate;
@@ -102,7 +102,7 @@ internal class ZlibBaseStream : Stream
{
if (_z is null)
{
bool wantRfc1950Header = (_flavor == ZlibStreamFlavor.ZLIB);
var wantRfc1950Header = (_flavor == ZlibStreamFlavor.ZLIB);
_z = new ZlibCodec();
if (_compressionMode == CompressionMode.Decompress)
{
@@ -147,13 +147,13 @@ internal class ZlibBaseStream : Stream
z.InputBuffer = buffer;
_z.NextIn = offset;
_z.AvailableBytesIn = count;
bool done = false;
var done = false;
do
{
_z.OutputBuffer = workingBuffer;
_z.NextOut = 0;
_z.AvailableBytesOut = _workingBuffer.Length;
int rc = (_wantCompress) ? _z.Deflate(_flushMode) : _z.Inflate(_flushMode);
var rc = (_wantCompress) ? _z.Deflate(_flushMode) : _z.Inflate(_flushMode);
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException((_wantCompress ? "de" : "in") + "flating: " + _z.Message);
@@ -181,18 +181,18 @@ internal class ZlibBaseStream : Stream
if (_streamMode == StreamMode.Writer)
{
bool done = false;
var done = false;
do
{
_z.OutputBuffer = workingBuffer;
_z.NextOut = 0;
_z.AvailableBytesOut = _workingBuffer.Length;
int rc =
var rc =
(_wantCompress) ? _z.Deflate(FlushType.Finish) : _z.Inflate(FlushType.Finish);
if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK)
{
string verb = (_wantCompress ? "de" : "in") + "flating";
var verb = (_wantCompress ? "de" : "in") + "flating";
if (_z.Message is null)
{
throw new ZlibException(String.Format("{0}: (rc = {1})", verb, rc));
@@ -225,7 +225,7 @@ internal class ZlibBaseStream : Stream
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(intBuf, crc.Crc32Result);
_stream.Write(intBuf);
int c2 = (int)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
var c2 = (int)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
BinaryPrimitives.WriteInt32LittleEndian(intBuf, c2);
_stream.Write(intBuf);
}
@@ -256,8 +256,8 @@ internal class ZlibBaseStream : Stream
{
// Make sure we have read to the end of the stream
_z.InputBuffer.AsSpan(_z.NextIn, _z.AvailableBytesIn).CopyTo(trailer);
int bytesNeeded = 8 - _z.AvailableBytesIn;
int bytesRead = _stream.Read(
var bytesNeeded = 8 - _z.AvailableBytesIn;
var bytesRead = _stream.Read(
trailer.Slice(_z.AvailableBytesIn, bytesNeeded)
);
if (bytesNeeded != bytesRead)
@@ -275,10 +275,10 @@ internal class ZlibBaseStream : Stream
_z.InputBuffer.AsSpan(_z.NextIn, trailer.Length).CopyTo(trailer);
}
Int32 crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
Int32 crc32_actual = crc.Crc32Result;
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
Int32 isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
var crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
var crc32_actual = crc.Crc32Result;
var isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
var isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
if (crc32_actual != crc32_expected)
{
@@ -380,11 +380,11 @@ internal class ZlibBaseStream : Stream
private string ReadZeroTerminatedString()
{
var list = new List<byte>();
bool done = false;
var done = false;
do
{
// workitem 7740
int n = _stream.Read(_buf1, 0, 1);
var n = _stream.Read(_buf1, 0, 1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
@@ -398,17 +398,17 @@ internal class ZlibBaseStream : Stream
list.Add(_buf1[0]);
}
} while (!done);
byte[] buffer = list.ToArray();
var buffer = list.ToArray();
return _encoding.GetString(buffer, 0, buffer.Length);
}
private int _ReadAndValidateGzipHeader()
{
int totalBytesRead = 0;
var totalBytesRead = 0;
// read the header on the first read
Span<byte> header = stackalloc byte[10];
int n = _stream.Read(header);
var n = _stream.Read(header);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
@@ -426,7 +426,7 @@ internal class ZlibBaseStream : Stream
throw new ZlibException("Bad GZIP header.");
}
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
var timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
_GzipMtime = TarHeader.EPOCH.AddSeconds(timet);
totalBytesRead += n;
if ((header[3] & 0x04) == 0x04)
@@ -435,8 +435,8 @@ internal class ZlibBaseStream : Stream
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
totalBytesRead += n;
short extraLength = (short)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
var extraLength = (short)(header[0] + header[1] * 256);
var extra = new byte[extraLength];
n = _stream.Read(extra, 0, extra.Length);
if (n != extraLength)
{
@@ -498,7 +498,7 @@ internal class ZlibBaseStream : Stream
throw new ZlibException("Cannot Read after Writing.");
}
int rc = 0;
var rc = 0;
// set up the output of the deflate/inflate codec:
_z.OutputBuffer = buffer;

View File

@@ -4,11 +4,11 @@
#nullable disable
using SharpCompress.Common.Zip;
using System;
using System.Diagnostics;
using System.IO;
using System.Runtime.CompilerServices;
using SharpCompress.Common.Zip;
namespace SharpCompress.Compressors.Deflate64;
@@ -118,8 +118,8 @@ public sealed class Deflate64Stream : Stream
EnsureNotDisposed();
int bytesRead;
int currentOffset = offset;
int remainingCount = count;
var currentOffset = offset;
var remainingCount = count;
while (true)
{
@@ -142,7 +142,7 @@ public sealed class Deflate64Stream : Stream
break;
}
int bytes = _stream.Read(_buffer, 0, _buffer.Length);
var bytes = _stream.Read(_buffer, 0, _buffer.Length);
if (bytes <= 0)
{
break;

View File

@@ -22,7 +22,7 @@ internal sealed class DeflateInput
Debug.Assert(StartIndex + Count <= Buffer.Length, "Input buffer is in invalid state!");
}
internal InputState DumpState() => new InputState(Count, StartIndex);
internal InputState DumpState() => new(Count, StartIndex);
internal void RestoreState(InputState state)
{

View File

@@ -42,11 +42,9 @@ internal sealed class HuffmanTree
private readonly int _tableMask;
// huffman tree for static block
public static HuffmanTree StaticLiteralLengthTree { get; } =
new HuffmanTree(GetStaticLiteralTreeLength());
public static HuffmanTree StaticLiteralLengthTree { get; } = new(GetStaticLiteralTreeLength());
public static HuffmanTree StaticDistanceTree { get; } =
new HuffmanTree(GetStaticDistanceTreeLength());
public static HuffmanTree StaticDistanceTree { get; } = new(GetStaticDistanceTreeLength());
public HuffmanTree(byte[] codeLengths)
{

View File

@@ -243,8 +243,8 @@ internal sealed class InflaterManaged
private void Reset() =>
_state = //_hasFormatReader ?
//InflaterState.ReadingHeader : // start by reading Header info
InflaterState.ReadingBFinal; // start by reading BFinal bit
//InflaterState.ReadingHeader : // start by reading Header info
InflaterState.ReadingBFinal; // start by reading BFinal bit
public void SetInput(byte[] inputBytes, int offset, int length) =>
_input.SetInput(inputBytes, offset, length); // append the bytes

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
namespace SharpCompress.Compressors.Filters;
@@ -79,7 +79,7 @@ internal class BCJ2Filter : Stream
public override bool CanWrite => false;
public override void Flush() => throw new NotSupportedException();
public override void Flush() { }
public override long Length => _baseStream.Length + _data1.Length + _data2.Length;

View File

@@ -18,7 +18,7 @@ internal class BCJFilterARM : Filter
{
if ((buffer[i + 3] & 0xFF) == 0xEB)
{
int src =
var src =
((buffer[i + 2] & 0xFF) << 16)
| ((buffer[i + 1] & 0xFF) << 8)
| (buffer[i] & 0xFF);

View File

@@ -18,7 +18,7 @@ internal class BCJFilterARMT : Filter
{
if ((buffer[i + 1] & 0xF8) == 0xF0 && (buffer[i + 3] & 0xF8) == 0xF8)
{
int src =
var src =
((buffer[i + 1] & 0x07) << 19)
| ((buffer[i] & 0xFF) << 11)
| ((buffer[i + 3] & 0x07) << 8)

View File

@@ -52,29 +52,29 @@ internal class BCJFilterIA64 : Filter
for (i = offset; i <= end; i += 16)
{
int instrTemplate = buffer[i] & 0x1F;
int mask = BRANCH_TABLE[instrTemplate];
var instrTemplate = buffer[i] & 0x1F;
var mask = BRANCH_TABLE[instrTemplate];
for (int slot = 0, bitPos = 5; slot < 3; ++slot, bitPos += 41)
{
if (((mask >>> slot) & 1) == 0)
continue;
int bytePos = bitPos >>> 3;
int bitRes = bitPos & 7;
var bytePos = bitPos >>> 3;
var bitRes = bitPos & 7;
long instr = 0;
for (int j = 0; j < 6; ++j)
for (var j = 0; j < 6; ++j)
{
instr |= (buffer[i + bytePos + j] & 0xFFL) << (8 * j);
}
long instrNorm = instr >>> bitRes;
var instrNorm = instr >>> bitRes;
if (((instrNorm >>> 37) & 0x0F) != 0x05 || ((instrNorm >>> 9) & 0x07) != 0x00)
continue;
int src = (int)((instrNorm >>> 13) & 0x0FFFFF);
var src = (int)((instrNorm >>> 13) & 0x0FFFFF);
src |= ((int)(instrNorm >>> 36) & 1) << 20;
src <<= 4;
@@ -93,7 +93,7 @@ internal class BCJFilterIA64 : Filter
instr &= (1 << bitRes) - 1;
instr |= instrNorm << bitRes;
for (int j = 0; j < 6; ++j)
for (var j = 0; j < 6; ++j)
{
buffer[i + bytePos + j] = (byte)(instr >>> (8 * j));
}

View File

@@ -18,7 +18,7 @@ internal class BCJFilterPPC : Filter
{
if ((buffer[i] & 0xFC) == 0x48 && (buffer[i + 3] & 0x03) == 0x01)
{
int src =
var src =
((buffer[i] & 0x03) << 24)
| ((buffer[i + 1] & 0xFF) << 16)
| ((buffer[i + 2] & 0xFF) << 8)

View File

@@ -21,7 +21,7 @@ internal class BCJFilterSPARC : Filter
|| (buffer[i] == 0x7F && (buffer[i + 1] & 0xC0) == 0xC0)
)
{
int src =
var src =
((buffer[i] & 0xFF) << 24)
| ((buffer[i + 1] & 0xFF) << 16)
| ((buffer[i + 2] & 0xFF) << 8)

View File

@@ -24,40 +24,27 @@ public sealed class BranchExecFilter
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static bool X86TestByte(byte b)
{
return b == 0x00 || b == 0xFF;
}
private static bool X86TestByte(byte b) => b == 0x00 || b == 0xFF;
//Replaced X86Converter with bcj_x86() - https://github.com/torvalds/linux/blob/master/lib/xz/xz_dec_bcj.c
//This was to fix an issue decoding a Test zip made with WinZip (that 7zip was also able to read).
//The previous version of the code would corrupt 2 bytes in the Test.exe at 0x6CF9 (3D6D - should be 4000) - Test zip: WinZip27.Xz.zipx
public static void X86Converter(byte[] buf, uint ip, ref uint state)
{
bool[] mask_to_allowed_status = new[]
{
true,
true,
true,
false,
true,
false,
false,
false
};
var mask_to_allowed_status = new[] { true, true, true, false, true, false, false, false };
byte[] mask_to_bit_num = new byte[] { 0, 1, 2, 2, 3, 3, 3, 3 };
var mask_to_bit_num = new byte[] { 0, 1, 2, 2, 3, 3, 3, 3 };
int i;
int prev_pos = -1;
uint prev_mask = state & 7;
var prev_pos = -1;
var prev_mask = state & 7;
uint src;
uint dest;
uint j;
byte b;
uint pos = ip;
var pos = ip;
uint size = (uint)buf.Length;
var size = (uint)buf.Length;
if (size <= 4)
return;

View File

@@ -23,9 +23,9 @@ namespace SharpCompress.Compressors.Filters
protected override int Transform(byte[] buffer, int offset, int count)
{
int end = offset + count;
var end = offset + count;
for (int i = offset; i < end; i++)
for (var i = offset; i < end; i++)
{
buffer[i] += _history[(_distance + _position--) & DISTANCE_MASK];
_history[_position & DISTANCE_MASK] = buffer[i];

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
namespace SharpCompress.Compressors.Filters;
@@ -40,7 +40,7 @@ internal abstract class Filter : Stream
public override bool CanWrite => _isEncoder;
public override void Flush() => throw new NotSupportedException();
public override void Flush() { }
public override long Length => _baseStream.Length;

View File

@@ -35,10 +35,10 @@ internal sealed class AesDecoderStream : DecoderStream2
throw new NotSupportedException("AES decoder does not support padding.");
}
Init(info, out int numCyclesPower, out byte[] salt, out byte[] seed);
Init(info, out var numCyclesPower, out var salt, out var seed);
byte[] password = Encoding.Unicode.GetBytes(pass.CryptoGetTextPassword());
byte[]? key = InitKey(numCyclesPower, salt, password);
var password = Encoding.Unicode.GetBytes(pass.CryptoGetTextPassword());
var key = InitKey(numCyclesPower, salt, password);
if (key == null)
{
throw new InvalidOperationException("Initialized with null key");
@@ -100,7 +100,7 @@ internal sealed class AesDecoderStream : DecoderStream2
do
{
int read = mStream.Read(mBuffer, mEnding, mBuffer.Length - mEnding);
var read = mStream.Read(mBuffer, mEnding, mBuffer.Length - mEnding);
if (read == 0)
{
// We are not done decoding and have less than 16 bytes.
@@ -133,7 +133,7 @@ internal sealed class AesDecoderStream : DecoderStream2
}
// Otherwise we transform directly into the target buffer.
int processed = mDecoder.TransformBlock(mBuffer, mOffset, count & ~15, buffer, offset);
var processed = mDecoder.TransformBlock(mBuffer, mOffset, count & ~15, buffer, offset);
mOffset += processed;
mWritten += processed;
return processed;
@@ -143,7 +143,7 @@ internal sealed class AesDecoderStream : DecoderStream2
private void Init(byte[] info, out int numCyclesPower, out byte[] salt, out byte[] iv)
{
byte bt = info[0];
var bt = info[0];
numCyclesPower = bt & 0x3F;
if ((bt & 0xC0) == 0)
@@ -153,14 +153,14 @@ internal sealed class AesDecoderStream : DecoderStream2
return;
}
int saltSize = (bt >> 7) & 1;
int ivSize = (bt >> 6) & 1;
var saltSize = (bt >> 7) & 1;
var ivSize = (bt >> 6) & 1;
if (info.Length == 1)
{
throw new InvalidOperationException();
}
byte bt2 = info[1];
var bt2 = info[1];
saltSize += (bt2 >> 4);
ivSize += (bt2 & 15);
if (info.Length < 2 + saltSize + ivSize)
@@ -169,13 +169,13 @@ internal sealed class AesDecoderStream : DecoderStream2
}
salt = new byte[saltSize];
for (int i = 0; i < saltSize; i++)
for (var i = 0; i < saltSize; i++)
{
salt[i] = info[i + 2];
}
iv = new byte[16];
for (int i = 0; i < ivSize; i++)
for (var i = 0; i < ivSize; i++)
{
iv[i] = info[i + saltSize + 2];
}
@@ -198,7 +198,7 @@ internal sealed class AesDecoderStream : DecoderStream2
key[pos] = salt[pos];
}
for (int i = 0; i < pass.Length && pos < 32; i++)
for (var i = 0; i < pass.Length && pos < 32; i++)
{
key[pos++] = pass[i];
}
@@ -208,9 +208,9 @@ internal sealed class AesDecoderStream : DecoderStream2
else
{
#if NETSTANDARD2_0
using IncrementalHash sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
byte[] counter = new byte[8];
long numRounds = 1L << mNumCyclesPower;
using var sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
var counter = new byte[8];
var numRounds = 1L << mNumCyclesPower;
for (long round = 0; round < numRounds; round++)
{
sha.AppendData(salt, 0, salt.Length);
@@ -219,7 +219,7 @@ internal sealed class AesDecoderStream : DecoderStream2
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (int i = 0; i < 8; i++)
for (var i = 0; i < 8; i++)
{
if (++counter[i] != 0)
{
@@ -230,8 +230,8 @@ internal sealed class AesDecoderStream : DecoderStream2
return sha.GetHashAndReset();
#else
using var sha = SHA256.Create();
byte[] counter = new byte[8];
long numRounds = 1L << mNumCyclesPower;
var counter = new byte[8];
var numRounds = 1L << mNumCyclesPower;
for (long round = 0; round < numRounds; round++)
{
sha.TransformBlock(salt, 0, salt.Length, null, 0);
@@ -240,7 +240,7 @@ internal sealed class AesDecoderStream : DecoderStream2
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (int i = 0; i < 8; i++)
for (var i = 0; i < 8; i++)
{
if (++counter[i] != 0)
{
@@ -261,7 +261,7 @@ internal sealed class AesDecoderStream : DecoderStream2
// Just transform as much as possible so we can feed from it as long as possible.
if (mUnderflow == 0)
{
int blockSize = (mEnding - mOffset) & ~15;
var blockSize = (mEnding - mOffset) & ~15;
mUnderflow = mDecoder.TransformBlock(mBuffer, mOffset, blockSize, mBuffer, mOffset);
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using SharpCompress.Common.SevenZip;
using SharpCompress.Compressors.LZMA.Utilites;
@@ -14,7 +14,7 @@ internal abstract class DecoderStream2 : Stream
public override bool CanWrite => false;
public override void Flush() => throw new NotSupportedException();
public override void Flush() { }
public override long Length => throw new NotSupportedException();

View File

@@ -6,7 +6,7 @@ namespace SharpCompress.Compressors.LZMA;
internal static class Log
{
private static readonly Stack<string> INDENT = new Stack<string>();
private static readonly Stack<string> INDENT = new();
private static bool NEEDS_INDENT = true;
static Log() => INDENT.Push("");

View File

@@ -11,11 +11,11 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
{
private class LenDecoder
{
private BitDecoder _choice = new BitDecoder();
private BitDecoder _choice2 = new BitDecoder();
private BitDecoder _choice = new();
private BitDecoder _choice2 = new();
private readonly BitTreeDecoder[] _lowCoder = new BitTreeDecoder[Base.K_NUM_POS_STATES_MAX];
private readonly BitTreeDecoder[] _midCoder = new BitTreeDecoder[Base.K_NUM_POS_STATES_MAX];
private BitTreeDecoder _highCoder = new BitTreeDecoder(Base.K_NUM_HIGH_LEN_BITS);
private BitTreeDecoder _highCoder = new(Base.K_NUM_HIGH_LEN_BITS);
private uint _numPosStates;
public void Create(uint numPosStates)
@@ -173,18 +173,18 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
Base.K_NUM_FULL_DISTANCES - Base.K_END_POS_MODEL_INDEX
];
private BitTreeDecoder _posAlignDecoder = new BitTreeDecoder(Base.K_NUM_ALIGN_BITS);
private BitTreeDecoder _posAlignDecoder = new(Base.K_NUM_ALIGN_BITS);
private readonly LenDecoder _lenDecoder = new LenDecoder();
private readonly LenDecoder _repLenDecoder = new LenDecoder();
private readonly LenDecoder _lenDecoder = new();
private readonly LenDecoder _repLenDecoder = new();
private readonly LiteralDecoder _literalDecoder = new LiteralDecoder();
private readonly LiteralDecoder _literalDecoder = new();
private int _dictionarySize;
private uint _posStateMask;
private Base.State _state = new Base.State();
private Base.State _state = new();
private uint _rep0,
_rep1,
_rep2,
@@ -318,9 +318,8 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
{
var posState = (uint)outWindow._total & _posStateMask;
if (
_isMatchDecoders[
(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState
].Decode(rangeDecoder) == 0
_isMatchDecoders[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState]
.Decode(rangeDecoder) == 0
)
{
byte b;
@@ -355,7 +354,8 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
if (
_isRep0LongDecoders[
(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState
].Decode(rangeDecoder) == 0
]
.Decode(rangeDecoder) == 0
)
{
_state.UpdateShortRep();

View File

@@ -61,7 +61,7 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
return (uint)(G_FAST_POS[pos >> 26] + 52);
}
private Base.State _state = new Base.State();
private Base.State _state = new();
private byte _previousByte;
private readonly uint[] _repDistances = new uint[Base.K_NUM_REP_DISTANCES];
@@ -191,15 +191,15 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
private class LenEncoder
{
private BitEncoder _choice = new BitEncoder();
private BitEncoder _choice2 = new BitEncoder();
private BitEncoder _choice = new();
private BitEncoder _choice2 = new();
private readonly BitTreeEncoder[] _lowCoder = new BitTreeEncoder[
Base.K_NUM_POS_STATES_ENCODING_MAX
];
private readonly BitTreeEncoder[] _midCoder = new BitTreeEncoder[
Base.K_NUM_POS_STATES_ENCODING_MAX
];
private BitTreeEncoder _highCoder = new BitTreeEncoder(Base.K_NUM_HIGH_LEN_BITS);
private BitTreeEncoder _highCoder = new(Base.K_NUM_HIGH_LEN_BITS);
public LenEncoder()
{
@@ -359,7 +359,7 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
private readonly Optimal[] _optimum = new Optimal[K_NUM_OPTS];
private BinTree _matchFinder;
private readonly RangeCoder.Encoder _rangeEncoder = new RangeCoder.Encoder();
private readonly RangeCoder.Encoder _rangeEncoder = new();
private readonly BitEncoder[] _isMatch = new BitEncoder[
Base.K_NUM_STATES << Base.K_NUM_POS_STATES_BITS_MAX
@@ -382,12 +382,12 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
Base.K_NUM_FULL_DISTANCES - Base.K_END_POS_MODEL_INDEX
];
private BitTreeEncoder _posAlignEncoder = new BitTreeEncoder(Base.K_NUM_ALIGN_BITS);
private BitTreeEncoder _posAlignEncoder = new(Base.K_NUM_ALIGN_BITS);
private readonly LenPriceTableEncoder _lenEncoder = new LenPriceTableEncoder();
private readonly LenPriceTableEncoder _repMatchLenEncoder = new LenPriceTableEncoder();
private readonly LenPriceTableEncoder _lenEncoder = new();
private readonly LenPriceTableEncoder _repMatchLenEncoder = new();
private readonly LiteralEncoder _literalEncoder = new LiteralEncoder();
private readonly LiteralEncoder _literalEncoder = new();
private readonly uint[] _matchDistances = new uint[(Base.K_MATCH_MAX_LEN * 2) + 2];
@@ -553,9 +553,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
if (repIndex == 0)
{
price = _isRepG0[state._index].GetPrice0();
price += _isRep0Long[
(state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState
].GetPrice1();
price += _isRep0Long[(state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState]
.GetPrice1();
}
else
{
@@ -713,9 +712,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
.GetPrice(!_state.IsCharState(), matchByte, currentByte);
_optimum[1].MakeAsChar();
var matchPrice = _isMatch[
(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState
].GetPrice1();
var matchPrice = _isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState]
.GetPrice1();
var repMatchPrice = matchPrice + _isRep[_state._index].GetPrice1();
if (matchByte == currentByte)
@@ -995,9 +993,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
var posStateNext = (position + 1) & _posStateMask;
var nextRepMatchPrice =
curAnd1Price
+ _isMatch[
(state2._index << Base.K_NUM_POS_STATES_BITS_MAX) + posStateNext
].GetPrice1()
+ _isMatch[(state2._index << Base.K_NUM_POS_STATES_BITS_MAX) + posStateNext]
.GetPrice1()
+ _isRep[state2._index].GetPrice1();
{
var offset = cur + 1 + lenTest2;
@@ -1069,7 +1066,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
+ GetRepPrice(repIndex, lenTest, state, posState)
+ _isMatch[
(state2._index << Base.K_NUM_POS_STATES_BITS_MAX) + posStateNext
].GetPrice0()
]
.GetPrice0()
+ _literalEncoder
.GetSubCoder(
position + lenTest,
@@ -1088,7 +1086,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
curAndLenCharPrice
+ _isMatch[
(state2._index << Base.K_NUM_POS_STATES_BITS_MAX) + posStateNext
].GetPrice1();
]
.GetPrice1();
var nextRepMatchPrice = nextMatchPrice + _isRep[state2._index].GetPrice1();
// for(; lenTest2 >= 2; lenTest2--)
@@ -1174,7 +1173,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
+ _isMatch[
(state2._index << Base.K_NUM_POS_STATES_BITS_MAX)
+ posStateNext
].GetPrice0()
]
.GetPrice0()
+ _literalEncoder
.GetSubCoder(
position + lenTest,
@@ -1194,7 +1194,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
+ _isMatch[
(state2._index << Base.K_NUM_POS_STATES_BITS_MAX)
+ posStateNext
].GetPrice1();
]
.GetPrice1();
var nextRepMatchPrice =
nextMatchPrice + _isRep[state2._index].GetPrice1();
@@ -1243,10 +1244,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
return;
}
_isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].Encode(
_rangeEncoder,
1
);
_isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState]
.Encode(_rangeEncoder, 1);
_isRep[_state._index].Encode(_rangeEncoder, 0);
_state.UpdateMatch();
var len = Base.K_MATCH_MIN_LEN;
@@ -1321,10 +1320,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
// it's not used
ReadMatchDistances(out var len, out var numDistancePairs);
var posState = (uint)(_nowPos64) & _posStateMask;
_isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].Encode(
_rangeEncoder,
0
);
_isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState]
.Encode(_rangeEncoder, 0);
_state.UpdateChar();
var curByte = _matchFinder.GetIndexByte((int)(0 - _additionalOffset));
_literalEncoder
@@ -1730,7 +1727,7 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
ReadOnlySpan<object> properties
)
{
for (int i = 0; i < properties.Length; i++)
for (var i = 0; i < properties.Length; i++)
{
var prop = properties[i];
switch (propIDs[i])

View File

@@ -14,8 +14,8 @@ public class LzmaStream : Stream
private readonly long _outputSize;
private readonly int _dictionarySize;
private readonly OutWindow _outWindow = new OutWindow();
private readonly RangeCoder.Decoder _rangeDecoder = new RangeCoder.Decoder();
private readonly OutWindow _outWindow = new();
private readonly RangeCoder.Decoder _rangeDecoder = new();
private Decoder _decoder;
private long _position;
@@ -207,14 +207,23 @@ public class LzmaStream : Stream
if (_availableBytes == 0 && !_uncompressedChunk)
{
_rangeDecoder.ReleaseStream();
// Check range corruption scenario
if (
!_rangeDecoder.IsFinished
|| (_rangeDecoderLimit >= 0 && _rangeDecoder._total != _rangeDecoderLimit)
)
{
throw new DataErrorException();
// Stream might have End Of Stream marker
_outWindow.SetLimit(toProcess + 1);
if (!_decoder.Code(_dictionarySize, _outWindow, _rangeDecoder))
{
_rangeDecoder.ReleaseStream();
throw new DataErrorException();
}
}
_rangeDecoder.ReleaseStream();
_inputPosition += _rangeDecoder._total;
if (_outWindow.HasPending)
{

View File

@@ -0,0 +1,65 @@
namespace SharpCompress.Compressors.Lzw
{
/// <summary>
/// This class contains constants used for LZW
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage(
"Naming",
"CA1707:Identifiers should not contain underscores",
Justification = "kept for backwards compatibility"
)]
public sealed class LzwConstants
{
/// <summary>
/// Magic number found at start of LZW header: 0x1f 0x9d
/// </summary>
public const int MAGIC = 0x1f9d;
/// <summary>
/// Maximum number of bits per code
/// </summary>
public const int MAX_BITS = 16;
/* 3rd header byte:
* bit 0..4 Number of compression bits
* bit 5 Extended header
* bit 6 Free
* bit 7 Block mode
*/
/// <summary>
/// Mask for 'number of compression bits'
/// </summary>
public const int BIT_MASK = 0x1f;
/// <summary>
/// Indicates the presence of a fourth header byte
/// </summary>
public const int EXTENDED_MASK = 0x20;
//public const int FREE_MASK = 0x40;
/// <summary>
/// Reserved bits
/// </summary>
public const int RESERVED_MASK = 0x60;
/// <summary>
/// Block compression: if table is full and compression rate is dropping,
/// clear the dictionary.
/// </summary>
public const int BLOCK_MODE_MASK = 0x80;
/// <summary>
/// LZW file header size (in bytes)
/// </summary>
public const int HDR_SIZE = 3;
/// <summary>
/// Initial number of bits per code
/// </summary>
public const int INIT_BITS = 9;
private LzwConstants() { }
}
}

View File

@@ -0,0 +1,597 @@
using System;
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Compressors.Lzw
{
/// <summary>
/// This filter stream is used to decompress a LZW format stream.
/// Specifically, a stream that uses the LZC compression method.
/// This file format is usually associated with the .Z file extension.
///
/// See http://en.wikipedia.org/wiki/Compress
/// See http://wiki.wxwidgets.org/Development:_Z_File_Format
///
/// The file header consists of 3 (or optionally 4) bytes. The first two bytes
/// contain the magic marker "0x1f 0x9d", followed by a byte of flags.
///
/// Based on Java code by Ronald Tschalar, which in turn was based on the unlzw.c
/// code in the gzip package.
/// </summary>
/// <example> This sample shows how to unzip a compressed file
/// <code>
/// using System;
/// using System.IO;
///
/// using ICSharpCode.SharpZipLib.Core;
/// using ICSharpCode.SharpZipLib.LZW;
///
/// class MainClass
/// {
/// public static void Main(string[] args)
/// {
/// using (Stream inStream = new LzwInputStream(File.OpenRead(args[0])))
/// using (FileStream outStream = File.Create(Path.GetFileNameWithoutExtension(args[0]))) {
/// byte[] buffer = new byte[4096];
/// StreamUtils.Copy(inStream, outStream, buffer);
/// // OR
/// inStream.Read(buffer, 0, buffer.Length);
/// // now do something with the buffer
/// }
/// }
/// }
/// </code>
/// </example>
public class LzwStream : Stream
{
public static bool IsLzwStream(Stream stream)
{
try
{
byte[] hdr = new byte[LzwConstants.HDR_SIZE];
int result = stream.Read(hdr, 0, hdr.Length);
// Check the magic marker
if (result < 0)
throw new IncompleteArchiveException("Failed to read LZW header");
if (hdr[0] != (LzwConstants.MAGIC >> 8) || hdr[1] != (LzwConstants.MAGIC & 0xff))
{
throw new IncompleteArchiveException(
String.Format(
"Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}",
hdr[0],
hdr[1]
)
);
}
}
catch (Exception)
{
return false;
}
return true;
}
/// <summary>
/// Gets or sets a flag indicating ownership of underlying stream.
/// When the flag is true <see cref="Stream.Dispose()" /> will close the underlying stream also.
/// </summary>
/// <remarks>The default value is true.</remarks>
public bool IsStreamOwner { get; set; } = false;
/// <summary>
/// Creates a LzwInputStream
/// </summary>
/// <param name="baseInputStream">
/// The stream to read compressed data from (baseInputStream LZW format)
/// </param>
public LzwStream(Stream baseInputStream)
{
this.baseInputStream = baseInputStream;
}
/// <summary>
/// See <see cref="System.IO.Stream.ReadByte"/>
/// </summary>
/// <returns></returns>
public override int ReadByte()
{
int b = Read(one, 0, 1);
if (b == 1)
return (one[0] & 0xff);
return -1;
}
/// <summary>
/// Reads decompressed data into the provided buffer byte array
/// </summary>
/// <param name ="buffer">
/// The array to read and decompress data into
/// </param>
/// <param name ="offset">
/// The offset indicating where the data should be placed
/// </param>
/// <param name ="count">
/// The number of bytes to decompress
/// </param>
/// <returns>The number of bytes read. Zero signals the end of stream</returns>
public override int Read(byte[] buffer, int offset, int count)
{
if (!headerParsed)
ParseHeader();
if (eof)
return 0;
int start = offset;
/* Using local copies of various variables speeds things up by as
* much as 30% in Java! Performance not tested in C#.
*/
int[] lTabPrefix = tabPrefix;
byte[] lTabSuffix = tabSuffix;
byte[] lStack = stack;
int lNBits = nBits;
int lMaxCode = maxCode;
int lMaxMaxCode = maxMaxCode;
int lBitMask = bitMask;
int lOldCode = oldCode;
byte lFinChar = finChar;
int lStackP = stackP;
int lFreeEnt = freeEnt;
byte[] lData = data;
int lBitPos = bitPos;
// empty stack if stuff still left
int sSize = lStack.Length - lStackP;
if (sSize > 0)
{
int num = (sSize >= count) ? count : sSize;
Array.Copy(lStack, lStackP, buffer, offset, num);
offset += num;
count -= num;
lStackP += num;
}
if (count == 0)
{
stackP = lStackP;
return offset - start;
}
// loop, filling local buffer until enough data has been decompressed
MainLoop:
do
{
if (end < EXTRA)
{
Fill();
}
int bitIn = (got > 0) ? (end - end % lNBits) << 3 : (end << 3) - (lNBits - 1);
while (lBitPos < bitIn)
{
#region A
// handle 1-byte reads correctly
if (count == 0)
{
nBits = lNBits;
maxCode = lMaxCode;
maxMaxCode = lMaxMaxCode;
bitMask = lBitMask;
oldCode = lOldCode;
finChar = lFinChar;
stackP = lStackP;
freeEnt = lFreeEnt;
bitPos = lBitPos;
return offset - start;
}
// check for code-width expansion
if (lFreeEnt > lMaxCode)
{
int nBytes = lNBits << 3;
lBitPos = (lBitPos - 1) + nBytes - (lBitPos - 1 + nBytes) % nBytes;
lNBits++;
lMaxCode = (lNBits == maxBits) ? lMaxMaxCode : (1 << lNBits) - 1;
lBitMask = (1 << lNBits) - 1;
lBitPos = ResetBuf(lBitPos);
goto MainLoop;
}
#endregion A
#region B
// read next code
int pos = lBitPos >> 3;
int code =
(
(
(lData[pos] & 0xFF)
| ((lData[pos + 1] & 0xFF) << 8)
| ((lData[pos + 2] & 0xFF) << 16)
) >> (lBitPos & 0x7)
) & lBitMask;
lBitPos += lNBits;
// handle first iteration
if (lOldCode == -1)
{
if (code >= 256)
throw new IncompleteArchiveException(
"corrupt input: " + code + " > 255"
);
lFinChar = (byte)(lOldCode = code);
buffer[offset++] = lFinChar;
count--;
continue;
}
// handle CLEAR code
if (code == TBL_CLEAR && blockMode)
{
Array.Copy(zeros, 0, lTabPrefix, 0, zeros.Length);
lFreeEnt = TBL_FIRST - 1;
int nBytes = lNBits << 3;
lBitPos = (lBitPos - 1) + nBytes - (lBitPos - 1 + nBytes) % nBytes;
lNBits = LzwConstants.INIT_BITS;
lMaxCode = (1 << lNBits) - 1;
lBitMask = lMaxCode;
// Code tables reset
lBitPos = ResetBuf(lBitPos);
goto MainLoop;
}
#endregion B
#region C
// setup
int inCode = code;
lStackP = lStack.Length;
// Handle KwK case
if (code >= lFreeEnt)
{
if (code > lFreeEnt)
{
throw new IncompleteArchiveException(
"corrupt input: code=" + code + ", freeEnt=" + lFreeEnt
);
}
lStack[--lStackP] = lFinChar;
code = lOldCode;
}
// Generate output characters in reverse order
while (code >= 256)
{
lStack[--lStackP] = lTabSuffix[code];
code = lTabPrefix[code];
}
lFinChar = lTabSuffix[code];
buffer[offset++] = lFinChar;
count--;
// And put them out in forward order
sSize = lStack.Length - lStackP;
int num = (sSize >= count) ? count : sSize;
Array.Copy(lStack, lStackP, buffer, offset, num);
offset += num;
count -= num;
lStackP += num;
#endregion C
#region D
// generate new entry in table
if (lFreeEnt < lMaxMaxCode)
{
lTabPrefix[lFreeEnt] = lOldCode;
lTabSuffix[lFreeEnt] = lFinChar;
lFreeEnt++;
}
// Remember previous code
lOldCode = inCode;
// if output buffer full, then return
if (count == 0)
{
nBits = lNBits;
maxCode = lMaxCode;
bitMask = lBitMask;
oldCode = lOldCode;
finChar = lFinChar;
stackP = lStackP;
freeEnt = lFreeEnt;
bitPos = lBitPos;
return offset - start;
}
#endregion D
} // while
lBitPos = ResetBuf(lBitPos);
} while (got > 0); // do..while
nBits = lNBits;
maxCode = lMaxCode;
bitMask = lBitMask;
oldCode = lOldCode;
finChar = lFinChar;
stackP = lStackP;
freeEnt = lFreeEnt;
bitPos = lBitPos;
eof = true;
return offset - start;
}
/// <summary>
/// Moves the unread data in the buffer to the beginning and resets
/// the pointers.
/// </summary>
/// <param name="bitPosition"></param>
/// <returns></returns>
private int ResetBuf(int bitPosition)
{
int pos = bitPosition >> 3;
Array.Copy(data, pos, data, 0, end - pos);
end -= pos;
return 0;
}
private void Fill()
{
got = baseInputStream.Read(data, end, data.Length - 1 - end);
if (got > 0)
{
end += got;
}
}
private void ParseHeader()
{
headerParsed = true;
byte[] hdr = new byte[LzwConstants.HDR_SIZE];
int result = baseInputStream.Read(hdr, 0, hdr.Length);
// Check the magic marker
if (result < 0)
throw new IncompleteArchiveException("Failed to read LZW header");
if (hdr[0] != (LzwConstants.MAGIC >> 8) || hdr[1] != (LzwConstants.MAGIC & 0xff))
{
throw new IncompleteArchiveException(
String.Format(
"Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}",
hdr[0],
hdr[1]
)
);
}
// Check the 3rd header byte
blockMode = (hdr[2] & LzwConstants.BLOCK_MODE_MASK) > 0;
maxBits = hdr[2] & LzwConstants.BIT_MASK;
if (maxBits > LzwConstants.MAX_BITS)
{
throw new ArchiveException(
"Stream compressed with "
+ maxBits
+ " bits, but decompression can only handle "
+ LzwConstants.MAX_BITS
+ " bits."
);
}
if ((hdr[2] & LzwConstants.RESERVED_MASK) > 0)
{
throw new ArchiveException("Unsupported bits set in the header.");
}
// Initialize variables
maxMaxCode = 1 << maxBits;
nBits = LzwConstants.INIT_BITS;
maxCode = (1 << nBits) - 1;
bitMask = maxCode;
oldCode = -1;
finChar = 0;
freeEnt = blockMode ? TBL_FIRST : 256;
tabPrefix = new int[1 << maxBits];
tabSuffix = new byte[1 << maxBits];
stack = new byte[1 << maxBits];
stackP = stack.Length;
for (int idx = 255; idx >= 0; idx--)
tabSuffix[idx] = (byte)idx;
}
#region Stream Overrides
/// <summary>
/// Gets a value indicating whether the current stream supports reading
/// </summary>
public override bool CanRead
{
get { return baseInputStream.CanRead; }
}
/// <summary>
/// Gets a value of false indicating seeking is not supported for this stream.
/// </summary>
public override bool CanSeek
{
get { return false; }
}
/// <summary>
/// Gets a value of false indicating that this stream is not writeable.
/// </summary>
public override bool CanWrite
{
get { return false; }
}
/// <summary>
/// A value representing the length of the stream in bytes.
/// </summary>
public override long Length
{
get { return got; }
}
/// <summary>
/// The current position within the stream.
/// Throws a NotSupportedException when attempting to set the position
/// </summary>
/// <exception cref="NotSupportedException">Attempting to set the position</exception>
public override long Position
{
get { return baseInputStream.Position; }
set { throw new NotSupportedException("InflaterInputStream Position not supported"); }
}
/// <summary>
/// Flushes the baseInputStream
/// </summary>
public override void Flush()
{
baseInputStream.Flush();
}
/// <summary>
/// Sets the position within the current stream
/// Always throws a NotSupportedException
/// </summary>
/// <param name="offset">The relative offset to seek to.</param>
/// <param name="origin">The <see cref="SeekOrigin"/> defining where to seek from.</param>
/// <returns>The new position in the stream.</returns>
/// <exception cref="NotSupportedException">Any access</exception>
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException("Seek not supported");
}
/// <summary>
/// Set the length of the current stream
/// Always throws a NotSupportedException
/// </summary>
/// <param name="value">The new length value for the stream.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void SetLength(long value)
{
throw new NotSupportedException("InflaterInputStream SetLength not supported");
}
/// <summary>
/// Writes a sequence of bytes to stream and advances the current position
/// This method always throws a NotSupportedException
/// </summary>
/// <param name="buffer">The buffer containing data to write.</param>
/// <param name="offset">The offset of the first byte to write.</param>
/// <param name="count">The number of bytes to write.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException("InflaterInputStream Write not supported");
}
/// <summary>
/// Writes one byte to the current stream and advances the current position
/// Always throws a NotSupportedException
/// </summary>
/// <param name="value">The byte to write.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void WriteByte(byte value)
{
throw new NotSupportedException("InflaterInputStream WriteByte not supported");
}
/// <summary>
/// Closes the input stream. When <see cref="IsStreamOwner"></see>
/// is true the underlying stream is also closed.
/// </summary>
protected override void Dispose(bool disposing)
{
if (!isClosed)
{
isClosed = true;
if (IsStreamOwner)
{
baseInputStream.Dispose();
}
}
}
#endregion Stream Overrides
#region Instance Fields
private Stream baseInputStream;
/// <summary>
/// Flag indicating wether this instance has been closed or not.
/// </summary>
private bool isClosed;
private readonly byte[] one = new byte[1];
private bool headerParsed;
// string table stuff
private const int TBL_CLEAR = 0x100;
private const int TBL_FIRST = TBL_CLEAR + 1;
private int[] tabPrefix = new int[0]; //
private byte[] tabSuffix = new byte[0]; //
private readonly int[] zeros = new int[256];
private byte[] stack = new byte[0]; //
// various state
private bool blockMode;
private int nBits;
private int maxBits;
private int maxMaxCode;
private int maxCode;
private int bitMask;
private int oldCode;
private byte finChar;
private int stackP;
private int freeEnt;
// input buffer
private readonly byte[] data = new byte[1024 * 8];
private int bitPos;
private int end;
private int got;
private bool eof;
private const int EXTRA = 64;
#endregion Instance Fields
}
}

View File

@@ -22,7 +22,7 @@ internal class ModelPpm
}
}
public SubAllocator SubAlloc { get; } = new SubAllocator();
public SubAllocator SubAlloc { get; } = new();
public virtual See2Context DummySee2Cont => _dummySee2Cont;
@@ -137,34 +137,34 @@ internal class ModelPpm
// Temp fields
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState1 = new State(null);
private readonly State _tempState1 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState2 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState2 = new State(null);
private readonly State _tempState2 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState3 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState3 = new State(null);
private readonly State _tempState3 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState4 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState4 = new State(null);
private readonly State _tempState4 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempStateRef1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly StateRef _tempStateRef1 = new StateRef();
private readonly StateRef _tempStateRef1 = new();
//UPGRADE_NOTE: Final was removed from the declaration of 'tempStateRef2 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly StateRef _tempStateRef2 = new StateRef();
private readonly StateRef _tempStateRef2 = new();
//UPGRADE_NOTE: Final was removed from the declaration of 'tempPPMContext1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly PpmContext _tempPpmContext1 = new PpmContext(null);
private readonly PpmContext _tempPpmContext1 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempPPMContext2 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly PpmContext _tempPpmContext2 = new PpmContext(null);
private readonly PpmContext _tempPpmContext2 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempPPMContext3 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly PpmContext _tempPpmContext3 = new PpmContext(null);
private readonly PpmContext _tempPpmContext3 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempPPMContext4 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly PpmContext _tempPpmContext4 = new PpmContext(null);
private readonly PpmContext _tempPpmContext4 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'ps '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly int[] _ps = new int[MAX_O];

View File

@@ -64,19 +64,19 @@ internal class PpmContext : Pointer
// Temp fields
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState1 = new State(null);
private readonly State _tempState1 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState2 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState2 = new State(null);
private readonly State _tempState2 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState3 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState3 = new State(null);
private readonly State _tempState3 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState4 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState4 = new State(null);
private readonly State _tempState4 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState5 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState5 = new State(null);
private readonly State _tempState5 = new(null);
private PpmContext _tempPpmContext;
//UPGRADE_NOTE: Final was removed from the declaration of 'ps '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"

View File

@@ -395,10 +395,8 @@ internal class Allocator
unitCountDifference -= unitCount;
}
_memoryNodes[UNITS_TO_INDEX[unitCountDifference - 1]].Insert(
newPointer,
unitCountDifference
);
_memoryNodes[UNITS_TO_INDEX[unitCountDifference - 1]]
.Insert(newPointer, unitCountDifference);
}
private void GlueFreeBlocks()
@@ -457,10 +455,11 @@ internal class Allocator
if (INDEX_TO_UNITS[index] != unitCount)
{
var unitCountDifference = unitCount - INDEX_TO_UNITS[--index];
_memoryNodes[unitCountDifference - 1].Insert(
memoryNode0 + (unitCount - unitCountDifference),
unitCountDifference
);
_memoryNodes[unitCountDifference - 1]
.Insert(
memoryNode0 + (unitCount - unitCountDifference),
unitCountDifference
);
}
_memoryNodes[index].Insert(memoryNode0, INDEX_TO_UNITS[index]);

View File

@@ -27,7 +27,7 @@ internal struct MemoryNode
{
public uint _address;
public byte[] _memory;
public static readonly MemoryNode ZERO = new MemoryNode(0, null);
public static readonly MemoryNode ZERO = new(0, null);
public const int SIZE = 12;
/// <summary>
@@ -64,7 +64,7 @@ internal struct MemoryNode
public MemoryNode Next
{
get =>
new MemoryNode(
new(
_memory[_address + 4]
| (((uint)_memory[_address + 5]) << 8)
| (((uint)_memory[_address + 6]) << 16)
@@ -150,7 +150,7 @@ internal struct MemoryNode
/// <param name="pointer"></param>
/// <returns></returns>
public static implicit operator MemoryNode(Pointer pointer) =>
new MemoryNode(pointer._address, pointer._memory);
new(pointer._address, pointer._memory);
/// <summary>
/// Allow pointer-like addition on a memory node.

View File

@@ -866,7 +866,8 @@ internal partial class Model
);
}
else if (
(currentContext.SummaryFrequency += 4) > 128 + (4 * currentContext.NumberStatistics)
(currentContext.SummaryFrequency += 4)
> 128 + (4 * currentContext.NumberStatistics)
)
{
Refresh((uint)((currentContext.NumberStatistics + 2) >> 1), true, currentContext);

View File

@@ -22,7 +22,7 @@ internal struct Pointer
{
public uint _address;
public byte[] _memory;
public static readonly Pointer ZERO = new Pointer(0, null);
public static readonly Pointer ZERO = new(0, null);
public const int SIZE = 1;
/// <summary>
@@ -69,7 +69,7 @@ internal struct Pointer
/// <param name="memoryNode"></param>
/// <returns></returns>
public static implicit operator Pointer(MemoryNode memoryNode) =>
new Pointer(memoryNode._address, memoryNode._memory);
new(memoryNode._address, memoryNode._memory);
/// <summary>
/// Allow a <see cref="Model.PpmContext"/> to be implicitly converted to a <see cref="Pointer"/>.
@@ -77,15 +77,14 @@ internal struct Pointer
/// <param name="context"></param>
/// <returns></returns>
public static implicit operator Pointer(Model.PpmContext context) =>
new Pointer(context._address, context._memory);
new(context._address, context._memory);
/// <summary>
/// Allow a <see cref="PpmState"/> to be implicitly converted to a <see cref="Pointer"/>.
/// </summary>
/// <param name="state"></param>
/// <returns></returns>
public static implicit operator Pointer(PpmState state) =>
new Pointer(state._address, state._memory);
public static implicit operator Pointer(PpmState state) => new(state._address, state._memory);
/// <summary>
/// Increase the address of a pointer by the given number of bytes.

View File

@@ -21,7 +21,7 @@ internal partial class Model
{
public uint _address;
public byte[] _memory;
public static readonly PpmContext ZERO = new PpmContext(0, null);
public static readonly PpmContext ZERO = new(0, null);
public const int SIZE = 12;
/// <summary>
@@ -70,7 +70,7 @@ internal partial class Model
public PpmState Statistics
{
get =>
new PpmState(
new(
_memory[_address + 4]
| (((uint)_memory[_address + 5]) << 8)
| (((uint)_memory[_address + 6]) << 16)
@@ -92,7 +92,7 @@ internal partial class Model
public PpmContext Suffix
{
get =>
new PpmContext(
new(
_memory[_address + 8]
| (((uint)_memory[_address + 9]) << 8)
| (((uint)_memory[_address + 10]) << 16)
@@ -133,7 +133,7 @@ internal partial class Model
/// </para>
/// </remarks>
/// <returns></returns>
public PpmState FirstState => new PpmState(_address + 2, _memory);
public PpmState FirstState => new(_address + 2, _memory);
/// <summary>
/// Gets or sets the symbol of the first PPM state. This is provided for convenience. The same
@@ -164,7 +164,7 @@ internal partial class Model
public PpmContext FirstStateSuccessor
{
get =>
new PpmContext(
new(
_memory[_address + 4]
| (((uint)_memory[_address + 5]) << 8)
| (((uint)_memory[_address + 6]) << 16)
@@ -186,7 +186,7 @@ internal partial class Model
/// <param name="pointer"></param>
/// <returns></returns>
public static implicit operator PpmContext(Pointer pointer) =>
new PpmContext(pointer._address, pointer._memory);
new(pointer._address, pointer._memory);
/// <summary>
/// Allow pointer-like addition on a PPM context.

View File

@@ -19,7 +19,7 @@ internal struct PpmState
{
public uint _address;
public byte[] _memory;
public static readonly PpmState ZERO = new PpmState(0, null);
public static readonly PpmState ZERO = new(0, null);
public const int SIZE = 6;
/// <summary>
@@ -55,7 +55,7 @@ internal struct PpmState
public Model.PpmContext Successor
{
get =>
new Model.PpmContext(
new(
_memory[_address + 2]
| (((uint)_memory[_address + 3]) << 8)
| (((uint)_memory[_address + 4]) << 16)
@@ -77,7 +77,7 @@ internal struct PpmState
/// </summary>
/// <param name="offset"></param>
/// <returns></returns>
public PpmState this[int offset] => new PpmState((uint)(_address + (offset * SIZE)), _memory);
public PpmState this[int offset] => new((uint)(_address + (offset * SIZE)), _memory);
/// <summary>
/// Allow a pointer to be implicitly converted to a PPM state.
@@ -85,7 +85,7 @@ internal struct PpmState
/// <param name="pointer"></param>
/// <returns></returns>
public static implicit operator PpmState(Pointer pointer) =>
new PpmState(pointer._address, pointer._memory);
new(pointer._address, pointer._memory);
/// <summary>
/// Allow pointer-like addition on a PPM state.

View File

@@ -128,9 +128,9 @@ internal sealed class MultiVolumeReadOnlyStream : Stream
public override bool CanWrite => false;
public uint CurrentCrc { get; private set; }
public byte[] CurrentCrc { get; private set; }
public override void Flush() => throw new NotSupportedException();
public override void Flush() { }
public override long Length => throw new NotSupportedException();

View File

@@ -0,0 +1,305 @@
using System;
using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Compressors.Rar;
internal class RarBLAKE2spStream : RarStream
{
private readonly MultiVolumeReadOnlyStream readStream;
private readonly bool disableCRCCheck;
const uint BLAKE2S_NUM_ROUNDS = 10;
const uint BLAKE2S_FINAL_FLAG = (~(uint)0);
const int BLAKE2S_BLOCK_SIZE = 64;
const int BLAKE2S_DIGEST_SIZE = 32;
const int BLAKE2SP_PARALLEL_DEGREE = 8;
const uint BLAKE2S_INIT_IV_SIZE = 8;
static readonly UInt32[] k_BLAKE2S_IV =
{
0x6A09E667U,
0xBB67AE85U,
0x3C6EF372U,
0xA54FF53AU,
0x510E527FU,
0x9B05688CU,
0x1F83D9ABU,
0x5BE0CD19U
};
static readonly byte[][] k_BLAKE2S_Sigma =
{
new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
new byte[] { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
new byte[] { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
new byte[] { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
new byte[] { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
new byte[] { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
new byte[] { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
new byte[] { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
new byte[] { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
new byte[] { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
};
internal class BLAKE2S
{
internal UInt32[] h;
internal UInt32[] t;
internal UInt32[] f;
internal byte[] b;
internal int bufferPosition;
internal UInt32 lastNodeFlag;
UInt32[] dummy;
public BLAKE2S()
{
h = new uint[BLAKE2S_INIT_IV_SIZE];
t = new uint[2];
f = new uint[2];
b = new byte[BLAKE2S_BLOCK_SIZE];
dummy = new uint[2];
}
};
internal class BLAKE2SP
{
internal BLAKE2S[] S;
internal int bufferPosition;
public BLAKE2SP()
{
S = new BLAKE2S[BLAKE2SP_PARALLEL_DEGREE];
for (var i = 0; i < S.Length; i++)
{
S[i] = new BLAKE2S();
}
}
};
BLAKE2SP _blake2sp;
byte[] _hash = { };
public RarBLAKE2spStream(
IRarUnpack unpack,
FileHeader fileHeader,
MultiVolumeReadOnlyStream readStream
)
: base(unpack, fileHeader, readStream)
{
this.readStream = readStream;
disableCRCCheck = fileHeader.IsEncrypted;
_hash = fileHeader.FileCrc;
_blake2sp = new BLAKE2SP();
ResetCrc();
}
public byte[] GetCrc() => _hash;
internal void ResetCrc(BLAKE2S hash)
{
for (UInt32 j = 0; j < BLAKE2S_INIT_IV_SIZE; j++)
{
hash.h[j] = k_BLAKE2S_IV[j];
}
hash.t[0] = 0;
hash.t[1] = 0;
hash.f[0] = 0;
hash.f[1] = 0;
hash.bufferPosition = 0;
hash.lastNodeFlag = 0;
}
internal void G(
ref UInt32[] m,
ref byte[] sigma,
int i,
ref UInt32 a,
ref UInt32 b,
ref UInt32 c,
ref UInt32 d
)
{
a += b + m[sigma[2 * i]];
d ^= a;
d = (d >> 16) | (d << 16);
c += d;
b ^= c;
b = (b >> 12) | (b << 20);
a += b + m[sigma[2 * i + 1]];
d ^= a;
d = (d >> 8) | (d << 24);
c += d;
b ^= c;
b = (b >> 7) | (b << 25);
}
internal void Compress(BLAKE2S hash)
{
var m = new UInt32[16];
var v = new UInt32[16];
for (var i = 0; i < 16; i++)
{
m[i] = BitConverter.ToUInt32(hash.b, i * 4);
}
for (var i = 0; i < 8; i++)
{
v[i] = hash.h[i];
}
v[8] = k_BLAKE2S_IV[0];
v[9] = k_BLAKE2S_IV[1];
v[10] = k_BLAKE2S_IV[2];
v[11] = k_BLAKE2S_IV[3];
v[12] = hash.t[0] ^ k_BLAKE2S_IV[4];
v[13] = hash.t[1] ^ k_BLAKE2S_IV[5];
v[14] = hash.f[0] ^ k_BLAKE2S_IV[6];
v[15] = hash.f[1] ^ k_BLAKE2S_IV[7];
for (var r = 0; r < BLAKE2S_NUM_ROUNDS; r++)
{
ref byte[] sigma = ref k_BLAKE2S_Sigma[r];
G(ref m, ref sigma, 0, ref v[0], ref v[4], ref v[8], ref v[12]);
G(ref m, ref sigma, 1, ref v[1], ref v[5], ref v[9], ref v[13]);
G(ref m, ref sigma, 2, ref v[2], ref v[6], ref v[10], ref v[14]);
G(ref m, ref sigma, 3, ref v[3], ref v[7], ref v[11], ref v[15]);
G(ref m, ref sigma, 4, ref v[0], ref v[5], ref v[10], ref v[15]);
G(ref m, ref sigma, 5, ref v[1], ref v[6], ref v[11], ref v[12]);
G(ref m, ref sigma, 6, ref v[2], ref v[7], ref v[8], ref v[13]);
G(ref m, ref sigma, 7, ref v[3], ref v[4], ref v[9], ref v[14]);
}
for (var i = 0; i < 8; i++)
{
hash.h[i] ^= v[i] ^ v[i + 8];
}
}
internal void Update(BLAKE2S hash, ReadOnlySpan<byte> data, int size)
{
var i = 0;
while (size != 0)
{
var pos = hash.bufferPosition;
var reminder = BLAKE2S_BLOCK_SIZE - pos;
if (size <= reminder)
{
data.Slice(i, size).CopyTo(new Span<byte>(hash.b, pos, size));
hash.bufferPosition += size;
return;
}
data.Slice(i, reminder).CopyTo(new Span<byte>(hash.b, pos, reminder));
hash.t[0] += BLAKE2S_BLOCK_SIZE;
hash.t[1] += hash.t[0] < BLAKE2S_BLOCK_SIZE ? 1U : 0U;
Compress(hash);
hash.bufferPosition = 0;
i += reminder;
size -= reminder;
}
}
internal byte[] Final(BLAKE2S hash)
{
hash.t[0] += (uint)hash.bufferPosition;
hash.t[1] += hash.t[0] < hash.bufferPosition ? 1U : 0U;
hash.f[0] = BLAKE2S_FINAL_FLAG;
hash.f[1] = hash.lastNodeFlag;
Array.Clear(hash.b, hash.bufferPosition, BLAKE2S_BLOCK_SIZE - hash.bufferPosition);
Compress(hash);
var mem = new MemoryStream();
for (var i = 0; i < 8; i++)
{
mem.Write(BitConverter.GetBytes(hash.h[i]), 0, 4);
}
return mem.ToArray();
}
public void ResetCrc()
{
_blake2sp.bufferPosition = 0;
for (UInt32 i = 0; i < BLAKE2SP_PARALLEL_DEGREE; i++)
{
_blake2sp.S[i].bufferPosition = 0;
ResetCrc(_blake2sp.S[i]);
_blake2sp.S[i].h[0] ^= (BLAKE2S_DIGEST_SIZE | BLAKE2SP_PARALLEL_DEGREE << 16 | 2 << 24);
_blake2sp.S[i].h[2] ^= i;
_blake2sp.S[i].h[3] ^= (BLAKE2S_DIGEST_SIZE << 24);
}
_blake2sp.S[BLAKE2SP_PARALLEL_DEGREE - 1].lastNodeFlag = BLAKE2S_FINAL_FLAG;
}
internal void Update(BLAKE2SP hash, ReadOnlySpan<byte> data, int size)
{
var i = 0;
var pos = hash.bufferPosition;
while (size != 0)
{
var index = pos / BLAKE2S_BLOCK_SIZE;
var reminder = BLAKE2S_BLOCK_SIZE - (pos & (BLAKE2S_BLOCK_SIZE - 1));
if (reminder > size)
{
reminder = size;
}
// Update(hash.S[index], data, size);
Update(hash.S[index], data.Slice(i, reminder), reminder);
size -= reminder;
i += reminder;
pos += reminder;
pos &= (BLAKE2S_BLOCK_SIZE * (BLAKE2SP_PARALLEL_DEGREE - 1));
}
hash.bufferPosition = pos;
}
internal byte[] Final(BLAKE2SP hash)
{
var h = new BLAKE2S();
ResetCrc(h);
h.h[0] ^= (BLAKE2S_DIGEST_SIZE | BLAKE2SP_PARALLEL_DEGREE << 16 | 2 << 24);
h.h[3] ^= (1 << 16 | BLAKE2S_DIGEST_SIZE << 24);
h.lastNodeFlag = BLAKE2S_FINAL_FLAG;
for (var i = 0; i < BLAKE2SP_PARALLEL_DEGREE; i++)
{
var digest = Final(_blake2sp.S[i]);
Update(h, digest, BLAKE2S_DIGEST_SIZE);
}
return Final(h);
}
public override int Read(byte[] buffer, int offset, int count)
{
var result = base.Read(buffer, offset, count);
if (result != 0)
{
Update(_blake2sp, new ReadOnlySpan<byte>(buffer, offset, result), result);
}
else
{
_hash = Final(_blake2sp);
if (!disableCRCCheck && !(GetCrc().SequenceEqual(readStream.CurrentCrc)) && count != 0)
{
// NOTE: we use the last FileHeader in a multipart volume to check CRC
throw new InvalidFormatException("file crc mismatch");
}
}
return result;
}
}

View File

@@ -1,3 +1,4 @@
using System;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;
@@ -7,6 +8,7 @@ internal class RarCrcStream : RarStream
{
private readonly MultiVolumeReadOnlyStream readStream;
private uint currentCrc;
private readonly bool disableCRC;
public RarCrcStream(
IRarUnpack unpack,
@@ -16,6 +18,7 @@ internal class RarCrcStream : RarStream
: base(unpack, fileHeader, readStream)
{
this.readStream = readStream;
disableCRC = fileHeader.IsEncrypted;
ResetCrc();
}
@@ -30,7 +33,11 @@ internal class RarCrcStream : RarStream
{
currentCrc = RarCRC.CheckCrc(currentCrc, buffer, offset, result);
}
else if (GetCrc() != readStream.CurrentCrc && count != 0)
else if (
!disableCRC
&& GetCrc() != BitConverter.ToUInt32(readStream.CurrentCrc, 0)
&& count != 0
)
{
// NOTE: we use the last FileHeader in a multipart volume to check CRC
throw new InvalidFormatException("file crc mismatch");

View File

@@ -14,7 +14,7 @@ internal class RarStream : Stream
private bool fetch;
private byte[] tmpBuffer = new byte[65536];
private byte[] tmpBuffer = BufferPool.Rent(65536);
private int tmpOffset;
private int tmpCount;
@@ -40,6 +40,11 @@ internal class RarStream : Stream
{
if (!isDisposed)
{
if (disposing)
{
BufferPool.Return(this.tmpBuffer);
this.tmpBuffer = null;
}
isDisposed = true;
base.Dispose(disposing);
readStream.Dispose();
@@ -86,6 +91,13 @@ internal class RarStream : Stream
fetch = false;
}
_position += outTotal;
if (count > 0 && outTotal == 0 && _position != Length)
{
// sanity check, eg if we try to decompress a redir entry
throw new InvalidOperationException(
$"unpacked file size does not match header: expected {Length} found {_position}"
);
}
return outTotal;
}
@@ -111,16 +123,7 @@ internal class RarStream : Stream
}
if (count > 0)
{
if (tmpBuffer.Length < tmpCount + count)
{
var newBuffer = new byte[
tmpBuffer.Length * 2 > tmpCount + count
? tmpBuffer.Length * 2
: tmpCount + count
];
Buffer.BlockCopy(tmpBuffer, 0, newBuffer, 0, tmpCount);
tmpBuffer = newBuffer;
}
EnsureBufferCapacity(count);
Buffer.BlockCopy(buffer, offset, tmpBuffer, tmpCount, count);
tmpCount += count;
tmpOffset = 0;
@@ -131,4 +134,20 @@ internal class RarStream : Stream
unpack.Suspended = false;
}
}
private void EnsureBufferCapacity(int count)
{
if (this.tmpBuffer.Length < this.tmpCount + count)
{
var newLength =
this.tmpBuffer.Length * 2 > this.tmpCount + count
? this.tmpBuffer.Length * 2
: this.tmpCount + count;
var newBuffer = BufferPool.Rent(newLength);
Buffer.BlockCopy(this.tmpBuffer, 0, newBuffer, 0, this.tmpCount);
var oldBuffer = this.tmpBuffer;
this.tmpBuffer = newBuffer;
BufferPool.Return(oldBuffer);
}
}
}

View File

@@ -52,19 +52,19 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
public int PpmEscChar { get; set; }
private readonly ModelPpm ppm = new ModelPpm();
private readonly ModelPpm ppm = new();
private readonly RarVM rarVM = new RarVM();
private readonly RarVM rarVM = new();
// Filters code, one entry per filter
private readonly List<UnpackFilter> filters = new List<UnpackFilter>();
private readonly List<UnpackFilter> filters = new();
// Filters stack, several entrances of same filter are possible
private readonly List<UnpackFilter> prgStack = new List<UnpackFilter>();
private readonly List<UnpackFilter> prgStack = new();
// lengths of preceding blocks, one length per filter. Used to reduce size
// required to write block length if lengths are repeating
private readonly List<int> oldFilterLengths = new List<int>();
private readonly List<int> oldFilterLengths = new();
private int lastFilter;

View File

@@ -25,15 +25,15 @@ internal partial class Unpack
private readonly AudioVariables[] AudV = new AudioVariables[4];
private readonly LitDecode LD = new LitDecode();
private readonly LitDecode LD = new();
private readonly DistDecode DD = new DistDecode();
private readonly DistDecode DD = new();
private readonly LowDistDecode LDD = new LowDistDecode();
private readonly LowDistDecode LDD = new();
private readonly RepDecode RD = new RepDecode();
private readonly RepDecode RD = new();
private readonly BitDecode BD = new BitDecode();
private readonly BitDecode BD = new();
private static readonly int[] LDecode =
{

View File

@@ -1,5 +1,4 @@
using System;
using SharpCompress.Compressors.Rar.VM;
namespace SharpCompress.Compressors.Rar.UnpackV1;

View File

@@ -1,5 +1,6 @@
#nullable disable
using System;
#if !Rar2017_64bit
using size_t = System.UInt32;
#else
@@ -7,7 +8,6 @@ using nint = System.Int64;
using nuint = System.UInt64;
using size_t = System.UInt64;
#endif
using System;
namespace SharpCompress.Compressors.Rar.UnpackV2017;

Some files were not shown because too many files have changed in this diff Show More