Compare commits

..

30 Commits

Author SHA1 Message Date
Adam Hathcock
f298ad3322 more reverts 2024-03-29 16:05:20 +00:00
Adam Hathcock
69872dd9e7 split files 2024-03-29 16:00:05 +00:00
Adam Hathcock
92174f49ae revert naming changes 2024-03-29 15:53:11 +00:00
Adam Hathcock
c39a155c8f Merge branch 'master' into async-2
# Conflicts:
#	src/SharpCompress/Writers/Zip/ZipWriter.cs
2024-03-29 15:27:43 +00:00
Adam Hathcock
fb73d8c0a7 Merge pull request #819 from TwanVanDongen/master
Support added for TAR LZW compression (Unix 'compress' resulting in .…
2024-03-25 08:41:34 +00:00
Twan van Dongen
f2b0368078 CSharpier reformat missed 2024-03-24 16:29:29 +01:00
Twan van Dongen
02301ecf6d Support added for TAR LZW compression (Unix 'compress' resulting in .Z files) 2024-03-24 16:23:25 +01:00
Adam Hathcock
bcb61ee3e4 Merge pull request #817 from btomblinson/master
#809 Add README.md to csproj for NuGet
2024-03-18 08:34:20 +00:00
btomblinson
6a824429d0 #809 Add README.md to csproj for NuGet 2024-03-16 22:52:36 -06:00
Adam Hathcock
6a52f9097f Merge pull request #815 from adamhathcock/code-clean-up
Code clean up
2024-03-14 16:01:34 +00:00
Adam Hathcock
3fa85fc516 Merge branch 'master' into code-clean-up 2024-03-14 15:58:29 +00:00
Adam Hathcock
498d132d8a Merge pull request #816 from coderb/pullreq-rar-memusage
rar5 improve memory usage
2024-03-14 15:58:15 +00:00
root
b6340f1458 rar5 improve memory usage
use ArrayPool for stream buffer
  use stackalloc for methods on file decompression code path
2024-03-14 11:50:45 -04:00
Adam Hathcock
4afc7ae2e4 use complete namespace 2024-03-14 13:07:40 +00:00
Adam Hathcock
95975a4c33 even more clean up 2024-03-14 09:07:21 +00:00
Adam Hathcock
198a0673a2 more clean up 2024-03-14 09:00:44 +00:00
Adam Hathcock
94d1503c64 more clean up 2024-03-14 08:57:16 +00:00
Adam Hathcock
5f13e245f0 more clean up on tests 2024-03-14 08:53:08 +00:00
Adam Hathcock
2715ae645d use var 2024-03-14 08:38:12 +00:00
Adam Hathcock
0299232cb5 just using rider to clean up 2024-03-14 08:37:17 +00:00
Adam Hathcock
93e181cfd9 update csharpier 2024-03-14 08:29:30 +00:00
Adam Hathcock
8072eb1212 Merge pull request #814 from coderb/pullreq-rar5-redir
rar5 read FHEXTRA_REDIR and expose via RarEntry
2024-03-14 08:26:06 +00:00
root
226ce340f2 rar5 read FHEXTRA_REDIR and expose via RarEntry
NOTE: api user should skip entries where RarEntry.IsRedir is true and not call OpenEntryStream()
2024-03-14 04:17:31 -04:00
Adam Hathcock
e5944cf72c add writer support for async 2024-03-12 15:40:29 +00:00
Adam Hathcock
ab5535eba3 Merge pull request #807 from TwanVanDongen/master
Support for decompressing Zip Shrink (Method:1)
2024-01-29 08:27:32 +00:00
Adam Hathcock
8da2499495 Merge pull request #805 from DannyBoyk/804_Fix_ZIP_Decryption
Zip: Use last modified time from basic header when validating zip decryption
2024-01-29 08:26:17 +00:00
Twan van Dongen
c057ffb153 Refrormatted using CSharpier 2024-01-27 18:59:56 +01:00
Twan van Dongen
fe13d29549 Merge branch 'master' of https://github.com/TwanVanDongen/sharpcompress 2024-01-27 18:31:13 +01:00
Twan van Dongen
225aaab4f4 Support for decompressing Zip Shrink (method:1) added 2024-01-27 18:28:46 +01:00
Daniel Nash
14c973558b Zip: Use last modified time from basic header when validating zip decryption
The last modified time used for zip decryption validation must be the
one from the basic header. If UnixTimeExtraFields are present, the
previous implementation was attempting to verify against that value
instead.
Fixed #804
2024-01-26 10:54:41 -05:00
147 changed files with 3757 additions and 1833 deletions

View File

@@ -3,7 +3,7 @@
"isRoot": true,
"tools": {
"csharpier": {
"version": "0.26.7",
"version": "0.27.3",
"commands": [
"dotnet-csharpier"
]

View File

@@ -62,7 +62,7 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
public static int ReduceSum(Vector256<int> accumulator)
{
// Add upper lane to lower lane.
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
var vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper());
// Add odd to even.
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_11_01_01));
@@ -81,7 +81,7 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static int EvenReduceSum(Vector256<int> accumulator)
{
Vector128<int> vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
var vsum = Sse2.Add(accumulator.GetLower(), accumulator.GetUpper()); // add upper lane to lower lane
vsum = Sse2.Add(vsum, Sse2.Shuffle(vsum, 0b_11_10_11_10)); // add high to low
// Vector128<int>.ToScalar() isn't optimized pre-net5.0 https://github.com/dotnet/runtime/pull/37882
@@ -189,29 +189,29 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateSse(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
var s1 = adler & 0xFFFF;
var s2 = (adler >> 16) & 0xFFFF;
// Process the data in blocks.
uint length = (uint)buffer.Length;
uint blocks = length / BlockSize;
var length = (uint)buffer.Length;
var blocks = length / BlockSize;
length -= blocks * BlockSize;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
fixed (byte* tapPtr = &MemoryMarshal.GetReference(Tap1Tap2))
{
byte* localBufferPtr = bufferPtr;
var localBufferPtr = bufferPtr;
// _mm_setr_epi8 on x86
Vector128<sbyte> tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
Vector128<sbyte> tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
Vector128<byte> zero = Vector128<byte>.Zero;
var tap1 = Sse2.LoadVector128((sbyte*)tapPtr);
var tap2 = Sse2.LoadVector128((sbyte*)(tapPtr + 0x10));
var zero = Vector128<byte>.Zero;
var ones = Vector128.Create((short)1);
while (blocks > 0)
{
uint n = NMAX / BlockSize; /* The NMAX constraint. */
var n = NMAX / BlockSize; /* The NMAX constraint. */
if (n > blocks)
{
n = blocks;
@@ -221,15 +221,15 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
// Process n blocks of data. At most NMAX data bytes can be
// processed before s2 must be reduced modulo BASE.
Vector128<uint> v_ps = Vector128.CreateScalar(s1 * n);
Vector128<uint> v_s2 = Vector128.CreateScalar(s2);
Vector128<uint> v_s1 = Vector128<uint>.Zero;
var v_ps = Vector128.CreateScalar(s1 * n);
var v_s2 = Vector128.CreateScalar(s2);
var v_s1 = Vector128<uint>.Zero;
do
{
// Load 32 input bytes.
Vector128<byte> bytes1 = Sse3.LoadDquVector128(localBufferPtr);
Vector128<byte> bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
var bytes1 = Sse3.LoadDquVector128(localBufferPtr);
var bytes2 = Sse3.LoadDquVector128(localBufferPtr + 0x10);
// Add previous block byte sum to v_ps.
v_ps = Sse2.Add(v_ps, v_s1);
@@ -237,11 +237,11 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
// Horizontally add the bytes for s1, multiply-adds the
// bytes by [ 32, 31, 30, ... ] for s2.
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes1, zero).AsUInt32());
Vector128<short> mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
var mad1 = Ssse3.MultiplyAddAdjacent(bytes1, tap1);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad1, ones).AsUInt32());
v_s1 = Sse2.Add(v_s1, Sse2.SumAbsoluteDifferences(bytes2, zero).AsUInt32());
Vector128<short> mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
var mad2 = Ssse3.MultiplyAddAdjacent(bytes2, tap2);
v_s2 = Sse2.Add(v_s2, Sse2.MultiplyAddAdjacent(mad2, ones).AsUInt32());
localBufferPtr += BlockSize;
@@ -281,15 +281,15 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
public static unsafe uint CalculateAvx2(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
uint length = (uint)buffer.Length;
var s1 = adler & 0xFFFF;
var s2 = (adler >> 16) & 0xFFFF;
var length = (uint)buffer.Length;
fixed (byte* bufferPtr = &MemoryMarshal.GetReference(buffer))
{
byte* localBufferPtr = bufferPtr;
var localBufferPtr = bufferPtr;
Vector256<byte> zero = Vector256<byte>.Zero;
var zero = Vector256<byte>.Zero;
var dot3v = Vector256.Create((short)1);
var dot2v = Vector256.Create(
32,
@@ -333,29 +333,29 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
while (length >= 32)
{
int k = length < NMAX ? (int)length : (int)NMAX;
var k = length < NMAX ? (int)length : (int)NMAX;
k -= k % 32;
length -= (uint)k;
Vector256<uint> vs10 = vs1;
Vector256<uint> vs3 = Vector256<uint>.Zero;
var vs10 = vs1;
var vs3 = Vector256<uint>.Zero;
while (k >= 32)
{
// Load 32 input bytes.
Vector256<byte> block = Avx.LoadVector256(localBufferPtr);
var block = Avx.LoadVector256(localBufferPtr);
// Sum of abs diff, resulting in 2 x int32's
Vector256<ushort> vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
var vs1sad = Avx2.SumAbsoluteDifferences(block, zero);
vs1 = Avx2.Add(vs1, vs1sad.AsUInt32());
vs3 = Avx2.Add(vs3, vs10);
// sum 32 uint8s to 16 shorts.
Vector256<short> vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
var vshortsum2 = Avx2.MultiplyAddAdjacent(block, dot2v);
// sum 16 shorts to 8 uint32s.
Vector256<int> vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
var vsum2 = Avx2.MultiplyAddAdjacent(vshortsum2, dot3v);
vs2 = Avx2.Add(vsum2.AsUInt32(), vs2);
vs10 = vs1;
@@ -434,14 +434,14 @@ internal static class Adler32 // From https://github.com/SixLabors/ImageSharp/bl
[MethodImpl(InliningOptions.HotPath | InliningOptions.ShortMethod)]
private static unsafe uint CalculateScalar(uint adler, ReadOnlySpan<byte> buffer)
{
uint s1 = adler & 0xFFFF;
uint s2 = (adler >> 16) & 0xFFFF;
var s1 = adler & 0xFFFF;
var s2 = (adler >> 16) & 0xFFFF;
uint k;
fixed (byte* bufferPtr = buffer)
{
var localBufferPtr = bufferPtr;
uint length = (uint)buffer.Length;
var length = (uint)buffer.Length;
while (length > 0)
{

View File

@@ -31,10 +31,10 @@ public abstract class AbstractWritableArchive<TEntry, TVolume>
}
}
private readonly List<TEntry> newEntries = new List<TEntry>();
private readonly List<TEntry> removedEntries = new List<TEntry>();
private readonly List<TEntry> newEntries = new();
private readonly List<TEntry> removedEntries = new();
private readonly List<TEntry> modifiedEntries = new List<TEntry>();
private readonly List<TEntry> modifiedEntries = new();
private bool hasModifications;
private bool pauseRebuilding;

View File

@@ -11,7 +11,7 @@ internal abstract class ArchiveVolumeFactory
FileInfo? item = null;
//split 001, 002 ...
Match m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
var m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(
Path.Combine(

View File

@@ -94,7 +94,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
);
}
public static GZipArchive Create() => new GZipArchive();
public static GZipArchive Create() => new();
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
@@ -14,9 +15,8 @@ namespace SharpCompress.Archives.Rar;
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
internal Lazy<IRarUnpack> UnpackV2017 { get; } =
new Lazy<IRarUnpack>(() => new Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } =
new Lazy<IRarUnpack>(() => new Compressors.Rar.UnpackV1.Unpack());
new(() => new Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
/// <summary>
/// Constructor with a SourceStream able to handle FileInfo and Streams.
@@ -40,9 +40,11 @@ public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
streams[1].Position = 0;
SrcStream.Position = 0;
return srcStream.Streams.Select(
a => new StreamRarArchiveVolume(a, ReaderOptions, idx++)
);
return srcStream.Streams.Select(a => new StreamRarArchiveVolume(
a,
ReaderOptions,
idx++
));
}
else //split mode or single file
{

View File

@@ -11,7 +11,7 @@ internal static class RarArchiveVolumeFactory
FileInfo? item = null;
//new style rar - ..part1 | /part01 | part001 ....
Match m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
var m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(
Path.Combine(

View File

@@ -195,7 +195,7 @@ public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
}
}
public static TarArchive Create() => new TarArchive();
public static TarArchive Create() => new();
protected override TarArchiveEntry CreateEntryInternal(
string filePath,

View File

@@ -294,7 +294,7 @@ public class ZipArchive : AbstractWritableArchive<ZipArchiveEntry, ZipVolume>
bool closeStream
) => new ZipWritableArchiveEntry(this, source, filePath, size, modified, closeStream);
public static ZipArchive Create() => new ZipArchive();
public static ZipArchive Create() => new();
protected override IReader CreateReaderForSolidExtraction()
{

View File

@@ -12,7 +12,7 @@ internal static class ZipArchiveVolumeFactory
//load files with zip/zipx first. Swapped to end once loaded in ZipArchive
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
Match m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
var m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
item = new FileInfo(
Path.Combine(

View File

@@ -0,0 +1,33 @@
using System.Buffers;
namespace SharpCompress;
internal static class BufferPool
{
/// <summary>
/// gets a buffer from the pool
/// </summary>
/// <param name="bufferSize">size of the buffer</param>
/// <returns>the buffer</returns>
public static byte[] Rent(int bufferSize)
{
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
return ArrayPool<byte>.Shared.Rent(bufferSize);
#else
return new byte[bufferSize];
#endif
}
/// <summary>
/// returns a buffer to the pool
/// </summary>
/// <param name="buffer">the buffer to return</param>
public static void Return(byte[] buffer)
{
#if NETCOREAPP || NETSTANDARD2_1_OR_GREATER
ArrayPool<byte>.Shared.Return(buffer);
#else
// no-op
#endif
}
}

View File

@@ -14,5 +14,7 @@ public enum CompressionType
LZip,
Xz,
Unknown,
Deflate64
Deflate64,
Shrink,
Lzw
}

View File

@@ -16,7 +16,7 @@ internal static class ExtractionMethods
)
{
string destinationFileName;
string fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
var fullDestinationDirectoryPath = Path.GetFullPath(destinationDirectory);
//check for trailing slash.
if (
@@ -36,11 +36,11 @@ internal static class ExtractionMethods
options ??= new ExtractionOptions() { Overwrite = true };
string file = Path.GetFileName(entry.Key);
var file = Path.GetFileName(entry.Key);
if (options.ExtractFullPath)
{
string folder = Path.GetDirectoryName(entry.Key)!;
string destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
var folder = Path.GetDirectoryName(entry.Key)!;
var destdir = Path.GetFullPath(Path.Combine(fullDestinationDirectoryPath, folder));
if (!Directory.Exists(destdir))
{
@@ -102,7 +102,7 @@ internal static class ExtractionMethods
}
else
{
FileMode fm = FileMode.Create;
var fm = FileMode.Create;
options ??= new ExtractionOptions() { Overwrite = true };
if (!options.Overwrite)

View File

@@ -7,5 +7,5 @@ public class OptionsBase
/// </summary>
public bool LeaveStreamOpen { get; set; } = true;
public ArchiveEncoding ArchiveEncoding { get; set; } = new ArchiveEncoding();
public ArchiveEncoding ArchiveEncoding { get; set; } = new();
}

View File

@@ -1,12 +0,0 @@
using System;
namespace SharpCompress.Common;
public class PasswordProtectedException : ExtractionException
{
public PasswordProtectedException(string message)
: base(message) { }
public PasswordProtectedException(string message, Exception inner)
: base(message, inner) { }
}

View File

@@ -12,10 +12,7 @@ internal class CryptKey3 : ICryptKey
private string _password;
public CryptKey3(string password)
{
_password = password ?? "";
}
public CryptKey3(string password) => _password = password ?? "";
public ICryptoTransform Transformer(byte[] salt)
{

View File

@@ -34,35 +34,33 @@ internal class CryptKey5 : ICryptKey
int keyLength
)
{
using (HMACSHA256 hmac = new HMACSHA256(Encoding.UTF8.GetBytes(password)))
using var hmac = new HMACSHA256(Encoding.UTF8.GetBytes(password));
var block = hmac.ComputeHash(salt);
var finalHash = (byte[])block.Clone();
var loop = new int[] { iterations, 17, 17 };
var res = new List<byte[]> { };
for (var x = 0; x < 3; x++)
{
byte[] block = hmac.ComputeHash(salt);
byte[] finalHash = (byte[])block.Clone();
var loop = new int[] { iterations, 17, 17 };
var res = new List<byte[]> { };
for (int x = 0; x < 3; x++)
for (var i = 1; i < loop[x]; i++)
{
for (int i = 1; i < loop[x]; i++)
block = hmac.ComputeHash(block);
for (var j = 0; j < finalHash.Length; j++)
{
block = hmac.ComputeHash(block);
for (int j = 0; j < finalHash.Length; j++)
{
finalHash[j] ^= block[j];
}
finalHash[j] ^= block[j];
}
res.Add((byte[])finalHash.Clone());
}
return res;
res.Add((byte[])finalHash.Clone());
}
return res;
}
public ICryptoTransform Transformer(byte[] salt)
{
int iterations = (1 << _cryptoInfo.LG2Count); // Adjust the number of iterations as needed
var iterations = (1 << _cryptoInfo.LG2Count); // Adjust the number of iterations as needed
var salt_rar5 = salt.Concat(new byte[] { 0, 0, 0, 1 });
var derivedKey = GenerateRarPBKDF2Key(
@@ -76,7 +74,7 @@ internal class CryptKey5 : ICryptKey
_pswCheck = new byte[EncryptionConstV5.SIZE_PSWCHECK];
for (int i = 0; i < SHA256_DIGEST_SIZE; i++)
for (var i = 0; i < SHA256_DIGEST_SIZE; i++)
{
_pswCheck[i % EncryptionConstV5.SIZE_PSWCHECK] ^= derivedKey[2][i];
}

View File

@@ -12,10 +12,8 @@ internal class ArchiveCryptHeader : RarHeader
public ArchiveCryptHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Crypt) { }
public Rar5CryptoInfo CryptInfo = new Rar5CryptoInfo();
public Rar5CryptoInfo CryptInfo = new();
protected override void ReadFinish(MarkingBinaryReader reader)
{
protected override void ReadFinish(MarkingBinaryReader reader) =>
CryptInfo = new Rar5CryptoInfo(reader, false);
}
}

View File

@@ -109,7 +109,7 @@ internal class FileHeader : RarHeader
const ushort FHEXTRA_HASH = 0x02;
const ushort FHEXTRA_HTIME = 0x03;
// const ushort FHEXTRA_VERSION = 0x04;
// const ushort FHEXTRA_REDIR = 0x05;
const ushort FHEXTRA_REDIR = 0x05;
// const ushort FHEXTRA_UOWNER = 0x06;
// const ushort FHEXTRA_SUBDATA = 0x07;
@@ -120,7 +120,6 @@ internal class FileHeader : RarHeader
var type = reader.ReadRarVIntUInt16();
switch (type)
{
//TODO
case FHEXTRA_CRYPT: // file encryption
{
@@ -171,11 +170,17 @@ internal class FileHeader : RarHeader
//
// }
// break;
// case FHEXTRA_REDIR: // file system redirection
// {
//
// }
// break;
case FHEXTRA_REDIR: // file system redirection
{
RedirType = reader.ReadRarVIntByte();
RedirFlags = reader.ReadRarVIntByte();
var nn = reader.ReadRarVIntUInt16();
var bb = reader.ReadBytes(nn);
RedirTargetName = ConvertPathV5(Encoding.UTF8.GetString(bb, 0, bb.Length));
}
break;
//TODO
// case FHEXTRA_UOWNER: // unix owner
// {
//
@@ -189,6 +194,7 @@ internal class FileHeader : RarHeader
default:
// skip unknown record types to allow new record types to be added in the future
//Console.WriteLine($"unhandled rar header field type {type}");
break;
}
// drain any trailing bytes of extra record
@@ -437,6 +443,12 @@ internal class FileHeader : RarHeader
public bool IsSolid { get; private set; }
public byte RedirType { get; private set; }
public bool IsRedir => RedirType != 0;
public byte RedirFlags { get; private set; }
public bool IsRedirDirectory => (RedirFlags & RedirFlagV5.DIRECTORY) != 0;
public string RedirTargetName { get; private set; }
// unused for UnpackV1 implementation (limitation)
internal size_t WindowSize { get; private set; }

View File

@@ -157,3 +157,17 @@ internal static class EndArchiveFlagsV5
{
public const ushort HAS_NEXT_VOLUME = 0x0001;
}
internal static class RedirTypeV5
{
public const byte UNIX_SYMLINK = 0x0001;
public const byte WIN_SYMLINK = 0x0002;
public const byte WIN_JUNCTION = 0x0003;
public const byte HARD_LINK = 0x0004;
public const byte FILE_COPY = 0x0005;
}
internal static class RedirFlagV5
{
public const byte DIRECTORY = 0x0001;
}

View File

@@ -42,10 +42,8 @@ internal class Rar5CryptoInfo
}
}
public void ReadInitV(MarkingBinaryReader reader)
{
public void ReadInitV(MarkingBinaryReader reader) =>
InitV = reader.ReadBytes(EncryptionConstV5.SIZE_INITV);
}
public bool UsePswCheck = false;

View File

@@ -10,7 +10,7 @@ namespace SharpCompress.Common.Rar;
internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
{
private BlockTransformer _rijndael;
private readonly Queue<byte> _data = new Queue<byte>();
private readonly Queue<byte> _data = new();
private long _readCount;
public RarCryptoBinaryReader(Stream stream, ICryptKey cryptKey)
@@ -22,10 +22,7 @@ internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
}
public RarCryptoBinaryReader(Stream stream, ICryptKey cryptKey, byte[] salt)
: base(stream)
{
_rijndael = new BlockTransformer(cryptKey.Transformer(salt));
}
: base(stream) => _rijndael = new BlockTransformer(cryptKey.Transformer(salt));
// track read count ourselves rather than using the underlying stream since we buffer
public override long CurrentReadByteCount
@@ -39,15 +36,9 @@ internal sealed class RarCryptoBinaryReader : RarCrcBinaryReader
public override void Mark() => _readCount = 0;
public override byte ReadByte()
{
return ReadAndDecryptBytes(1)[0];
}
public override byte ReadByte() => ReadAndDecryptBytes(1)[0];
public override byte[] ReadBytes(int count)
{
return ReadAndDecryptBytes(count);
}
public override byte[] ReadBytes(int count) => ReadAndDecryptBytes(count);
private byte[] ReadAndDecryptBytes(int count)
{

View File

@@ -9,7 +9,7 @@ internal sealed class RarCryptoWrapper : Stream
{
private readonly Stream _actualStream;
private BlockTransformer _rijndael;
private readonly Queue<byte> _data = new Queue<byte>();
private readonly Queue<byte> _data = new();
public RarCryptoWrapper(Stream actualStream, byte[] salt, ICryptKey key)
{
@@ -23,10 +23,8 @@ internal sealed class RarCryptoWrapper : Stream
public override void SetLength(long value) => throw new NotSupportedException();
public override int Read(byte[] buffer, int offset, int count)
{
return ReadAndDecrypt(buffer, offset, count);
}
public override int Read(byte[] buffer, int offset, int count) =>
ReadAndDecrypt(buffer, offset, count);
public int ReadAndDecrypt(byte[] buffer, int offset, int count)
{

View File

@@ -66,6 +66,10 @@ public abstract class RarEntry : Entry
public override bool IsSplitAfter => FileHeader.IsSplitAfter;
public bool IsRedir => FileHeader.IsRedir;
public string RedirTargetName => FileHeader.RedirTargetName;
public override string ToString() =>
string.Format(
"Entry Path: {0} Compressed Size: {1} Uncompressed Size: {2} CRC: {3}",

View File

@@ -15,15 +15,15 @@ internal class ArchiveDatabase
internal long _startPositionAfterHeader;
internal long _dataStartPosition;
internal List<long> _packSizes = new List<long>();
internal List<uint?> _packCrCs = new List<uint?>();
internal List<CFolder> _folders = new List<CFolder>();
internal List<long> _packSizes = new();
internal List<uint?> _packCrCs = new();
internal List<CFolder> _folders = new();
internal List<int> _numUnpackStreamsVector;
internal List<CFileItem> _files = new List<CFileItem>();
internal List<CFileItem> _files = new();
internal List<long> _packStreamStartPositions = new List<long>();
internal List<int> _folderStartFileIndex = new List<int>();
internal List<int> _fileIndexToFolderIndexMap = new List<int>();
internal List<long> _packStreamStartPositions = new();
internal List<int> _folderStartFileIndex = new();
internal List<int> _fileIndexToFolderIndexMap = new();
internal IPasswordProvider PasswordProvider { get; }

View File

@@ -14,13 +14,13 @@ namespace SharpCompress.Common.SevenZip;
internal class ArchiveReader
{
internal Stream _stream;
internal Stack<DataReader> _readerStack = new Stack<DataReader>();
internal Stack<DataReader> _readerStack = new();
internal DataReader _currentReader;
internal long _streamOrigin;
internal long _streamEnding;
internal byte[] _header;
private readonly Dictionary<int, Stream> _cachedStreams = new Dictionary<int, Stream>();
private readonly Dictionary<int, Stream> _cachedStreams = new();
internal void AddByteStream(byte[] buffer, int offset, int length)
{
@@ -1359,7 +1359,7 @@ internal class ArchiveReader
{
internal int _fileIndex;
internal int _folderIndex;
internal List<bool> _extractStatuses = new List<bool>();
internal List<bool> _extractStatuses = new();
internal CExtractFolderInfo(int fileIndex, int folderIndex)
{

View File

@@ -6,11 +6,11 @@ namespace SharpCompress.Common.SevenZip;
internal class CFolder
{
internal List<CCoderInfo> _coders = new List<CCoderInfo>();
internal List<CBindPair> _bindPairs = new List<CBindPair>();
internal List<int> _packStreams = new List<int>();
internal List<CCoderInfo> _coders = new();
internal List<CBindPair> _bindPairs = new();
internal List<int> _packStreams = new();
internal int _firstPackStreamId;
internal List<long> _unpackSizes = new List<long>();
internal List<long> _unpackSizes = new();
internal uint? _unpackCrc;
internal bool UnpackCrcDefined => _unpackCrc != null;

View File

@@ -7,10 +7,10 @@ internal readonly struct CMethodId
public const ulong K_LZMA2_ID = 0x21;
public const ulong K_AES_ID = 0x06F10701;
public static readonly CMethodId K_COPY = new CMethodId(K_COPY_ID);
public static readonly CMethodId K_LZMA = new CMethodId(K_LZMA_ID);
public static readonly CMethodId K_LZMA2 = new CMethodId(K_LZMA2_ID);
public static readonly CMethodId K_AES = new CMethodId(K_AES_ID);
public static readonly CMethodId K_COPY = new(K_COPY_ID);
public static readonly CMethodId K_LZMA = new(K_LZMA_ID);
public static readonly CMethodId K_LZMA2 = new(K_LZMA2_ID);
public static readonly CMethodId K_AES = new(K_AES_ID);
public readonly ulong _id;

View File

@@ -9,7 +9,7 @@ namespace SharpCompress.Common.Tar.Headers;
internal sealed class TarHeader
{
internal static readonly DateTime EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
internal static readonly DateTime EPOCH = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
public TarHeader(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;

View File

@@ -63,7 +63,7 @@ internal class TarReadOnlySubStream : NonDisposingStream
{
count = (int)BytesLeftToRead;
}
int read = Stream.Read(buffer, offset, count);
var read = Stream.Read(buffer, offset, count);
if (read > 0)
{
BytesLeftToRead -= read;
@@ -78,7 +78,7 @@ internal class TarReadOnlySubStream : NonDisposingStream
{
return -1;
}
int value = Stream.ReadByte();
var value = Stream.ReadByte();
if (value != -1)
{
--BytesLeftToRead;

View File

@@ -14,8 +14,8 @@ internal class DirectoryEntryHeader : ZipFileEntry
VersionNeededToExtract = reader.ReadUInt16();
Flags = (HeaderFlags)reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
LastModifiedTime = reader.ReadUInt16();
LastModifiedDate = reader.ReadUInt16();
OriginalLastModifiedTime = LastModifiedTime = reader.ReadUInt16();
OriginalLastModifiedDate = LastModifiedDate = reader.ReadUInt16();
Crc = reader.ReadUInt32();
CompressedSize = reader.ReadUInt32();
UncompressedSize = reader.ReadUInt32();
@@ -52,8 +52,8 @@ internal class DirectoryEntryHeader : ZipFileEntry
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(
u => u.Type == ExtraDataType.UnicodePathExtraField
var unicodePathExtra = Extra.FirstOrDefault(u =>
u.Type == ExtraDataType.UnicodePathExtraField
);
if (unicodePathExtra != null && ArchiveEncoding.Forced == null)
{

View File

@@ -13,8 +13,8 @@ internal class LocalEntryHeader : ZipFileEntry
Version = reader.ReadUInt16();
Flags = (HeaderFlags)reader.ReadUInt16();
CompressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
LastModifiedTime = reader.ReadUInt16();
LastModifiedDate = reader.ReadUInt16();
OriginalLastModifiedTime = LastModifiedTime = reader.ReadUInt16();
OriginalLastModifiedDate = LastModifiedDate = reader.ReadUInt16();
Crc = reader.ReadUInt32();
CompressedSize = reader.ReadUInt32();
UncompressedSize = reader.ReadUInt32();
@@ -42,8 +42,8 @@ internal class LocalEntryHeader : ZipFileEntry
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(
u => u.Type == ExtraDataType.UnicodePathExtraField
var unicodePathExtra = Extra.FirstOrDefault(u =>
u.Type == ExtraDataType.UnicodePathExtraField
);
if (unicodePathExtra != null && ArchiveEncoding.Forced == null)
{

View File

@@ -67,8 +67,26 @@ internal abstract class ZipFileEntry : ZipHeader
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
/// <summary>
/// The last modified date as read from the Local or Central Directory header.
/// </summary>
internal ushort OriginalLastModifiedDate { get; set; }
/// <summary>
/// The last modified date from the UnixTimeExtraField, if present, or the
/// Local or Cental Directory header, if not.
/// </summary>
internal ushort LastModifiedDate { get; set; }
/// <summary>
/// The last modified time as read from the Local or Central Directory header.
/// </summary>
internal ushort OriginalLastModifiedTime { get; set; }
/// <summary>
/// The last modified time from the UnixTimeExtraField, if present, or the
/// Local or Cental Directory header, if not.
/// </summary>
internal ushort LastModifiedTime { get; set; }
internal uint Crc { get; set; }

View File

@@ -6,7 +6,7 @@ namespace SharpCompress.Common.Zip;
internal class PkwareTraditionalEncryptionData
{
private static readonly CRC32 CRC32 = new CRC32();
private static readonly CRC32 CRC32 = new();
private readonly uint[] _keys = { 0x12345678, 0x23456789, 0x34567890 };
private readonly ArchiveEncoding _archiveEncoding;
@@ -39,7 +39,7 @@ internal class PkwareTraditionalEncryptionData
{
throw new CryptographicException("The password did not match.");
}
if (plainTextHeader[11] != (byte)((header.LastModifiedTime >> 8) & 0xff))
if (plainTextHeader[11] != (byte)((header.OriginalLastModifiedTime >> 8) & 0xff))
{
throw new CryptographicException("The password did not match.");
}

View File

@@ -15,10 +15,7 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
ArchiveEncoding archiveEncoding,
IEnumerable<ZipEntry>? entries
)
: base(StreamingMode.Streaming, password, archiveEncoding)
{
_entries = entries;
}
: base(StreamingMode.Streaming, password, archiveEncoding) => _entries = entries;
internal IEnumerable<ZipHeader> ReadStreamHeader(Stream stream)
{
@@ -97,13 +94,12 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
{
var local_header = ((LocalEntryHeader)header);
var dir_header = _entries?.FirstOrDefault(
entry =>
entry.Key == local_header.Name
&& local_header.CompressedSize == 0
&& local_header.UncompressedSize == 0
&& local_header.Crc == 0
&& local_header.IsDirectory == false
var dir_header = _entries?.FirstOrDefault(entry =>
entry.Key == local_header.Name
&& local_header.CompressedSize == 0
&& local_header.UncompressedSize == 0
&& local_header.Crc == 0
&& local_header.IsDirectory == false
);
if (dir_header != null)

View File

@@ -3,6 +3,7 @@ namespace SharpCompress.Common.Zip;
internal enum ZipCompressionMethod
{
None = 0,
Shrink = 1,
Deflate = 8,
Deflate64 = 9,
BZip2 = 12,

View File

@@ -22,43 +22,18 @@ public class ZipEntry : Entry
}
}
public override CompressionType CompressionType
{
get
public override CompressionType CompressionType =>
_filePart.Header.CompressionMethod switch
{
switch (_filePart.Header.CompressionMethod)
{
case ZipCompressionMethod.BZip2:
{
return CompressionType.BZip2;
}
case ZipCompressionMethod.Deflate:
{
return CompressionType.Deflate;
}
case ZipCompressionMethod.Deflate64:
{
return CompressionType.Deflate64;
}
case ZipCompressionMethod.LZMA:
{
return CompressionType.LZMA;
}
case ZipCompressionMethod.PPMd:
{
return CompressionType.PPMd;
}
case ZipCompressionMethod.None:
{
return CompressionType.None;
}
default:
{
return CompressionType.Unknown;
}
}
}
}
ZipCompressionMethod.BZip2 => CompressionType.BZip2,
ZipCompressionMethod.Deflate => CompressionType.Deflate,
ZipCompressionMethod.Deflate64 => CompressionType.Deflate64,
ZipCompressionMethod.LZMA => CompressionType.LZMA,
ZipCompressionMethod.PPMd => CompressionType.PPMd,
ZipCompressionMethod.None => CompressionType.None,
ZipCompressionMethod.Shrink => CompressionType.Shrink,
_ => CompressionType.Unknown
};
public override long Crc => _filePart.Header.Crc;

View File

@@ -9,6 +9,7 @@ using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.Compressors.Shrink;
using SharpCompress.Compressors.Xz;
using SharpCompress.IO;
using ZstdSharp;
@@ -79,6 +80,15 @@ internal abstract class ZipFilePart : FilePart
return new DataDescriptorStream(stream);
}
case ZipCompressionMethod.Shrink:
{
return new ShrinkStream(
stream,
CompressionMode.Decompress,
Header.CompressedSize,
Header.UncompressedSize
);
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(stream, CompressionMode.Decompress);
@@ -192,6 +202,7 @@ internal abstract class ZipFilePart : FilePart
switch (Header.CompressionMethod)
{
case ZipCompressionMethod.None:
case ZipCompressionMethod.Shrink:
case ZipCompressionMethod.Deflate:
case ZipCompressionMethod.Deflate64:
case ZipCompressionMethod.BZip2:

View File

@@ -142,8 +142,8 @@ internal class ZipHeaderFactory
if (entryHeader.CompressionMethod == ZipCompressionMethod.WinzipAes)
{
var data = entryHeader.Extra.SingleOrDefault(
x => x.Type == ExtraDataType.WinZipAes
var data = entryHeader.Extra.SingleOrDefault(x =>
x.Type == ExtraDataType.WinZipAes
);
if (data != null)
{

View File

@@ -87,7 +87,7 @@ internal class CBZip2InputStream : Stream
private int bsBuff;
private int bsLive;
private readonly CRC mCrc = new CRC();
private readonly CRC mCrc = new();
private readonly bool[] inUse = new bool[256];
private int nInUse;

View File

@@ -284,7 +284,7 @@ internal sealed class CBZip2OutputStream : Stream
private int bytesOut;
private int bsBuff;
private int bsLive;
private readonly CRC mCrc = new CRC();
private readonly CRC mCrc = new();
private readonly bool[] inUse = new bool[256];
private int nInUse;

View File

@@ -342,9 +342,9 @@ internal sealed partial class DeflateManager
private readonly short[] dyn_dtree; // distance tree
private readonly short[] bl_tree; // Huffman tree for bit lengths
private readonly Tree treeLiterals = new Tree(); // desc for literal tree
private readonly Tree treeDistances = new Tree(); // desc for distance tree
private readonly Tree treeBitLengths = new Tree(); // desc for bit length tree
private readonly Tree treeLiterals = new(); // desc for literal tree
private readonly Tree treeDistances = new(); // desc for distance tree
private readonly Tree treeBitLengths = new(); // desc for bit length tree
// number of codes at each bit length for an optimal tree
private readonly short[] bl_count = new short[InternalConstants.MAX_BITS + 1];
@@ -1787,21 +1787,14 @@ internal sealed partial class DeflateManager
return status == BUSY_STATE ? ZlibConstants.Z_DATA_ERROR : ZlibConstants.Z_OK;
}
private void SetDeflater()
{
switch (config.Flavor)
private void SetDeflater() =>
DeflateFunction = config.Flavor switch
{
case DeflateFlavor.Store:
DeflateFunction = DeflateNone;
break;
case DeflateFlavor.Fast:
DeflateFunction = DeflateFast;
break;
case DeflateFlavor.Slow:
DeflateFunction = DeflateSlow;
break;
}
}
DeflateFlavor.Store => DeflateNone,
DeflateFlavor.Fast => DeflateFast,
DeflateFlavor.Slow => DeflateSlow,
_ => DeflateFunction
};
internal int SetParams(CompressionLevel level, CompressionStrategy strategy)
{

View File

@@ -366,9 +366,5 @@ public class DeflateStream : Stream
#endregion
public MemoryStream InputBuffer =>
new MemoryStream(
_baseStream._z.InputBuffer,
_baseStream._z.NextIn,
_baseStream._z.AvailableBytesIn
);
new(_baseStream._z.InputBuffer, _baseStream._z.NextIn, _baseStream._z.AvailableBytesIn);
}

View File

@@ -35,15 +35,7 @@ namespace SharpCompress.Compressors.Deflate;
public class GZipStream : Stream
{
internal static readonly DateTime UNIX_EPOCH = new DateTime(
1970,
1,
1,
0,
0,
0,
DateTimeKind.Utc
);
internal static readonly DateTime UNIX_EPOCH = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
private string? _comment;
private string? _fileName;

View File

@@ -105,11 +105,11 @@ internal sealed class InflateBlocks
internal int[] blens; // bit lengths of codes
internal uint check; // check on output
internal object checkfn; // check function
internal InflateCodes codes = new InflateCodes(); // if CODES, current state
internal InflateCodes codes = new(); // if CODES, current state
internal int end; // one byte after sliding window
internal int[] hufts; // single malloc for tree space
internal int index; // index into blens (or border)
internal InfTree inftree = new InfTree();
internal InfTree inftree = new();
internal int last; // true if this block is the last block
internal int left; // if STORED, bytes left to copy
private InflateBlockMode mode; // current inflate_block mode

View File

@@ -102,7 +102,7 @@ internal class ZlibBaseStream : Stream
{
if (_z is null)
{
bool wantRfc1950Header = (_flavor == ZlibStreamFlavor.ZLIB);
var wantRfc1950Header = (_flavor == ZlibStreamFlavor.ZLIB);
_z = new ZlibCodec();
if (_compressionMode == CompressionMode.Decompress)
{
@@ -147,13 +147,13 @@ internal class ZlibBaseStream : Stream
z.InputBuffer = buffer;
_z.NextIn = offset;
_z.AvailableBytesIn = count;
bool done = false;
var done = false;
do
{
_z.OutputBuffer = workingBuffer;
_z.NextOut = 0;
_z.AvailableBytesOut = _workingBuffer.Length;
int rc = (_wantCompress) ? _z.Deflate(_flushMode) : _z.Inflate(_flushMode);
var rc = (_wantCompress) ? _z.Deflate(_flushMode) : _z.Inflate(_flushMode);
if (rc != ZlibConstants.Z_OK && rc != ZlibConstants.Z_STREAM_END)
{
throw new ZlibException((_wantCompress ? "de" : "in") + "flating: " + _z.Message);
@@ -181,18 +181,18 @@ internal class ZlibBaseStream : Stream
if (_streamMode == StreamMode.Writer)
{
bool done = false;
var done = false;
do
{
_z.OutputBuffer = workingBuffer;
_z.NextOut = 0;
_z.AvailableBytesOut = _workingBuffer.Length;
int rc =
var rc =
(_wantCompress) ? _z.Deflate(FlushType.Finish) : _z.Inflate(FlushType.Finish);
if (rc != ZlibConstants.Z_STREAM_END && rc != ZlibConstants.Z_OK)
{
string verb = (_wantCompress ? "de" : "in") + "flating";
var verb = (_wantCompress ? "de" : "in") + "flating";
if (_z.Message is null)
{
throw new ZlibException(String.Format("{0}: (rc = {1})", verb, rc));
@@ -225,7 +225,7 @@ internal class ZlibBaseStream : Stream
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(intBuf, crc.Crc32Result);
_stream.Write(intBuf);
int c2 = (int)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
var c2 = (int)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
BinaryPrimitives.WriteInt32LittleEndian(intBuf, c2);
_stream.Write(intBuf);
}
@@ -256,8 +256,8 @@ internal class ZlibBaseStream : Stream
{
// Make sure we have read to the end of the stream
_z.InputBuffer.AsSpan(_z.NextIn, _z.AvailableBytesIn).CopyTo(trailer);
int bytesNeeded = 8 - _z.AvailableBytesIn;
int bytesRead = _stream.Read(
var bytesNeeded = 8 - _z.AvailableBytesIn;
var bytesRead = _stream.Read(
trailer.Slice(_z.AvailableBytesIn, bytesNeeded)
);
if (bytesNeeded != bytesRead)
@@ -275,10 +275,10 @@ internal class ZlibBaseStream : Stream
_z.InputBuffer.AsSpan(_z.NextIn, trailer.Length).CopyTo(trailer);
}
Int32 crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
Int32 crc32_actual = crc.Crc32Result;
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
Int32 isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
var crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
var crc32_actual = crc.Crc32Result;
var isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
var isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
if (crc32_actual != crc32_expected)
{
@@ -380,11 +380,11 @@ internal class ZlibBaseStream : Stream
private string ReadZeroTerminatedString()
{
var list = new List<byte>();
bool done = false;
var done = false;
do
{
// workitem 7740
int n = _stream.Read(_buf1, 0, 1);
var n = _stream.Read(_buf1, 0, 1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");
@@ -398,17 +398,17 @@ internal class ZlibBaseStream : Stream
list.Add(_buf1[0]);
}
} while (!done);
byte[] buffer = list.ToArray();
var buffer = list.ToArray();
return _encoding.GetString(buffer, 0, buffer.Length);
}
private int _ReadAndValidateGzipHeader()
{
int totalBytesRead = 0;
var totalBytesRead = 0;
// read the header on the first read
Span<byte> header = stackalloc byte[10];
int n = _stream.Read(header);
var n = _stream.Read(header);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
@@ -426,7 +426,7 @@ internal class ZlibBaseStream : Stream
throw new ZlibException("Bad GZIP header.");
}
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
var timet = BinaryPrimitives.ReadInt32LittleEndian(header.Slice(4));
_GzipMtime = TarHeader.EPOCH.AddSeconds(timet);
totalBytesRead += n;
if ((header[3] & 0x04) == 0x04)
@@ -435,8 +435,8 @@ internal class ZlibBaseStream : Stream
n = _stream.Read(header.Slice(0, 2)); // 2-byte length field
totalBytesRead += n;
short extraLength = (short)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
var extraLength = (short)(header[0] + header[1] * 256);
var extra = new byte[extraLength];
n = _stream.Read(extra, 0, extra.Length);
if (n != extraLength)
{
@@ -498,7 +498,7 @@ internal class ZlibBaseStream : Stream
throw new ZlibException("Cannot Read after Writing.");
}
int rc = 0;
var rc = 0;
// set up the output of the deflate/inflate codec:
_z.OutputBuffer = buffer;

View File

@@ -118,8 +118,8 @@ public sealed class Deflate64Stream : Stream
EnsureNotDisposed();
int bytesRead;
int currentOffset = offset;
int remainingCount = count;
var currentOffset = offset;
var remainingCount = count;
while (true)
{
@@ -142,7 +142,7 @@ public sealed class Deflate64Stream : Stream
break;
}
int bytes = _stream.Read(_buffer, 0, _buffer.Length);
var bytes = _stream.Read(_buffer, 0, _buffer.Length);
if (bytes <= 0)
{
break;

View File

@@ -22,7 +22,7 @@ internal sealed class DeflateInput
Debug.Assert(StartIndex + Count <= Buffer.Length, "Input buffer is in invalid state!");
}
internal InputState DumpState() => new InputState(Count, StartIndex);
internal InputState DumpState() => new(Count, StartIndex);
internal void RestoreState(InputState state)
{

View File

@@ -42,11 +42,9 @@ internal sealed class HuffmanTree
private readonly int _tableMask;
// huffman tree for static block
public static HuffmanTree StaticLiteralLengthTree { get; } =
new HuffmanTree(GetStaticLiteralTreeLength());
public static HuffmanTree StaticLiteralLengthTree { get; } = new(GetStaticLiteralTreeLength());
public static HuffmanTree StaticDistanceTree { get; } =
new HuffmanTree(GetStaticDistanceTreeLength());
public static HuffmanTree StaticDistanceTree { get; } = new(GetStaticDistanceTreeLength());
public HuffmanTree(byte[] codeLengths)
{

View File

@@ -243,8 +243,8 @@ internal sealed class InflaterManaged
private void Reset() =>
_state = //_hasFormatReader ?
//InflaterState.ReadingHeader : // start by reading Header info
InflaterState.ReadingBFinal; // start by reading BFinal bit
//InflaterState.ReadingHeader : // start by reading Header info
InflaterState.ReadingBFinal; // start by reading BFinal bit
public void SetInput(byte[] inputBytes, int offset, int length) =>
_input.SetInput(inputBytes, offset, length); // append the bytes

View File

@@ -18,7 +18,7 @@ internal class BCJFilterARM : Filter
{
if ((buffer[i + 3] & 0xFF) == 0xEB)
{
int src =
var src =
((buffer[i + 2] & 0xFF) << 16)
| ((buffer[i + 1] & 0xFF) << 8)
| (buffer[i] & 0xFF);

View File

@@ -18,7 +18,7 @@ internal class BCJFilterARMT : Filter
{
if ((buffer[i + 1] & 0xF8) == 0xF0 && (buffer[i + 3] & 0xF8) == 0xF8)
{
int src =
var src =
((buffer[i + 1] & 0x07) << 19)
| ((buffer[i] & 0xFF) << 11)
| ((buffer[i + 3] & 0x07) << 8)

View File

@@ -52,29 +52,29 @@ internal class BCJFilterIA64 : Filter
for (i = offset; i <= end; i += 16)
{
int instrTemplate = buffer[i] & 0x1F;
int mask = BRANCH_TABLE[instrTemplate];
var instrTemplate = buffer[i] & 0x1F;
var mask = BRANCH_TABLE[instrTemplate];
for (int slot = 0, bitPos = 5; slot < 3; ++slot, bitPos += 41)
{
if (((mask >>> slot) & 1) == 0)
continue;
int bytePos = bitPos >>> 3;
int bitRes = bitPos & 7;
var bytePos = bitPos >>> 3;
var bitRes = bitPos & 7;
long instr = 0;
for (int j = 0; j < 6; ++j)
for (var j = 0; j < 6; ++j)
{
instr |= (buffer[i + bytePos + j] & 0xFFL) << (8 * j);
}
long instrNorm = instr >>> bitRes;
var instrNorm = instr >>> bitRes;
if (((instrNorm >>> 37) & 0x0F) != 0x05 || ((instrNorm >>> 9) & 0x07) != 0x00)
continue;
int src = (int)((instrNorm >>> 13) & 0x0FFFFF);
var src = (int)((instrNorm >>> 13) & 0x0FFFFF);
src |= ((int)(instrNorm >>> 36) & 1) << 20;
src <<= 4;
@@ -93,7 +93,7 @@ internal class BCJFilterIA64 : Filter
instr &= (1 << bitRes) - 1;
instr |= instrNorm << bitRes;
for (int j = 0; j < 6; ++j)
for (var j = 0; j < 6; ++j)
{
buffer[i + bytePos + j] = (byte)(instr >>> (8 * j));
}

View File

@@ -18,7 +18,7 @@ internal class BCJFilterPPC : Filter
{
if ((buffer[i] & 0xFC) == 0x48 && (buffer[i + 3] & 0x03) == 0x01)
{
int src =
var src =
((buffer[i] & 0x03) << 24)
| ((buffer[i + 1] & 0xFF) << 16)
| ((buffer[i + 2] & 0xFF) << 8)

View File

@@ -21,7 +21,7 @@ internal class BCJFilterSPARC : Filter
|| (buffer[i] == 0x7F && (buffer[i + 1] & 0xC0) == 0xC0)
)
{
int src =
var src =
((buffer[i] & 0xFF) << 24)
| ((buffer[i + 1] & 0xFF) << 16)
| ((buffer[i + 2] & 0xFF) << 8)

View File

@@ -24,40 +24,27 @@ public sealed class BranchExecFilter
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static bool X86TestByte(byte b)
{
return b == 0x00 || b == 0xFF;
}
private static bool X86TestByte(byte b) => b == 0x00 || b == 0xFF;
//Replaced X86Converter with bcj_x86() - https://github.com/torvalds/linux/blob/master/lib/xz/xz_dec_bcj.c
//This was to fix an issue decoding a Test zip made with WinZip (that 7zip was also able to read).
//The previous version of the code would corrupt 2 bytes in the Test.exe at 0x6CF9 (3D6D - should be 4000) - Test zip: WinZip27.Xz.zipx
public static void X86Converter(byte[] buf, uint ip, ref uint state)
{
bool[] mask_to_allowed_status = new[]
{
true,
true,
true,
false,
true,
false,
false,
false
};
var mask_to_allowed_status = new[] { true, true, true, false, true, false, false, false };
byte[] mask_to_bit_num = new byte[] { 0, 1, 2, 2, 3, 3, 3, 3 };
var mask_to_bit_num = new byte[] { 0, 1, 2, 2, 3, 3, 3, 3 };
int i;
int prev_pos = -1;
uint prev_mask = state & 7;
var prev_pos = -1;
var prev_mask = state & 7;
uint src;
uint dest;
uint j;
byte b;
uint pos = ip;
var pos = ip;
uint size = (uint)buf.Length;
var size = (uint)buf.Length;
if (size <= 4)
return;

View File

@@ -23,9 +23,9 @@ namespace SharpCompress.Compressors.Filters
protected override int Transform(byte[] buffer, int offset, int count)
{
int end = offset + count;
var end = offset + count;
for (int i = offset; i < end; i++)
for (var i = offset; i < end; i++)
{
buffer[i] += _history[(_distance + _position--) & DISTANCE_MASK];
_history[_position & DISTANCE_MASK] = buffer[i];

View File

@@ -35,10 +35,10 @@ internal sealed class AesDecoderStream : DecoderStream2
throw new NotSupportedException("AES decoder does not support padding.");
}
Init(info, out int numCyclesPower, out byte[] salt, out byte[] seed);
Init(info, out var numCyclesPower, out var salt, out var seed);
byte[] password = Encoding.Unicode.GetBytes(pass.CryptoGetTextPassword());
byte[]? key = InitKey(numCyclesPower, salt, password);
var password = Encoding.Unicode.GetBytes(pass.CryptoGetTextPassword());
var key = InitKey(numCyclesPower, salt, password);
if (key == null)
{
throw new InvalidOperationException("Initialized with null key");
@@ -100,7 +100,7 @@ internal sealed class AesDecoderStream : DecoderStream2
do
{
int read = mStream.Read(mBuffer, mEnding, mBuffer.Length - mEnding);
var read = mStream.Read(mBuffer, mEnding, mBuffer.Length - mEnding);
if (read == 0)
{
// We are not done decoding and have less than 16 bytes.
@@ -133,7 +133,7 @@ internal sealed class AesDecoderStream : DecoderStream2
}
// Otherwise we transform directly into the target buffer.
int processed = mDecoder.TransformBlock(mBuffer, mOffset, count & ~15, buffer, offset);
var processed = mDecoder.TransformBlock(mBuffer, mOffset, count & ~15, buffer, offset);
mOffset += processed;
mWritten += processed;
return processed;
@@ -143,7 +143,7 @@ internal sealed class AesDecoderStream : DecoderStream2
private void Init(byte[] info, out int numCyclesPower, out byte[] salt, out byte[] iv)
{
byte bt = info[0];
var bt = info[0];
numCyclesPower = bt & 0x3F;
if ((bt & 0xC0) == 0)
@@ -153,14 +153,14 @@ internal sealed class AesDecoderStream : DecoderStream2
return;
}
int saltSize = (bt >> 7) & 1;
int ivSize = (bt >> 6) & 1;
var saltSize = (bt >> 7) & 1;
var ivSize = (bt >> 6) & 1;
if (info.Length == 1)
{
throw new InvalidOperationException();
}
byte bt2 = info[1];
var bt2 = info[1];
saltSize += (bt2 >> 4);
ivSize += (bt2 & 15);
if (info.Length < 2 + saltSize + ivSize)
@@ -169,13 +169,13 @@ internal sealed class AesDecoderStream : DecoderStream2
}
salt = new byte[saltSize];
for (int i = 0; i < saltSize; i++)
for (var i = 0; i < saltSize; i++)
{
salt[i] = info[i + 2];
}
iv = new byte[16];
for (int i = 0; i < ivSize; i++)
for (var i = 0; i < ivSize; i++)
{
iv[i] = info[i + saltSize + 2];
}
@@ -198,7 +198,7 @@ internal sealed class AesDecoderStream : DecoderStream2
key[pos] = salt[pos];
}
for (int i = 0; i < pass.Length && pos < 32; i++)
for (var i = 0; i < pass.Length && pos < 32; i++)
{
key[pos++] = pass[i];
}
@@ -208,9 +208,9 @@ internal sealed class AesDecoderStream : DecoderStream2
else
{
#if NETSTANDARD2_0
using IncrementalHash sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
byte[] counter = new byte[8];
long numRounds = 1L << mNumCyclesPower;
using var sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
var counter = new byte[8];
var numRounds = 1L << mNumCyclesPower;
for (long round = 0; round < numRounds; round++)
{
sha.AppendData(salt, 0, salt.Length);
@@ -219,7 +219,7 @@ internal sealed class AesDecoderStream : DecoderStream2
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (int i = 0; i < 8; i++)
for (var i = 0; i < 8; i++)
{
if (++counter[i] != 0)
{
@@ -230,8 +230,8 @@ internal sealed class AesDecoderStream : DecoderStream2
return sha.GetHashAndReset();
#else
using var sha = SHA256.Create();
byte[] counter = new byte[8];
long numRounds = 1L << mNumCyclesPower;
var counter = new byte[8];
var numRounds = 1L << mNumCyclesPower;
for (long round = 0; round < numRounds; round++)
{
sha.TransformBlock(salt, 0, salt.Length, null, 0);
@@ -240,7 +240,7 @@ internal sealed class AesDecoderStream : DecoderStream2
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (int i = 0; i < 8; i++)
for (var i = 0; i < 8; i++)
{
if (++counter[i] != 0)
{
@@ -261,7 +261,7 @@ internal sealed class AesDecoderStream : DecoderStream2
// Just transform as much as possible so we can feed from it as long as possible.
if (mUnderflow == 0)
{
int blockSize = (mEnding - mOffset) & ~15;
var blockSize = (mEnding - mOffset) & ~15;
mUnderflow = mDecoder.TransformBlock(mBuffer, mOffset, blockSize, mBuffer, mOffset);
}

View File

@@ -6,7 +6,7 @@ namespace SharpCompress.Compressors.LZMA;
internal static class Log
{
private static readonly Stack<string> INDENT = new Stack<string>();
private static readonly Stack<string> INDENT = new();
private static bool NEEDS_INDENT = true;
static Log() => INDENT.Push("");

View File

@@ -11,11 +11,11 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
{
private class LenDecoder
{
private BitDecoder _choice = new BitDecoder();
private BitDecoder _choice2 = new BitDecoder();
private BitDecoder _choice = new();
private BitDecoder _choice2 = new();
private readonly BitTreeDecoder[] _lowCoder = new BitTreeDecoder[Base.K_NUM_POS_STATES_MAX];
private readonly BitTreeDecoder[] _midCoder = new BitTreeDecoder[Base.K_NUM_POS_STATES_MAX];
private BitTreeDecoder _highCoder = new BitTreeDecoder(Base.K_NUM_HIGH_LEN_BITS);
private BitTreeDecoder _highCoder = new(Base.K_NUM_HIGH_LEN_BITS);
private uint _numPosStates;
public void Create(uint numPosStates)
@@ -173,18 +173,18 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
Base.K_NUM_FULL_DISTANCES - Base.K_END_POS_MODEL_INDEX
];
private BitTreeDecoder _posAlignDecoder = new BitTreeDecoder(Base.K_NUM_ALIGN_BITS);
private BitTreeDecoder _posAlignDecoder = new(Base.K_NUM_ALIGN_BITS);
private readonly LenDecoder _lenDecoder = new LenDecoder();
private readonly LenDecoder _repLenDecoder = new LenDecoder();
private readonly LenDecoder _lenDecoder = new();
private readonly LenDecoder _repLenDecoder = new();
private readonly LiteralDecoder _literalDecoder = new LiteralDecoder();
private readonly LiteralDecoder _literalDecoder = new();
private int _dictionarySize;
private uint _posStateMask;
private Base.State _state = new Base.State();
private Base.State _state = new();
private uint _rep0,
_rep1,
_rep2,
@@ -318,9 +318,8 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
{
var posState = (uint)outWindow._total & _posStateMask;
if (
_isMatchDecoders[
(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState
].Decode(rangeDecoder) == 0
_isMatchDecoders[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState]
.Decode(rangeDecoder) == 0
)
{
byte b;
@@ -355,7 +354,8 @@ public class Decoder : ICoder, ISetDecoderProperties // ,System.IO.Stream
if (
_isRep0LongDecoders[
(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState
].Decode(rangeDecoder) == 0
]
.Decode(rangeDecoder) == 0
)
{
_state.UpdateShortRep();

View File

@@ -61,7 +61,7 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
return (uint)(G_FAST_POS[pos >> 26] + 52);
}
private Base.State _state = new Base.State();
private Base.State _state = new();
private byte _previousByte;
private readonly uint[] _repDistances = new uint[Base.K_NUM_REP_DISTANCES];
@@ -191,15 +191,15 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
private class LenEncoder
{
private BitEncoder _choice = new BitEncoder();
private BitEncoder _choice2 = new BitEncoder();
private BitEncoder _choice = new();
private BitEncoder _choice2 = new();
private readonly BitTreeEncoder[] _lowCoder = new BitTreeEncoder[
Base.K_NUM_POS_STATES_ENCODING_MAX
];
private readonly BitTreeEncoder[] _midCoder = new BitTreeEncoder[
Base.K_NUM_POS_STATES_ENCODING_MAX
];
private BitTreeEncoder _highCoder = new BitTreeEncoder(Base.K_NUM_HIGH_LEN_BITS);
private BitTreeEncoder _highCoder = new(Base.K_NUM_HIGH_LEN_BITS);
public LenEncoder()
{
@@ -359,7 +359,7 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
private readonly Optimal[] _optimum = new Optimal[K_NUM_OPTS];
private BinTree _matchFinder;
private readonly RangeCoder.Encoder _rangeEncoder = new RangeCoder.Encoder();
private readonly RangeCoder.Encoder _rangeEncoder = new();
private readonly BitEncoder[] _isMatch = new BitEncoder[
Base.K_NUM_STATES << Base.K_NUM_POS_STATES_BITS_MAX
@@ -382,12 +382,12 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
Base.K_NUM_FULL_DISTANCES - Base.K_END_POS_MODEL_INDEX
];
private BitTreeEncoder _posAlignEncoder = new BitTreeEncoder(Base.K_NUM_ALIGN_BITS);
private BitTreeEncoder _posAlignEncoder = new(Base.K_NUM_ALIGN_BITS);
private readonly LenPriceTableEncoder _lenEncoder = new LenPriceTableEncoder();
private readonly LenPriceTableEncoder _repMatchLenEncoder = new LenPriceTableEncoder();
private readonly LenPriceTableEncoder _lenEncoder = new();
private readonly LenPriceTableEncoder _repMatchLenEncoder = new();
private readonly LiteralEncoder _literalEncoder = new LiteralEncoder();
private readonly LiteralEncoder _literalEncoder = new();
private readonly uint[] _matchDistances = new uint[(Base.K_MATCH_MAX_LEN * 2) + 2];
@@ -553,9 +553,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
if (repIndex == 0)
{
price = _isRepG0[state._index].GetPrice0();
price += _isRep0Long[
(state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState
].GetPrice1();
price += _isRep0Long[(state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState]
.GetPrice1();
}
else
{
@@ -713,9 +712,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
.GetPrice(!_state.IsCharState(), matchByte, currentByte);
_optimum[1].MakeAsChar();
var matchPrice = _isMatch[
(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState
].GetPrice1();
var matchPrice = _isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState]
.GetPrice1();
var repMatchPrice = matchPrice + _isRep[_state._index].GetPrice1();
if (matchByte == currentByte)
@@ -995,9 +993,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
var posStateNext = (position + 1) & _posStateMask;
var nextRepMatchPrice =
curAnd1Price
+ _isMatch[
(state2._index << Base.K_NUM_POS_STATES_BITS_MAX) + posStateNext
].GetPrice1()
+ _isMatch[(state2._index << Base.K_NUM_POS_STATES_BITS_MAX) + posStateNext]
.GetPrice1()
+ _isRep[state2._index].GetPrice1();
{
var offset = cur + 1 + lenTest2;
@@ -1069,7 +1066,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
+ GetRepPrice(repIndex, lenTest, state, posState)
+ _isMatch[
(state2._index << Base.K_NUM_POS_STATES_BITS_MAX) + posStateNext
].GetPrice0()
]
.GetPrice0()
+ _literalEncoder
.GetSubCoder(
position + lenTest,
@@ -1088,7 +1086,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
curAndLenCharPrice
+ _isMatch[
(state2._index << Base.K_NUM_POS_STATES_BITS_MAX) + posStateNext
].GetPrice1();
]
.GetPrice1();
var nextRepMatchPrice = nextMatchPrice + _isRep[state2._index].GetPrice1();
// for(; lenTest2 >= 2; lenTest2--)
@@ -1174,7 +1173,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
+ _isMatch[
(state2._index << Base.K_NUM_POS_STATES_BITS_MAX)
+ posStateNext
].GetPrice0()
]
.GetPrice0()
+ _literalEncoder
.GetSubCoder(
position + lenTest,
@@ -1194,7 +1194,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
+ _isMatch[
(state2._index << Base.K_NUM_POS_STATES_BITS_MAX)
+ posStateNext
].GetPrice1();
]
.GetPrice1();
var nextRepMatchPrice =
nextMatchPrice + _isRep[state2._index].GetPrice1();
@@ -1243,10 +1244,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
return;
}
_isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].Encode(
_rangeEncoder,
1
);
_isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState]
.Encode(_rangeEncoder, 1);
_isRep[_state._index].Encode(_rangeEncoder, 0);
_state.UpdateMatch();
var len = Base.K_MATCH_MIN_LEN;
@@ -1321,10 +1320,8 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
// it's not used
ReadMatchDistances(out var len, out var numDistancePairs);
var posState = (uint)(_nowPos64) & _posStateMask;
_isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState].Encode(
_rangeEncoder,
0
);
_isMatch[(_state._index << Base.K_NUM_POS_STATES_BITS_MAX) + posState]
.Encode(_rangeEncoder, 0);
_state.UpdateChar();
var curByte = _matchFinder.GetIndexByte((int)(0 - _additionalOffset));
_literalEncoder
@@ -1730,7 +1727,7 @@ internal class Encoder : ICoder, ISetCoderProperties, IWriteCoderProperties
ReadOnlySpan<object> properties
)
{
for (int i = 0; i < properties.Length; i++)
for (var i = 0; i < properties.Length; i++)
{
var prop = properties[i];
switch (propIDs[i])

View File

@@ -14,8 +14,8 @@ public class LzmaStream : Stream
private readonly long _outputSize;
private readonly int _dictionarySize;
private readonly OutWindow _outWindow = new OutWindow();
private readonly RangeCoder.Decoder _rangeDecoder = new RangeCoder.Decoder();
private readonly OutWindow _outWindow = new();
private readonly RangeCoder.Decoder _rangeDecoder = new();
private Decoder _decoder;
private long _position;

View File

@@ -0,0 +1,65 @@
namespace SharpCompress.Compressors.Lzw
{
/// <summary>
/// This class contains constants used for LZW
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage(
"Naming",
"CA1707:Identifiers should not contain underscores",
Justification = "kept for backwards compatibility"
)]
public sealed class LzwConstants
{
/// <summary>
/// Magic number found at start of LZW header: 0x1f 0x9d
/// </summary>
public const int MAGIC = 0x1f9d;
/// <summary>
/// Maximum number of bits per code
/// </summary>
public const int MAX_BITS = 16;
/* 3rd header byte:
* bit 0..4 Number of compression bits
* bit 5 Extended header
* bit 6 Free
* bit 7 Block mode
*/
/// <summary>
/// Mask for 'number of compression bits'
/// </summary>
public const int BIT_MASK = 0x1f;
/// <summary>
/// Indicates the presence of a fourth header byte
/// </summary>
public const int EXTENDED_MASK = 0x20;
//public const int FREE_MASK = 0x40;
/// <summary>
/// Reserved bits
/// </summary>
public const int RESERVED_MASK = 0x60;
/// <summary>
/// Block compression: if table is full and compression rate is dropping,
/// clear the dictionary.
/// </summary>
public const int BLOCK_MODE_MASK = 0x80;
/// <summary>
/// LZW file header size (in bytes)
/// </summary>
public const int HDR_SIZE = 3;
/// <summary>
/// Initial number of bits per code
/// </summary>
public const int INIT_BITS = 9;
private LzwConstants() { }
}
}

View File

@@ -0,0 +1,597 @@
using System;
using System.IO;
using SharpCompress.Common;
namespace SharpCompress.Compressors.Lzw
{
/// <summary>
/// This filter stream is used to decompress a LZW format stream.
/// Specifically, a stream that uses the LZC compression method.
/// This file format is usually associated with the .Z file extension.
///
/// See http://en.wikipedia.org/wiki/Compress
/// See http://wiki.wxwidgets.org/Development:_Z_File_Format
///
/// The file header consists of 3 (or optionally 4) bytes. The first two bytes
/// contain the magic marker "0x1f 0x9d", followed by a byte of flags.
///
/// Based on Java code by Ronald Tschalar, which in turn was based on the unlzw.c
/// code in the gzip package.
/// </summary>
/// <example> This sample shows how to unzip a compressed file
/// <code>
/// using System;
/// using System.IO;
///
/// using ICSharpCode.SharpZipLib.Core;
/// using ICSharpCode.SharpZipLib.LZW;
///
/// class MainClass
/// {
/// public static void Main(string[] args)
/// {
/// using (Stream inStream = new LzwInputStream(File.OpenRead(args[0])))
/// using (FileStream outStream = File.Create(Path.GetFileNameWithoutExtension(args[0]))) {
/// byte[] buffer = new byte[4096];
/// StreamUtils.Copy(inStream, outStream, buffer);
/// // OR
/// inStream.Read(buffer, 0, buffer.Length);
/// // now do something with the buffer
/// }
/// }
/// }
/// </code>
/// </example>
public class LzwStream : Stream
{
public static bool IsLzwStream(Stream stream)
{
try
{
byte[] hdr = new byte[LzwConstants.HDR_SIZE];
int result = stream.Read(hdr, 0, hdr.Length);
// Check the magic marker
if (result < 0)
throw new IncompleteArchiveException("Failed to read LZW header");
if (hdr[0] != (LzwConstants.MAGIC >> 8) || hdr[1] != (LzwConstants.MAGIC & 0xff))
{
throw new IncompleteArchiveException(
String.Format(
"Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}",
hdr[0],
hdr[1]
)
);
}
}
catch (Exception)
{
return false;
}
return true;
}
/// <summary>
/// Gets or sets a flag indicating ownership of underlying stream.
/// When the flag is true <see cref="Stream.Dispose()" /> will close the underlying stream also.
/// </summary>
/// <remarks>The default value is true.</remarks>
public bool IsStreamOwner { get; set; } = false;
/// <summary>
/// Creates a LzwInputStream
/// </summary>
/// <param name="baseInputStream">
/// The stream to read compressed data from (baseInputStream LZW format)
/// </param>
public LzwStream(Stream baseInputStream)
{
this.baseInputStream = baseInputStream;
}
/// <summary>
/// See <see cref="System.IO.Stream.ReadByte"/>
/// </summary>
/// <returns></returns>
public override int ReadByte()
{
int b = Read(one, 0, 1);
if (b == 1)
return (one[0] & 0xff);
return -1;
}
/// <summary>
/// Reads decompressed data into the provided buffer byte array
/// </summary>
/// <param name ="buffer">
/// The array to read and decompress data into
/// </param>
/// <param name ="offset">
/// The offset indicating where the data should be placed
/// </param>
/// <param name ="count">
/// The number of bytes to decompress
/// </param>
/// <returns>The number of bytes read. Zero signals the end of stream</returns>
public override int Read(byte[] buffer, int offset, int count)
{
if (!headerParsed)
ParseHeader();
if (eof)
return 0;
int start = offset;
/* Using local copies of various variables speeds things up by as
* much as 30% in Java! Performance not tested in C#.
*/
int[] lTabPrefix = tabPrefix;
byte[] lTabSuffix = tabSuffix;
byte[] lStack = stack;
int lNBits = nBits;
int lMaxCode = maxCode;
int lMaxMaxCode = maxMaxCode;
int lBitMask = bitMask;
int lOldCode = oldCode;
byte lFinChar = finChar;
int lStackP = stackP;
int lFreeEnt = freeEnt;
byte[] lData = data;
int lBitPos = bitPos;
// empty stack if stuff still left
int sSize = lStack.Length - lStackP;
if (sSize > 0)
{
int num = (sSize >= count) ? count : sSize;
Array.Copy(lStack, lStackP, buffer, offset, num);
offset += num;
count -= num;
lStackP += num;
}
if (count == 0)
{
stackP = lStackP;
return offset - start;
}
// loop, filling local buffer until enough data has been decompressed
MainLoop:
do
{
if (end < EXTRA)
{
Fill();
}
int bitIn = (got > 0) ? (end - end % lNBits) << 3 : (end << 3) - (lNBits - 1);
while (lBitPos < bitIn)
{
#region A
// handle 1-byte reads correctly
if (count == 0)
{
nBits = lNBits;
maxCode = lMaxCode;
maxMaxCode = lMaxMaxCode;
bitMask = lBitMask;
oldCode = lOldCode;
finChar = lFinChar;
stackP = lStackP;
freeEnt = lFreeEnt;
bitPos = lBitPos;
return offset - start;
}
// check for code-width expansion
if (lFreeEnt > lMaxCode)
{
int nBytes = lNBits << 3;
lBitPos = (lBitPos - 1) + nBytes - (lBitPos - 1 + nBytes) % nBytes;
lNBits++;
lMaxCode = (lNBits == maxBits) ? lMaxMaxCode : (1 << lNBits) - 1;
lBitMask = (1 << lNBits) - 1;
lBitPos = ResetBuf(lBitPos);
goto MainLoop;
}
#endregion A
#region B
// read next code
int pos = lBitPos >> 3;
int code =
(
(
(lData[pos] & 0xFF)
| ((lData[pos + 1] & 0xFF) << 8)
| ((lData[pos + 2] & 0xFF) << 16)
) >> (lBitPos & 0x7)
) & lBitMask;
lBitPos += lNBits;
// handle first iteration
if (lOldCode == -1)
{
if (code >= 256)
throw new IncompleteArchiveException(
"corrupt input: " + code + " > 255"
);
lFinChar = (byte)(lOldCode = code);
buffer[offset++] = lFinChar;
count--;
continue;
}
// handle CLEAR code
if (code == TBL_CLEAR && blockMode)
{
Array.Copy(zeros, 0, lTabPrefix, 0, zeros.Length);
lFreeEnt = TBL_FIRST - 1;
int nBytes = lNBits << 3;
lBitPos = (lBitPos - 1) + nBytes - (lBitPos - 1 + nBytes) % nBytes;
lNBits = LzwConstants.INIT_BITS;
lMaxCode = (1 << lNBits) - 1;
lBitMask = lMaxCode;
// Code tables reset
lBitPos = ResetBuf(lBitPos);
goto MainLoop;
}
#endregion B
#region C
// setup
int inCode = code;
lStackP = lStack.Length;
// Handle KwK case
if (code >= lFreeEnt)
{
if (code > lFreeEnt)
{
throw new IncompleteArchiveException(
"corrupt input: code=" + code + ", freeEnt=" + lFreeEnt
);
}
lStack[--lStackP] = lFinChar;
code = lOldCode;
}
// Generate output characters in reverse order
while (code >= 256)
{
lStack[--lStackP] = lTabSuffix[code];
code = lTabPrefix[code];
}
lFinChar = lTabSuffix[code];
buffer[offset++] = lFinChar;
count--;
// And put them out in forward order
sSize = lStack.Length - lStackP;
int num = (sSize >= count) ? count : sSize;
Array.Copy(lStack, lStackP, buffer, offset, num);
offset += num;
count -= num;
lStackP += num;
#endregion C
#region D
// generate new entry in table
if (lFreeEnt < lMaxMaxCode)
{
lTabPrefix[lFreeEnt] = lOldCode;
lTabSuffix[lFreeEnt] = lFinChar;
lFreeEnt++;
}
// Remember previous code
lOldCode = inCode;
// if output buffer full, then return
if (count == 0)
{
nBits = lNBits;
maxCode = lMaxCode;
bitMask = lBitMask;
oldCode = lOldCode;
finChar = lFinChar;
stackP = lStackP;
freeEnt = lFreeEnt;
bitPos = lBitPos;
return offset - start;
}
#endregion D
} // while
lBitPos = ResetBuf(lBitPos);
} while (got > 0); // do..while
nBits = lNBits;
maxCode = lMaxCode;
bitMask = lBitMask;
oldCode = lOldCode;
finChar = lFinChar;
stackP = lStackP;
freeEnt = lFreeEnt;
bitPos = lBitPos;
eof = true;
return offset - start;
}
/// <summary>
/// Moves the unread data in the buffer to the beginning and resets
/// the pointers.
/// </summary>
/// <param name="bitPosition"></param>
/// <returns></returns>
private int ResetBuf(int bitPosition)
{
int pos = bitPosition >> 3;
Array.Copy(data, pos, data, 0, end - pos);
end -= pos;
return 0;
}
private void Fill()
{
got = baseInputStream.Read(data, end, data.Length - 1 - end);
if (got > 0)
{
end += got;
}
}
private void ParseHeader()
{
headerParsed = true;
byte[] hdr = new byte[LzwConstants.HDR_SIZE];
int result = baseInputStream.Read(hdr, 0, hdr.Length);
// Check the magic marker
if (result < 0)
throw new IncompleteArchiveException("Failed to read LZW header");
if (hdr[0] != (LzwConstants.MAGIC >> 8) || hdr[1] != (LzwConstants.MAGIC & 0xff))
{
throw new IncompleteArchiveException(
String.Format(
"Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}",
hdr[0],
hdr[1]
)
);
}
// Check the 3rd header byte
blockMode = (hdr[2] & LzwConstants.BLOCK_MODE_MASK) > 0;
maxBits = hdr[2] & LzwConstants.BIT_MASK;
if (maxBits > LzwConstants.MAX_BITS)
{
throw new ArchiveException(
"Stream compressed with "
+ maxBits
+ " bits, but decompression can only handle "
+ LzwConstants.MAX_BITS
+ " bits."
);
}
if ((hdr[2] & LzwConstants.RESERVED_MASK) > 0)
{
throw new ArchiveException("Unsupported bits set in the header.");
}
// Initialize variables
maxMaxCode = 1 << maxBits;
nBits = LzwConstants.INIT_BITS;
maxCode = (1 << nBits) - 1;
bitMask = maxCode;
oldCode = -1;
finChar = 0;
freeEnt = blockMode ? TBL_FIRST : 256;
tabPrefix = new int[1 << maxBits];
tabSuffix = new byte[1 << maxBits];
stack = new byte[1 << maxBits];
stackP = stack.Length;
for (int idx = 255; idx >= 0; idx--)
tabSuffix[idx] = (byte)idx;
}
#region Stream Overrides
/// <summary>
/// Gets a value indicating whether the current stream supports reading
/// </summary>
public override bool CanRead
{
get { return baseInputStream.CanRead; }
}
/// <summary>
/// Gets a value of false indicating seeking is not supported for this stream.
/// </summary>
public override bool CanSeek
{
get { return false; }
}
/// <summary>
/// Gets a value of false indicating that this stream is not writeable.
/// </summary>
public override bool CanWrite
{
get { return false; }
}
/// <summary>
/// A value representing the length of the stream in bytes.
/// </summary>
public override long Length
{
get { return got; }
}
/// <summary>
/// The current position within the stream.
/// Throws a NotSupportedException when attempting to set the position
/// </summary>
/// <exception cref="NotSupportedException">Attempting to set the position</exception>
public override long Position
{
get { return baseInputStream.Position; }
set { throw new NotSupportedException("InflaterInputStream Position not supported"); }
}
/// <summary>
/// Flushes the baseInputStream
/// </summary>
public override void Flush()
{
baseInputStream.Flush();
}
/// <summary>
/// Sets the position within the current stream
/// Always throws a NotSupportedException
/// </summary>
/// <param name="offset">The relative offset to seek to.</param>
/// <param name="origin">The <see cref="SeekOrigin"/> defining where to seek from.</param>
/// <returns>The new position in the stream.</returns>
/// <exception cref="NotSupportedException">Any access</exception>
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException("Seek not supported");
}
/// <summary>
/// Set the length of the current stream
/// Always throws a NotSupportedException
/// </summary>
/// <param name="value">The new length value for the stream.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void SetLength(long value)
{
throw new NotSupportedException("InflaterInputStream SetLength not supported");
}
/// <summary>
/// Writes a sequence of bytes to stream and advances the current position
/// This method always throws a NotSupportedException
/// </summary>
/// <param name="buffer">The buffer containing data to write.</param>
/// <param name="offset">The offset of the first byte to write.</param>
/// <param name="count">The number of bytes to write.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException("InflaterInputStream Write not supported");
}
/// <summary>
/// Writes one byte to the current stream and advances the current position
/// Always throws a NotSupportedException
/// </summary>
/// <param name="value">The byte to write.</param>
/// <exception cref="NotSupportedException">Any access</exception>
public override void WriteByte(byte value)
{
throw new NotSupportedException("InflaterInputStream WriteByte not supported");
}
/// <summary>
/// Closes the input stream. When <see cref="IsStreamOwner"></see>
/// is true the underlying stream is also closed.
/// </summary>
protected override void Dispose(bool disposing)
{
if (!isClosed)
{
isClosed = true;
if (IsStreamOwner)
{
baseInputStream.Dispose();
}
}
}
#endregion Stream Overrides
#region Instance Fields
private Stream baseInputStream;
/// <summary>
/// Flag indicating wether this instance has been closed or not.
/// </summary>
private bool isClosed;
private readonly byte[] one = new byte[1];
private bool headerParsed;
// string table stuff
private const int TBL_CLEAR = 0x100;
private const int TBL_FIRST = TBL_CLEAR + 1;
private int[] tabPrefix = new int[0]; //
private byte[] tabSuffix = new byte[0]; //
private readonly int[] zeros = new int[256];
private byte[] stack = new byte[0]; //
// various state
private bool blockMode;
private int nBits;
private int maxBits;
private int maxMaxCode;
private int maxCode;
private int bitMask;
private int oldCode;
private byte finChar;
private int stackP;
private int freeEnt;
// input buffer
private readonly byte[] data = new byte[1024 * 8];
private int bitPos;
private int end;
private int got;
private bool eof;
private const int EXTRA = 64;
#endregion Instance Fields
}
}

View File

@@ -22,7 +22,7 @@ internal class ModelPpm
}
}
public SubAllocator SubAlloc { get; } = new SubAllocator();
public SubAllocator SubAlloc { get; } = new();
public virtual See2Context DummySee2Cont => _dummySee2Cont;
@@ -137,34 +137,34 @@ internal class ModelPpm
// Temp fields
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState1 = new State(null);
private readonly State _tempState1 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState2 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState2 = new State(null);
private readonly State _tempState2 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState3 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState3 = new State(null);
private readonly State _tempState3 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState4 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState4 = new State(null);
private readonly State _tempState4 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempStateRef1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly StateRef _tempStateRef1 = new StateRef();
private readonly StateRef _tempStateRef1 = new();
//UPGRADE_NOTE: Final was removed from the declaration of 'tempStateRef2 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly StateRef _tempStateRef2 = new StateRef();
private readonly StateRef _tempStateRef2 = new();
//UPGRADE_NOTE: Final was removed from the declaration of 'tempPPMContext1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly PpmContext _tempPpmContext1 = new PpmContext(null);
private readonly PpmContext _tempPpmContext1 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempPPMContext2 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly PpmContext _tempPpmContext2 = new PpmContext(null);
private readonly PpmContext _tempPpmContext2 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempPPMContext3 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly PpmContext _tempPpmContext3 = new PpmContext(null);
private readonly PpmContext _tempPpmContext3 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempPPMContext4 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly PpmContext _tempPpmContext4 = new PpmContext(null);
private readonly PpmContext _tempPpmContext4 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'ps '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly int[] _ps = new int[MAX_O];

View File

@@ -64,19 +64,19 @@ internal class PpmContext : Pointer
// Temp fields
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState1 = new State(null);
private readonly State _tempState1 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState2 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState2 = new State(null);
private readonly State _tempState2 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState3 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState3 = new State(null);
private readonly State _tempState3 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState4 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState4 = new State(null);
private readonly State _tempState4 = new(null);
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState5 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
private readonly State _tempState5 = new State(null);
private readonly State _tempState5 = new(null);
private PpmContext _tempPpmContext;
//UPGRADE_NOTE: Final was removed from the declaration of 'ps '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"

View File

@@ -395,10 +395,8 @@ internal class Allocator
unitCountDifference -= unitCount;
}
_memoryNodes[UNITS_TO_INDEX[unitCountDifference - 1]].Insert(
newPointer,
unitCountDifference
);
_memoryNodes[UNITS_TO_INDEX[unitCountDifference - 1]]
.Insert(newPointer, unitCountDifference);
}
private void GlueFreeBlocks()
@@ -457,10 +455,11 @@ internal class Allocator
if (INDEX_TO_UNITS[index] != unitCount)
{
var unitCountDifference = unitCount - INDEX_TO_UNITS[--index];
_memoryNodes[unitCountDifference - 1].Insert(
memoryNode0 + (unitCount - unitCountDifference),
unitCountDifference
);
_memoryNodes[unitCountDifference - 1]
.Insert(
memoryNode0 + (unitCount - unitCountDifference),
unitCountDifference
);
}
_memoryNodes[index].Insert(memoryNode0, INDEX_TO_UNITS[index]);

View File

@@ -27,7 +27,7 @@ internal struct MemoryNode
{
public uint _address;
public byte[] _memory;
public static readonly MemoryNode ZERO = new MemoryNode(0, null);
public static readonly MemoryNode ZERO = new(0, null);
public const int SIZE = 12;
/// <summary>
@@ -64,7 +64,7 @@ internal struct MemoryNode
public MemoryNode Next
{
get =>
new MemoryNode(
new(
_memory[_address + 4]
| (((uint)_memory[_address + 5]) << 8)
| (((uint)_memory[_address + 6]) << 16)
@@ -150,7 +150,7 @@ internal struct MemoryNode
/// <param name="pointer"></param>
/// <returns></returns>
public static implicit operator MemoryNode(Pointer pointer) =>
new MemoryNode(pointer._address, pointer._memory);
new(pointer._address, pointer._memory);
/// <summary>
/// Allow pointer-like addition on a memory node.

View File

@@ -22,7 +22,7 @@ internal struct Pointer
{
public uint _address;
public byte[] _memory;
public static readonly Pointer ZERO = new Pointer(0, null);
public static readonly Pointer ZERO = new(0, null);
public const int SIZE = 1;
/// <summary>
@@ -69,7 +69,7 @@ internal struct Pointer
/// <param name="memoryNode"></param>
/// <returns></returns>
public static implicit operator Pointer(MemoryNode memoryNode) =>
new Pointer(memoryNode._address, memoryNode._memory);
new(memoryNode._address, memoryNode._memory);
/// <summary>
/// Allow a <see cref="Model.PpmContext"/> to be implicitly converted to a <see cref="Pointer"/>.
@@ -77,15 +77,14 @@ internal struct Pointer
/// <param name="context"></param>
/// <returns></returns>
public static implicit operator Pointer(Model.PpmContext context) =>
new Pointer(context._address, context._memory);
new(context._address, context._memory);
/// <summary>
/// Allow a <see cref="PpmState"/> to be implicitly converted to a <see cref="Pointer"/>.
/// </summary>
/// <param name="state"></param>
/// <returns></returns>
public static implicit operator Pointer(PpmState state) =>
new Pointer(state._address, state._memory);
public static implicit operator Pointer(PpmState state) => new(state._address, state._memory);
/// <summary>
/// Increase the address of a pointer by the given number of bytes.

View File

@@ -21,7 +21,7 @@ internal partial class Model
{
public uint _address;
public byte[] _memory;
public static readonly PpmContext ZERO = new PpmContext(0, null);
public static readonly PpmContext ZERO = new(0, null);
public const int SIZE = 12;
/// <summary>
@@ -70,7 +70,7 @@ internal partial class Model
public PpmState Statistics
{
get =>
new PpmState(
new(
_memory[_address + 4]
| (((uint)_memory[_address + 5]) << 8)
| (((uint)_memory[_address + 6]) << 16)
@@ -92,7 +92,7 @@ internal partial class Model
public PpmContext Suffix
{
get =>
new PpmContext(
new(
_memory[_address + 8]
| (((uint)_memory[_address + 9]) << 8)
| (((uint)_memory[_address + 10]) << 16)
@@ -133,7 +133,7 @@ internal partial class Model
/// </para>
/// </remarks>
/// <returns></returns>
public PpmState FirstState => new PpmState(_address + 2, _memory);
public PpmState FirstState => new(_address + 2, _memory);
/// <summary>
/// Gets or sets the symbol of the first PPM state. This is provided for convenience. The same
@@ -164,7 +164,7 @@ internal partial class Model
public PpmContext FirstStateSuccessor
{
get =>
new PpmContext(
new(
_memory[_address + 4]
| (((uint)_memory[_address + 5]) << 8)
| (((uint)_memory[_address + 6]) << 16)
@@ -186,7 +186,7 @@ internal partial class Model
/// <param name="pointer"></param>
/// <returns></returns>
public static implicit operator PpmContext(Pointer pointer) =>
new PpmContext(pointer._address, pointer._memory);
new(pointer._address, pointer._memory);
/// <summary>
/// Allow pointer-like addition on a PPM context.

View File

@@ -19,7 +19,7 @@ internal struct PpmState
{
public uint _address;
public byte[] _memory;
public static readonly PpmState ZERO = new PpmState(0, null);
public static readonly PpmState ZERO = new(0, null);
public const int SIZE = 6;
/// <summary>
@@ -55,7 +55,7 @@ internal struct PpmState
public Model.PpmContext Successor
{
get =>
new Model.PpmContext(
new(
_memory[_address + 2]
| (((uint)_memory[_address + 3]) << 8)
| (((uint)_memory[_address + 4]) << 16)
@@ -77,7 +77,7 @@ internal struct PpmState
/// </summary>
/// <param name="offset"></param>
/// <returns></returns>
public PpmState this[int offset] => new PpmState((uint)(_address + (offset * SIZE)), _memory);
public PpmState this[int offset] => new((uint)(_address + (offset * SIZE)), _memory);
/// <summary>
/// Allow a pointer to be implicitly converted to a PPM state.
@@ -85,7 +85,7 @@ internal struct PpmState
/// <param name="pointer"></param>
/// <returns></returns>
public static implicit operator PpmState(Pointer pointer) =>
new PpmState(pointer._address, pointer._memory);
new(pointer._address, pointer._memory);
/// <summary>
/// Allow pointer-like addition on a PPM state.

View File

@@ -73,7 +73,7 @@ internal class RarBLAKE2spStream : RarStream
public BLAKE2SP()
{
S = new BLAKE2S[BLAKE2SP_PARALLEL_DEGREE];
for (int i = 0; i < S.Length; i++)
for (var i = 0; i < S.Length; i++)
{
S[i] = new BLAKE2S();
}
@@ -141,15 +141,15 @@ internal class RarBLAKE2spStream : RarStream
internal void Compress(BLAKE2S hash)
{
UInt32[] m = new UInt32[16];
UInt32[] v = new UInt32[16];
var m = new UInt32[16];
var v = new UInt32[16];
for (int i = 0; i < 16; i++)
for (var i = 0; i < 16; i++)
{
m[i] = BitConverter.ToUInt32(hash.b, i * 4);
}
for (int i = 0; i < 8; i++)
for (var i = 0; i < 8; i++)
{
v[i] = hash.h[i];
}
@@ -164,7 +164,7 @@ internal class RarBLAKE2spStream : RarStream
v[14] = hash.f[0] ^ k_BLAKE2S_IV[6];
v[15] = hash.f[1] ^ k_BLAKE2S_IV[7];
for (int r = 0; r < BLAKE2S_NUM_ROUNDS; r++)
for (var r = 0; r < BLAKE2S_NUM_ROUNDS; r++)
{
ref byte[] sigma = ref k_BLAKE2S_Sigma[r];
@@ -178,7 +178,7 @@ internal class RarBLAKE2spStream : RarStream
G(ref m, ref sigma, 7, ref v[3], ref v[4], ref v[9], ref v[14]);
}
for (int i = 0; i < 8; i++)
for (var i = 0; i < 8; i++)
{
hash.h[i] ^= v[i] ^ v[i + 8];
}
@@ -186,7 +186,7 @@ internal class RarBLAKE2spStream : RarStream
internal void Update(BLAKE2S hash, ReadOnlySpan<byte> data, int size)
{
int i = 0;
var i = 0;
while (size != 0)
{
var pos = hash.bufferPosition;
@@ -219,7 +219,7 @@ internal class RarBLAKE2spStream : RarStream
var mem = new MemoryStream();
for (int i = 0; i < 8; i++)
for (var i = 0; i < 8; i++)
{
mem.Write(BitConverter.GetBytes(hash.h[i]), 0, 4);
}
@@ -245,7 +245,7 @@ internal class RarBLAKE2spStream : RarStream
internal void Update(BLAKE2SP hash, ReadOnlySpan<byte> data, int size)
{
int i = 0;
var i = 0;
var pos = hash.bufferPosition;
while (size != 0)
{
@@ -274,7 +274,7 @@ internal class RarBLAKE2spStream : RarStream
h.h[3] ^= (1 << 16 | BLAKE2S_DIGEST_SIZE << 24);
h.lastNodeFlag = BLAKE2S_FINAL_FLAG;
for (int i = 0; i < BLAKE2SP_PARALLEL_DEGREE; i++)
for (var i = 0; i < BLAKE2SP_PARALLEL_DEGREE; i++)
{
var digest = Final(_blake2sp.S[i]);
Update(h, digest, BLAKE2S_DIGEST_SIZE);

View File

@@ -14,7 +14,7 @@ internal class RarStream : Stream
private bool fetch;
private byte[] tmpBuffer = new byte[65536];
private byte[] tmpBuffer = BufferPool.Rent(65536);
private int tmpOffset;
private int tmpCount;
@@ -40,6 +40,11 @@ internal class RarStream : Stream
{
if (!isDisposed)
{
if (disposing)
{
BufferPool.Return(this.tmpBuffer);
this.tmpBuffer = null;
}
isDisposed = true;
base.Dispose(disposing);
readStream.Dispose();
@@ -86,6 +91,13 @@ internal class RarStream : Stream
fetch = false;
}
_position += outTotal;
if (count > 0 && outTotal == 0 && _position != Length)
{
// sanity check, eg if we try to decompress a redir entry
throw new InvalidOperationException(
$"unpacked file size does not match header: expected {Length} found {_position}"
);
}
return outTotal;
}
@@ -111,16 +123,7 @@ internal class RarStream : Stream
}
if (count > 0)
{
if (tmpBuffer.Length < tmpCount + count)
{
var newBuffer = new byte[
tmpBuffer.Length * 2 > tmpCount + count
? tmpBuffer.Length * 2
: tmpCount + count
];
Buffer.BlockCopy(tmpBuffer, 0, newBuffer, 0, tmpCount);
tmpBuffer = newBuffer;
}
EnsureBufferCapacity(count);
Buffer.BlockCopy(buffer, offset, tmpBuffer, tmpCount, count);
tmpCount += count;
tmpOffset = 0;
@@ -131,4 +134,20 @@ internal class RarStream : Stream
unpack.Suspended = false;
}
}
private void EnsureBufferCapacity(int count)
{
if (this.tmpBuffer.Length < this.tmpCount + count)
{
var newLength =
this.tmpBuffer.Length * 2 > this.tmpCount + count
? this.tmpBuffer.Length * 2
: this.tmpCount + count;
var newBuffer = BufferPool.Rent(newLength);
Buffer.BlockCopy(this.tmpBuffer, 0, newBuffer, 0, this.tmpCount);
var oldBuffer = this.tmpBuffer;
this.tmpBuffer = newBuffer;
BufferPool.Return(oldBuffer);
}
}
}

View File

@@ -52,19 +52,19 @@ internal sealed partial class Unpack : BitInput, IRarUnpack
public int PpmEscChar { get; set; }
private readonly ModelPpm ppm = new ModelPpm();
private readonly ModelPpm ppm = new();
private readonly RarVM rarVM = new RarVM();
private readonly RarVM rarVM = new();
// Filters code, one entry per filter
private readonly List<UnpackFilter> filters = new List<UnpackFilter>();
private readonly List<UnpackFilter> filters = new();
// Filters stack, several entrances of same filter are possible
private readonly List<UnpackFilter> prgStack = new List<UnpackFilter>();
private readonly List<UnpackFilter> prgStack = new();
// lengths of preceding blocks, one length per filter. Used to reduce size
// required to write block length if lengths are repeating
private readonly List<int> oldFilterLengths = new List<int>();
private readonly List<int> oldFilterLengths = new();
private int lastFilter;

View File

@@ -25,15 +25,15 @@ internal partial class Unpack
private readonly AudioVariables[] AudV = new AudioVariables[4];
private readonly LitDecode LD = new LitDecode();
private readonly LitDecode LD = new();
private readonly DistDecode DD = new DistDecode();
private readonly DistDecode DD = new();
private readonly LowDistDecode LDD = new LowDistDecode();
private readonly LowDistDecode LDD = new();
private readonly RepDecode RD = new RepDecode();
private readonly RepDecode RD = new();
private readonly BitDecode BD = new BitDecode();
private readonly BitDecode BD = new();
private static readonly int[] LDecode =
{

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.IO;
using SharpCompress.Common.Rar.Headers;
#if !Rar2017_64bit
@@ -67,15 +67,15 @@ internal partial class Unpack : IRarUnpack
private void UnstoreFile()
{
var b = new byte[0x10000];
Span<byte> b = stackalloc byte[(int)Math.Min(0x10000, DestUnpSize)];
do
{
var n = readStream.Read(b, 0, (int)Math.Min(b.Length, DestUnpSize));
var n = readStream.Read(b);
if (n == 0)
{
break;
}
writeStream.Write(b, 0, n);
writeStream.Write(b.Slice(0, n));
DestUnpSize -= n;
} while (!Suspended);
}

View File

@@ -373,8 +373,8 @@ internal partial class Unpack
private bool ReadTables20()
{
var BitLength = new byte[BC20];
var Table = new byte[MC20 * 4];
Span<byte> BitLength = stackalloc byte[checked((int)BC20)];
Span<byte> Table = stackalloc byte[checked((int)MC20 * 4)];
if (Inp.InAddr > ReadTop - 25)
{
if (!UnpReadBuf())
@@ -410,13 +410,13 @@ internal partial class Unpack
TableSize = NC20 + DC20 + RC20;
}
for (uint I = 0; I < BC20; I++)
for (int I = 0; I < checked((int)BC20); I++)
{
BitLength[I] = (byte)(Inp.getbits() >> 12);
Inp.addbits(4);
}
MakeDecodeTables(BitLength, 0, BlockTables.BD, BC20);
for (uint I = 0; I < TableSize; )
for (int I = 0; I < checked((int)TableSize); )
{
if (Inp.InAddr > ReadTop - 5)
{
@@ -487,8 +487,7 @@ internal partial class Unpack
MakeDecodeTables(Table, (int)NC20, BlockTables.DD, DC20);
MakeDecodeTables(Table, (int)(NC20 + DC20), BlockTables.RD, RC20);
}
//x memcpy(UnpOldTable20,Table,sizeof(UnpOldTable20));
Array.Copy(Table, UnpOldTable20, UnpOldTable20.Length);
Table.CopyTo(this.UnpOldTable20);
return true;
}

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
@@ -752,8 +752,8 @@ internal partial class Unpack
}
}
var BitLength = new byte[BC];
for (uint I = 0; I < BC; I++)
Span<byte> BitLength = stackalloc byte[checked((int)BC)];
for (int I = 0; I < BC; I++)
{
uint Length = (byte)(Inp.fgetbits() >> 12);
Inp.faddbits(4);
@@ -784,9 +784,9 @@ internal partial class Unpack
MakeDecodeTables(BitLength, 0, Tables.BD, BC);
var Table = new byte[HUFF_TABLE_SIZE];
const uint TableSize = HUFF_TABLE_SIZE;
for (uint I = 0; I < TableSize; )
Span<byte> Table = stackalloc byte[checked((int)HUFF_TABLE_SIZE)];
const int TableSize = checked((int)HUFF_TABLE_SIZE);
for (int I = 0; I < TableSize; )
{
if (!Inp.ExternalBuffer && Inp.InAddr > ReadTop - 5)
{

View File

@@ -259,7 +259,7 @@ internal sealed partial class Unpack : BitInput
// LengthTable contains the length in bits for every element of alphabet.
// Dec is the structure to decode Huffman code/
// Size is size of length table and DecodeNum field in Dec structure,
private void MakeDecodeTables(byte[] LengthTable, int offset, DecodeTable Dec, uint Size)
private void MakeDecodeTables(Span<byte> LengthTable, int offset, DecodeTable Dec, uint Size)
{
// Size of alphabet and DecodePos array.
Dec.MaxNum = Size;
@@ -269,7 +269,7 @@ internal sealed partial class Unpack : BitInput
//memset(LengthCount,0,sizeof(LengthCount));
for (size_t I = 0; I < Size; I++)
{
LengthCount[LengthTable[offset + I] & 0xf]++;
LengthCount[LengthTable[checked((int)(offset + I))] & 0xf]++;
}
// We must not calculate the number of zero length codes.
@@ -318,7 +318,7 @@ internal sealed partial class Unpack : BitInput
for (uint I = 0; I < Size; I++)
{
// Get the current bit length.
var _CurBitLength = (byte)(LengthTable[offset + I] & 0xf);
var _CurBitLength = (byte)(LengthTable[checked((int)(offset + I))] & 0xf);
if (_CurBitLength != 0)
{

View File

@@ -269,7 +269,7 @@ internal partial class Unpack
private byte[] FilterDstMemory = Array.Empty<byte>();
// Filters code, one entry per filter.
private readonly List<UnpackFilter> Filters = new List<UnpackFilter>();
private readonly List<UnpackFilter> Filters = new();
private readonly uint[] OldDist = new uint[4];
private uint OldDistPtr;
@@ -297,7 +297,7 @@ internal partial class Unpack
private byte[] Window;
private readonly FragmentedWindow FragWindow = new FragmentedWindow();
private readonly FragmentedWindow FragWindow = new();
private bool Fragmented;
private int64 DestUnpSize;
@@ -393,18 +393,18 @@ internal partial class Unpack
// Buffer to read VM filters code. We moved it here from AddVMCode
// function to reduce time spent in BitInput constructor.
private readonly BitInput VMCodeInp = new BitInput(true);
private readonly BitInput VMCodeInp = new(true);
// Filters code, one entry per filter.
private readonly List<UnpackFilter30> Filters30 = new List<UnpackFilter30>();
private readonly List<UnpackFilter30> Filters30 = new();
// Filters stack, several entrances of same filter are possible.
private readonly List<UnpackFilter30> PrgStack = new List<UnpackFilter30>();
private readonly List<UnpackFilter30> PrgStack = new();
// Lengths of preceding data blocks, one length of one last block
// for every filter. Used to reduce the size required to write
// the data block length if lengths are repeating.
private readonly List<int> OldFilterLengths = new List<int>();
private readonly List<int> OldFilterLengths = new();
/*#if RarV2017_RAR_SMP
// More than 8 threads are unlikely to provide a noticeable gain

View File

@@ -1131,13 +1131,13 @@ internal sealed class RarVM : BitInput
{
VMStandardFilterSignature[] stdList =
{
new VMStandardFilterSignature(53, 0xad576887, VMStandardFilters.VMSF_E8),
new VMStandardFilterSignature(57, 0x3cd7e57e, VMStandardFilters.VMSF_E8E9),
new VMStandardFilterSignature(120, 0x3769893f, VMStandardFilters.VMSF_ITANIUM),
new VMStandardFilterSignature(29, 0x0e06077d, VMStandardFilters.VMSF_DELTA),
new VMStandardFilterSignature(149, 0x1c2c5dc8, VMStandardFilters.VMSF_RGB),
new VMStandardFilterSignature(216, 0xbc85e701, VMStandardFilters.VMSF_AUDIO),
new VMStandardFilterSignature(40, 0x46b9c560, VMStandardFilters.VMSF_UPCASE)
new(53, 0xad576887, VMStandardFilters.VMSF_E8),
new(57, 0x3cd7e57e, VMStandardFilters.VMSF_E8E9),
new(120, 0x3769893f, VMStandardFilters.VMSF_ITANIUM),
new(29, 0x0e06077d, VMStandardFilters.VMSF_DELTA),
new(149, 0x1c2c5dc8, VMStandardFilters.VMSF_RGB),
new(216, 0xbc85e701, VMStandardFilters.VMSF_AUDIO),
new(40, 0x46b9c560, VMStandardFilters.VMSF_UPCASE)
};
var CodeCRC = RarCRC.CheckCrc(0xffffffff, code, 0, code.Length) ^ 0xffffffff;
for (var i = 0; i < stdList.Length; i++)

View File

@@ -4,13 +4,13 @@ namespace SharpCompress.Compressors.Rar.VM;
internal class VMPreparedProgram
{
internal List<VMPreparedCommand> Commands = new List<VMPreparedCommand>();
internal List<VMPreparedCommand> AltCommands = new List<VMPreparedCommand>();
internal List<VMPreparedCommand> Commands = new();
internal List<VMPreparedCommand> AltCommands = new();
public int CommandCount { get; set; }
internal List<byte> GlobalData = new List<byte>();
internal List<byte> StaticData = new List<byte>();
internal List<byte> GlobalData = new();
internal List<byte> StaticData = new();
// static data contained in DB operators
internal int[] InitR = new int[7];

View File

@@ -0,0 +1,85 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Shrink
{
internal class BitStream
{
private byte[] _src;
private int _srcLen;
private int _byteIdx;
private int _bitIdx;
private int _bitsLeft;
private ulong _bitBuffer;
private static uint[] _maskBits = new uint[17]
{
0U,
1U,
3U,
7U,
15U,
31U,
63U,
(uint)sbyte.MaxValue,
(uint)byte.MaxValue,
511U,
1023U,
2047U,
4095U,
8191U,
16383U,
(uint)short.MaxValue,
(uint)ushort.MaxValue
};
public BitStream(byte[] src, int srcLen)
{
_src = src;
_srcLen = srcLen;
_byteIdx = 0;
_bitIdx = 0;
}
public int BytesRead => (_byteIdx << 3) + _bitIdx;
private int NextByte()
{
if (_byteIdx >= _srcLen)
{
return 0;
}
return _src[_byteIdx++];
}
public int NextBits(int nbits)
{
var result = 0;
if (nbits > _bitsLeft)
{
int num;
while (_bitsLeft <= 24 && (num = NextByte()) != 1234)
{
_bitBuffer |= (ulong)num << _bitsLeft;
_bitsLeft += 8;
}
}
result = (int)((long)_bitBuffer & (long)_maskBits[nbits]);
_bitBuffer >>= nbits;
_bitsLeft -= nbits;
return result;
}
public bool Advance(int count)
{
if (_byteIdx > _srcLen)
{
return false;
}
return true;
}
}
}

View File

@@ -0,0 +1,431 @@
using System;
namespace SharpCompress.Compressors.Shrink
{
public class HwUnshrink
{
private const int MIN_CODE_SIZE = 9;
private const int MAX_CODE_SIZE = 13;
private const ushort MAX_CODE = (ushort)((1U << MAX_CODE_SIZE) - 1);
private const ushort INVALID_CODE = ushort.MaxValue;
private const ushort CONTROL_CODE = 256;
private const ushort INC_CODE_SIZE = 1;
private const ushort PARTIAL_CLEAR = 2;
private const int HASH_BITS = MAX_CODE_SIZE + 1; // For a load factor of 0.5.
private const int HASHTAB_SIZE = 1 << HASH_BITS;
private const ushort UNKNOWN_LEN = ushort.MaxValue;
private struct CodeTabEntry
{
public int prefixCode; // INVALID_CODE means the entry is invalid.
public byte extByte;
public ushort len;
public int lastDstPos;
}
private static void CodeTabInit(CodeTabEntry[] codeTab)
{
for (var i = 0; i <= byte.MaxValue; i++)
{
codeTab[i].prefixCode = (ushort)i;
codeTab[i].extByte = (byte)i;
codeTab[i].len = 1;
}
for (var i = byte.MaxValue + 1; i <= MAX_CODE; i++)
{
codeTab[i].prefixCode = INVALID_CODE;
}
}
private static void UnshrinkPartialClear(CodeTabEntry[] codeTab, ref CodeQueue queue)
{
var isPrefix = new bool[MAX_CODE + 1];
int codeQueueSize;
// Scan for codes that have been used as a prefix.
for (var i = CONTROL_CODE + 1; i <= MAX_CODE; i++)
{
if (codeTab[i].prefixCode != INVALID_CODE)
{
isPrefix[codeTab[i].prefixCode] = true;
}
}
// Clear "non-prefix" codes in the table; populate the code queue.
codeQueueSize = 0;
for (var i = CONTROL_CODE + 1; i <= MAX_CODE; i++)
{
if (!isPrefix[i])
{
codeTab[i].prefixCode = INVALID_CODE;
queue.codes[codeQueueSize++] = (ushort)i;
}
}
queue.codes[codeQueueSize] = INVALID_CODE; // End-of-queue marker.
queue.nextIdx = 0;
}
private static bool ReadCode(
BitStream stream,
ref int codeSize,
CodeTabEntry[] codeTab,
ref CodeQueue queue,
out int nextCode
)
{
int code,
controlCode;
code = (int)stream.NextBits(codeSize);
if (!stream.Advance(codeSize))
{
nextCode = INVALID_CODE;
return false;
}
// Handle regular codes (the common case).
if (code != CONTROL_CODE)
{
nextCode = code;
return true;
}
// Handle control codes.
controlCode = (ushort)stream.NextBits(codeSize);
if (!stream.Advance(codeSize))
{
nextCode = INVALID_CODE;
return true;
}
if (controlCode == INC_CODE_SIZE && codeSize < MAX_CODE_SIZE)
{
codeSize++;
return ReadCode(stream, ref codeSize, codeTab, ref queue, out nextCode);
}
if (controlCode == PARTIAL_CLEAR)
{
UnshrinkPartialClear(codeTab, ref queue);
return ReadCode(stream, ref codeSize, codeTab, ref queue, out nextCode);
}
nextCode = INVALID_CODE;
return true;
}
private static void CopyFromPrevPos(byte[] dst, int prevPos, int dstPos, int len)
{
if (dstPos + len > dst.Length)
{
// Not enough room in dst for the sloppy copy below.
Array.Copy(dst, prevPos, dst, dstPos, len);
return;
}
if (prevPos + len > dstPos)
{
// Benign one-byte overlap possible in the KwKwK case.
//assert(prevPos + len == dstPos + 1);
//assert(dst[prevPos] == dst[prevPos + len - 1]);
}
Buffer.BlockCopy(dst, prevPos, dst, dstPos, len);
}
private static UnshrnkStatus OutputCode(
int code,
byte[] dst,
int dstPos,
int dstCap,
int prevCode,
CodeTabEntry[] codeTab,
ref CodeQueue queue,
out byte firstByte,
out int len
)
{
int prefixCode;
//assert(code <= MAX_CODE && code != CONTROL_CODE);
//assert(dstPos < dstCap);
firstByte = 0;
if (code <= byte.MaxValue)
{
// Output literal byte.
firstByte = (byte)code;
len = 1;
dst[dstPos] = (byte)code;
return UnshrnkStatus.Ok;
}
if (codeTab[code].prefixCode == INVALID_CODE || codeTab[code].prefixCode == code)
{
// Reject invalid codes. Self-referential codes may exist in the table but cannot be used.
firstByte = 0;
len = 0;
return UnshrnkStatus.Error;
}
if (codeTab[code].len != UNKNOWN_LEN)
{
// Output string with known length (the common case).
if (dstCap - dstPos < codeTab[code].len)
{
firstByte = 0;
len = 0;
return UnshrnkStatus.Full;
}
CopyFromPrevPos(dst, codeTab[code].lastDstPos, dstPos, codeTab[code].len);
firstByte = dst[dstPos];
len = codeTab[code].len;
return UnshrnkStatus.Ok;
}
// Output a string of unknown length.
//assert(codeTab[code].len == UNKNOWN_LEN);
prefixCode = codeTab[code].prefixCode;
// assert(prefixCode > CONTROL_CODE);
if (prefixCode == queue.codes[queue.nextIdx])
{
// The prefix code hasn't been added yet, but we were just about to: the KwKwK case.
//assert(codeTab[prevCode].prefixCode != INVALID_CODE);
codeTab[prefixCode].prefixCode = prevCode;
codeTab[prefixCode].extByte = firstByte;
codeTab[prefixCode].len = (ushort)(codeTab[prevCode].len + 1);
codeTab[prefixCode].lastDstPos = codeTab[prevCode].lastDstPos;
dst[dstPos] = firstByte;
}
else if (codeTab[prefixCode].prefixCode == INVALID_CODE)
{
// The prefix code is still invalid.
firstByte = 0;
len = 0;
return UnshrnkStatus.Error;
}
// Output the prefix string, then the extension byte.
len = codeTab[prefixCode].len + 1;
if (dstCap - dstPos < len)
{
firstByte = 0;
len = 0;
return UnshrnkStatus.Full;
}
CopyFromPrevPos(dst, codeTab[prefixCode].lastDstPos, dstPos, codeTab[prefixCode].len);
dst[dstPos + len - 1] = codeTab[code].extByte;
firstByte = dst[dstPos];
// Update the code table now that the string has a length and pos.
//assert(prevCode != code);
codeTab[code].len = (ushort)len;
codeTab[code].lastDstPos = dstPos;
return UnshrnkStatus.Ok;
}
public static UnshrnkStatus Unshrink(
byte[] src,
int srcLen,
out int srcUsed,
byte[] dst,
int dstCap,
out int dstUsed
)
{
var codeTab = new CodeTabEntry[HASHTAB_SIZE];
var queue = new CodeQueue();
var stream = new BitStream(src, srcLen);
int codeSize,
dstPos,
len;
int currCode,
prevCode,
newCode;
byte firstByte;
CodeTabInit(codeTab);
CodeQueueInit(ref queue);
codeSize = MIN_CODE_SIZE;
dstPos = 0;
// Handle the first code separately since there is no previous code.
if (!ReadCode(stream, ref codeSize, codeTab, ref queue, out currCode))
{
srcUsed = stream.BytesRead;
dstUsed = 0;
return UnshrnkStatus.Ok;
}
//assert(currCode != CONTROL_CODE);
if (currCode > byte.MaxValue)
{
srcUsed = stream.BytesRead;
dstUsed = 0;
return UnshrnkStatus.Error; // The first code must be a literal.
}
if (dstPos == dstCap)
{
srcUsed = stream.BytesRead;
dstUsed = 0;
return UnshrnkStatus.Full;
}
firstByte = (byte)currCode;
dst[dstPos] = (byte)currCode;
codeTab[currCode].lastDstPos = dstPos;
dstPos++;
prevCode = currCode;
while (ReadCode(stream, ref codeSize, codeTab, ref queue, out currCode))
{
if (currCode == INVALID_CODE)
{
srcUsed = stream.BytesRead;
dstUsed = 0;
return UnshrnkStatus.Error;
}
if (dstPos == dstCap)
{
srcUsed = stream.BytesRead;
dstUsed = 0;
return UnshrnkStatus.Full;
}
// Handle KwKwK: next code used before being added.
if (currCode == queue.codes[queue.nextIdx])
{
if (codeTab[prevCode].prefixCode == INVALID_CODE)
{
// The previous code is no longer valid.
srcUsed = stream.BytesRead;
dstUsed = 0;
return UnshrnkStatus.Error;
}
// Extend the previous code with its first byte.
//assert(currCode != prevCode);
codeTab[currCode].prefixCode = prevCode;
codeTab[currCode].extByte = firstByte;
codeTab[currCode].len = (ushort)(codeTab[prevCode].len + 1);
codeTab[currCode].lastDstPos = codeTab[prevCode].lastDstPos;
//assert(dstPos < dstCap);
dst[dstPos] = firstByte;
}
// Output the string represented by the current code.
var status = OutputCode(
currCode,
dst,
dstPos,
dstCap,
prevCode,
codeTab,
ref queue,
out firstByte,
out len
);
if (status != UnshrnkStatus.Ok)
{
srcUsed = stream.BytesRead;
dstUsed = 0;
return status;
}
// Verify that the output matches walking the prefixes.
var c = currCode;
for (var i = 0; i < len; i++)
{
// assert(codeTab[c].len == len - i);
//assert(codeTab[c].extByte == dst[dstPos + len - i - 1]);
c = codeTab[c].prefixCode;
}
// Add a new code to the string table if there's room.
// The string is the previous code's string extended with the first byte of the current code's string.
newCode = CodeQueueRemoveNext(ref queue);
if (newCode != INVALID_CODE)
{
//assert(codeTab[prevCode].lastDstPos < dstPos);
codeTab[newCode].prefixCode = prevCode;
codeTab[newCode].extByte = firstByte;
codeTab[newCode].len = (ushort)(codeTab[prevCode].len + 1);
codeTab[newCode].lastDstPos = codeTab[prevCode].lastDstPos;
if (codeTab[prevCode].prefixCode == INVALID_CODE)
{
// prevCode was invalidated in a partial clearing. Until that code is re-used, the
// string represented by newCode is indeterminate.
codeTab[newCode].len = UNKNOWN_LEN;
}
// If prevCode was invalidated in a partial clearing, it's possible that newCode == prevCode,
// in which case it will never be used or cleared.
}
codeTab[currCode].lastDstPos = dstPos;
dstPos += len;
prevCode = currCode;
}
srcUsed = stream.BytesRead;
dstUsed = dstPos;
return UnshrnkStatus.Ok;
}
public enum UnshrnkStatus
{
Ok,
Full,
Error
}
private struct CodeQueue
{
public int nextIdx;
public ushort[] codes;
}
private static void CodeQueueInit(ref CodeQueue q)
{
int codeQueueSize;
ushort code;
codeQueueSize = 0;
q.codes = new ushort[MAX_CODE - CONTROL_CODE + 2];
for (code = CONTROL_CODE + 1; code <= MAX_CODE; code++)
{
q.codes[codeQueueSize++] = code;
}
//assert(codeQueueSize < q.codes.Length);
q.codes[codeQueueSize] = INVALID_CODE; // End-of-queue marker.
q.nextIdx = 0;
}
private static ushort CodeQueueNext(ref CodeQueue q) =>
//assert(q.nextIdx < q.codes.Length);
q.codes[q.nextIdx];
private static ushort CodeQueueRemoveNext(ref CodeQueue q)
{
var code = CodeQueueNext(ref q);
if (code != INVALID_CODE)
{
q.nextIdx++;
}
return code;
}
}
}

View File

@@ -0,0 +1,89 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace SharpCompress.Compressors.Shrink;
internal class ShrinkStream : Stream
{
private Stream inStream;
private CompressionMode _compressionMode;
private ulong _compressedSize;
private long _uncompressedSize;
private byte[] _byteOut;
private long _outBytesCount;
public ShrinkStream(
Stream stream,
CompressionMode compressionMode,
long compressedSize,
long uncompressedSize
)
{
inStream = stream;
_compressionMode = compressionMode;
_compressedSize = (ulong)compressedSize;
_uncompressedSize = uncompressedSize;
_byteOut = new byte[_uncompressedSize];
_outBytesCount = 0L;
}
public override bool CanRead => true;
public override bool CanSeek => true;
public override bool CanWrite => false;
public override long Length => _uncompressedSize;
public override long Position
{
get => _outBytesCount;
set => throw new NotImplementedException();
}
public override void Flush() => throw new NotImplementedException();
public override int Read(byte[] buffer, int offset, int count)
{
if (inStream.Position == (long)_compressedSize)
{
return 0;
}
var src = new byte[_compressedSize];
inStream.Read(src, offset, (int)_compressedSize);
var srcUsed = 0;
var dstUsed = 0;
HwUnshrink.Unshrink(
src,
(int)_compressedSize,
out srcUsed,
_byteOut,
(int)_uncompressedSize,
out dstUsed
);
_outBytesCount = _byteOut.Length;
for (var index = 0; index < _outBytesCount; ++index)
{
buffer[offset + index] = _byteOut[index];
}
var tmp = _outBytesCount;
_outBytesCount = 0;
return (int)tmp;
}
public override long Seek(long offset, SeekOrigin origin) =>
throw new NotImplementedException();
public override void SetLength(long value) => throw new NotImplementedException();
public override void Write(byte[] buffer, int offset, int count) =>
throw new NotImplementedException();
}

View File

@@ -14,7 +14,7 @@ public sealed class XZBlock : XZReadOnlyStream
public int BlockHeaderSize => (_blockHeaderSizeByte + 1) * 4;
public ulong? CompressedSize { get; private set; }
public ulong? UncompressedSize { get; private set; }
public Stack<BlockFilter> Filters { get; private set; } = new Stack<BlockFilter>();
public Stack<BlockFilter> Filters { get; private set; } = new();
public bool HeaderIsLoaded { get; private set; }
private CheckType _checkType;
private readonly int _checkSize;

View File

@@ -13,7 +13,7 @@ public class XZIndex
private readonly BinaryReader _reader;
public long StreamStartPosition { get; private set; }
public ulong NumberOfRecords { get; private set; }
public List<XZIndexRecord> Records { get; } = new List<XZIndexRecord>();
public List<XZIndexRecord> Records { get; } = new();
private readonly bool _indexMarkerAlreadyVerified;

View File

@@ -5,25 +5,12 @@ using System.Security.Cryptography;
namespace SharpCompress.Crypto;
internal class BlockTransformer : IDisposable
internal class BlockTransformer(ICryptoTransform transformer) : IDisposable
{
private ICryptoTransform _transform;
public BlockTransformer(ICryptoTransform transformer)
{
_transform = transformer;
}
public byte[] ProcessBlock(ReadOnlySpan<byte> cipherText)
{
var decryptedBytes = new byte[cipherText.Length];
var bytes = _transform.TransformBlock(
cipherText.ToArray(),
0,
cipherText.Length,
decryptedBytes,
0
);
transformer.TransformBlock(cipherText.ToArray(), 0, cipherText.Length, decryptedBytes, 0);
return decryptedBytes;
}

View File

@@ -6,27 +6,17 @@ using System.IO;
namespace SharpCompress.Crypto;
[CLSCompliant(false)]
public sealed class Crc32Stream : Stream
public sealed class Crc32Stream(Stream stream, uint polynomial, uint seed) : Stream
{
public const uint DefaultPolynomial = 0xedb88320u;
public const uint DefaultSeed = 0xffffffffu;
public const uint DEFAULT_POLYNOMIAL = 0xedb88320u;
public const uint DEFAULT_SEED = 0xffffffffu;
private static uint[] defaultTable;
private static uint[] _defaultTable;
private readonly uint[] table;
private uint hash;
private readonly Stream stream;
private readonly uint[] _table = InitializeTable(polynomial);
public Crc32Stream(Stream stream)
: this(stream, DefaultPolynomial, DefaultSeed) { }
public Crc32Stream(Stream stream, uint polynomial, uint seed)
{
this.stream = stream;
table = InitializeTable(polynomial);
hash = seed;
}
: this(stream, DEFAULT_POLYNOMIAL, DEFAULT_SEED) { }
public Stream WrappedStream => stream;
@@ -45,20 +35,20 @@ public sealed class Crc32Stream : Stream
{
stream.Write(buffer);
hash = CalculateCrc(table, hash, buffer);
seed = CalculateCrc(_table, seed, buffer);
}
#endif
public override void Write(byte[] buffer, int offset, int count)
{
stream.Write(buffer, offset, count);
hash = CalculateCrc(table, hash, buffer.AsSpan(offset, count));
seed = CalculateCrc(_table, seed, buffer.AsSpan(offset, count));
}
public override void WriteByte(byte value)
{
stream.WriteByte(value);
hash = CalculateCrc(table, hash, value);
seed = CalculateCrc(_table, seed, value);
}
public override bool CanRead => stream.CanRead;
@@ -71,21 +61,21 @@ public sealed class Crc32Stream : Stream
set => throw new NotSupportedException();
}
public uint Crc => ~hash;
public uint Crc => ~seed;
public static uint Compute(byte[] buffer) => Compute(DefaultSeed, buffer);
public static uint Compute(byte[] buffer) => Compute(DEFAULT_SEED, buffer);
public static uint Compute(uint seed, byte[] buffer) =>
Compute(DefaultPolynomial, seed, buffer);
Compute(DEFAULT_POLYNOMIAL, seed, buffer);
public static uint Compute(uint polynomial, uint seed, ReadOnlySpan<byte> buffer) =>
~CalculateCrc(InitializeTable(polynomial), seed, buffer);
private static uint[] InitializeTable(uint polynomial)
{
if (polynomial == DefaultPolynomial && defaultTable != null)
if (polynomial == DEFAULT_POLYNOMIAL && _defaultTable != null)
{
return defaultTable;
return _defaultTable;
}
var createTable = new uint[256];
@@ -107,9 +97,9 @@ public sealed class Crc32Stream : Stream
createTable[i] = entry;
}
if (polynomial == DefaultPolynomial)
if (polynomial == DEFAULT_POLYNOMIAL)
{
defaultTable = createTable;
_defaultTable = createTable;
}
return createTable;

View File

@@ -1,14 +0,0 @@
using System;
namespace SharpCompress.Crypto;
public class CryptoException : Exception
{
public CryptoException() { }
public CryptoException(string message)
: base(message) { }
public CryptoException(string message, Exception exception)
: base(message, exception) { }
}

View File

@@ -1,24 +0,0 @@
using System;
namespace SharpCompress.Crypto;
public class DataLengthException : CryptoException
{
/**
* base constructor.
*/
public DataLengthException() { }
/**
* create a DataLengthException with the given message.
*
* @param message the message to be carried with the exception.
*/
public DataLengthException(string message)
: base(message) { }
public DataLengthException(string message, Exception exception)
: base(message, exception) { }
}

View File

@@ -1,33 +0,0 @@
using System;
namespace SharpCompress.Crypto;
/// <remarks>Base interface for a symmetric key block cipher.</remarks>
public interface IBlockCipher
{
/// <summary>The name of the algorithm this cipher implements.</summary>
string AlgorithmName { get; }
/// <summary>Initialise the cipher.</summary>
/// <param name="forEncryption">Initialise for encryption if true, for decryption if false.</param>
/// <param name="parameters">The key or other data required by the cipher.</param>
void Init(bool forEncryption, ICipherParameters parameters);
/// <returns>The block size for this cipher, in bytes.</returns>
int GetBlockSize();
/// <summary>Indicates whether this cipher can handle partial blocks.</summary>
bool IsPartialBlockOkay { get; }
/// <summary>Process a block.</summary>
/// <param name="inBuf">The input buffer.</param>
/// <param name="outBuf">The output buffer.</param>
/// <exception cref="DataLengthException">If input block is wrong size, or outBuf too small.</exception>
/// <returns>The number of bytes processed and produced.</returns>
int ProcessBlock(ReadOnlySpan<byte> inBuf, Span<byte> outBuf);
/// <summary>
/// Reset the cipher to the same state as it was after the last init (if there was one).
/// </summary>
void Reset();
}

View File

@@ -1,3 +0,0 @@
namespace SharpCompress.Crypto;
public interface ICipherParameters { }

View File

@@ -1,39 +0,0 @@
using System;
namespace SharpCompress.Crypto;
public class KeyParameter : ICipherParameters
{
private readonly byte[] key;
public KeyParameter(byte[] key)
{
if (key is null)
{
throw new ArgumentNullException(nameof(key));
}
this.key = (byte[])key.Clone();
}
public KeyParameter(byte[] key, int keyOff, int keyLen)
{
if (key is null)
{
throw new ArgumentNullException(nameof(key));
}
if (keyOff < 0 || keyOff > key.Length)
{
throw new ArgumentOutOfRangeException(nameof(keyOff));
}
if (keyLen < 0 || (keyOff + keyLen) > key.Length)
{
throw new ArgumentOutOfRangeException(nameof(keyLen));
}
this.key = new byte[keyLen];
Array.Copy(key, keyOff, this.key, 0, keyLen);
}
public byte[] GetKey() => (byte[])key.Clone();
}

View File

@@ -19,7 +19,7 @@ public abstract class Factory : IFactory
RegisterFactory(new TarFactory());
}
private static readonly HashSet<Factory> _factories = new HashSet<Factory>();
private static readonly HashSet<Factory> _factories = new();
/// <summary>
/// Gets the collection of registered <see cref="IFactory"/>.

View File

@@ -6,6 +6,7 @@ using SharpCompress.Common;
using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.Lzw;
using SharpCompress.Compressors.Xz;
using SharpCompress.IO;
using SharpCompress.Readers;
@@ -160,6 +161,19 @@ public class TarFactory
}
}
rewindableStream.Rewind(false);
if (LzwStream.IsLzwStream(rewindableStream))
{
rewindableStream.Rewind(false);
var testStream = new LzwStream(rewindableStream);
if (TarArchive.IsTarFile(testStream))
{
rewindableStream.Rewind(true);
reader = new TarReader(rewindableStream, options, CompressionType.Lzw);
return true;
}
}
return false;
}

View File

@@ -3,22 +3,14 @@ using System.IO;
namespace SharpCompress.IO;
internal class BufferedSubStream : NonDisposingStream
internal class BufferedSubStream(Stream stream, long origin, long bytesToRead)
: NonDisposingStream(stream, throwOnDispose: false)
{
private long position;
private int cacheOffset;
private int cacheLength;
private readonly byte[] cache;
private int _cacheOffset;
private int _cacheLength;
private readonly byte[] _cache = new byte[32 << 10];
public BufferedSubStream(Stream stream, long origin, long bytesToRead)
: base(stream, throwOnDispose: false)
{
position = origin;
BytesLeftToRead = bytesToRead;
cache = new byte[32 << 10];
}
private long BytesLeftToRead { get; set; }
private long BytesLeftToRead { get; set; } = bytesToRead;
public override bool CanRead => true;
@@ -45,22 +37,22 @@ internal class BufferedSubStream : NonDisposingStream
if (count > 0)
{
if (cacheLength == 0)
if (_cacheLength == 0)
{
cacheOffset = 0;
Stream.Position = position;
cacheLength = Stream.Read(cache, 0, cache.Length);
position += cacheLength;
_cacheOffset = 0;
Stream.Position = origin;
_cacheLength = Stream.Read(_cache, 0, _cache.Length);
origin += _cacheLength;
}
if (count > cacheLength)
if (count > _cacheLength)
{
count = cacheLength;
count = _cacheLength;
}
Buffer.BlockCopy(cache, cacheOffset, buffer, offset, count);
cacheOffset += count;
cacheLength -= count;
Buffer.BlockCopy(_cache, _cacheOffset, buffer, offset, count);
_cacheOffset += count;
_cacheLength -= count;
BytesLeftToRead -= count;
}

View File

@@ -8,18 +8,18 @@ public class DataDescriptorStream : Stream
{
private readonly Stream _stream;
private long _start;
private int _search_position;
private int _searchPosition;
private bool _isDisposed;
private bool _done;
private static byte[] DataDescriptorMarker = new byte[] { 0x50, 0x4b, 0x07, 0x08 };
private static long DataDescriptorSize = 24;
private static byte[] _dataDescriptorMarker = new byte[] { 0x50, 0x4b, 0x07, 0x08 };
private static long _dataDescriptorSize = 24;
public DataDescriptorStream(Stream stream)
{
_stream = stream;
_start = _stream.Position;
_search_position = 0;
_searchPosition = 0;
_done = false;
}
@@ -60,20 +60,20 @@ public class DataDescriptorStream : Stream
var br = new BinaryReader(stream);
br.ReadUInt32();
br.ReadUInt32(); // CRC32 can be checked if we calculate it
var compressed_size = br.ReadUInt32();
var uncompressed_size = br.ReadUInt32();
var uncompressed_64bit = br.ReadInt64();
var compressedSize = br.ReadUInt32();
var uncompressedSize = br.ReadUInt32();
var uncompressed64Bit = br.ReadInt64();
stream.Position -= DataDescriptorSize;
stream.Position -= _dataDescriptorSize;
var test_64bit = ((long)uncompressed_size << 32) | compressed_size;
var test64Bit = ((long)uncompressedSize << 32) | compressedSize;
if (test_64bit == size && test_64bit == uncompressed_64bit)
if (test64Bit == size && test64Bit == uncompressed64Bit)
{
return true;
}
if (compressed_size == size && compressed_size == uncompressed_size)
if (compressedSize == size && compressedSize == uncompressedSize)
{
return true;
}
@@ -88,24 +88,24 @@ public class DataDescriptorStream : Stream
return 0;
}
int read = _stream.Read(buffer, offset, count);
var read = _stream.Read(buffer, offset, count);
for (int i = 0; i < read; i++)
for (var i = 0; i < read; i++)
{
if (buffer[offset + i] == DataDescriptorMarker[_search_position])
if (buffer[offset + i] == _dataDescriptorMarker[_searchPosition])
{
_search_position++;
_searchPosition++;
if (_search_position == 4)
if (_searchPosition == 4)
{
_search_position = 0;
_searchPosition = 0;
if (read - i > DataDescriptorSize)
if (read - i > _dataDescriptorSize)
{
var check = new MemoryStream(
buffer,
offset + i - 3,
(int)DataDescriptorSize
(int)_dataDescriptorSize
);
_done = validate_data_descriptor(
check,
@@ -131,15 +131,15 @@ public class DataDescriptorStream : Stream
}
else
{
_search_position = 0;
_searchPosition = 0;
}
}
if (_search_position > 0)
if (_searchPosition > 0)
{
read -= _search_position;
_stream.Position -= _search_position;
_search_position = 0;
read -= _searchPosition;
_stream.Position -= _searchPosition;
_searchPosition = 0;
}
return read;

View File

@@ -5,13 +5,13 @@ namespace SharpCompress.IO;
internal class ListeningStream : Stream
{
private long currentEntryTotalReadBytes;
private readonly IExtractionListener listener;
private long _currentEntryTotalReadBytes;
private readonly IExtractionListener _listener;
public ListeningStream(IExtractionListener listener, Stream stream)
{
Stream = stream;
this.listener = listener;
this._listener = listener;
}
protected override void Dispose(bool disposing)
@@ -44,8 +44,8 @@ internal class ListeningStream : Stream
public override int Read(byte[] buffer, int offset, int count)
{
var read = Stream.Read(buffer, offset, count);
currentEntryTotalReadBytes += read;
listener.FireCompressedBytesRead(currentEntryTotalReadBytes, currentEntryTotalReadBytes);
_currentEntryTotalReadBytes += read;
_listener.FireCompressedBytesRead(_currentEntryTotalReadBytes, _currentEntryTotalReadBytes);
return read;
}
@@ -57,8 +57,8 @@ internal class ListeningStream : Stream
return -1;
}
++currentEntryTotalReadBytes;
listener.FireCompressedBytesRead(currentEntryTotalReadBytes, currentEntryTotalReadBytes);
++_currentEntryTotalReadBytes;
_listener.FireCompressedBytesRead(_currentEntryTotalReadBytes, _currentEntryTotalReadBytes);
return value;
}

View File

@@ -72,8 +72,8 @@ internal class ReadOnlySubStream : NonDisposingStream
#if !NETFRAMEWORK && !NETSTANDARD2_0
public override int Read(Span<byte> buffer)
{
var slice_len = BytesLeftToRead < buffer.Length ? BytesLeftToRead : buffer.Length;
var read = Stream.Read(buffer.Slice(0, (int)slice_len));
var sliceLen = BytesLeftToRead < buffer.Length ? BytesLeftToRead : buffer.Length;
var read = Stream.Read(buffer.Slice(0, (int)sliceLen));
if (read > 0)
{
BytesLeftToRead -= read;

Some files were not shown because too many files have changed in this diff Show More