Compare commits

..

4 Commits

Author SHA1 Message Date
Adam Hathcock
d6a6085d75 Update docker build 2019-10-22 17:28:09 +01:00
Adam Hathcock
b5a897819d Fix pack 2019-10-22 17:26:08 +01:00
Adam Hathcock
9e842ee8ec Fix cake build 2019-10-22 17:23:58 +01:00
Adam Hathcock
a04a0a5912 Use RecyclableMemoryStreamManager 2019-10-22 17:13:44 +01:00
116 changed files with 1976 additions and 1381 deletions

16
.circleci/config.yml Normal file
View File

@@ -0,0 +1,16 @@
version: 2
jobs:
build:
docker:
- image: mcr.microsoft.com/dotnet/core/sdk:2.2-alpine
steps:
- checkout
- run:
name: Install Cake
command: |
dotnet tool install -g Cake.Tool
echo 'export PATH=$PATH:/root/.dotnet/tools' >> $BASH_ENV
source $BASH_ENV
- run:
name: Build
command: dotnet cake build.cake

View File

@@ -1,17 +0,0 @@
name: SharpCompress
on: [push]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v1
- uses: actions/setup-dotnet@v1
with:
dotnet-version: 3.1.202
- name: Run the Cake script
uses: ecampidoglio/cake-action@master

View File

@@ -1,14 +1,14 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET Standard 1.3 and 2.0 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
SharpCompress is a compression library in pure C# for .NET Standard 1.4 and 2.0 and .NET 4.6 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
AppVeyor Build -
[![Build status](https://ci.appveyor.com/api/projects/status/voxg971oemmvxh1e/branch/master?svg=true)](https://ci.appveyor.com/project/adamhathcock/sharpcompress/branch/master)
GitHub Actions Build -
[![GitHubActions](https://github.com/adamhathcock/sharpcompress/workflows/SharpCompress/badge.svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
Circle CI Build -
[![CircleCI](https://circleci.com/gh/adamhathcock/sharpcompress.svg?style=svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
## Need Help?

View File

@@ -126,7 +126,4 @@
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EAddAccessorOwnerDeclarationBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue">&lt;SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"&gt;
&lt;Solution /&gt;
&lt;/SessionState&gt;</s:String></wpf:ResourceDictionary>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>

View File

@@ -1,5 +1,5 @@
version: '{build}'
image: Visual Studio 2019
image: Visual Studio 2017
pull_requests:
do_not_increment_build_number: true

View File

@@ -11,32 +11,23 @@ Task("Build")
.IsDependentOn("Restore")
.Does(() =>
{
var settings = new DotNetCoreBuildSettings
{
Framework = "netstandard1.4",
Configuration = "Release",
NoRestore = true
};
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
if (IsRunningOnWindows())
{
MSBuild("./sharpcompress.sln", c =>
{
c.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2019);
});
}
else
{
var settings = new DotNetCoreBuildSettings
{
Framework = "netstandard1.3",
Configuration = "Release",
NoRestore = true
};
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netstandard2.0";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netstandard2.1";
settings.Framework = "net46";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
}
settings.Framework = "netstandard2.0";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
});
Task("Test")
@@ -49,7 +40,7 @@ Task("Test")
var settings = new DotNetCoreTestSettings
{
Configuration = "Release",
Framework = "netcoreapp3.1"
Framework = "netcoreapp2.2"
};
DotNetCoreTest(file.ToString(), settings);
}
@@ -58,16 +49,17 @@ Task("Test")
Task("Pack")
.IsDependentOn("Build")
.Does(() =>
{
{
if (IsRunningOnWindows())
{
MSBuild("src/SharpCompress/SharpCompress.csproj", c => c
.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2019)
.WithProperty("NoBuild", "true")
.WithTarget("Pack"));
}
var settings = new DotNetCorePackSettings
{
Configuration = "Release",
NoBuild = true
};
DotNetCorePack("src/SharpCompress/SharpCompress.csproj", settings);
}
else
{
Information("Skipping Pack as this is not Windows");

View File

@@ -23,7 +23,7 @@ namespace SharpCompress.Archives
protected ReaderOptions ReaderOptions { get; }
private bool disposed;
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
{
Type = type;
@@ -140,12 +140,12 @@ namespace SharpCompress.Archives
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
///
///
/// This method will load all entry information from the archive.
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
@@ -174,4 +174,4 @@ namespace SharpCompress.Archives
}
}
}
}
}

View File

@@ -142,4 +142,4 @@ namespace SharpCompress.Archives
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
}
}
}
}

View File

@@ -129,13 +129,13 @@ namespace SharpCompress.Archives.SevenZip
}
}
private static ReadOnlySpan<byte> SIGNATURE => new byte[] {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
private static readonly byte[] SIGNATURE = {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
private static bool SignatureMatch(Stream stream)
{
BinaryReader reader = new BinaryReader(stream);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(SIGNATURE);
byte[] signatureBytes = reader.ReadBytes(6);
return signatureBytes.BinaryEquals(SIGNATURE);
}
protected override IReader CreateReaderForSolidExtraction()
@@ -201,7 +201,7 @@ namespace SharpCompress.Archives.SevenZip
return CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
}
}
private class PasswordProvider : IPasswordProvider
{
private readonly string _password;
@@ -209,6 +209,7 @@ namespace SharpCompress.Archives.SevenZip
public PasswordProvider(string password)
{
_password = password;
}
public string CryptoGetTextPassword()

View File

@@ -138,7 +138,7 @@ namespace SharpCompress.Archives.Tar
using (var entryStream = entry.OpenEntryStream())
{
using (var memoryStream = new MemoryStream())
using (var memoryStream = Utility.RECYCLABLE_MEMORY_STREAM_MANAGER.GetStream())
{
entryStream.TransferTo(memoryStream);
memoryStream.Position = 0;

View File

@@ -5,6 +5,7 @@ using System.Runtime.CompilerServices;
[assembly: AssemblyTitle("SharpCompress")]
[assembly: AssemblyProduct("SharpCompress")]
[assembly: InternalsVisibleTo("SharpCompress.Test" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: InternalsVisibleTo("SharpCompress.Test.Portable" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: CLSCompliant(true)]
namespace SharpCompress

View File

@@ -32,12 +32,10 @@ namespace SharpCompress.Common
Password = Encoding.GetEncoding(437);
}
#if NETSTANDARD1_3 || NETSTANDARD2_0 || NETSTANDARD2_1
static ArchiveEncoding()
{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
}
#endif
public string Decode(byte[] bytes)
{
@@ -69,4 +67,4 @@ namespace SharpCompress.Common
return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}
}
}
}

View File

@@ -1,6 +1,4 @@
using System;
namespace SharpCompress.Common
namespace SharpCompress.Common
{
public class ExtractionOptions
{
@@ -31,10 +29,6 @@ namespace SharpCompress.Common
/// </summary>
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
public SymbolicLinkWriterDelegate WriteSymbolicLink =
(sourcePath, targetPath) =>
{
Console.WriteLine($"Could not write symlink {sourcePath} -> {targetPath}, for more information please see https://github.com/dotnet/runtime/issues/24271");
};
public SymbolicLinkWriterDelegate WriteSymbolicLink;
}
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Common.GZip
{

View File

@@ -1,10 +1,11 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Common.GZip
{
@@ -59,7 +60,7 @@ namespace SharpCompress.Common.GZip
throw new ZlibException("Bad GZIP header.");
}
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
Int32 timet = DataConverter.LittleEndian.GetInt32(header, 4);
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
@@ -68,7 +69,7 @@ namespace SharpCompress.Common.GZip
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
if (!stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
@@ -116,4 +117,4 @@ namespace SharpCompress.Common.GZip
return ArchiveEncoding.Decode(buffer);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{

View File

@@ -7,10 +7,8 @@ namespace SharpCompress.Common.Rar.Headers
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
{
if (IsRar5)
{
if (IsRar5)
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -38,11 +38,7 @@ namespace SharpCompress.Common.Rar.Headers
private void ReadLocator(MarkingBinaryReader reader) {
var size = reader.ReadRarVIntUInt16();
var type = reader.ReadRarVIntUInt16();
if (type != 1)
{
throw new InvalidFormatException("expected locator record");
}
if (type != 1) throw new InvalidFormatException("expected locator record");
var flags = reader.ReadRarVIntUInt16();
const ushort hasQuickOpenOffset = 0x01;
const ushort hasRecoveryOffset = 0x02;

View File

@@ -7,10 +7,7 @@ namespace SharpCompress.Common.Rar.Headers
protected CommentHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Comment)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -46,38 +46,19 @@ namespace SharpCompress.Common.Rar.Headers
if (b == 0x61)
{
b = GetByte(stream); start++;
if (b != 0x72)
{
continue;
}
if (b != 0x72) continue;
b = GetByte(stream); start++;
if (b != 0x21)
{
continue;
}
if (b != 0x21) continue;
b = GetByte(stream); start++;
if (b != 0x1a)
{
continue;
}
if (b != 0x1a) continue;
b = GetByte(stream); start++;
if (b != 0x07)
{
continue;
}
if (b != 0x07) continue;
b = GetByte(stream); start++;
if (b == 1)
{
b = GetByte(stream); start++;
if (b != 0)
{
continue;
}
if (b != 0) continue;
return new MarkHeader(true); // Rar5
}
else if (b == 0)
@@ -88,17 +69,9 @@ namespace SharpCompress.Common.Rar.Headers
else if (b == 0x45)
{
b = GetByte(stream); start++;
if (b != 0x7e)
{
continue;
}
if (b != 0x7e) continue;
b = GetByte(stream); start++;
if (b != 0x5e)
{
continue;
}
if (b != 0x5e) continue;
throw new InvalidFormatException("Rar format version pre-4 is unsupported.");
}
}

View File

@@ -8,10 +8,7 @@ namespace SharpCompress.Common.Rar.Headers
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Protect)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -1,3 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;

View File

@@ -7,10 +7,7 @@ namespace SharpCompress.Common.Rar.Headers
protected SignHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Sign)
{
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -81,9 +81,7 @@ namespace SharpCompress.Common.Rar
byte[] cipherText = ReadBytesNoCrc(16);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
}

View File

@@ -50,23 +50,20 @@ namespace SharpCompress.Common.Rar
if (sizeToRead > 0)
{
int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
for (int i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
_actualStream.Read(cipherText, 0, RarRijndael.CRYPTO_BLOCK_SIZE);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
for (int i = 0; i < count; i++)
{
buffer[offset + i] = _data.Dequeue();
}
}
return count;
}
@@ -96,4 +93,4 @@ namespace SharpCompress.Common.Rar
base.Dispose(disposing);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Crypto;
@@ -88,20 +89,19 @@ namespace SharpCompress.Common.Rar
public byte[] ProcessBlock(byte[] cipherText)
{
var plainText = new byte[CRYPTO_BLOCK_SIZE];
byte[] decryptedBytes = new byte[CRYPTO_BLOCK_SIZE];
var decryptedBytes = new List<byte>();
_rijndael.ProcessBlock(cipherText, 0, plainText, 0);
for (int j = 0; j < CRYPTO_BLOCK_SIZE; j++)
for (int j = 0; j < plainText.Length; j++)
{
decryptedBytes[j] = (byte)(plainText[j] ^ _aesInitializationVector[j % 16]); //32:114, 33:101
decryptedBytes.Add((byte) (plainText[j] ^ _aesInitializationVector[j%16])); //32:114, 33:101
}
for (int j = 0; j < _aesInitializationVector.Length; j++)
{
_aesInitializationVector[j] = cipherText[j];
}
return decryptedBytes;
return decryptedBytes.ToArray();
}
public void Dispose()

View File

@@ -22,7 +22,7 @@ namespace SharpCompress.Common.SevenZip
internal List<long> _packStreamStartPositions = new List<long>();
internal List<int> _folderStartFileIndex = new List<int>();
internal List<int> _fileIndexToFolderIndexMap = new List<int>();
internal IPasswordProvider PasswordProvider { get; }
public ArchiveDatabase(IPasswordProvider passwordProvider)
@@ -152,14 +152,13 @@ namespace SharpCompress.Common.SevenZip
{
int packStreamIndex = folder._firstPackStreamId;
long folderStartPackPos = GetFolderStreamPos(folder, 0);
int count = folder._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
List<long> packSizes = new List<long>();
for (int j = 0; j < folder._packStreams.Count; j++)
{
packSizes[j] = _packSizes[packStreamIndex + j];
packSizes.Add(_packSizes[packStreamIndex + j]);
}
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes, folder, pw);
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes.ToArray(), folder, pw);
}
private long GetFolderPackStreamSize(int folderIndex, int streamIndex)
@@ -180,4 +179,4 @@ namespace SharpCompress.Common.SevenZip
return 0;
}
}
}
}

View File

@@ -1396,7 +1396,7 @@ namespace SharpCompress.Common.SevenZip
}
else
{
_stream = new MemoryStream();
_stream = Utility.RECYCLABLE_MEMORY_STREAM_MANAGER.GetStream();
}
_rem = _db._files[index].Size;
}
@@ -1449,14 +1449,13 @@ namespace SharpCompress.Common.SevenZip
CFolder folderInfo = db._folders[folderIndex];
int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
long folderStartPackPos = db.GetFolderStreamPos(folderInfo, 0);
var count = folderInfo._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
List<long> packSizes = new List<long>();
for (int j = 0; j < folderInfo._packStreams.Count; j++)
{
packSizes[j] = db._packSizes[packStreamIndex + j];
packSizes.Add(db._packSizes[packStreamIndex + j]);
}
s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes, folderInfo,
s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(), folderInfo,
db.PasswordProvider);
_cachedStreams.Add(folderIndex, s);
}
@@ -1554,16 +1553,15 @@ namespace SharpCompress.Common.SevenZip
int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
long folderStartPackPos = db.GetFolderStreamPos(folderInfo, 0);
var count = folderInfo._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
List<long> packSizes = new List<long>();
for (int j = 0; j < folderInfo._packStreams.Count; j++)
{
packSizes[j] = db._packSizes[packStreamIndex + j];
packSizes.Add(db._packSizes[packStreamIndex + j]);
}
// TODO: If the decoding fails the last file may be extracted incompletely. Delete it?
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes,
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(),
folderInfo, db.PasswordProvider);
byte[] buffer = new byte[4 << 10];
for (;;)

View File

@@ -1,4 +1,5 @@
using System.IO;
using SharpCompress.Archives;
using SharpCompress.Readers;
namespace SharpCompress.Common.SevenZip

View File

@@ -1,7 +1,7 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Tar.Headers
{
@@ -49,7 +49,7 @@ namespace SharpCompress.Common.Tar.Headers
}
else
{
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100);
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 0, 100);
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
@@ -57,10 +57,11 @@ namespace SharpCompress.Common.Tar.Headers
if (Size >= 0x1FFFFFFFF)
{
Span<byte> bytes12 = stackalloc byte[12];
BinaryPrimitives.WriteInt64BigEndian(bytes12.Slice(4), Size);
byte[] bytes = DataConverter.BigEndian.GetBytes(Size);
var bytes12 = new byte[12];
bytes.CopyTo(bytes12, 12 - bytes.Length);
bytes12[0] |= 0x80;
bytes12.CopyTo(buffer.AsSpan(124));
bytes12.CopyTo(buffer, 124);
}
}
@@ -175,9 +176,8 @@ namespace SharpCompress.Common.Tar.Headers
{
if ((buffer[124] & 0x80) == 0x80) // if size in binary
{
return BinaryPrimitives.ReadInt64BigEndian(buffer.AsSpan(0x80));
return DataConverter.BigEndian.GetInt64(buffer, 0x80);
}
return ReadAsciiInt64Base8(buffer, 124, 11);
}
@@ -192,11 +192,15 @@ namespace SharpCompress.Common.Tar.Headers
return buffer;
}
private static void WriteStringBytes(ReadOnlySpan<byte> name, Span<byte> buffer, int length)
private static void WriteStringBytes(byte[] name, byte[] buffer, int offset, int length)
{
name.CopyTo(buffer);
int i = Math.Min(length, name.Length);
buffer.Slice(i, length - i).Clear();
Buffer.BlockCopy(name, 0, buffer, offset, i);
// if Span<byte>.Fill can be used, it is more efficient
for (; i < length; ++i)
{
buffer[offset + i] = 0;
}
}
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Tar
{

View File

@@ -2,6 +2,7 @@
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Tar
{

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Linq;
using System.Text;
namespace SharpCompress.Common.Zip.Headers
{

View File

@@ -1,4 +1,5 @@
using System.IO;
using System;
using System.IO;
namespace SharpCompress.Common.Zip.Headers
{

View File

@@ -1,5 +1,6 @@
using System.IO;
using System.Linq;
using System.Text;
namespace SharpCompress.Common.Zip.Headers
{

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip.Headers
{
@@ -76,34 +76,34 @@ namespace SharpCompress.Common.Zip.Headers
switch (DataBytes.Length)
{
case 4:
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 0);
return;
case 8:
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
return;
case 12:
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 8);
return;
case 16:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
return;
case 20:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(16));
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 16);
return;
case 24:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 16);
return;
case 28:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(24));
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 16);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 24);
return;
default:
throw new ArchiveException("Unexpected size of of Zip64 extended information extra field");
@@ -132,7 +132,7 @@ namespace SharpCompress.Common.Zip.Headers
case ExtraDataType.Zip64ExtendedInformationExtraField:
return new Zip64ExtendedInformationExtraField
(
type,
type,
length,
extraData
);
@@ -146,4 +146,4 @@ namespace SharpCompress.Common.Zip.Headers
}
}
}
}
}

View File

@@ -1,4 +1,5 @@
using System.IO;
using System;
using System.IO;
namespace SharpCompress.Common.Zip.Headers
{

View File

@@ -1,7 +1,8 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip.Headers
{
@@ -29,7 +30,7 @@ namespace SharpCompress.Common.Zip.Headers
&& Name.EndsWith("\\");
}
}
internal Stream PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
@@ -64,7 +65,7 @@ namespace SharpCompress.Common.Zip.Headers
return encryptionData;
}
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
internal ushort LastModifiedDate { get; set; }
@@ -77,13 +78,13 @@ namespace SharpCompress.Common.Zip.Headers
{
for (int i = 0; i < extra.Length - 4;)
{
ExtraDataType type = (ExtraDataType)BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i));
ExtraDataType type = (ExtraDataType)DataConverter.LittleEndian.GetUInt16(extra, i);
if (!Enum.IsDefined(typeof(ExtraDataType), type))
{
type = ExtraDataType.NotImplementedExtraData;
}
ushort length = BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i + 2));
ushort length = DataConverter.LittleEndian.GetUInt16(extra, i + 2);
// 7zip has this same kind of check to ignore extras blocks that don't conform to the standard 2-byte ID, 2-byte length, N-byte value.
// CPP/7Zip/Zip/ZipIn.cpp: CInArchive::ReadExtra

View File

@@ -1,4 +1,5 @@
using System;
using System.Text;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{
@@ -34,9 +35,7 @@ namespace SharpCompress.Common.Zip
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
uint zip64Signature = reader.ReadUInt32();
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
zip64Entry.Read(reader);
@@ -56,9 +55,7 @@ namespace SharpCompress.Common.Zip
position = stream.Position;
if (nextHeader == null)
{
yield break;
}
if (nextHeader is DirectoryEntryHeader entryHeader)
{

View File

@@ -2,6 +2,7 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{
@@ -53,30 +54,14 @@ namespace SharpCompress.Common.Zip
//entry could be zero bytes so we need to know that.
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
{
var local_header = ((LocalEntryHeader)header);
// If we have CompressedSize, there is data to be read
if( local_header.CompressedSize > 0 )
bool isRecording = rewindableStream.IsRecording;
if (!isRecording)
{
header.HasData = true;
} // Check if zip is streaming ( Length is 0 and is declared in PostDataDescriptor )
else if( local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor) )
{
bool isRecording = rewindableStream.IsRecording;
if (!isRecording)
{
rewindableStream.StartRecording();
}
uint nextHeaderBytes = reader.ReadUInt32();
// Check if next data is PostDataDescriptor, streamed file with 0 length
header.HasData = !IsHeader(nextHeaderBytes);
rewindableStream.Rewind(!isRecording);
}
else // We are not streaming and compressed size is 0, we have no data
{
header.HasData = false;
rewindableStream.StartRecording();
}
uint nextHeaderBytes = reader.ReadUInt32();
header.HasData = !IsHeader(nextHeaderBytes);
rewindableStream.Rewind(!isRecording);
}
yield return header;
}

View File

@@ -1,7 +1,7 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Security.Cryptography;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip
{
@@ -118,7 +118,7 @@ namespace SharpCompress.Common.Zip
: bytesRemaining;
// update the counter
BinaryPrimitives.WriteInt32LittleEndian(_counter, _nonce++);
DataConverter.LittleEndian.PutBytes(_counter, 0, _nonce++);
// Determine if this is the final block
if ((bytesToRead == bytesRemaining) && (_totalBytesLeftToRead == 0))

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Security.Cryptography;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip
{
@@ -62,10 +62,10 @@ namespace SharpCompress.Common.Zip
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
_generatedVerifyValue = rfc2898.GetBytes(2);
short verify = BinaryPrimitives.ReadInt16LittleEndian(_passwordVerifyValue);
short verify = DataConverter.LittleEndian.GetInt16(_passwordVerifyValue, 0);
if (_password != null)
{
short generated = BinaryPrimitives.ReadInt16LittleEndian(_generatedVerifyValue);
short generated = DataConverter.LittleEndian.GetInt16(_generatedVerifyValue, 0);
if (verify != generated)
{
throw new InvalidFormatException("bad password");

View File

@@ -1,5 +1,4 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using SharpCompress.Common.Zip.Headers;
@@ -9,6 +8,7 @@ using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.Converters;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
@@ -108,19 +108,19 @@ namespace SharpCompress.Common.Zip
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
ushort compressedMethod = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes);
ushort compressedMethod = DataConverter.LittleEndian.GetUInt16(data.DataBytes, 0);
if (compressedMethod != 0x01 && compressedMethod != 0x02)
{
throw new InvalidFormatException("Unexpected vendor version number for WinZip AES metadata");
}
ushort vendorId = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(2));
ushort vendorId = DataConverter.LittleEndian.GetUInt16(data.DataBytes, 2);
if (vendorId != 0x4541)
{
throw new InvalidFormatException("Unexpected vendor ID for WinZip AES metadata");
}
return CreateDecompressionStream(stream, (ZipCompressionMethod)BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)));
return CreateDecompressionStream(stream, (ZipCompressionMethod)DataConverter.LittleEndian.GetUInt16(data.DataBytes, 5));
}
default:
{
@@ -142,7 +142,7 @@ namespace SharpCompress.Common.Zip
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor))
|| Header.IsZip64)
{
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
}
else
{
@@ -182,4 +182,4 @@ namespace SharpCompress.Common.Zip
return plainStream;
}
}
}
}

View File

@@ -3,6 +3,7 @@ using System.IO;
using System.Linq;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{

View File

@@ -65,16 +65,16 @@ namespace SharpCompress.Compressors.ADC
}
}
private static int GetOffset(ReadOnlySpan<byte> chunk)
private static int GetOffset(byte[] chunk, int position)
{
switch (GetChunkType(chunk[0]))
switch (GetChunkType(chunk[position]))
{
case PLAIN:
return 0;
case TWO_BYTE:
return ((chunk[0] & 0x03) << 8) + chunk[1];
return ((chunk[position] & 0x03) << 8) + chunk[position + 1];
case THREE_BYTE:
return (chunk[1] << 8) + chunk[2];
return (chunk[position + 1] << 8) + chunk[position + 2];
default:
return -1;
}
@@ -116,7 +116,7 @@ namespace SharpCompress.Compressors.ADC
byte[] buffer = new byte[bufferSize];
int outPosition = 0;
bool full = false;
Span<byte> temp = stackalloc byte[3];
MemoryStream tempMs;
while (position < input.Length)
{
@@ -142,10 +142,11 @@ namespace SharpCompress.Compressors.ADC
position += chunkSize + 1;
break;
case TWO_BYTE:
tempMs = Utility.RECYCLABLE_MEMORY_STREAM_MANAGER.GetStream();
chunkSize = GetChunkSize((byte)readByte);
temp[0] = (byte)readByte;
temp[1] = (byte)input.ReadByte();
offset = GetOffset(temp);
tempMs.WriteByte((byte)readByte);
tempMs.WriteByte((byte)input.ReadByte());
offset = GetOffset(tempMs.ToArray(), 0);
if (outPosition + chunkSize > bufferSize)
{
full = true;
@@ -172,11 +173,12 @@ namespace SharpCompress.Compressors.ADC
}
break;
case THREE_BYTE:
tempMs = Utility.RECYCLABLE_MEMORY_STREAM_MANAGER.GetStream();
chunkSize = GetChunkSize((byte)readByte);
temp[0] = (byte)readByte;
temp[1] = (byte)input.ReadByte();
temp[2] = (byte)input.ReadByte();
offset = GetOffset(temp);
tempMs.WriteByte((byte)readByte);
tempMs.WriteByte((byte)input.ReadByte());
tempMs.WriteByte((byte)input.ReadByte());
offset = GetOffset(tempMs.ToArray(), 0);
if (outPosition + chunkSize > bufferSize)
{
full = true;
@@ -211,8 +213,8 @@ namespace SharpCompress.Compressors.ADC
}
output = new byte[outPosition];
Array.Copy(buffer, output, outPosition);
Array.Copy(buffer, 0, output, 0, outPosition);
return position - start;
}
}
}
}

View File

@@ -27,8 +27,9 @@
// ------------------------------------------------------------------
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Compressors.Deflate
@@ -372,16 +373,17 @@ namespace SharpCompress.Compressors.Deflate
{
return;
}
if (_fileName.Contains("/"))
if (_fileName.IndexOf("/") != -1)
{
_fileName = _fileName.Replace('/', '\\');
_fileName = _fileName.Replace("/", "\\");
}
if (_fileName.EndsWith("\\"))
{
throw new InvalidOperationException("Illegal filename");
}
if (_fileName.Contains("\\"))
var index = _fileName.IndexOf("\\");
if (index != -1)
{
// trim any leading path
int length = _fileName.Length;
@@ -440,7 +442,7 @@ namespace SharpCompress.Compressors.Deflate
}
TimeSpan delta = LastModified.Value - UNIX_EPOCH;
var timet = (Int32)delta.TotalSeconds;
BinaryPrimitives.WriteInt32LittleEndian(header.AsSpan(i), timet);
DataConverter.LittleEndian.PutBytes(header, i, timet);
i += 4;
// xflg
@@ -474,4 +476,4 @@ namespace SharpCompress.Compressors.Deflate
return header.Length; // bytes written
}
}
}
}

View File

@@ -25,10 +25,11 @@
// ------------------------------------------------------------------
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Compressors.Deflate
@@ -243,12 +244,10 @@ namespace SharpCompress.Compressors.Deflate
if (_wantCompress)
{
// Emit the GZIP trailer: CRC32 and size mod 2^32
byte[] intBuf = new byte[4];
BinaryPrimitives.WriteInt32LittleEndian(intBuf, crc.Crc32Result);
_stream.Write(intBuf, 0, 4);
int c1 = crc.Crc32Result;
_stream.Write(DataConverter.LittleEndian.GetBytes(c1), 0, 4);
int c2 = (Int32)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
BinaryPrimitives.WriteInt32LittleEndian(intBuf, c2);
_stream.Write(intBuf, 0, 4);
_stream.Write(DataConverter.LittleEndian.GetBytes(c2), 0, 4);
}
else
{
@@ -294,9 +293,9 @@ namespace SharpCompress.Compressors.Deflate
Array.Copy(_z.InputBuffer, _z.NextIn, trailer, 0, trailer.Length);
}
Int32 crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
Int32 crc32_expected = DataConverter.LittleEndian.GetInt32(trailer, 0);
Int32 crc32_actual = crc.Crc32Result;
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.AsSpan(4));
Int32 isize_expected = DataConverter.LittleEndian.GetInt32(trailer, 4);
Int32 isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
if (crc32_actual != crc32_expected)
@@ -447,7 +446,7 @@ namespace SharpCompress.Compressors.Deflate
throw new ZlibException("Bad GZIP header.");
}
Int32 timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
Int32 timet = DataConverter.LittleEndian.GetInt32(header, 4);
_GzipMtime = TarHeader.EPOCH.AddSeconds(timet);
totalBytesRead += n;
if ((header[3] & 0x04) == 0x04)
@@ -648,4 +647,4 @@ namespace SharpCompress.Compressors.Deflate
Undefined
}
}
}
}

View File

@@ -3,6 +3,7 @@
// See the LICENSE file in the project root for more information.
using SharpCompress.Common.Zip;
using SharpCompress.Compressors.Deflate;
using System;
using System.Diagnostics;
using System.IO;
@@ -22,19 +23,11 @@ namespace SharpCompress.Compressors.Deflate64
public Deflate64Stream(Stream stream, CompressionMode mode)
{
if (stream == null)
{
throw new ArgumentNullException(nameof(stream));
}
if (mode != CompressionMode.Decompress)
{
throw new NotImplementedException("Deflate64: this implementation only supports decompression");
}
if (!stream.CanRead)
{
throw new ArgumentException("Deflate64: input stream is not readable", nameof(stream));
}
InitializeInflater(stream, ZipCompressionMethod.Deflate64);
}
@@ -47,9 +40,7 @@ namespace SharpCompress.Compressors.Deflate64
Debug.Assert(stream != null);
Debug.Assert(method == ZipCompressionMethod.Deflate || method == ZipCompressionMethod.Deflate64);
if (!stream.CanRead)
{
throw new ArgumentException("Deflate64: input stream is not readable", nameof(stream));
}
_inflater = new InflaterManaged(method == ZipCompressionMethod.Deflate64);
@@ -161,32 +152,22 @@ namespace SharpCompress.Compressors.Deflate64
private void ValidateParameters(byte[] array, int offset, int count)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (array.Length - offset < count)
{
throw new ArgumentException("Deflate64: invalid offset/count combination");
}
}
private void EnsureNotDisposed()
{
if (_stream == null)
{
ThrowStreamClosedException();
}
}
[MethodImpl(MethodImplOptions.NoInlining)]
@@ -198,9 +179,7 @@ namespace SharpCompress.Compressors.Deflate64
private void EnsureDecompressionMode()
{
if (_mode != CompressionMode.Decompress)
{
ThrowCannotReadFromDeflateManagedStreamException();
}
}
[MethodImpl(MethodImplOptions.NoInlining)]
@@ -212,9 +191,7 @@ namespace SharpCompress.Compressors.Deflate64
private void EnsureCompressionMode()
{
if (_mode != CompressionMode.Compress)
{
ThrowCannotWriteToDeflateManagedStreamException();
}
}
[MethodImpl(MethodImplOptions.NoInlining)]
@@ -232,14 +209,10 @@ namespace SharpCompress.Compressors.Deflate64
private void PurgeBuffers(bool disposing)
{
if (!disposing)
{
return;
}
if (_stream == null)
{
return;
}
Flush();
}

View File

@@ -208,9 +208,7 @@ namespace SharpCompress.Compressors.Deflate64
for (code = 0; code < 16; code++)
{
for (int n = 0; n < (1 << EXTRA_DISTANCE_BITS[code]); n++)
{
result[dist++] = (byte)code;
}
}
dist >>= 7; // from now on, all distances are divided by 128
@@ -218,9 +216,7 @@ namespace SharpCompress.Compressors.Deflate64
for (; code < NUM_DIST_BASE_CODES; code++)
{
for (int n = 0; n < (1 << (EXTRA_DISTANCE_BITS[code] - 7)); n++)
{
result[256 + dist++] = (byte)code;
}
}
return result;

View File

@@ -82,24 +82,16 @@ namespace SharpCompress.Compressors.Deflate64
{
byte[] literalTreeLength = new byte[MAX_LITERAL_TREE_ELEMENTS];
for (int i = 0; i <= 143; i++)
{
literalTreeLength[i] = 8;
}
for (int i = 144; i <= 255; i++)
{
literalTreeLength[i] = 9;
}
for (int i = 256; i <= 279; i++)
{
literalTreeLength[i] = 7;
}
for (int i = 280; i <= 287; i++)
{
literalTreeLength[i] = 8;
}
return literalTreeLength;
}
@@ -285,14 +277,9 @@ namespace SharpCompress.Compressors.Deflate64
{
symbol = -symbol;
if ((bitBuffer & mask) == 0)
{
symbol = _left[symbol];
}
else
{
symbol = _right[symbol];
}
mask <<= 1;
} while (symbol < 0);
}

View File

@@ -37,7 +37,7 @@ namespace SharpCompress.Compressors.Deflate64
// const tables used in decoding:
// Extra bits for length code 257 - 285.
private static ReadOnlySpan<byte> S_EXTRA_LENGTH_BITS => new byte[]
private static readonly byte[] S_EXTRA_LENGTH_BITS =
{ 0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,16 };
// The base length for length code 257 - 285.
@@ -51,9 +51,9 @@ namespace SharpCompress.Compressors.Deflate64
{ 1,2,3,4,5,7,9,13,17,25,33,49,65,97,129,193,257,385,513,769,1025,1537,2049,3073,4097,6145,8193,12289,16385,24577,32769,49153 };
// code lengths for code length alphabet is stored in following order
private static ReadOnlySpan<byte> S_CODE_ORDER => new byte[] { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
private static readonly byte[] S_CODE_ORDER = { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
private static ReadOnlySpan<byte> S_STATIC_DISTANCE_TREE_TABLE => new byte[]
private static readonly byte[] S_STATIC_DISTANCE_TREE_TABLE =
{
0x00,0x10,0x08,0x18,0x04,0x14,0x0c,0x1c,0x02,0x12,0x0a,0x1a,
0x06,0x16,0x0e,0x1e,0x01,0x11,0x09,0x19,0x05,0x15,0x0d,0x1d,
@@ -220,9 +220,7 @@ namespace SharpCompress.Compressors.Deflate64
// reading bfinal bit
// Need 1 bit
if (!_input.EnsureBitsAvailable(1))
{
return false;
}
_bfinal = _input.GetBits(1);
_state = InflaterState.ReadingBType;
@@ -720,7 +718,7 @@ namespace SharpCompress.Compressors.Deflate64
byte[] distanceTreeCodeLength = new byte[HuffmanTree.MAX_DIST_TREE_ELEMENTS];
// Create literal and distance tables
Array.Copy(_codeList, literalTreeCodeLength, _literalLengthCodeCount);
Array.Copy(_codeList, 0, literalTreeCodeLength, 0, _literalLengthCodeCount);
Array.Copy(_codeList, _literalLengthCodeCount, distanceTreeCodeLength, 0, _distanceCodeCount);
// Make sure there is an end-of-block code, otherwise how could we ever end?

View File

@@ -30,9 +30,7 @@ namespace SharpCompress.Compressors.LZMA
mLimit = limit;
if (((uint) input.Length & 15) != 0)
{
throw new NotSupportedException("AES decoder does not support padding.");
}
int numCyclesPower;
byte[] salt, seed;
@@ -92,14 +90,10 @@ namespace SharpCompress.Compressors.LZMA
{
if (count == 0
|| mWritten == mLimit)
{
return 0;
}
if (mUnderflow > 0)
{
return HandleUnderflow(buffer, offset, count);
}
// Need at least 16 bytes to proceed.
if (mEnding - mOffset < 16)
@@ -126,22 +120,16 @@ namespace SharpCompress.Compressors.LZMA
// Currently this is handled by forcing an underflow if
// the stream length is not a multiple of the block size.
if (count > mLimit - mWritten)
{
count = (int) (mLimit - mWritten);
}
// We cannot transform less than 16 bytes into the target buffer,
// but we also cannot return zero, so we need to handle this.
// We transform the data locally and use our own buffer as cache.
if (count < 16)
{
return HandleUnderflow(buffer, offset, count);
}
if (count > mEnding - mOffset)
{
count = mEnding - mOffset;
}
// Otherwise we transform directly into the target buffer.
int processed = mDecoder.TransformBlock(mBuffer, mOffset, count & ~15, buffer, offset);
@@ -169,34 +157,24 @@ namespace SharpCompress.Compressors.LZMA
int saltSize = (bt >> 7) & 1;
int ivSize = (bt >> 6) & 1;
if (info.Length == 1)
{
throw new InvalidOperationException();
}
byte bt2 = info[1];
saltSize += (bt2 >> 4);
ivSize += (bt2 & 15);
if (info.Length < 2 + saltSize + ivSize)
{
throw new InvalidOperationException();
}
salt = new byte[saltSize];
for (int i = 0; i < saltSize; i++)
{
salt[i] = info[i + 2];
}
iv = new byte[16];
for (int i = 0; i < ivSize; i++)
{
iv[i] = info[i + saltSize + 2];
}
if (numCyclesPower > 24)
{
throw new NotSupportedException();
}
}
private byte[] InitKey(int mNumCyclesPower, byte[] salt, byte[] pass)
@@ -207,20 +185,15 @@ namespace SharpCompress.Compressors.LZMA
int pos;
for (pos = 0; pos < salt.Length; pos++)
{
key[pos] = salt[pos];
}
for (int i = 0; i < pass.Length && pos < 32; i++)
{
key[pos++] = pass[i];
}
return key;
}
else
{
#if NETSTANDARD1_3 || NETSTANDARD2_0
#if NETSTANDARD1_4 || NETSTANDARD2_0
using (IncrementalHash sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256))
{
byte[] counter = new byte[8];
@@ -234,12 +207,8 @@ namespace SharpCompress.Compressors.LZMA
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (int i = 0; i < 8; i++)
{
if (++counter[i] != 0)
{
break;
}
}
}
return sha.GetHashAndReset();
}
@@ -257,12 +226,8 @@ namespace SharpCompress.Compressors.LZMA
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (int i = 0; i < 8; i++)
{
if (++counter[i] != 0)
{
break;
}
}
}
sha.TransformFinalBlock(counter, 0, 0);
@@ -283,9 +248,7 @@ namespace SharpCompress.Compressors.LZMA
}
if (count > mUnderflow)
{
count = mUnderflow;
}
Buffer.BlockCopy(mBuffer, mOffset, buffer, offset, count);
mWritten += count;

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Converters;
using SharpCompress.Crypto;
using SharpCompress.IO;
@@ -58,17 +58,16 @@ namespace SharpCompress.Compressors.LZMA
crc32Stream.WrappedStream.Dispose();
crc32Stream.Dispose();
var compressedCount = _countingWritableSubStream.Count;
var bytes = DataConverter.LittleEndian.GetBytes(crc32Stream.Crc);
_countingWritableSubStream.Write(bytes, 0, bytes.Length);
byte[] intBuf = new byte[8];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc32Stream.Crc);
_countingWritableSubStream.Write(intBuf, 0, 4);
BinaryPrimitives.WriteInt64LittleEndian(intBuf, _writeCount);
_countingWritableSubStream.Write(intBuf, 0, 8);
bytes = DataConverter.LittleEndian.GetBytes(_writeCount);
_countingWritableSubStream.Write(bytes, 0, bytes.Length);
//total with headers
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, compressedCount + 6 + 20);
_countingWritableSubStream.Write(intBuf, 0, 8);
bytes = DataConverter.LittleEndian.GetBytes(compressedCount + 6 + 20);
_countingWritableSubStream.Write(bytes, 0, bytes.Length);
}
_finished = true;
}
@@ -102,7 +101,7 @@ namespace SharpCompress.Compressors.LZMA
{
_stream.Flush();
}
// TODO: Both Length and Position are sometimes feasible, but would require
// reading the output length when we initialize.
public override long Length => throw new NotImplementedException();

View File

@@ -996,7 +996,7 @@ namespace SharpCompress.Compressors.LZMA
}
}
UInt32 startLen = 2; // speed optimization
UInt32 startLen = 2; // speed optimization
for (UInt32 repIndex = 0; repIndex < Base.K_NUM_REP_DISTANCES; repIndex++)
{
@@ -1571,17 +1571,12 @@ namespace SharpCompress.Compressors.LZMA
public void WriteCoderProperties(Stream outStream)
{
WriteCoderProperties(_properties);
outStream.Write(_properties, 0, K_PROP_SIZE);
}
public void WriteCoderProperties(Span<byte> span)
{
span[0] = (byte)((_posStateBits * 5 + _numLiteralPosStateBits) * 9 + _numLiteralContextBits);
_properties[0] = (Byte)((_posStateBits * 5 + _numLiteralPosStateBits) * 9 + _numLiteralContextBits);
for (int i = 0; i < 4; i++)
{
span[1 + i] = (byte)((_dictionarySize >> (8 * i)) & 0xFF);
_properties[1 + i] = (Byte)((_dictionarySize >> (8 * i)) & 0xFF);
}
outStream.Write(_properties, 0, K_PROP_SIZE);
}
private readonly UInt32[] _tempPrices = new UInt32[Base.K_NUM_FULL_DISTANCES];
@@ -1799,4 +1794,4 @@ namespace SharpCompress.Compressors.LZMA
_trainSize = trainSize;
}
}
}
}

View File

@@ -1,7 +1,7 @@
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.LZMA
{
@@ -56,7 +56,7 @@ namespace SharpCompress.Compressors.LZMA
if (!isLzma2)
{
_dictionarySize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
_dictionarySize = DataConverter.LittleEndian.GetInt32(properties, 1);
_outWindow.Create(_dictionarySize);
if (presetDictionary != null)
{
@@ -107,9 +107,9 @@ namespace SharpCompress.Compressors.LZMA
_encoder = new Encoder();
_encoder.SetCoderProperties(properties._propIDs, properties._properties);
byte[] prop = new byte[5];
_encoder.WriteCoderProperties(prop);
Properties = prop;
MemoryStream propStream = new MemoryStream(5);
_encoder.WriteCoderProperties(propStream);
Properties = propStream.ToArray();
_encoder.SetStreams(null, outputStream, -1, -1);
if (presetDictionary != null)
@@ -315,4 +315,4 @@ namespace SharpCompress.Compressors.LZMA
public byte[] Properties { get; } = new byte[5];
}
}
}

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -19,11 +19,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
}
internal int SummFreq
{
get => BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address)) & 0xffff;
set => BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address), (short)value);
}
internal int SummFreq { get => DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff; set => DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value); }
internal FreqData Initialize(byte[] mem)
{
@@ -32,12 +28,14 @@ namespace SharpCompress.Compressors.PPMd.H
internal void IncrementSummFreq(int dSummFreq)
{
SummFreq += (short)dSummFreq;
short summFreq = DataConverter.LittleEndian.GetInt16(Memory, Address);
summFreq += (short)dSummFreq;
DataConverter.LittleEndian.PutBytes(Memory, Address, summFreq);
}
internal int GetStats()
{
return BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 2));
return DataConverter.LittleEndian.GetInt32(Memory, Address + 2);
}
internal virtual void SetStats(State state)
@@ -47,7 +45,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal void SetStats(int state)
{
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 2), state);
DataConverter.LittleEndian.PutBytes(Memory, Address + 2, state);
}
public override String ToString()
@@ -66,4 +64,4 @@ namespace SharpCompress.Compressors.PPMd.H
return buffer.ToString();
}
}
}
}

View File

@@ -137,7 +137,7 @@ namespace SharpCompress.Compressors.PPMd.H
private void RestartModelRare()
{
new Span<int>(_charMask).Clear();
Utility.Fill(_charMask, 0);
SubAlloc.InitSubAllocator();
_initRl = -(_maxOrder < 12 ? _maxOrder : 12) - 1;
int addr = SubAlloc.AllocContext();
@@ -228,7 +228,7 @@ namespace SharpCompress.Compressors.PPMd.H
private void ClearMask()
{
_escCount = 1;
new Span<int>(_charMask).Clear();
Utility.Fill(_charMask, 0);
}
internal bool DecodeInit(IRarUnpack unpackRead, int escChar)
@@ -912,4 +912,4 @@ namespace SharpCompress.Compressors.PPMd.H
}
}
}
}
}

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -22,7 +22,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_numStats = BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address)) & 0xffff;
_numStats = DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff;
}
return _numStats;
}
@@ -32,7 +32,7 @@ namespace SharpCompress.Compressors.PPMd.H
_numStats = value & 0xffff;
if (Memory != null)
{
BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address), (short)value);
DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value);
}
}
}
@@ -109,7 +109,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_suffix = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 8));
_suffix = DataConverter.LittleEndian.GetInt32(Memory, Address + 8);
}
return _suffix;
}
@@ -124,7 +124,7 @@ namespace SharpCompress.Compressors.PPMd.H
_suffix = suffix;
if (Memory != null)
{
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 8), suffix);
DataConverter.LittleEndian.PutBytes(Memory, Address + 8, suffix);
}
}
@@ -307,7 +307,7 @@ namespace SharpCompress.Compressors.PPMd.H
// byte[] bytes = model.getSubAlloc().getHeap();
// int p1 = state1.Address;
// int p2 = state2.Address;
//
//
// for (int i = 0; i < StatePtr.size; i++) {
// byte temp = bytes[p1+i];
// bytes[p1+i] = bytes[p2+i];
@@ -564,4 +564,4 @@ namespace SharpCompress.Compressors.PPMd.H
UNION_SIZE = Math.Max(FreqData.SIZE, State.SIZE);
}
}
}
}

View File

@@ -1,5 +1,4 @@
using System;
using System.Buffers.Binary;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -22,7 +21,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_stamp = BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address)) & 0xffff;
_stamp = DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff;
}
return _stamp;
}
@@ -32,7 +31,7 @@ namespace SharpCompress.Compressors.PPMd.H
_stamp = value;
if (Memory != null)
{
BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address), (short)value);
DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value);
}
}
}
@@ -64,7 +63,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_next = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 4));
_next = DataConverter.LittleEndian.GetInt32(Memory, Address + 4);
}
return _next;
}
@@ -79,7 +78,7 @@ namespace SharpCompress.Compressors.PPMd.H
_next = next;
if (Memory != null)
{
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 4), next);
DataConverter.LittleEndian.PutBytes(Memory, Address + 4, next);
}
}
@@ -87,7 +86,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_nu = BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address + 2)) & 0xffff;
_nu = DataConverter.LittleEndian.GetInt16(Memory, Address + 2) & 0xffff;
}
return _nu;
}
@@ -97,7 +96,7 @@ namespace SharpCompress.Compressors.PPMd.H
_nu = nu & 0xffff;
if (Memory != null)
{
BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address + 2), (short)nu);
DataConverter.LittleEndian.PutBytes(Memory, Address + 2, (short)nu);
}
}
@@ -105,7 +104,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_prev = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 8));
_prev = DataConverter.LittleEndian.GetInt32(Memory, Address + 8);
}
return _prev;
}
@@ -120,8 +119,8 @@ namespace SharpCompress.Compressors.PPMd.H
_prev = prev;
if (Memory != null)
{
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 8), prev);
DataConverter.LittleEndian.PutBytes(Memory, Address + 8, prev);
}
}
}
}
}

View File

@@ -1,6 +1,5 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -19,7 +18,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_next = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address));
_next = DataConverter.LittleEndian.GetInt32(Memory, Address);
}
return _next;
}
@@ -34,7 +33,7 @@ namespace SharpCompress.Compressors.PPMd.H
_next = next;
if (Memory != null)
{
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address), next);
DataConverter.LittleEndian.PutBytes(Memory, Address, next);
}
}
@@ -52,4 +51,4 @@ namespace SharpCompress.Compressors.PPMd.H
return buffer.ToString();
}
}
}
}

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -29,7 +29,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal int GetSuccessor()
{
return BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 2));
return DataConverter.LittleEndian.GetInt32(Memory, Address + 2);
}
internal void SetSuccessor(PpmContext successor)
@@ -39,7 +39,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal void SetSuccessor(int successor)
{
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 2), successor);
DataConverter.LittleEndian.PutBytes(Memory, Address + 2, successor);
}
internal void SetValues(StateRef state)
@@ -95,4 +95,4 @@ namespace SharpCompress.Compressors.PPMd.H
return buffer.ToString();
}
}
}
}

View File

@@ -166,7 +166,7 @@ namespace SharpCompress.Compressors.PPMd.H
_freeListPos = _heapStart + allocSize;
//UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'"
//assert(realAllocSize - tempMemBlockPos == RarMemBlock.size): realAllocSize
//assert(realAllocSize - tempMemBlockPos == RarMemBlock.size): realAllocSize
//+ + tempMemBlockPos + + RarMemBlock.size;
// Init freeList
@@ -360,7 +360,7 @@ namespace SharpCompress.Compressors.PPMd.H
public virtual void InitSubAllocator()
{
int i, k;
new Span<byte>(_heap, _freeListPos, SizeOfFreeList()).Clear();
Utility.Fill(_heap, _freeListPos, _freeListPos + SizeOfFreeList(), (byte)0);
_pText = _heapStart;
@@ -448,4 +448,4 @@ namespace SharpCompress.Compressors.PPMd.H
UNIT_SIZE = Math.Max(PpmContext.SIZE, RarMemBlock.SIZE);
}
}
}
}

View File

@@ -58,7 +58,7 @@ namespace SharpCompress.Compressors.PPMd.I1
0x6051
};
private static ReadOnlySpan<byte> EXPONENTIAL_ESCAPES => new byte[] {25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2};
private static readonly byte[] EXPONENTIAL_ESCAPES = {25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2};
#region Public Methods

View File

@@ -1,6 +1,5 @@
using System;
using System.Buffers.Binary;
using SharpCompress.Compressors.PPMd.I1;
using SharpCompress.Compressors.PPMd.I1;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd
{
@@ -26,7 +25,7 @@ namespace SharpCompress.Compressors.PPMd
ModelOrder = modelOrder;
RestorationMethod = modelRestorationMethod;
}
public int ModelOrder { get; }
public PpmdVersion Version { get; } = PpmdVersion.I1;
internal ModelRestorationMethod RestorationMethod { get; }
@@ -35,7 +34,7 @@ namespace SharpCompress.Compressors.PPMd
{
if (properties.Length == 2)
{
ushort props = BinaryPrimitives.ReadUInt16LittleEndian(properties);
ushort props = DataConverter.LittleEndian.GetUInt16(properties, 0);
AllocatorSize = (((props >> 4) & 0xff) + 1) << 20;
ModelOrder = (props & 0x0f) + 1;
RestorationMethod = (ModelRestorationMethod)(props >> 12);
@@ -43,7 +42,7 @@ namespace SharpCompress.Compressors.PPMd
else if (properties.Length == 5)
{
Version = PpmdVersion.H7Z;
AllocatorSize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
AllocatorSize = DataConverter.LittleEndian.GetInt32(properties, 1);
ModelOrder = properties[0];
}
}
@@ -65,16 +64,8 @@ namespace SharpCompress.Compressors.PPMd
}
}
public byte[] Properties
{
get
{
byte[] bytes = new byte[2];
BinaryPrimitives.WriteUInt16LittleEndian(
bytes,
(ushort)((ModelOrder - 1) + (((AllocatorSize >> 20) - 1) << 4) + ((ushort)RestorationMethod << 12)));
return bytes;
}
}
public byte[] Properties => DataConverter.LittleEndian.GetBytes(
(ushort)
((ModelOrder - 1) + (((AllocatorSize >> 20) - 1) << 4) + ((ushort)RestorationMethod << 12)));
}
}
}

View File

@@ -78,7 +78,10 @@ namespace SharpCompress.Compressors.PPMd
{
if (_compress)
{
_model.EncodeBlock(_stream, new MemoryStream(), true);
using (var stream = Utility.RECYCLABLE_MEMORY_STREAM_MANAGER.GetStream())
{
_model.EncodeBlock(_stream, stream, true);
}
}
}
base.Dispose(isDisposing);

View File

@@ -1,3 +1,4 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;

View File

@@ -32,9 +32,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
}
}
public bool Suspended {
public bool Suspended {
get => suspended;
set => suspended = value;
set => suspended = value;
}
public int Char
@@ -139,12 +139,12 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
case 36: // alternative hash
Unpack29(fileHeader.IsSolid);
break;
case 50: // rar 5.x compression
Unpack5(fileHeader.IsSolid);
break;
default:
default:
throw new InvalidFormatException("unknown rar compression version " + fileHeader.CompressionAlgorithm);
}
}
@@ -729,13 +729,13 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
if (!solid)
{
tablesRead = false;
new Span<int>(oldDist).Clear(); // memset(oldDist,0,sizeof(OldDist));
Utility.Fill(oldDist, 0); // memset(oldDist,0,sizeof(OldDist));
oldDistPtr = 0;
lastDist = 0;
lastLength = 0;
new Span<byte>(unpOldTable).Clear(); // memset(UnpOldTable,0,sizeof(UnpOldTable));
Utility.Fill(unpOldTable, (byte)0); // memset(UnpOldTable,0,sizeof(UnpOldTable));
unpPtr = 0;
wrPtr = 0;
@@ -837,7 +837,7 @@ WriteBorder=Math.Min(MaxWinSize,UNPACK_MAX_WRITE)&MaxWinMask;
if ((bitField & 0x4000) == 0)
{
new Span<byte>(unpOldTable).Clear(); // memset(UnpOldTable,0,sizeof(UnpOldTable));
Utility.Fill(unpOldTable, (byte)0); // memset(UnpOldTable,0,sizeof(UnpOldTable));
}
AddBits(2);
@@ -1109,7 +1109,7 @@ WriteBorder=Math.Min(MaxWinSize,UNPACK_MAX_WRITE)&MaxWinMask;
oldFilterLengths[FiltPos] = StackFilter.BlockLength;
// memset(StackFilter->Prg.InitR,0,sizeof(StackFilter->Prg.InitR));
new Span<int>(StackFilter.Program.InitR).Clear();
Utility.Fill(StackFilter.Program.InitR, 0);
StackFilter.Program.InitR[3] = RarVM.VM_GLOBALMEMADDR; // StackFilter->Prg.InitR[3]=VM_GLOBALMEMADDR;
StackFilter.Program.InitR[4] = StackFilter.BlockLength;
@@ -1267,4 +1267,4 @@ WriteBorder=Math.Min(MaxWinSize,UNPACK_MAX_WRITE)&MaxWinMask;
}
}
}
}
}

View File

@@ -3,7 +3,7 @@
* Original author: Edmund Wagner
* Creation date: 21.06.2007
*
* the unrar licence applies to all junrar source and binary distributions
* the unrar licence applies to all junrar source and binary distributions
* you are not allowed to use this source to re-create the RAR compression algorithm
*/
@@ -652,9 +652,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
ChSetC[I] = ((~I + 1) & 0xff) << 8;
}
new Span<int>(NToPl).Clear(); // memset(NToPl,0,sizeof(NToPl));
new Span<int>(NToPlB).Clear(); // memset(NToPlB,0,sizeof(NToPlB));
new Span<int>(NToPlC).Clear(); // memset(NToPlC,0,sizeof(NToPlC));
Utility.Fill(NToPl, 0); // memset(NToPl,0,sizeof(NToPl));
Utility.Fill(NToPlB, 0); // memset(NToPlB,0,sizeof(NToPlB));
Utility.Fill(NToPlC, 0); // memset(NToPlC,0,sizeof(NToPlC));
corrHuff(ChSetB, NToPlB);
}
@@ -670,7 +670,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
// & ~0xff) | I;
}
}
new Span<int>(NumToPlace).Clear(); // memset(NumToPlace,0,sizeof(NToPl));
Utility.Fill(NumToPlace, 0); // memset(NumToPlace,0,sizeof(NToPl));
for (I = 6; I >= 0; I--)
{
NumToPlace[I] = (7 - I) * 32;
@@ -717,4 +717,4 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
wrPtr = unpPtr;
}
}
}
}

View File

@@ -3,7 +3,7 @@
* Original author: Edmund Wagner
* Creation date: 21.06.2007
*
* the unrar licence applies to all junrar source and binary distributions
* the unrar licence applies to all junrar source and binary distributions
* you are not allowed to use this source to re-create the RAR compression algorithm
*/
@@ -38,7 +38,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
56, 64, 80, 96, 112, 128, 160, 192, 224
};
private static ReadOnlySpan<byte> LBits => new byte[]
private static readonly byte[] LBits =
{
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4,
4, 5, 5, 5, 5
@@ -263,7 +263,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
if (0 == (BitField & 0x4000))
{
// memset(UnpOldTable20,0,sizeof(UnpOldTable20));
new Span<byte>(UnpOldTable20).Clear();
Utility.Fill(UnpOldTable20, (byte)0);
}
AddBits(2);
@@ -371,7 +371,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
AudV[3] = new AudioVariables();
// memset(UnpOldTable20,0,sizeof(UnpOldTable20));
new Span<byte>(UnpOldTable20).Clear();
Utility.Fill(UnpOldTable20, (byte)0);
}
}
@@ -521,4 +521,4 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
return ((byte)Ch);
}
}
}
}

View File

@@ -2,6 +2,8 @@
using System;
using System.Collections.Generic;
using SharpCompress.Compressors.Rar.UnpackV1.Decode;
using SharpCompress.Compressors.Rar.VM;
using size_t=System.UInt32;
using UnpackBlockHeader = SharpCompress.Compressors.Rar.UnpackV1;
@@ -137,18 +139,14 @@ public bool TablePresent;
{
UnpInitData(Solid);
if (!UnpReadBuf())
{
return;
}
return;
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (!ReadBlockHeader() ||
!ReadTables() || !TablesRead5)
{
return;
}
return;
}
while (true)
@@ -171,24 +169,17 @@ public bool TablePresent;
break;
}
if (!ReadBlockHeader() || !ReadTables())
{
return;
}
}
if (FileDone || !UnpReadBuf())
{
break;
}
}
if (((WriteBorder-UnpPtr) & MaxWinMask)<PackDef.MAX_LZ_MATCH+3 && WriteBorder!=UnpPtr)
{
UnpWriteBuf();
if (WrittenFileSize>DestUnpSize)
{
return;
}
if (Suspended)
{
FileExtracted=false;
@@ -252,9 +243,7 @@ public bool TablePresent;
{
Length++;
if (Distance>0x40000)
{
Length++;
}
}
}
@@ -270,10 +259,7 @@ public bool TablePresent;
{
UnpackFilter Filter = new UnpackFilter();
if (!ReadFilter(Filter) || !AddFilter(Filter))
{
break;
}
continue;
}
if (MainSlot==257)
@@ -283,10 +269,7 @@ public bool TablePresent;
// FragWindow.CopyString(LastLength,OldDist[0],UnpPtr,MaxWinMask);
// else
//CopyString(LastLength,OldDist[0]);
{
CopyString(LastLength,OldDistN(0));
}
CopyString(LastLength,OldDistN(0));
continue;
}
if (MainSlot<262)
@@ -298,10 +281,7 @@ public bool TablePresent;
//for (uint I=DistNum;I>0;I--)
for (int I=DistNum;I>0;I--)
//OldDistN[I]=OldDistN(I-1);
{
SetOldDistN(I, OldDistN(I-1));
}
//OldDistN[0]=Distance;
SetOldDistN(0, Distance);
@@ -336,19 +316,13 @@ public bool TablePresent;
private bool ReadFilter(UnpackFilter Filter)
{
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-16)
{
if (!UnpReadBuf())
{
return false;
}
}
Filter.uBlockStart=ReadFilterData();
Filter.uBlockLength=ReadFilterData();
if (Filter.BlockLength>MAX_FILTER_BLOCK_SIZE)
{
Filter.BlockLength=0;
}
//Filter.Type=Inp.fgetbits()>>13;
Filter.Type=(byte)(Inp.fgetbits()>>13);
@@ -370,9 +344,7 @@ public bool TablePresent;
{
UnpWriteBuf(); // Write data, apply and flush filters.
if (Filters.Count>=MAX_UNPACK_FILTERS)
{
InitFilters(); // Still too many filters, prevent excessive memory use.
}
}
// If distance to filter start is that large that due to circular dictionary
@@ -389,10 +361,7 @@ public bool TablePresent;
{
int DataSize=ReadTop-Inp.InAddr; // Data left to process.
if (DataSize<0)
{
return false;
}
BlockHeader.BlockSize-=Inp.InAddr-BlockHeader.BlockStart;
if (Inp.InAddr>MAX_SIZE/2)
{
@@ -404,33 +373,21 @@ public bool TablePresent;
// to make it zero.
if (DataSize>0)
//memmove(Inp.InBuf,Inp.InBuf+Inp.InAddr,DataSize);
{
Array.Copy(InBuf, inAddr, InBuf, 0, DataSize);
}
// TODO: perf
// TODO: perf
//Buffer.BlockCopy(InBuf, inAddr, InBuf, 0, DataSize);
Inp.InAddr=0;
ReadTop=DataSize;
}
else
{
DataSize=ReadTop;
}
int ReadCode=0;
if (MAX_SIZE!=DataSize)
//ReadCode=UnpIO->UnpRead(Inp.InBuf+DataSize,BitInput.MAX_SIZE-DataSize);
{
ReadCode = readStream.Read(InBuf, DataSize, MAX_SIZE-DataSize);
}
if (ReadCode>0) // Can be also -1.
{
ReadTop+=ReadCode;
}
ReadBorder=ReadTop-30;
BlockHeader.BlockStart=Inp.InAddr;
if (BlockHeader.BlockSize!=-1) // '-1' means not defined yet.
@@ -717,9 +674,7 @@ public bool TablePresent;
private void UnpInitData50(bool Solid)
{
if (!Solid)
{
TablesRead5=false;
}
}
private bool ReadBlockHeader()
@@ -727,13 +682,8 @@ public bool TablePresent;
Header.HeaderSize=0;
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-7)
{
if (!UnpReadBuf())
{
return false;
}
}
//Inp.faddbits((8-Inp.InBit)&7);
Inp.faddbits((uint)((8-Inp.InBit)&7));
@@ -743,9 +693,7 @@ public bool TablePresent;
uint ByteCount=(uint)(((BlockFlags>>3)&3)+1); // Block size byte count.
if (ByteCount==4)
{
return false;
}
//Header.HeaderSize=2+ByteCount;
Header.HeaderSize=(int)(2+ByteCount);
@@ -767,9 +715,7 @@ public bool TablePresent;
Header.BlockSize=BlockSize;
byte CheckSum=(byte)(0x5a^BlockFlags^BlockSize^(BlockSize>>8)^(BlockSize>>16));
if (CheckSum!=SavedCheckSum)
{
return false;
}
Header.BlockStart=Inp.InAddr;
ReadBorder=Math.Min(ReadBorder,Header.BlockStart+Header.BlockSize-1);

View File

@@ -1,5 +1,4 @@
using System;
using SharpCompress.Compressors.Rar.VM;
using SharpCompress.Compressors.Rar.VM;
namespace SharpCompress.Compressors.Rar.UnpackV1
{
@@ -187,7 +186,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
int i;
long M, N;
new Span<int>(dec.DecodeNum).Clear(); // memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
Utility.Fill(lenCount, 0); // memset(LenCount,0,sizeof(LenCount));
Utility.Fill(dec.DecodeNum, 0); // memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
for (i = 0; i < size; i++)
{
@@ -216,4 +217,4 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
dec.MaxNum = size;
}
}
}
}

View File

@@ -30,9 +30,7 @@ public BitInput(bool AllocBuffer)
//memset(InBuf,0,BufSize);
}
else
{
InBuf=null;
}
InBuf=null;
}

View File

@@ -31,13 +31,11 @@ public FragmentedWindow()
private void Reset()
{
for (uint I=0;I<Mem.Length;I++)
{
if (Mem[I]!=null)
{
//free(Mem[I]);
Mem[I]=null;
}
}
}
@@ -62,18 +60,13 @@ public void Init(size_t WinSize)
{
NewMem=new byte[Size];
if (NewMem!=null)
{
break;
}
Size-=Size/32;
}
if (NewMem==null)
//throw std::bad_alloc();
{
throw new InvalidOperationException();
}
// Clean the window to generate the same output when unpacking corrupt
// RAR files, which may access to unused areas of sliding dictionary.
// sharpcompress: don't need this, freshly allocated above
@@ -86,27 +79,17 @@ public void Init(size_t WinSize)
}
if (TotalSize<WinSize) // Not found enough free blocks.
//throw std::bad_alloc();
{
throw new InvalidOperationException();
}
}
public byte this[size_t Item] {
get {
if (Item<MemSize[0])
{
return Mem[0][Item];
}
for (uint I=1;I<MemSize.Length;I++)
{
if (Item<MemSize[I])
{
return Mem[I][Item-MemSize[I-1]];
}
}
return Mem[0][0]; // Must never happen;
}
set {
@@ -115,13 +98,10 @@ set {
return;
}
for (uint I=1;I<MemSize.Length;I++)
{
if (Item<MemSize[I]) {
Mem[I][Item-MemSize[I-1]] = value;
return;
}
}
}
Mem[0][0] = value; // Must never happen;
}
}
@@ -158,22 +138,15 @@ public void CopyString(uint Length,uint Distance,ref size_t UnpPtr,size_t MaxWin
public void CopyData(byte[] Dest, size_t destOffset, size_t WinPos,size_t Size)
{
for (size_t I=0;I<Size;I++)
{
Dest[destOffset+I]=this[WinPos+I];
}
}
public size_t GetBlockSize(size_t StartPos,size_t RequiredSize)
{
for (uint I=0;I<MemSize.Length;I++)
{
if (StartPos<MemSize[I])
{
return Math.Min(MemSize[I]-StartPos,RequiredSize);
}
}
return 0; // Must never be here.
}

View File

@@ -12,6 +12,7 @@ using int64 = System.Int64;
using System;
using System.IO;
using SharpCompress.Common.Rar.Headers;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
namespace SharpCompress.Compressors.Rar.UnpackV2017
{

View File

@@ -10,6 +10,10 @@ using size_t = System.UInt64;
using int64 = System.Int64;
using uint32 = System.UInt32;
using System;
using System.Collections.Generic;
using System.Text;
namespace SharpCompress.Compressors.Rar.UnpackV2017
{
internal partial class Unpack

View File

@@ -1,5 +1,4 @@
using System;
using static SharpCompress.Compressors.Rar.UnpackV2017.Unpack.Unpack15Local;
using static SharpCompress.Compressors.Rar.UnpackV2017.Unpack.Unpack15Local;
namespace SharpCompress.Compressors.Rar.UnpackV2017
{
@@ -62,10 +61,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
UnpPtr=0;
}
else
{
UnpPtr=WrPtr;
}
--DestUnpSize;
if (DestUnpSize>=0)
{
@@ -78,15 +74,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
UnpPtr&=MaxWinMask;
if (Inp.InAddr>ReadTop-30 && !UnpReadBuf())
{
break;
}
if (((WrPtr-UnpPtr) & MaxWinMask)<270 && WrPtr!=UnpPtr)
{
UnpWriteBuf20();
}
if (StMode != 0)
{
HuffDecode();
@@ -103,13 +93,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
FlagBuf<<=1;
if (Nlzb > Nhfb)
{
LongLZ();
}
else
{
HuffDecode();
}
}
else
{
@@ -123,13 +109,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
FlagBuf<<=1;
if (Nlzb > Nhfb)
{
HuffDecode();
}
else
{
LongLZ();
}
}
else
{
@@ -185,25 +167,15 @@ internal static class Unpack15Local {
if (AvrLn1<37)
{
for (Length=0;;Length++)
{
if (((BitField^ShortXor1[Length]) & (~(0xff>>(int)GetShortLen1(Length))))==0)
{
break;
}
}
Inp.faddbits(GetShortLen1(Length));
}
else
{
for (Length=0;;Length++)
{
if (((BitField^ShortXor2[Length]) & (~(0xff>>(int)GetShortLen2(Length))))==0)
{
break;
}
}
Inp.faddbits(GetShortLen2(Length));
}
@@ -237,14 +209,9 @@ internal static class Unpack15Local {
return;
}
if (Distance > 256)
{
Length++;
}
if (Distance >= MaxDist3)
{
Length++;
}
OldDist[OldDistPtr++]=Distance;
OldDistPtr = OldDistPtr & 3;
@@ -292,14 +259,10 @@ internal static class Unpack15Local {
uint BitField=Inp.fgetbits();
if (AvrLn2 >= 122)
{
Length=DecodeNum(BitField,STARTL2,DecL2,PosL2);
}
else
if (AvrLn2 >= 64)
{
Length=DecodeNum(BitField,STARTL1,DecL1,PosL1);
}
else
if (BitField < 0x100)
{
@@ -309,10 +272,7 @@ internal static class Unpack15Local {
else
{
for (Length=0;((BitField<<(int)Length)&0x8000)==0;Length++)
{
;
}
Inp.faddbits(Length+1);
}
@@ -321,18 +281,12 @@ internal static class Unpack15Local {
BitField=Inp.fgetbits();
if (AvrPlcB > 0x28ff)
{
DistancePlace=DecodeNum(BitField,STARTHF2,DecHf2,PosHf2);
}
else
if (AvrPlcB > 0x6ff)
{
DistancePlace=DecodeNum(BitField,STARTHF1,DecHf1,PosHf1);
}
else
{
DistancePlace=DecodeNum(BitField,STARTHF0,DecHf0,PosHf0);
}
AvrPlcB += DistancePlace;
AvrPlcB -= AvrPlcB >> 8;
@@ -341,13 +295,9 @@ internal static class Unpack15Local {
Distance = ChSetB[DistancePlace & 0xff];
NewDistancePlace = NToPlB[Distance++ & 0xff]++;
if ((Distance & 0xff) != 0)
{
CorrHuff(ChSetB,NToPlB);
}
else
{
break;
}
}
ChSetB[DistancePlace & 0xff]=ChSetB[NewDistancePlace];
@@ -358,39 +308,23 @@ internal static class Unpack15Local {
OldAvr3=AvrLn3;
if (Length!=1 && Length!=4)
{
if (Length==0 && Distance <= MaxDist3)
{
AvrLn3++;
AvrLn3 -= AvrLn3 >> 8;
}
else
if (AvrLn3 > 0)
{
AvrLn3--;
}
}
if (AvrLn3 > 0)
AvrLn3--;
Length+=3;
if (Distance >= MaxDist3)
{
Length++;
}
if (Distance <= 256)
{
Length+=8;
}
if (OldAvr3 > 0xb0 || AvrPlc >= 0x2a00 && OldAvr2 < 0x40)
{
MaxDist3=0x7f00;
}
else
{
MaxDist3=0x2001;
}
OldDist[OldDistPtr++]=Distance;
OldDistPtr = OldDistPtr & 3;
LastLength=Length;
@@ -408,37 +342,23 @@ internal static class Unpack15Local {
uint BitField=Inp.fgetbits();
if (AvrPlc > 0x75ff)
{
BytePlace=(int)DecodeNum(BitField,STARTHF4,DecHf4,PosHf4);
}
else
if (AvrPlc > 0x5dff)
{
BytePlace=(int)DecodeNum(BitField,STARTHF3,DecHf3,PosHf3);
}
else
if (AvrPlc > 0x35ff)
{
BytePlace=(int)DecodeNum(BitField,STARTHF2,DecHf2,PosHf2);
}
else
if (AvrPlc > 0x0dff)
{
BytePlace=(int)DecodeNum(BitField,STARTHF1,DecHf1,PosHf1);
}
else
{
BytePlace=(int)DecodeNum(BitField,STARTHF0,DecHf0,PosHf0);
}
BytePlace&=0xff;
if (StMode != 0)
{
if (BytePlace==0 && BitField > 0xfff)
{
BytePlace=0x100;
}
if (--BytePlace==-1)
{
BitField=Inp.fgetbits();
@@ -462,10 +382,7 @@ internal static class Unpack15Local {
}
else
if (NumHuf++ >= 16 && FlagsCnt==0)
{
StMode=1;
}
AvrPlc += (uint)BytePlace;
AvrPlc -= AvrPlc >> 8;
Nhfb+=16;
@@ -483,13 +400,9 @@ internal static class Unpack15Local {
CurByte=ChSet[BytePlace];
NewBytePlace=NToPl[CurByte++ & 0xff]++;
if ((CurByte & 0xff) > 0xa1)
{
CorrHuff(ChSet,NToPl);
}
else
{
break;
}
}
ChSet[BytePlace]=ChSet[NewBytePlace];
@@ -507,9 +420,7 @@ internal static class Unpack15Local {
// we need to check for value 256 when unpacking in case we unpack
// a corrupt archive.
if (FlagsPlace>=ChSetC.Length)
{
return;
}
while (true)
{
@@ -517,10 +428,7 @@ internal static class Unpack15Local {
FlagBuf=Flags>>8;
NewFlagsPlace=NToPlC[Flags++ & 0xff]++;
if ((Flags & 0xff) != 0)
{
break;
}
CorrHuff(ChSetC,NToPlC);
}
@@ -553,9 +461,9 @@ internal static class Unpack15Local {
ChSetA[I]=(ushort)I;
ChSetC[I]=(ushort)(((~I+1) & 0xff)<<8);
}
new Span<byte>(NToPl).Clear();
new Span<byte>(NToPlB).Clear();
new Span<byte>(NToPlC).Clear();
Utility.Memset(NToPl,0,NToPl.Length);
Utility.Memset(NToPlB,0,NToPlB.Length);
Utility.Memset(NToPlC,0,NToPlC.Length);
CorrHuff(ChSetB,NToPlB);
}
@@ -564,15 +472,10 @@ internal static class Unpack15Local {
int I,J;
for (I=7;I>=0;I--)
for (J=0;J<32;J++)
{
CharSet[J]=(ushort)((CharSet[J] & ~0xff) | I);
}
new Span<byte>(NumToPlace, 0, NToPl.Length).Clear();
Utility.Memset(NumToPlace,0,NToPl.Length);
for (I=6;I>=0;I--)
{
NumToPlace[I]=(byte)((7-I)*32);
}
}
private void CopyString15(uint Distance,uint Length)
@@ -589,10 +492,7 @@ internal static class Unpack15Local {
{
int I;
for (Num&=0xfff0,I=0;DecTab[I]<=Num;I++)
{
StartPos++;
}
Inp.faddbits(StartPos);
return(((Num-(I != 0 ? DecTab[I-1]:0))>>(int)(16-StartPos))+PosTab[StartPos]);
}

View File

@@ -40,22 +40,14 @@ internal static class Unpack20Local {
uint Bits;
if (Suspended)
{
UnpPtr=WrPtr;
}
else
{
UnpInitData(Solid);
if (!UnpReadBuf())
{
return;
}
if ((!Solid || !TablesRead2) && !ReadTables20())
{
return;
}
--DestUnpSize;
}
@@ -64,20 +56,13 @@ internal static class Unpack20Local {
UnpPtr&=MaxWinMask;
if (Inp.InAddr>ReadTop-30)
{
if (!UnpReadBuf())
{
break;
}
}
if (((WrPtr-UnpPtr) & MaxWinMask)<270 && WrPtr!=UnpPtr)
{
UnpWriteBuf20();
if (Suspended)
{
return;
}
}
if (UnpAudioBlock)
{
@@ -86,18 +71,12 @@ internal static class Unpack20Local {
if (AudioNumber==256)
{
if (!ReadTables20())
{
break;
}
continue;
}
Window[UnpPtr++]=DecodeAudio((int)AudioNumber);
if (++UnpCurChannel==UnpChannels)
{
UnpCurChannel=0;
}
--DestUnpSize;
continue;
}
@@ -130,9 +109,7 @@ internal static class Unpack20Local {
{
Length++;
if (Distance>=0x40000L)
{
Length++;
}
}
CopyString20(Length,Distance);
@@ -141,10 +118,7 @@ internal static class Unpack20Local {
if (Number==269)
{
if (!ReadTables20())
{
break;
}
continue;
}
if (Number==256)
@@ -169,9 +143,7 @@ internal static class Unpack20Local {
{
Length++;
if (Distance>=0x40000)
{
Length++;
}
}
}
CopyString20(Length,Distance);
@@ -196,10 +168,7 @@ internal static class Unpack20Local {
private void UnpWriteBuf20()
{
if (UnpPtr!=WrPtr)
{
UnpSomeRead=true;
}
if (UnpPtr<WrPtr)
{
UnpIO_UnpWrite(Window, WrPtr,(uint)(-(int)WrPtr & MaxWinMask));
@@ -207,10 +176,7 @@ internal static class Unpack20Local {
UnpAllBuf=true;
}
else
{
UnpIO_UnpWrite(Window,WrPtr,UnpPtr-WrPtr);
}
WrPtr=UnpPtr;
}
@@ -219,21 +185,13 @@ internal static class Unpack20Local {
byte[] BitLength = new byte[BC20];
byte[] Table = new byte[MC20*4];
if (Inp.InAddr>ReadTop-25)
{
if (!UnpReadBuf())
{
return false;
}
}
uint BitField=Inp.getbits();
UnpAudioBlock=(BitField & 0x8000)!=0;
if ((BitField & 0x4000) != 0)
{
new Span<byte>(UnpOldTable20).Clear();
}
Utility.Memset(UnpOldTable20,0,UnpOldTable20.Length);
Inp.addbits(2);
uint TableSize;
@@ -241,17 +199,12 @@ internal static class Unpack20Local {
{
UnpChannels=((BitField>>12) & 3)+1;
if (UnpCurChannel>=UnpChannels)
{
UnpCurChannel=0;
}
Inp.addbits(2);
TableSize=MC20*UnpChannels;
}
else
{
TableSize=NC20+DC20+RC20;
}
for (uint I=0;I<BC20;I++)
{
@@ -262,13 +215,8 @@ internal static class Unpack20Local {
for (uint I=0;I<TableSize;)
{
if (Inp.InAddr>ReadTop-5)
{
if (!UnpReadBuf())
{
return false;
}
}
uint Number=DecodeNumber(Inp,BlockTables.BD);
if (Number<16)
{
@@ -281,17 +229,13 @@ internal static class Unpack20Local {
uint N=(Inp.getbits() >> 14)+3;
Inp.addbits(2);
if (I==0)
{
return false; // We cannot have "repeat previous" code at the first position.
}
else
{
while (N-- > 0 && I<TableSize)
{
Table[I]=Table[I-1];
I++;
}
}
}
else
{
@@ -307,24 +251,15 @@ internal static class Unpack20Local {
Inp.addbits(7);
}
while (N-- > 0 && I<TableSize)
{
Table[I++]=0;
}
}
}
TablesRead2=true;
if (Inp.InAddr>ReadTop)
{
return true;
}
if (UnpAudioBlock)
{
for (uint I=0;I<UnpChannels;I++)
{
MakeDecodeTables(Table,(int)(I*MC20),MD[I],MC20);
}
}
else
{
MakeDecodeTables(Table,0,BlockTables.LD,NC20);
@@ -332,27 +267,21 @@ internal static class Unpack20Local {
MakeDecodeTables(Table,(int)(NC20+DC20),BlockTables.RD,RC20);
}
//x memcpy(UnpOldTable20,Table,sizeof(UnpOldTable20));
Array.Copy(Table,UnpOldTable20,UnpOldTable20.Length);
Array.Copy(Table,0,UnpOldTable20,0,UnpOldTable20.Length);
return true;
}
private void ReadLastTables()
{
if (ReadTop>=Inp.InAddr+5)
{
if (UnpAudioBlock)
{
if (DecodeNumber(Inp,MD[UnpCurChannel])==256)
{
ReadTables20();
}
}
else
if (DecodeNumber(Inp,BlockTables.LD)==269)
{
ReadTables20();
}
}
if (DecodeNumber(Inp,BlockTables.LD)==269)
ReadTables20();
}
private void UnpInitData20(bool Solid)
@@ -367,7 +296,7 @@ internal static class Unpack20Local {
//memset(AudV,0,sizeof(AudV));
AudV = new AudioVariables[4];
new Span<byte>(UnpOldTable20).Clear();
Utility.Memset(UnpOldTable20, 0, UnpOldTable20.Length);
//memset(MD,0,sizeof(MD));
MD = new DecodeTable[4];
}
@@ -423,73 +352,43 @@ internal static class Unpack20Local {
{
case 1:
if (V.K1>=-16)
{
V.K1--;
}
break;
case 2:
if (V.K1<16)
{
V.K1++;
}
break;
case 3:
if (V.K2>=-16)
{
V.K2--;
}
break;
case 4:
if (V.K2<16)
{
V.K2++;
}
break;
case 5:
if (V.K3>=-16)
{
V.K3--;
}
break;
case 6:
if (V.K3<16)
{
V.K3++;
}
break;
case 7:
if (V.K4>=-16)
{
V.K4--;
}
break;
case 8:
if (V.K4<16)
{
V.K4++;
}
break;
case 9:
if (V.K5>=-16)
{
V.K5--;
}
break;
case 10:
if (V.K5<16)
{
V.K5++;
}
break;
}
}

View File

@@ -9,6 +9,9 @@ using size_t = System.UInt64;
#endif
using int64 = System.Int64;
using System;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.UnpackGlobal;
//using static SharpCompress.Compressors.Rar.UnpackV2017.Unpack.Unpack30Local;
/*
namespace SharpCompress.Compressors.Rar.UnpackV2017

View File

@@ -25,18 +25,14 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
UnpInitData(Solid);
if (!UnpReadBuf())
{
return;
}
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (!ReadBlockHeader(Inp,ref BlockHeader) ||
if (!ReadBlockHeader(Inp,ref BlockHeader) ||
!ReadTables(Inp,ref BlockHeader, ref BlockTables) || !TablesRead5)
{
return;
}
}
while (true)
@@ -49,8 +45,8 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// We use 'while', because for empty block containing only Huffman table,
// we'll be on the block border once again just after reading the table.
while (Inp.InAddr>BlockHeader.BlockStart+BlockHeader.BlockSize-1 ||
Inp.InAddr==BlockHeader.BlockStart+BlockHeader.BlockSize-1 &&
while (Inp.InAddr>BlockHeader.BlockStart+BlockHeader.BlockSize-1 ||
Inp.InAddr==BlockHeader.BlockStart+BlockHeader.BlockSize-1 &&
Inp.InBit>=BlockHeader.BlockBitSize)
{
if (BlockHeader.LastBlockInFile)
@@ -59,24 +55,17 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
break;
}
if (!ReadBlockHeader(Inp,ref BlockHeader) || !ReadTables(Inp, ref BlockHeader, ref BlockTables))
{
return;
}
}
if (FileDone || !UnpReadBuf())
{
break;
}
}
if (((WriteBorder-UnpPtr) & MaxWinMask)<MAX_LZ_MATCH+3 && WriteBorder!=UnpPtr)
{
UnpWriteBuf();
if (WrittenFileSize>DestUnpSize)
{
return;
}
if (Suspended)
{
FileExtracted=false;
@@ -88,14 +77,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
if (MainSlot<256)
{
if (Fragmented)
{
FragWindow[UnpPtr++]=(byte)MainSlot;
}
else
{
Window[UnpPtr++]=(byte)MainSlot;
}
continue;
}
if (MainSlot>=262)
@@ -140,49 +124,32 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
Length++;
if (Distance>0x40000)
{
Length++;
}
}
}
InsertOldDist(Distance);
LastLength=Length;
if (Fragmented)
{
FragWindow.CopyString(Length,Distance,ref UnpPtr,MaxWinMask);
}
else
{
CopyString(Length,Distance);
}
continue;
}
if (MainSlot==256)
{
UnpackFilter Filter = new UnpackFilter();
if (!ReadFilter(Inp,Filter) || !AddFilter(Filter))
{
break;
}
continue;
}
if (MainSlot==257)
{
if (LastLength!=0)
{
if (Fragmented)
{
FragWindow.CopyString(LastLength,OldDist[0],ref UnpPtr,MaxWinMask);
}
else
{
CopyString(LastLength,OldDist[0]);
}
}
continue;
}
if (MainSlot<262)
@@ -190,24 +157,16 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
uint DistNum=MainSlot-258;
uint Distance=OldDist[DistNum];
for (uint I=DistNum;I>0;I--)
{
OldDist[I]=OldDist[I-1];
}
OldDist[0]=Distance;
uint LengthSlot=DecodeNumber(Inp,BlockTables.RD);
uint Length=SlotToLength(Inp,LengthSlot);
LastLength=Length;
if (Fragmented)
{
FragWindow.CopyString(Length,Distance,ref UnpPtr,MaxWinMask);
}
else
{
CopyString(Length,Distance);
}
continue;
}
}
@@ -231,19 +190,13 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
private bool ReadFilter(BitInput Inp,UnpackFilter Filter)
{
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-16)
{
if (!UnpReadBuf())
{
return false;
}
}
Filter.BlockStart=ReadFilterData(Inp);
Filter.BlockLength=ReadFilterData(Inp);
if (Filter.BlockLength>MAX_FILTER_BLOCK_SIZE)
{
Filter.BlockLength=0;
}
Filter.Type=(byte)(Inp.fgetbits()>>13);
Inp.faddbits(3);
@@ -263,9 +216,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
UnpWriteBuf(); // Write data, apply and flush filters.
if (Filters.Count>=MAX_UNPACK_FILTERS)
{
InitFilters(); // Still too many filters, prevent excessive memory use.
}
}
// If distance to filter start is that large that due to circular dictionary
@@ -282,10 +233,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
int DataSize=ReadTop-Inp.InAddr; // Data left to process.
if (DataSize<0)
{
return false;
}
BlockHeader.BlockSize-=Inp.InAddr-BlockHeader.BlockStart;
if (Inp.InAddr>MAX_SIZE/2)
{
@@ -297,29 +245,17 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// to make it zero.
if (DataSize>0)
//x memmove(Inp.InBuf,Inp.InBuf+Inp.InAddr,DataSize);
{
Buffer.BlockCopy(Inp.InBuf, Inp.InAddr, Inp.InBuf, 0, DataSize);
}
Inp.InAddr=0;
ReadTop=DataSize;
}
else
{
DataSize=ReadTop;
}
int ReadCode=0;
if (MAX_SIZE!=DataSize)
{
ReadCode=UnpIO_UnpRead(Inp.InBuf,DataSize,MAX_SIZE-DataSize);
}
if (ReadCode>0) // Can be also -1.
{
ReadTop+=ReadCode;
}
ReadBorder=ReadTop-30;
BlockHeader.BlockStart=Inp.InAddr;
if (BlockHeader.BlockSize!=-1) // '-1' means not defined yet.
@@ -349,10 +285,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
UnpackFilter flt=Filters[I];
if (flt.Type==FILTER_NONE)
{
continue;
}
if (flt.NextWindow)
{
// Here we skip filters which have block start in current data range
@@ -368,10 +301,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// our write here, we can safely assume that filter is applicable
// to next block on no further wrap arounds is possible.
if (((flt.BlockStart-WrPtr)&MaxWinMask)<=FullWriteSize)
{
flt.NextWindow=false;
}
continue;
}
uint BlockStart=flt.BlockStart;
@@ -396,14 +326,10 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
if (BlockStart<BlockEnd || BlockEnd==0)
{
if (Fragmented)
{
FragWindow.CopyData(Mem,0,BlockStart,BlockLength);
}
else
//x memcpy(Mem,Window+BlockStart,BlockLength);
{
Utility.Copy(Window, BlockStart, Mem, 0, BlockLength);
}
}
else
{
@@ -427,9 +353,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
Filters[I].Type=FILTER_NONE;
if (OutMem!=null)
{
UnpIO_UnpWrite(OutMem,0,BlockLength);
}
UnpSomeRead=true;
WrittenFileSize+=BlockLength;
@@ -452,9 +376,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
UnpackFilter _flt=Filters[J];
if (_flt.Type!=FILTER_NONE)
{
_flt.NextWindow=false;
}
}
// Do not write data left after current filter now.
@@ -471,20 +393,13 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
for (int I=0;I<Filters.Count;I++)
{
if (EmptyCount>0)
{
Filters[I-EmptyCount]=Filters[I];
}
if (Filters[I].Type==FILTER_NONE)
{
EmptyCount++;
}
}
if (EmptyCount>0)
//Filters.Alloc(Filters.Count-EmptyCount);
{
Filters.RemoveRange(Filters.Count-EmptyCount, EmptyCount);
}
if (!NotAllFiltersProcessed) // Only if all filters are processed.
{
@@ -500,11 +415,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// Choose the nearest among WriteBorder and WrPtr actual written border.
// If border is equal to UnpPtr, it means that we have MaxWinSize data ahead.
if (WriteBorder==UnpPtr ||
if (WriteBorder==UnpPtr ||
WrPtr!=UnpPtr && ((WrPtr-UnpPtr)&MaxWinMask)<((WriteBorder-UnpPtr)&MaxWinMask))
{
WriteBorder=WrPtr;
}
}
private byte[] ApplyFilter(byte[] __d,uint DataSize,UnpackFilter Flt)
@@ -537,15 +450,11 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
if ((Addr & 0x80000000)!=0) // Addr<0
{
if (((Addr+Offset) & 0x80000000)==0) // Addr+Offset>=0
{
RawPut4(Addr+FileSize,__d,Data);
}
}
else
if (((Addr-FileSize) & 0x80000000)!=0) // Addr<FileSize
{
RawPut4(Addr-Offset,__d,Data);
}
Data+=4;
CurPos+=4;
@@ -589,9 +498,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
byte PrevByte=0;
for (uint DestPos=CurChannel;DestPos<DataSize;DestPos+=Channels)
{
DstData[DestPos]=(PrevByte-=__d[Data+SrcPos++]);
}
}
return DstData;
}
@@ -603,14 +510,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
private void UnpWriteArea(size_t StartPtr,size_t EndPtr)
{
if (EndPtr!=StartPtr)
{
UnpSomeRead=true;
}
if (EndPtr<StartPtr)
{
UnpAllBuf=true;
}
if (Fragmented)
{
@@ -632,25 +534,17 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
UnpWriteData(Window,0,EndPtr);
}
else
{
UnpWriteData(Window,StartPtr,EndPtr-StartPtr);
}
}
private void UnpWriteData(byte[] Data, size_t offset, size_t Size)
{
if (WrittenFileSize>=DestUnpSize)
{
return;
}
size_t WriteSize=Size;
int64 LeftToWrite=DestUnpSize-WrittenFileSize;
if ((int64)WriteSize>LeftToWrite)
{
WriteSize=(size_t)LeftToWrite;
}
UnpIO_UnpWrite(Data, offset, WriteSize);
WrittenFileSize+=Size;
}
@@ -658,9 +552,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
private void UnpInitData50(bool Solid)
{
if (!Solid)
{
TablesRead5=false;
}
}
private bool ReadBlockHeader(BitInput Inp,ref UnpackBlockHeader Header)
@@ -668,23 +560,16 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
Header.HeaderSize=0;
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-7)
{
if (!UnpReadBuf())
{
return false;
}
}
Inp.faddbits((uint)((8-Inp.InBit)&7));
byte BlockFlags=(byte)(Inp.fgetbits()>>8);
Inp.faddbits(8);
uint ByteCount=(uint)(((BlockFlags>>3)&3)+1); // Block size byte count.
if (ByteCount==4)
{
return false;
}
Header.HeaderSize=(int)(2+ByteCount);
@@ -703,9 +588,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
Header.BlockSize=BlockSize;
byte CheckSum=(byte)(0x5a^BlockFlags^BlockSize^(BlockSize>>8)^(BlockSize>>16));
if (CheckSum!=SavedCheckSum)
{
return false;
}
Header.BlockStart=Inp.InAddr;
ReadBorder=Math.Min(ReadBorder,Header.BlockStart+Header.BlockSize-1);
@@ -718,17 +601,11 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
private bool ReadTables(BitInput Inp,ref UnpackBlockHeader Header, ref UnpackBlockTables Tables)
{
if (!Header.TablePresent)
{
return true;
}
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-25)
{
if (!UnpReadBuf())
{
return false;
}
}
byte[] BitLength = new byte[BC];
for (uint I=0;I<BC;I++)
@@ -740,24 +617,17 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
uint ZeroCount=(byte)(Inp.fgetbits() >> 12);
Inp.faddbits(4);
if (ZeroCount==0)
{
BitLength[I]=15;
}
else
{
ZeroCount+=2;
while (ZeroCount-- > 0 && I<BitLength.Length)
{
BitLength[I++]=0;
}
I--;
}
}
else
{
BitLength[I]=(byte)Length;
}
}
MakeDecodeTables(BitLength,0,Tables.BD,BC);
@@ -767,13 +637,8 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
for (uint I=0;I<TableSize;)
{
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-5)
{
if (!UnpReadBuf())
{
return false;
}
}
uint Number=DecodeNumber(Inp,Tables.BD);
if (Number<16)
{
@@ -804,13 +669,11 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
return false;
}
else
{
while (N-- > 0 && I<TableSize)
{
Table[I]=Table[I-1];
I++;
}
}
}
else
{
@@ -826,17 +689,12 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
Inp.faddbits(7);
}
while (N-- > 0 && I<TableSize)
{
Table[I++]=0;
}
}
}
TablesRead5=true;
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop)
{
return false;
}
MakeDecodeTables(Table, 0, Tables.LD,NC);
MakeDecodeTables(Table, (int)NC,Tables.DD,DC);
MakeDecodeTables(Table, (int)(NC+DC),Tables.LDD,LDC);

View File

@@ -70,9 +70,7 @@ public Unpack(/* ComprDataIO *DataIO */)
// will be 0 because of size_t overflow. Let's issue the memory error.
if (WinSize==0)
//ErrHandler.MemoryError();
{
throw new InvalidFormatException("invalid window size (possibly due to a rar file with a 4GB being unpacked on a 32-bit platform)");
}
// Minimum window size must be at least twice more than maximum possible
// size of filter block, which is 0x10000 in RAR now. If window size is
@@ -81,19 +79,12 @@ public Unpack(/* ComprDataIO *DataIO */)
// use 0x40000 for extra safety and possible filter area size expansion.
const size_t MinAllocSize=0x40000;
if (WinSize<MinAllocSize)
{
WinSize=MinAllocSize;
}
if (WinSize<=MaxWinSize) // Use the already allocated window.
{
return;
}
if ((WinSize>>16)>0x10000) // Window size must not exceed 4 GB.
{
return;
}
// Archiving code guarantees that window size does not grow in the same
// solid stream. So if we are here, we are either creating a new window
@@ -105,14 +96,11 @@ public Unpack(/* ComprDataIO *DataIO */)
// We do not handle growth for existing fragmented window.
if (Grow && Fragmented)
//throw std::bad_alloc();
{
throw new InvalidFormatException("Grow && Fragmented");
}
byte[] NewWindow=Fragmented ? null : new byte[WinSize];
if (NewWindow==null)
{
if (Grow || WinSize<0x1000000)
{
// We do not support growth for new fragmented window.
@@ -130,7 +118,6 @@ public Unpack(/* ComprDataIO *DataIO */)
FragWindow.Init(WinSize);
Fragmented=true;
}
}
if (!Fragmented)
{
@@ -145,12 +132,8 @@ public Unpack(/* ComprDataIO *DataIO */)
// RAR archiving code does not allow it in solid streams now,
// but let's implement it anyway just in case we'll change it sometimes.
if (Grow)
{
for (size_t I=1;I<=MaxWinSize;I++)
{
NewWindow[(UnpPtr-I)&(WinSize-1)]=Window[(UnpPtr-I)&(MaxWinSize-1)];
}
}
//if (Window!=null)
// free(Window);
@@ -171,27 +154,18 @@ public Unpack(/* ComprDataIO *DataIO */)
#if !RarV2017_SFX_MODULE
case 15: // rar 1.5 compression
if (!Fragmented)
{
Unpack15(Solid);
}
break;
case 20: // rar 2.x compression
case 26: // files larger than 2GB
if (!Fragmented)
{
Unpack20(Solid);
}
break;
#endif
#if !RarV2017_RAR5ONLY
case 29: // rar 3.x compression
if (!Fragmented)
{
throw new NotImplementedException();
}
break;
#endif
case 50: // RAR 5.0 compression algorithm.
@@ -222,7 +196,7 @@ public Unpack(/* ComprDataIO *DataIO */)
{
if (!Solid)
{
new Span<uint>(OldDist).Clear();
Utility.Memset<uint>(OldDist, 0, OldDist.Length);
OldDistPtr=0;
LastDist=LastLength=0;
// memset(Window,0,MaxWinSize);
@@ -265,16 +239,14 @@ public Unpack(/* ComprDataIO *DataIO */)
uint[] LengthCount = new uint[16];
//memset(LengthCount,0,sizeof(LengthCount));
for (size_t I=0;I<Size;I++)
{
LengthCount[LengthTable[offset+I] & 0xf]++;
}
// We must not calculate the number of zero length codes.
LengthCount[0]=0;
// Set the entire DecodeNum to zero.
//memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
new Span<ushort>(Dec.DecodeNum).Clear();
Utility.FillFast<ushort>(Dec.DecodeNum, 0);
// Initialize not really used entry for zero length code.
Dec.DecodePos[0]=0;
@@ -300,7 +272,7 @@ public Unpack(/* ComprDataIO *DataIO */)
Dec.DecodeLen[I]=(uint)LeftAligned;
// Every item of this array contains the sum of all preceding items.
// So it contains the start position in code list for every bit length.
// So it contains the start position in code list for every bit length.
Dec.DecodePos[I]=Dec.DecodePos[I-1]+LengthCount[I-1];
}
@@ -308,7 +280,7 @@ public Unpack(/* ComprDataIO *DataIO */)
// so we cannot use the original DecodePos.
uint[] CopyDecodePos = new uint[Dec.DecodePos.Length];
//memcpy(CopyDecodePos,Dec->DecodePos,sizeof(CopyDecodePos));
Array.Copy(Dec.DecodePos, CopyDecodePos, CopyDecodePos.Length);
Array.Copy(Dec.DecodePos, 0, CopyDecodePos, 0, CopyDecodePos.Length);
// For every bit length in the bit length table and so for every item
// of alphabet.
@@ -365,13 +337,11 @@ public Unpack(/* ComprDataIO *DataIO */)
uint BitField=Code<<(int)(16-Dec.QuickBits);
// Prepare the table for quick decoding of bit lengths.
// Find the upper limit for current bit field and adjust the bit length
// accordingly if necessary.
while (CurBitLength<Dec.DecodeLen.Length && BitField>=Dec.DecodeLen[CurBitLength])
{
CurBitLength++;
}
// Translation of right aligned bit string to bit length.
Dec.QuickLen[Code]=CurBitLength;

View File

@@ -94,7 +94,6 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// if (Length>6) { Dest[6]=Src[6]; } } } } } } } // Close all nested "if"s.
}
else
{
while (Length-- > 0) // Slow copying with all possible precautions.
{
Window[UnpPtr]=Window[SrcPtr++ & MaxWinMask];
@@ -102,7 +101,6 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// be replaced with 'Window[UnpPtr++ & MaxWinMask]'
UnpPtr=(UnpPtr+1) & MaxWinMask;
}
}
}
private uint DecodeNumber(BitInput Inp,DecodeTable Dec)
@@ -120,13 +118,11 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// Detect the real bit length for current code.
uint Bits=15;
for (uint I=Dec.QuickBits+1;I<15;I++)
{
if (BitField<Dec.DecodeLen[I])
{
Bits=I;
break;
}
}
Inp.addbits(Bits);
@@ -144,9 +140,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// Out of bounds safety check required for damaged archives.
if (Pos>=Dec.MaxNum)
{
Pos=0;
}
// Convert the position in the code list to position in alphabet
// and return it.

View File

@@ -432,10 +432,7 @@ internal partial class Unpack
private uint GetChar()
{
if (Inp.InAddr>MAX_SIZE-30)
{
UnpReadBuf();
}
UnpReadBuf();
return(Inp.InBuf[Inp.InAddr++]);
}

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.Rar.VM
{
@@ -72,9 +72,9 @@ namespace SharpCompress.Compressors.Rar.VM
}
if (IsVMMem(mem))
{
return BinaryPrimitives.ReadInt32LittleEndian(mem.AsSpan(offset));
return DataConverter.LittleEndian.GetInt32(mem, offset);
}
return BinaryPrimitives.ReadInt32BigEndian(mem.AsSpan(offset));
return DataConverter.BigEndian.GetInt32(mem, offset);
}
private void SetValue(bool byteMode, byte[] mem, int offset, int value)
@@ -94,11 +94,11 @@ namespace SharpCompress.Compressors.Rar.VM
{
if (IsVMMem(mem))
{
BinaryPrimitives.WriteInt32LittleEndian(mem.AsSpan(offset), value);
DataConverter.LittleEndian.PutBytes(mem, offset, value);
}
else
{
BinaryPrimitives.WriteInt32BigEndian(mem.AsSpan(offset), value);
DataConverter.BigEndian.PutBytes(mem, offset, value);
}
}
@@ -120,12 +120,12 @@ namespace SharpCompress.Compressors.Rar.VM
if (cmdOp.Type == VMOpType.VM_OPREGMEM)
{
int pos = (cmdOp.Offset + cmdOp.Base) & VM_MEMMASK;
ret = BinaryPrimitives.ReadInt32LittleEndian(Mem.AsSpan(pos));
ret = DataConverter.LittleEndian.GetInt32(Mem, pos);
}
else
{
int pos = cmdOp.Offset;
ret = BinaryPrimitives.ReadInt32LittleEndian(Mem.AsSpan(pos));
ret = DataConverter.LittleEndian.GetInt32(Mem, pos);
}
return ret;
}
@@ -190,12 +190,12 @@ namespace SharpCompress.Compressors.Rar.VM
{
//prg.GlobalData.Clear();
// ->GlobalData.Add(dataSize+VM_FIXEDGLOBALSIZE);
prg.GlobalData.Capacity = dataSize + VM_FIXEDGLOBALSIZE;
prg.GlobalData.SetSize(dataSize + VM_FIXEDGLOBALSIZE);
for (int i = 0; i < dataSize + VM_FIXEDGLOBALSIZE; i++)
// memcpy(&Prg->GlobalData[0],&Mem[VM_GLOBALMEMADDR],DataSize+VM_FIXEDGLOBALSIZE);
{
prg.GlobalData.Add(Mem[VM_GLOBALMEMADDR + i]);
prg.GlobalData[i] = Mem[VM_GLOBALMEMADDR + i];
}
}
}
@@ -1449,4 +1449,4 @@ namespace SharpCompress.Compressors.Rar.VM
}
//
}
}

View File

@@ -36,9 +36,7 @@ namespace SharpCompress.Compressors.Xz
var result = BitConverter.GetBytes(uint32);
if (BitConverter.IsLittleEndian)
{
Array.Reverse(result);
}
return result;
}
@@ -48,9 +46,7 @@ namespace SharpCompress.Compressors.Xz
var result = BitConverter.GetBytes(uint32);
if (!BitConverter.IsLittleEndian)
{
Array.Reverse(result);
}
return result;
}

View File

@@ -28,33 +28,22 @@ namespace SharpCompress.Compressors.Xz
private static UInt32[] InitializeTable(UInt32 polynomial)
{
if (polynomial == DefaultPolynomial && defaultTable != null)
{
return defaultTable;
}
var createTable = new UInt32[256];
for (var i = 0; i < 256; i++)
{
var entry = (UInt32)i;
for (var j = 0; j < 8; j++)
{
if ((entry & 1) == 1)
{
entry = (entry >> 1) ^ polynomial;
}
else
{
entry = entry >> 1;
}
}
createTable[i] = entry;
}
if (polynomial == DefaultPolynomial)
{
defaultTable = createTable;
}
return createTable;
}
@@ -63,10 +52,7 @@ namespace SharpCompress.Compressors.Xz
{
var crc = seed;
for (var i = start; i < size - start; i++)
{
crc = (crc >> 8) ^ table[buffer[i] ^ crc & 0xff];
}
return crc;
}

View File

@@ -19,9 +19,7 @@ namespace SharpCompress.Compressors.Xz
public static UInt64 Compute(UInt64 seed, byte[] buffer)
{
if (Table == null)
{
Table = CreateTable(Iso3309Polynomial);
}
return CalculateHash(seed, Table, buffer, 0, buffer.Length);
}
@@ -31,12 +29,10 @@ namespace SharpCompress.Compressors.Xz
var crc = seed;
for (var i = start; i < size; i++)
{
unchecked
{
crc = (crc >> 8) ^ table[(buffer[i] ^ crc) & 0xff];
}
}
return crc;
}
@@ -48,17 +44,10 @@ namespace SharpCompress.Compressors.Xz
{
var entry = (UInt64)i;
for (var j = 0; j < 8; ++j)
{
if ((entry & 1) == 1)
{
entry = (entry >> 1) ^ polynomial;
}
else
{
entry = entry >> 1;
}
}
createTable[i] = entry;
}
return createTable;

View File

@@ -35,18 +35,12 @@ namespace SharpCompress.Compressors.Xz.Filters
{
var filterType = (FilterTypes)reader.ReadXZInteger();
if (!FilterMap.ContainsKey(filterType))
{
throw new NotImplementedException($"Filter {filterType} has not yet been implemented");
}
var filter = Activator.CreateInstance(FilterMap[filterType]) as BlockFilter;
var sizeOfProperties = reader.ReadXZInteger();
if (sizeOfProperties > int.MaxValue)
{
throw new InvalidDataException("Block filter information too large");
}
byte[] properties = reader.ReadBytes((int)sizeOfProperties);
filter.Init(properties);
return filter;

View File

@@ -16,10 +16,7 @@ namespace SharpCompress.Compressors.Xz.Filters
get
{
if (_dictionarySize > 40)
{
throw new OverflowException("Dictionary size greater than UInt32.Max");
}
if (_dictionarySize == 40)
{
return uint.MaxValue;
@@ -33,16 +30,12 @@ namespace SharpCompress.Compressors.Xz.Filters
public override void Init(byte[] properties)
{
if (properties.Length != 1)
{
throw new InvalidDataException("LZMA properties unexpected length");
}
_dictionarySize = (byte)(properties[0] & 0x3F);
var reserved = properties[0] & 0xC0;
if (reserved != 0)
{
throw new InvalidDataException("Reserved bits used in LZMA properties");
}
}
public override void ValidateFilter()

View File

@@ -8,14 +8,9 @@ namespace SharpCompress.Compressors.Xz
public static ulong ReadXZInteger(this BinaryReader reader, int MaxBytes = 9)
{
if (MaxBytes <= 0)
{
throw new ArgumentOutOfRangeException();
}
if (MaxBytes > 9)
{
MaxBytes = 9;
}
byte LastByte = reader.ReadByte();
ulong Output = (ulong)LastByte & 0x7F;
@@ -24,15 +19,10 @@ namespace SharpCompress.Compressors.Xz
while ((LastByte & 0x80) != 0)
{
if (++i >= MaxBytes)
{
throw new InvalidDataException();
}
LastByte = reader.ReadByte();
if (LastByte == 0)
{
throw new InvalidDataException();
}
Output |= ((ulong)(LastByte & 0x7F)) << (i * 7);
}

View File

@@ -33,35 +33,17 @@ namespace SharpCompress.Compressors.Xz
{
int bytesRead = 0;
if (!HeaderIsLoaded)
{
LoadHeader();
}
if (!_streamConnected)
{
ConnectStream();
}
if (!_endOfStream)
{
bytesRead = _decomStream.Read(buffer, offset, count);
}
if (bytesRead != count)
{
_endOfStream = true;
}
if (_endOfStream && !_paddingSkipped)
{
SkipPadding();
}
if (_endOfStream && !_crcChecked)
{
CheckCrc();
}
_bytesRead += (ulong)bytesRead;
return bytesRead;
}
@@ -74,9 +56,7 @@ namespace SharpCompress.Compressors.Xz
byte[] paddingBytes = new byte[4 - bytes];
BaseStream.Read(paddingBytes, 0, paddingBytes.Length);
if (paddingBytes.Any(b => b != 0))
{
throw new InvalidDataException("Padding bytes were non-null");
}
}
_paddingSkipped = true;
}
@@ -121,9 +101,7 @@ namespace SharpCompress.Compressors.Xz
{
_blockHeaderSizeByte = (byte)BaseStream.ReadByte();
if (_blockHeaderSizeByte == 0)
{
throw new XZIndexMarkerReachedException();
}
}
private byte[] CacheHeader()
@@ -132,16 +110,12 @@ namespace SharpCompress.Compressors.Xz
blockHeaderWithoutCrc[0] = _blockHeaderSizeByte;
var read = BaseStream.Read(blockHeaderWithoutCrc, 1, BlockHeaderSize - 5);
if (read != BlockHeaderSize - 5)
{
throw new EndOfStreamException("Reached end of stream unexectedly");
}
uint crc = BaseStream.ReadLittleEndianUInt32();
uint calcCrc = Crc32.Compute(blockHeaderWithoutCrc);
if (crc != calcCrc)
{
throw new InvalidDataException("Block header corrupt");
}
return blockHeaderWithoutCrc;
}
@@ -153,22 +127,15 @@ namespace SharpCompress.Compressors.Xz
byte reserved = (byte)(blockFlags & 0x3C);
if (reserved != 0)
{
throw new InvalidDataException("Reserved bytes used, perhaps an unknown XZ implementation");
}
bool compressedSizePresent = (blockFlags & 0x40) != 0;
bool uncompressedSizePresent = (blockFlags & 0x80) != 0;
if (compressedSizePresent)
{
CompressedSize = reader.ReadXZInteger();
}
if (uncompressedSizePresent)
{
UncompressedSize = reader.ReadXZInteger();
}
}
private void ReadFilters(BinaryReader reader, long baseStreamOffset = 0)
@@ -179,30 +146,20 @@ namespace SharpCompress.Compressors.Xz
var filter = BlockFilter.Read(reader);
if ((i + 1 == _numFilters && !filter.AllowAsLast)
|| (i + 1 < _numFilters && !filter.AllowAsNonLast))
{
throw new InvalidDataException("Block Filters in bad order");
}
if (filter.ChangesDataSize && i + 1 < _numFilters)
{
nonLastSizeChangers++;
}
filter.ValidateFilter();
Filters.Push(filter);
}
if (nonLastSizeChangers > 2)
{
throw new InvalidDataException("More than two non-last block filters cannot change stream size");
}
int blockHeaderPaddingSize = BlockHeaderSize -
(4 + (int)(reader.BaseStream.Position - baseStreamOffset));
(4 + (int)(reader.BaseStream.Position - baseStreamOffset));
byte[] blockHeaderPadding = reader.ReadBytes(blockHeaderPaddingSize);
if (!blockHeaderPadding.All(b => b == 0))
{
throw new InvalidDataException("Block header contains unknown fields");
}
}
}
}

View File

@@ -32,10 +32,7 @@ namespace SharpCompress.Compressors.Xz
byte[] footerBytes = _reader.ReadBytes(6);
uint myCrc = Crc32.Compute(footerBytes);
if (crc != myCrc)
{
throw new InvalidDataException("Footer corrupt");
}
using (var stream = new MemoryStream(footerBytes))
using (var reader = new BinaryReader(stream))
{

View File

@@ -38,24 +38,18 @@ namespace SharpCompress.Compressors.Xz
UInt32 crc = _reader.ReadLittleEndianUInt32();
UInt32 calcCrc = Crc32.Compute(streamFlags);
if (crc != calcCrc)
{
throw new InvalidDataException("Stream header corrupt");
}
BlockCheckType = (CheckType)(streamFlags[1] & 0x0F);
byte futureUse = (byte)(streamFlags[1] & 0xF0);
if (futureUse != 0 || streamFlags[0] != 0)
{
throw new InvalidDataException("Unknown XZ Stream Version");
}
}
private void CheckMagicBytes(byte[] header)
{
if (!header.SequenceEqual(MagicHeader))
{
throw new InvalidDataException("Invalid XZ Stream");
}
}
}
}

View File

@@ -23,9 +23,7 @@ namespace SharpCompress.Compressors.Xz
_indexMarkerAlreadyVerified = indexMarkerAlreadyVerified;
StreamStartPosition = reader.BaseStream.Position;
if (indexMarkerAlreadyVerified)
{
StreamStartPosition--;
}
}
public static XZIndex FromStream(Stream stream, bool indexMarkerAlreadyVerified)
@@ -38,10 +36,7 @@ namespace SharpCompress.Compressors.Xz
public void Process()
{
if (!_indexMarkerAlreadyVerified)
{
VerifyIndexMarker();
}
NumberOfRecords = _reader.ReadXZInteger();
for (ulong i = 0; i < NumberOfRecords; i++)
{
@@ -55,9 +50,7 @@ namespace SharpCompress.Compressors.Xz
{
byte marker = _reader.ReadByte();
if (marker != 0)
{
throw new InvalidDataException("Not an index block");
}
}
private void SkipPadding()
@@ -67,9 +60,7 @@ namespace SharpCompress.Compressors.Xz
{
byte[] paddingBytes = _reader.ReadBytes(4 - bytes);
if (paddingBytes.Any(b => b != 0))
{
throw new InvalidDataException("Padding bytes were non-null");
}
}
}

View File

@@ -8,9 +8,7 @@ namespace SharpCompress.Compressors.Xz
{
BaseStream = stream;
if (!BaseStream.CanRead)
{
throw new InvalidDataException("Must be able to read from stream");
}
}
}
}

View File

@@ -50,15 +50,9 @@ namespace SharpCompress.Compressors.Xz
{
int bytesRead = 0;
if (_endOfStream)
{
return bytesRead;
}
if (!HeaderIsRead)
{
ReadHeader();
}
bytesRead = ReadBlocks(buffer, offset, count);
if (bytesRead < count)
{
@@ -92,27 +86,18 @@ namespace SharpCompress.Compressors.Xz
{
int bytesRead = 0;
if (_currentBlock == null)
{
NextBlock();
}
for (;;)
{
try
{
if (bytesRead >= count)
{
break;
}
int remaining = count - bytesRead;
int newOffset = offset + bytesRead;
int justRead = _currentBlock.Read(buffer, newOffset, remaining);
if (justRead < remaining)
{
NextBlock();
}
bytesRead += justRead;
}
catch (XZIndexMarkerReachedException)

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Crypto
@@ -78,33 +79,22 @@ namespace SharpCompress.Crypto
private static uint[] InitializeTable(uint polynomial)
{
if (polynomial == DefaultPolynomial && defaultTable != null)
{
return defaultTable;
}
var createTable = new uint[256];
for (var i = 0; i < 256; i++)
{
var entry = (uint)i;
for (var j = 0; j < 8; j++)
{
if ((entry & 1) == 1)
{
entry = (entry >> 1) ^ polynomial;
}
else
{
entry = entry >> 1;
}
}
createTable[i] = entry;
}
if (polynomial == DefaultPolynomial)
{
defaultTable = createTable;
}
return createTable;
}

View File

@@ -9,7 +9,7 @@ namespace SharpCompress.Crypto
private static readonly int MAXKC = (256 / 4);
private static ReadOnlySpan<byte> Logtable => new byte[]
private static readonly byte[] Logtable =
{
0, 0, 25, 1, 50, 2, 26, 198,
75, 199, 27, 104, 51, 238, 223, 3,
@@ -45,7 +45,7 @@ namespace SharpCompress.Crypto
13, 99, 140, 128, 192, 247, 112, 7
};
private static ReadOnlySpan<byte> Alogtable => new byte[]
private static readonly byte[] Alogtable =
{
0, 3, 5, 15, 17, 51, 85, 255, 26, 46, 114, 150, 161, 248, 19, 53,
95, 225, 56, 72, 216, 115, 149, 164, 247, 2, 6, 10, 30, 34, 102, 170,
@@ -121,7 +121,7 @@ namespace SharpCompress.Crypto
23, 43, 4, 126, 186, 119, 214, 38, 225, 105, 20, 99, 85, 33, 12, 125
};
private static ReadOnlySpan<byte> rcon => new byte[]
private static readonly byte[] rcon =
{
0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a,
0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Converters;
namespace SharpCompress.IO
{
@@ -81,17 +81,17 @@ namespace SharpCompress.IO
public override short ReadInt16()
{
return BinaryPrimitives.ReadInt16LittleEndian(ReadBytes(2));
return DataConverter.LittleEndian.GetInt16(ReadBytes(2), 0);
}
public override int ReadInt32()
{
return BinaryPrimitives.ReadInt32LittleEndian(ReadBytes(4));
return DataConverter.LittleEndian.GetInt32(ReadBytes(4), 0);
}
public override long ReadInt64()
{
return BinaryPrimitives.ReadInt64LittleEndian(ReadBytes(8));
return DataConverter.LittleEndian.GetInt64(ReadBytes(8), 0);
}
public override sbyte ReadSByte()
@@ -111,17 +111,17 @@ namespace SharpCompress.IO
public override ushort ReadUInt16()
{
return BinaryPrimitives.ReadUInt16LittleEndian(ReadBytes(2));
return DataConverter.LittleEndian.GetUInt16(ReadBytes(2), 0);
}
public override uint ReadUInt32()
{
return BinaryPrimitives.ReadUInt32LittleEndian(ReadBytes(4));
return DataConverter.LittleEndian.GetUInt32(ReadBytes(4), 0);
}
public override ulong ReadUInt64()
{
return BinaryPrimitives.ReadUInt64LittleEndian(ReadBytes(8));
return DataConverter.LittleEndian.GetUInt64(ReadBytes(8), 0);
}
// RAR5 style variable length encoded value
@@ -196,4 +196,4 @@ namespace SharpCompress.IO
throw new FormatException("malformed vint");
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More