Compare commits

...

31 Commits

Author SHA1 Message Date
Adam Hathcock
8bfc9ef4de Update for 0.25.1 2020-05-22 13:46:36 +01:00
Adam Hathcock
fa949e089e Merge pull request #512 from adamhathcock/fix-codepages
Attempt Windows reference fix
2020-05-22 13:44:10 +01:00
Adam Hathcock
c296ca7660 Merge pull request #513 from RealOrko/symbolic-link-default-write
Added default implementation with warning for symbolic links
2020-05-22 13:43:56 +01:00
RealOrko
538b38869f Added a warning for the writing of symbolic links with a link to the original GitHub issue for the DOTNET runtime 2020-05-21 13:00:25 +01:00
Adam Hathcock
ce328ed90b Merge branch 'master' into fix-codepages 2020-05-15 08:28:25 +01:00
Adam Hathcock
632bae725d Add braces for clarity 2020-05-14 13:47:21 +01:00
Adam Hathcock
4f824b1d9a Add build flags for Core targets 2020-05-14 13:16:00 +01:00
Adam Hathcock
120aee8039 See if windows reference is fixed 2020-05-14 13:09:17 +01:00
Adam Hathcock
3b2b341c4d Merge pull request #508 from turbedi/minor_optimizations
Minor optimizations
2020-04-11 09:08:07 +01:00
Berkan Diler
4cad40f637 Minor string optimizations 2020-04-10 12:33:40 +02:00
Berkan Diler
2c64380019 Use 3 argument Array.Copy when possible 2020-04-10 12:06:38 +02:00
Berkan Diler
ccb9593de2 Replace Span<T>.Fill(0) with Span<T>.Clear() 2020-04-10 12:03:32 +02:00
Berkan Diler
921a99fc32 Replace static readonly byte[] fields with static ReadOnlySpan<byte> properties 2020-04-10 11:54:58 +02:00
Adam Hathcock
400d2c1774 Fix usings and add braces for better merging 2020-04-03 08:47:30 +01:00
Adam Hathcock
762497b1c1 Tag for 0.25.0 and update packages 2020-04-03 08:25:43 +01:00
Adam Hathcock
be9edc7512 Merge pull request #500 from Erior/Issue_86
ZipReader/StreamingZipReaderFactory fails for archive entries which are uncompressed files in ZIP format #86
2020-01-17 09:38:19 +00:00
Lars Vahlenberg
9bf9d34d94 Issue86 Proposal 2020-01-16 22:08:48 +01:00
Adam Hathcock
df8405006c Fix workflow name 2020-01-03 09:24:08 +00:00
Adam Hathcock
d135fdce58 Give github actions build a name and use badge 2020-01-03 09:22:51 +00:00
Adam Hathcock
ba570b93bb Merge pull request #496 from Bond-009/allocations
Reduce the amount of allocations
2020-01-03 09:18:17 +00:00
Adam Hathcock
6dfe0c7a96 Merge branch 'master' into allocations 2020-01-03 09:16:46 +00:00
Adam Hathcock
73d4430a65 Merge pull request #498 from adamhathcock/build-netcore3
Build netcore3
2020-01-03 09:15:14 +00:00
Adam Hathcock
ce6fd9b976 JUst one target 2020-01-03 09:12:10 +00:00
Adam Hathcock
ae7e8c03f2 Put wrong SDK 2020-01-03 09:07:34 +00:00
Adam Hathcock
22e2526f4c Update cake and dependencies 2020-01-03 09:06:13 +00:00
Adam Hathcock
50283d9411 Add new build targets for netcore3 2020-01-03 09:02:04 +00:00
Bond-009
d2c2b58f3b Fix language version and add netstandard2.1 2020-01-02 17:43:58 +01:00
Bond_009
50d4b39ca0 Fix test 2019-12-30 22:17:45 +01:00
Bond_009
1ed675e960 Minor improvement 2019-12-30 19:19:05 +01:00
Bond_009
80b0671844 Reduce the amount of allocations
* Directly fill an array instead of filling a List and copying that to
an array
* Use own buffer when writing bytes to a stream
* Remove DataConverter class, replaced by BinaryPrimitives
2019-12-30 18:58:25 +01:00
Bond-009
6f387336c0 Use functions from System.Memory instead of selfmade ones (#495)
* Use functions from System.Memory instead of selfmade ones

* Update SharpCompress.Test.csproj
2019-12-30 15:19:46 +00:00
114 changed files with 1349 additions and 1940 deletions

View File

@@ -1,16 +0,0 @@
version: 2
jobs:
build:
docker:
- image: microsoft/dotnet:2.2.104-sdk
steps:
- checkout
- run:
name: Install Cake
command: |
dotnet tool install -g Cake.Tool
echo 'export PATH=$PATH:/root/.dotnet/tools' >> $BASH_ENV
source $BASH_ENV
- run:
name: Build
command: dotnet cake build.cake

17
.github/workflows/dotnetcore.yml vendored Normal file
View File

@@ -0,0 +1,17 @@
name: SharpCompress
on: [push]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v1
- uses: actions/setup-dotnet@v1
with:
dotnet-version: 3.1.202
- name: Run the Cake script
uses: ecampidoglio/cake-action@master

View File

@@ -7,8 +7,8 @@ The major feature is support for non-seekable streams so large files can be proc
AppVeyor Build -
[![Build status](https://ci.appveyor.com/api/projects/status/voxg971oemmvxh1e/branch/master?svg=true)](https://ci.appveyor.com/project/adamhathcock/sharpcompress/branch/master)
Circle CI Build -
[![CircleCI](https://circleci.com/gh/adamhathcock/sharpcompress.svg?style=svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
GitHub Actions Build -
[![GitHubActions](https://github.com/adamhathcock/sharpcompress/workflows/SharpCompress/badge.svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
## Need Help?

View File

@@ -126,4 +126,7 @@
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EAddAccessorOwnerDeclarationBracesMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002ECSharpPlaceAttributeOnSameLineMigration/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateBlankLinesAroundFieldToBlankLinesAroundProperty/@EntryIndexedValue">True</s:Boolean>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean></wpf:ResourceDictionary>
<s:Boolean x:Key="/Default/Environment/SettingsMigration/IsMigratorApplied/=JetBrains_002EReSharper_002EPsi_002ECSharp_002ECodeStyle_002ESettingsUpgrade_002EMigrateThisQualifierSettings/@EntryIndexedValue">True</s:Boolean>
<s:String x:Key="/Default/Environment/UnitTesting/UnitTestSessionStore/Sessions/=6af8f80e_002D9fdd_002D4223_002D8e02_002D473db916f9b2/@EntryIndexedValue">&lt;SessionState ContinuousTestingIsOn="False" ContinuousTestingMode="0" FrameworkVersion="{x:Null}" IsLocked="False" Name="All tests from Solution" PlatformMonoPreference="{x:Null}" PlatformType="{x:Null}" xmlns="urn:schemas-jetbrains-com:jetbrains-ut-session" xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"&gt;
&lt;Solution /&gt;
&lt;/SessionState&gt;</s:String></wpf:ResourceDictionary>

View File

@@ -1,5 +1,5 @@
version: '{build}'
image: Visual Studio 2017
image: Visual Studio 2019
pull_requests:
do_not_increment_build_number: true

View File

@@ -17,7 +17,7 @@ Task("Build")
{
c.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2017);
.UseToolVersion(MSBuildToolVersion.VS2019);
});
}
else
@@ -33,6 +33,9 @@ Task("Build")
settings.Framework = "netstandard2.0";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netstandard2.1";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
}
});
@@ -46,7 +49,7 @@ Task("Test")
var settings = new DotNetCoreTestSettings
{
Configuration = "Release",
Framework = "netcoreapp2.2"
Framework = "netcoreapp3.1"
};
DotNetCoreTest(file.ToString(), settings);
}
@@ -61,7 +64,7 @@ Task("Pack")
MSBuild("src/SharpCompress/SharpCompress.csproj", c => c
.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2017)
.UseToolVersion(MSBuildToolVersion.VS2019)
.WithProperty("NoBuild", "true")
.WithTarget("Pack"));
}

View File

@@ -23,7 +23,7 @@ namespace SharpCompress.Archives
protected ReaderOptions ReaderOptions { get; }
private bool disposed;
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
{
Type = type;
@@ -140,12 +140,12 @@ namespace SharpCompress.Archives
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
///
///
/// This method will load all entry information from the archive.
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
@@ -174,4 +174,4 @@ namespace SharpCompress.Archives
}
}
}
}
}

View File

@@ -142,4 +142,4 @@ namespace SharpCompress.Archives
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
}
}
}
}

View File

@@ -129,13 +129,13 @@ namespace SharpCompress.Archives.SevenZip
}
}
private static readonly byte[] SIGNATURE = {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
private static ReadOnlySpan<byte> SIGNATURE => new byte[] {(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C};
private static bool SignatureMatch(Stream stream)
{
BinaryReader reader = new BinaryReader(stream);
byte[] signatureBytes = reader.ReadBytes(6);
return signatureBytes.BinaryEquals(SIGNATURE);
ReadOnlySpan<byte> signatureBytes = reader.ReadBytes(6);
return signatureBytes.SequenceEqual(SIGNATURE);
}
protected override IReader CreateReaderForSolidExtraction()
@@ -201,7 +201,7 @@ namespace SharpCompress.Archives.SevenZip
return CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
}
}
private class PasswordProvider : IPasswordProvider
{
private readonly string _password;
@@ -209,7 +209,6 @@ namespace SharpCompress.Archives.SevenZip
public PasswordProvider(string password)
{
_password = password;
}
public string CryptoGetTextPassword()

View File

@@ -5,7 +5,6 @@ using System.Runtime.CompilerServices;
[assembly: AssemblyTitle("SharpCompress")]
[assembly: AssemblyProduct("SharpCompress")]
[assembly: InternalsVisibleTo("SharpCompress.Test" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: InternalsVisibleTo("SharpCompress.Test.Portable" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: CLSCompliant(true)]
namespace SharpCompress

View File

@@ -32,10 +32,12 @@ namespace SharpCompress.Common
Password = Encoding.GetEncoding(437);
}
#if NETSTANDARD1_3 || NETSTANDARD2_0 || NETSTANDARD2_1
static ArchiveEncoding()
{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
}
#endif
public string Decode(byte[] bytes)
{
@@ -67,4 +69,4 @@ namespace SharpCompress.Common
return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}
}
}
}

View File

@@ -1,4 +1,6 @@
namespace SharpCompress.Common
using System;
namespace SharpCompress.Common
{
public class ExtractionOptions
{
@@ -29,6 +31,10 @@
/// </summary>
public delegate void SymbolicLinkWriterDelegate(string sourcePath, string targetPath);
public SymbolicLinkWriterDelegate WriteSymbolicLink;
public SymbolicLinkWriterDelegate WriteSymbolicLink =
(sourcePath, targetPath) =>
{
Console.WriteLine($"Could not write symlink {sourcePath} -> {targetPath}, for more information please see https://github.com/dotnet/runtime/issues/24271");
};
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Common.GZip
{

View File

@@ -1,11 +1,10 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Common.GZip
{
@@ -60,7 +59,7 @@ namespace SharpCompress.Common.GZip
throw new ZlibException("Bad GZIP header.");
}
Int32 timet = DataConverter.LittleEndian.GetInt32(header, 4);
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
@@ -69,7 +68,7 @@ namespace SharpCompress.Common.GZip
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
if (!stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
@@ -117,4 +116,4 @@ namespace SharpCompress.Common.GZip
return ArchiveEncoding.Decode(buffer);
}
}
}
}

View File

@@ -1,5 +1,4 @@
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
{

View File

@@ -7,8 +7,10 @@ namespace SharpCompress.Common.Rar.Headers
public AvHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Av)
{
if (IsRar5)
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -38,7 +38,11 @@ namespace SharpCompress.Common.Rar.Headers
private void ReadLocator(MarkingBinaryReader reader) {
var size = reader.ReadRarVIntUInt16();
var type = reader.ReadRarVIntUInt16();
if (type != 1) throw new InvalidFormatException("expected locator record");
if (type != 1)
{
throw new InvalidFormatException("expected locator record");
}
var flags = reader.ReadRarVIntUInt16();
const ushort hasQuickOpenOffset = 0x01;
const ushort hasRecoveryOffset = 0x02;

View File

@@ -7,7 +7,10 @@ namespace SharpCompress.Common.Rar.Headers
protected CommentHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Comment)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -46,19 +46,38 @@ namespace SharpCompress.Common.Rar.Headers
if (b == 0x61)
{
b = GetByte(stream); start++;
if (b != 0x72) continue;
if (b != 0x72)
{
continue;
}
b = GetByte(stream); start++;
if (b != 0x21) continue;
if (b != 0x21)
{
continue;
}
b = GetByte(stream); start++;
if (b != 0x1a) continue;
if (b != 0x1a)
{
continue;
}
b = GetByte(stream); start++;
if (b != 0x07) continue;
if (b != 0x07)
{
continue;
}
b = GetByte(stream); start++;
if (b == 1)
{
b = GetByte(stream); start++;
if (b != 0) continue;
if (b != 0)
{
continue;
}
return new MarkHeader(true); // Rar5
}
else if (b == 0)
@@ -69,9 +88,17 @@ namespace SharpCompress.Common.Rar.Headers
else if (b == 0x45)
{
b = GetByte(stream); start++;
if (b != 0x7e) continue;
if (b != 0x7e)
{
continue;
}
b = GetByte(stream); start++;
if (b != 0x5e) continue;
if (b != 0x5e)
{
continue;
}
throw new InvalidFormatException("Rar format version pre-4 is unsupported.");
}
}

View File

@@ -8,7 +8,10 @@ namespace SharpCompress.Common.Rar.Headers
public ProtectHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Protect)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -1,4 +1,3 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.IO;

View File

@@ -7,7 +7,10 @@ namespace SharpCompress.Common.Rar.Headers
protected SignHeader(RarHeader header, RarCrcBinaryReader reader)
: base(header, reader, HeaderType.Sign)
{
if (IsRar5) throw new InvalidFormatException("unexpected rar5 record");
if (IsRar5)
{
throw new InvalidFormatException("unexpected rar5 record");
}
}
protected override void ReadFinish(MarkingBinaryReader reader)

View File

@@ -81,7 +81,9 @@ namespace SharpCompress.Common.Rar
byte[] cipherText = ReadBytesNoCrc(16);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
}

View File

@@ -50,20 +50,23 @@ namespace SharpCompress.Common.Rar
if (sizeToRead > 0)
{
int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
for (int i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
_actualStream.Read(cipherText, 0, RarRijndael.CRYPTO_BLOCK_SIZE);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)
{
_data.Enqueue(readByte);
}
}
for (int i = 0; i < count; i++)
{
buffer[offset + i] = _data.Dequeue();
}
}
return count;
}
@@ -93,4 +96,4 @@ namespace SharpCompress.Common.Rar
base.Dispose(disposing);
}
}
}
}

View File

@@ -1,5 +1,4 @@
using System;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Crypto;
@@ -89,19 +88,20 @@ namespace SharpCompress.Common.Rar
public byte[] ProcessBlock(byte[] cipherText)
{
var plainText = new byte[CRYPTO_BLOCK_SIZE];
var decryptedBytes = new List<byte>();
byte[] decryptedBytes = new byte[CRYPTO_BLOCK_SIZE];
_rijndael.ProcessBlock(cipherText, 0, plainText, 0);
for (int j = 0; j < plainText.Length; j++)
for (int j = 0; j < CRYPTO_BLOCK_SIZE; j++)
{
decryptedBytes.Add((byte) (plainText[j] ^ _aesInitializationVector[j%16])); //32:114, 33:101
decryptedBytes[j] = (byte)(plainText[j] ^ _aesInitializationVector[j % 16]); //32:114, 33:101
}
for (int j = 0; j < _aesInitializationVector.Length; j++)
{
_aesInitializationVector[j] = cipherText[j];
}
return decryptedBytes.ToArray();
return decryptedBytes;
}
public void Dispose()

View File

@@ -22,7 +22,7 @@ namespace SharpCompress.Common.SevenZip
internal List<long> _packStreamStartPositions = new List<long>();
internal List<int> _folderStartFileIndex = new List<int>();
internal List<int> _fileIndexToFolderIndexMap = new List<int>();
internal IPasswordProvider PasswordProvider { get; }
public ArchiveDatabase(IPasswordProvider passwordProvider)
@@ -152,13 +152,14 @@ namespace SharpCompress.Common.SevenZip
{
int packStreamIndex = folder._firstPackStreamId;
long folderStartPackPos = GetFolderStreamPos(folder, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folder._packStreams.Count; j++)
int count = folder._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
{
packSizes.Add(_packSizes[packStreamIndex + j]);
packSizes[j] = _packSizes[packStreamIndex + j];
}
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes.ToArray(), folder, pw);
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes, folder, pw);
}
private long GetFolderPackStreamSize(int folderIndex, int streamIndex)
@@ -179,4 +180,4 @@ namespace SharpCompress.Common.SevenZip
return 0;
}
}
}
}

View File

@@ -1449,13 +1449,14 @@ namespace SharpCompress.Common.SevenZip
CFolder folderInfo = db._folders[folderIndex];
int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
long folderStartPackPos = db.GetFolderStreamPos(folderInfo, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folderInfo._packStreams.Count; j++)
var count = folderInfo._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
{
packSizes.Add(db._packSizes[packStreamIndex + j]);
packSizes[j] = db._packSizes[packStreamIndex + j];
}
s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(), folderInfo,
s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes, folderInfo,
db.PasswordProvider);
_cachedStreams.Add(folderIndex, s);
}
@@ -1553,15 +1554,16 @@ namespace SharpCompress.Common.SevenZip
int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
long folderStartPackPos = db.GetFolderStreamPos(folderInfo, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folderInfo._packStreams.Count; j++)
var count = folderInfo._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
{
packSizes.Add(db._packSizes[packStreamIndex + j]);
packSizes[j] = db._packSizes[packStreamIndex + j];
}
// TODO: If the decoding fails the last file may be extracted incompletely. Delete it?
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(),
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes,
folderInfo, db.PasswordProvider);
byte[] buffer = new byte[4 << 10];
for (;;)

View File

@@ -1,5 +1,4 @@
using System.IO;
using SharpCompress.Archives;
using SharpCompress.Readers;
namespace SharpCompress.Common.SevenZip

View File

@@ -1,7 +1,7 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Tar.Headers
{
@@ -49,7 +49,7 @@ namespace SharpCompress.Common.Tar.Headers
}
else
{
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 0, 100);
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100);
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
@@ -57,11 +57,10 @@ namespace SharpCompress.Common.Tar.Headers
if (Size >= 0x1FFFFFFFF)
{
byte[] bytes = DataConverter.BigEndian.GetBytes(Size);
var bytes12 = new byte[12];
bytes.CopyTo(bytes12, 12 - bytes.Length);
Span<byte> bytes12 = stackalloc byte[12];
BinaryPrimitives.WriteInt64BigEndian(bytes12.Slice(4), Size);
bytes12[0] |= 0x80;
bytes12.CopyTo(buffer, 124);
bytes12.CopyTo(buffer.AsSpan(124));
}
}
@@ -176,8 +175,9 @@ namespace SharpCompress.Common.Tar.Headers
{
if ((buffer[124] & 0x80) == 0x80) // if size in binary
{
return DataConverter.BigEndian.GetInt64(buffer, 0x80);
return BinaryPrimitives.ReadInt64BigEndian(buffer.AsSpan(0x80));
}
return ReadAsciiInt64Base8(buffer, 124, 11);
}
@@ -192,15 +192,11 @@ namespace SharpCompress.Common.Tar.Headers
return buffer;
}
private static void WriteStringBytes(byte[] name, byte[] buffer, int offset, int length)
private static void WriteStringBytes(ReadOnlySpan<byte> name, Span<byte> buffer, int length)
{
name.CopyTo(buffer);
int i = Math.Min(length, name.Length);
Buffer.BlockCopy(name, 0, buffer, offset, i);
// if Span<byte>.Fill can be used, it is more efficient
for (; i < length; ++i)
{
buffer[offset + i] = 0;
}
buffer.Slice(i, length - i).Clear();
}
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)

View File

@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Tar
{

View File

@@ -2,7 +2,6 @@
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Tar
{

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Linq;
using System.Text;
namespace SharpCompress.Common.Zip.Headers
{

View File

@@ -1,5 +1,4 @@
using System;
using System.IO;
using System.IO;
namespace SharpCompress.Common.Zip.Headers
{

View File

@@ -1,6 +1,5 @@
using System.IO;
using System.Linq;
using System.Text;
namespace SharpCompress.Common.Zip.Headers
{

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip.Headers
{
@@ -76,34 +76,34 @@ namespace SharpCompress.Common.Zip.Headers
switch (DataBytes.Length)
{
case 4:
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 0);
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes);
return;
case 8:
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
return;
case 12:
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 8);
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(8));
return;
case 16:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
return;
case 20:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 16);
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(16));
return;
case 24:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 16);
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
return;
case 28:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 16);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 24);
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(24));
return;
default:
throw new ArchiveException("Unexpected size of of Zip64 extended information extra field");
@@ -132,7 +132,7 @@ namespace SharpCompress.Common.Zip.Headers
case ExtraDataType.Zip64ExtendedInformationExtraField:
return new Zip64ExtendedInformationExtraField
(
type,
type,
length,
extraData
);
@@ -146,4 +146,4 @@ namespace SharpCompress.Common.Zip.Headers
}
}
}
}
}

View File

@@ -1,5 +1,4 @@
using System;
using System.IO;
using System.IO;
namespace SharpCompress.Common.Zip.Headers
{

View File

@@ -1,8 +1,7 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip.Headers
{
@@ -30,7 +29,7 @@ namespace SharpCompress.Common.Zip.Headers
&& Name.EndsWith("\\");
}
}
internal Stream PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
@@ -65,7 +64,7 @@ namespace SharpCompress.Common.Zip.Headers
return encryptionData;
}
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
internal ushort LastModifiedDate { get; set; }
@@ -78,13 +77,13 @@ namespace SharpCompress.Common.Zip.Headers
{
for (int i = 0; i < extra.Length - 4;)
{
ExtraDataType type = (ExtraDataType)DataConverter.LittleEndian.GetUInt16(extra, i);
ExtraDataType type = (ExtraDataType)BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i));
if (!Enum.IsDefined(typeof(ExtraDataType), type))
{
type = ExtraDataType.NotImplementedExtraData;
}
ushort length = DataConverter.LittleEndian.GetUInt16(extra, i + 2);
ushort length = BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i + 2));
// 7zip has this same kind of check to ignore extras blocks that don't conform to the standard 2-byte ID, 2-byte length, N-byte value.
// CPP/7Zip/Zip/ZipIn.cpp: CInArchive::ReadExtra

View File

@@ -1,5 +1,4 @@
using System;
using System.Text;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Compressors.Deflate;

View File

@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{
@@ -35,7 +34,9 @@ namespace SharpCompress.Common.Zip
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
uint zip64Signature = reader.ReadUInt32();
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
{
throw new ArchiveException("Failed to locate the Zip64 Header");
}
var zip64Entry = new Zip64DirectoryEndHeader();
zip64Entry.Read(reader);
@@ -55,7 +56,9 @@ namespace SharpCompress.Common.Zip
position = stream.Position;
if (nextHeader == null)
{
yield break;
}
if (nextHeader is DirectoryEntryHeader entryHeader)
{

View File

@@ -2,7 +2,6 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{
@@ -54,14 +53,30 @@ namespace SharpCompress.Common.Zip
//entry could be zero bytes so we need to know that.
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
{
bool isRecording = rewindableStream.IsRecording;
if (!isRecording)
var local_header = ((LocalEntryHeader)header);
// If we have CompressedSize, there is data to be read
if( local_header.CompressedSize > 0 )
{
rewindableStream.StartRecording();
header.HasData = true;
} // Check if zip is streaming ( Length is 0 and is declared in PostDataDescriptor )
else if( local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor) )
{
bool isRecording = rewindableStream.IsRecording;
if (!isRecording)
{
rewindableStream.StartRecording();
}
uint nextHeaderBytes = reader.ReadUInt32();
// Check if next data is PostDataDescriptor, streamed file with 0 length
header.HasData = !IsHeader(nextHeaderBytes);
rewindableStream.Rewind(!isRecording);
}
else // We are not streaming and compressed size is 0, we have no data
{
header.HasData = false;
}
uint nextHeaderBytes = reader.ReadUInt32();
header.HasData = !IsHeader(nextHeaderBytes);
rewindableStream.Rewind(!isRecording);
}
yield return header;
}

View File

@@ -1,7 +1,7 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Security.Cryptography;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip
{
@@ -118,7 +118,7 @@ namespace SharpCompress.Common.Zip
: bytesRemaining;
// update the counter
DataConverter.LittleEndian.PutBytes(_counter, 0, _nonce++);
BinaryPrimitives.WriteInt32LittleEndian(_counter, _nonce++);
// Determine if this is the final block
if ((bytesToRead == bytesRemaining) && (_totalBytesLeftToRead == 0))

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Security.Cryptography;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip
{
@@ -62,10 +62,10 @@ namespace SharpCompress.Common.Zip
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
_generatedVerifyValue = rfc2898.GetBytes(2);
short verify = DataConverter.LittleEndian.GetInt16(_passwordVerifyValue, 0);
short verify = BinaryPrimitives.ReadInt16LittleEndian(_passwordVerifyValue);
if (_password != null)
{
short generated = DataConverter.LittleEndian.GetInt16(_generatedVerifyValue, 0);
short generated = BinaryPrimitives.ReadInt16LittleEndian(_generatedVerifyValue);
if (verify != generated)
{
throw new InvalidFormatException("bad password");

View File

@@ -1,4 +1,5 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using SharpCompress.Common.Zip.Headers;
@@ -8,7 +9,6 @@ using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.Converters;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
@@ -108,19 +108,19 @@ namespace SharpCompress.Common.Zip
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
ushort compressedMethod = DataConverter.LittleEndian.GetUInt16(data.DataBytes, 0);
ushort compressedMethod = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes);
if (compressedMethod != 0x01 && compressedMethod != 0x02)
{
throw new InvalidFormatException("Unexpected vendor version number for WinZip AES metadata");
}
ushort vendorId = DataConverter.LittleEndian.GetUInt16(data.DataBytes, 2);
ushort vendorId = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(2));
if (vendorId != 0x4541)
{
throw new InvalidFormatException("Unexpected vendor ID for WinZip AES metadata");
}
return CreateDecompressionStream(stream, (ZipCompressionMethod)DataConverter.LittleEndian.GetUInt16(data.DataBytes, 5));
return CreateDecompressionStream(stream, (ZipCompressionMethod)BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)));
}
default:
{
@@ -142,7 +142,7 @@ namespace SharpCompress.Common.Zip
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor))
|| Header.IsZip64)
{
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
}
else
{
@@ -182,4 +182,4 @@ namespace SharpCompress.Common.Zip
return plainStream;
}
}
}
}

View File

@@ -3,7 +3,6 @@ using System.IO;
using System.Linq;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{

View File

@@ -65,16 +65,16 @@ namespace SharpCompress.Compressors.ADC
}
}
private static int GetOffset(byte[] chunk, int position)
private static int GetOffset(ReadOnlySpan<byte> chunk)
{
switch (GetChunkType(chunk[position]))
switch (GetChunkType(chunk[0]))
{
case PLAIN:
return 0;
case TWO_BYTE:
return ((chunk[position] & 0x03) << 8) + chunk[position + 1];
return ((chunk[0] & 0x03) << 8) + chunk[1];
case THREE_BYTE:
return (chunk[position + 1] << 8) + chunk[position + 2];
return (chunk[1] << 8) + chunk[2];
default:
return -1;
}
@@ -116,7 +116,7 @@ namespace SharpCompress.Compressors.ADC
byte[] buffer = new byte[bufferSize];
int outPosition = 0;
bool full = false;
MemoryStream tempMs;
Span<byte> temp = stackalloc byte[3];
while (position < input.Length)
{
@@ -142,11 +142,10 @@ namespace SharpCompress.Compressors.ADC
position += chunkSize + 1;
break;
case TWO_BYTE:
tempMs = new MemoryStream();
chunkSize = GetChunkSize((byte)readByte);
tempMs.WriteByte((byte)readByte);
tempMs.WriteByte((byte)input.ReadByte());
offset = GetOffset(tempMs.ToArray(), 0);
temp[0] = (byte)readByte;
temp[1] = (byte)input.ReadByte();
offset = GetOffset(temp);
if (outPosition + chunkSize > bufferSize)
{
full = true;
@@ -173,12 +172,11 @@ namespace SharpCompress.Compressors.ADC
}
break;
case THREE_BYTE:
tempMs = new MemoryStream();
chunkSize = GetChunkSize((byte)readByte);
tempMs.WriteByte((byte)readByte);
tempMs.WriteByte((byte)input.ReadByte());
tempMs.WriteByte((byte)input.ReadByte());
offset = GetOffset(tempMs.ToArray(), 0);
temp[0] = (byte)readByte;
temp[1] = (byte)input.ReadByte();
temp[2] = (byte)input.ReadByte();
offset = GetOffset(temp);
if (outPosition + chunkSize > bufferSize)
{
full = true;
@@ -213,8 +211,8 @@ namespace SharpCompress.Compressors.ADC
}
output = new byte[outPosition];
Array.Copy(buffer, 0, output, 0, outPosition);
Array.Copy(buffer, output, outPosition);
return position - start;
}
}
}
}

View File

@@ -27,9 +27,8 @@
// ------------------------------------------------------------------
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Compressors.Deflate
@@ -373,17 +372,16 @@ namespace SharpCompress.Compressors.Deflate
{
return;
}
if (_fileName.IndexOf("/") != -1)
if (_fileName.Contains("/"))
{
_fileName = _fileName.Replace("/", "\\");
_fileName = _fileName.Replace('/', '\\');
}
if (_fileName.EndsWith("\\"))
{
throw new InvalidOperationException("Illegal filename");
}
var index = _fileName.IndexOf("\\");
if (index != -1)
if (_fileName.Contains("\\"))
{
// trim any leading path
int length = _fileName.Length;
@@ -442,7 +440,7 @@ namespace SharpCompress.Compressors.Deflate
}
TimeSpan delta = LastModified.Value - UNIX_EPOCH;
var timet = (Int32)delta.TotalSeconds;
DataConverter.LittleEndian.PutBytes(header, i, timet);
BinaryPrimitives.WriteInt32LittleEndian(header.AsSpan(i), timet);
i += 4;
// xflg
@@ -476,4 +474,4 @@ namespace SharpCompress.Compressors.Deflate
return header.Length; // bytes written
}
}
}
}

View File

@@ -25,11 +25,10 @@
// ------------------------------------------------------------------
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Compressors.Deflate
@@ -244,10 +243,12 @@ namespace SharpCompress.Compressors.Deflate
if (_wantCompress)
{
// Emit the GZIP trailer: CRC32 and size mod 2^32
int c1 = crc.Crc32Result;
_stream.Write(DataConverter.LittleEndian.GetBytes(c1), 0, 4);
byte[] intBuf = new byte[4];
BinaryPrimitives.WriteInt32LittleEndian(intBuf, crc.Crc32Result);
_stream.Write(intBuf, 0, 4);
int c2 = (Int32)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
_stream.Write(DataConverter.LittleEndian.GetBytes(c2), 0, 4);
BinaryPrimitives.WriteInt32LittleEndian(intBuf, c2);
_stream.Write(intBuf, 0, 4);
}
else
{
@@ -293,9 +294,9 @@ namespace SharpCompress.Compressors.Deflate
Array.Copy(_z.InputBuffer, _z.NextIn, trailer, 0, trailer.Length);
}
Int32 crc32_expected = DataConverter.LittleEndian.GetInt32(trailer, 0);
Int32 crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
Int32 crc32_actual = crc.Crc32Result;
Int32 isize_expected = DataConverter.LittleEndian.GetInt32(trailer, 4);
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.AsSpan(4));
Int32 isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
if (crc32_actual != crc32_expected)
@@ -446,7 +447,7 @@ namespace SharpCompress.Compressors.Deflate
throw new ZlibException("Bad GZIP header.");
}
Int32 timet = DataConverter.LittleEndian.GetInt32(header, 4);
Int32 timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
_GzipMtime = TarHeader.EPOCH.AddSeconds(timet);
totalBytesRead += n;
if ((header[3] & 0x04) == 0x04)
@@ -647,4 +648,4 @@ namespace SharpCompress.Compressors.Deflate
Undefined
}
}
}
}

View File

@@ -3,7 +3,6 @@
// See the LICENSE file in the project root for more information.
using SharpCompress.Common.Zip;
using SharpCompress.Compressors.Deflate;
using System;
using System.Diagnostics;
using System.IO;
@@ -23,11 +22,19 @@ namespace SharpCompress.Compressors.Deflate64
public Deflate64Stream(Stream stream, CompressionMode mode)
{
if (stream == null)
{
throw new ArgumentNullException(nameof(stream));
}
if (mode != CompressionMode.Decompress)
{
throw new NotImplementedException("Deflate64: this implementation only supports decompression");
}
if (!stream.CanRead)
{
throw new ArgumentException("Deflate64: input stream is not readable", nameof(stream));
}
InitializeInflater(stream, ZipCompressionMethod.Deflate64);
}
@@ -40,7 +47,9 @@ namespace SharpCompress.Compressors.Deflate64
Debug.Assert(stream != null);
Debug.Assert(method == ZipCompressionMethod.Deflate || method == ZipCompressionMethod.Deflate64);
if (!stream.CanRead)
{
throw new ArgumentException("Deflate64: input stream is not readable", nameof(stream));
}
_inflater = new InflaterManaged(method == ZipCompressionMethod.Deflate64);
@@ -152,22 +161,32 @@ namespace SharpCompress.Compressors.Deflate64
private void ValidateParameters(byte[] array, int offset, int count)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
}
if (array.Length - offset < count)
{
throw new ArgumentException("Deflate64: invalid offset/count combination");
}
}
private void EnsureNotDisposed()
{
if (_stream == null)
{
ThrowStreamClosedException();
}
}
[MethodImpl(MethodImplOptions.NoInlining)]
@@ -179,7 +198,9 @@ namespace SharpCompress.Compressors.Deflate64
private void EnsureDecompressionMode()
{
if (_mode != CompressionMode.Decompress)
{
ThrowCannotReadFromDeflateManagedStreamException();
}
}
[MethodImpl(MethodImplOptions.NoInlining)]
@@ -191,7 +212,9 @@ namespace SharpCompress.Compressors.Deflate64
private void EnsureCompressionMode()
{
if (_mode != CompressionMode.Compress)
{
ThrowCannotWriteToDeflateManagedStreamException();
}
}
[MethodImpl(MethodImplOptions.NoInlining)]
@@ -209,10 +232,14 @@ namespace SharpCompress.Compressors.Deflate64
private void PurgeBuffers(bool disposing)
{
if (!disposing)
{
return;
}
if (_stream == null)
{
return;
}
Flush();
}

View File

@@ -208,7 +208,9 @@ namespace SharpCompress.Compressors.Deflate64
for (code = 0; code < 16; code++)
{
for (int n = 0; n < (1 << EXTRA_DISTANCE_BITS[code]); n++)
{
result[dist++] = (byte)code;
}
}
dist >>= 7; // from now on, all distances are divided by 128
@@ -216,7 +218,9 @@ namespace SharpCompress.Compressors.Deflate64
for (; code < NUM_DIST_BASE_CODES; code++)
{
for (int n = 0; n < (1 << (EXTRA_DISTANCE_BITS[code] - 7)); n++)
{
result[256 + dist++] = (byte)code;
}
}
return result;

View File

@@ -82,16 +82,24 @@ namespace SharpCompress.Compressors.Deflate64
{
byte[] literalTreeLength = new byte[MAX_LITERAL_TREE_ELEMENTS];
for (int i = 0; i <= 143; i++)
{
literalTreeLength[i] = 8;
}
for (int i = 144; i <= 255; i++)
{
literalTreeLength[i] = 9;
}
for (int i = 256; i <= 279; i++)
{
literalTreeLength[i] = 7;
}
for (int i = 280; i <= 287; i++)
{
literalTreeLength[i] = 8;
}
return literalTreeLength;
}
@@ -277,9 +285,14 @@ namespace SharpCompress.Compressors.Deflate64
{
symbol = -symbol;
if ((bitBuffer & mask) == 0)
{
symbol = _left[symbol];
}
else
{
symbol = _right[symbol];
}
mask <<= 1;
} while (symbol < 0);
}

View File

@@ -37,7 +37,7 @@ namespace SharpCompress.Compressors.Deflate64
// const tables used in decoding:
// Extra bits for length code 257 - 285.
private static readonly byte[] S_EXTRA_LENGTH_BITS =
private static ReadOnlySpan<byte> S_EXTRA_LENGTH_BITS => new byte[]
{ 0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,16 };
// The base length for length code 257 - 285.
@@ -51,9 +51,9 @@ namespace SharpCompress.Compressors.Deflate64
{ 1,2,3,4,5,7,9,13,17,25,33,49,65,97,129,193,257,385,513,769,1025,1537,2049,3073,4097,6145,8193,12289,16385,24577,32769,49153 };
// code lengths for code length alphabet is stored in following order
private static readonly byte[] S_CODE_ORDER = { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
private static ReadOnlySpan<byte> S_CODE_ORDER => new byte[] { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
private static readonly byte[] S_STATIC_DISTANCE_TREE_TABLE =
private static ReadOnlySpan<byte> S_STATIC_DISTANCE_TREE_TABLE => new byte[]
{
0x00,0x10,0x08,0x18,0x04,0x14,0x0c,0x1c,0x02,0x12,0x0a,0x1a,
0x06,0x16,0x0e,0x1e,0x01,0x11,0x09,0x19,0x05,0x15,0x0d,0x1d,
@@ -220,7 +220,9 @@ namespace SharpCompress.Compressors.Deflate64
// reading bfinal bit
// Need 1 bit
if (!_input.EnsureBitsAvailable(1))
{
return false;
}
_bfinal = _input.GetBits(1);
_state = InflaterState.ReadingBType;
@@ -718,7 +720,7 @@ namespace SharpCompress.Compressors.Deflate64
byte[] distanceTreeCodeLength = new byte[HuffmanTree.MAX_DIST_TREE_ELEMENTS];
// Create literal and distance tables
Array.Copy(_codeList, 0, literalTreeCodeLength, 0, _literalLengthCodeCount);
Array.Copy(_codeList, literalTreeCodeLength, _literalLengthCodeCount);
Array.Copy(_codeList, _literalLengthCodeCount, distanceTreeCodeLength, 0, _distanceCodeCount);
// Make sure there is an end-of-block code, otherwise how could we ever end?

View File

@@ -30,7 +30,9 @@ namespace SharpCompress.Compressors.LZMA
mLimit = limit;
if (((uint) input.Length & 15) != 0)
{
throw new NotSupportedException("AES decoder does not support padding.");
}
int numCyclesPower;
byte[] salt, seed;
@@ -90,10 +92,14 @@ namespace SharpCompress.Compressors.LZMA
{
if (count == 0
|| mWritten == mLimit)
{
return 0;
}
if (mUnderflow > 0)
{
return HandleUnderflow(buffer, offset, count);
}
// Need at least 16 bytes to proceed.
if (mEnding - mOffset < 16)
@@ -120,16 +126,22 @@ namespace SharpCompress.Compressors.LZMA
// Currently this is handled by forcing an underflow if
// the stream length is not a multiple of the block size.
if (count > mLimit - mWritten)
{
count = (int) (mLimit - mWritten);
}
// We cannot transform less than 16 bytes into the target buffer,
// but we also cannot return zero, so we need to handle this.
// We transform the data locally and use our own buffer as cache.
if (count < 16)
{
return HandleUnderflow(buffer, offset, count);
}
if (count > mEnding - mOffset)
{
count = mEnding - mOffset;
}
// Otherwise we transform directly into the target buffer.
int processed = mDecoder.TransformBlock(mBuffer, mOffset, count & ~15, buffer, offset);
@@ -157,24 +169,34 @@ namespace SharpCompress.Compressors.LZMA
int saltSize = (bt >> 7) & 1;
int ivSize = (bt >> 6) & 1;
if (info.Length == 1)
{
throw new InvalidOperationException();
}
byte bt2 = info[1];
saltSize += (bt2 >> 4);
ivSize += (bt2 & 15);
if (info.Length < 2 + saltSize + ivSize)
{
throw new InvalidOperationException();
}
salt = new byte[saltSize];
for (int i = 0; i < saltSize; i++)
{
salt[i] = info[i + 2];
}
iv = new byte[16];
for (int i = 0; i < ivSize; i++)
{
iv[i] = info[i + saltSize + 2];
}
if (numCyclesPower > 24)
{
throw new NotSupportedException();
}
}
private byte[] InitKey(int mNumCyclesPower, byte[] salt, byte[] pass)
@@ -185,9 +207,14 @@ namespace SharpCompress.Compressors.LZMA
int pos;
for (pos = 0; pos < salt.Length; pos++)
{
key[pos] = salt[pos];
}
for (int i = 0; i < pass.Length && pos < 32; i++)
{
key[pos++] = pass[i];
}
return key;
}
@@ -207,8 +234,12 @@ namespace SharpCompress.Compressors.LZMA
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (int i = 0; i < 8; i++)
{
if (++counter[i] != 0)
{
break;
}
}
}
return sha.GetHashAndReset();
}
@@ -226,8 +257,12 @@ namespace SharpCompress.Compressors.LZMA
// This mirrors the counter so we don't have to convert long to byte[] each round.
// (It also ensures the counter is little endian, which BitConverter does not.)
for (int i = 0; i < 8; i++)
{
if (++counter[i] != 0)
{
break;
}
}
}
sha.TransformFinalBlock(counter, 0, 0);
@@ -248,7 +283,9 @@ namespace SharpCompress.Compressors.LZMA
}
if (count > mUnderflow)
{
count = mUnderflow;
}
Buffer.BlockCopy(mBuffer, mOffset, buffer, offset, count);
mWritten += count;

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Converters;
using SharpCompress.Crypto;
using SharpCompress.IO;
@@ -58,16 +58,17 @@ namespace SharpCompress.Compressors.LZMA
crc32Stream.WrappedStream.Dispose();
crc32Stream.Dispose();
var compressedCount = _countingWritableSubStream.Count;
var bytes = DataConverter.LittleEndian.GetBytes(crc32Stream.Crc);
_countingWritableSubStream.Write(bytes, 0, bytes.Length);
bytes = DataConverter.LittleEndian.GetBytes(_writeCount);
_countingWritableSubStream.Write(bytes, 0, bytes.Length);
byte[] intBuf = new byte[8];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc32Stream.Crc);
_countingWritableSubStream.Write(intBuf, 0, 4);
BinaryPrimitives.WriteInt64LittleEndian(intBuf, _writeCount);
_countingWritableSubStream.Write(intBuf, 0, 8);
//total with headers
bytes = DataConverter.LittleEndian.GetBytes(compressedCount + 6 + 20);
_countingWritableSubStream.Write(bytes, 0, bytes.Length);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, compressedCount + 6 + 20);
_countingWritableSubStream.Write(intBuf, 0, 8);
}
_finished = true;
}
@@ -101,7 +102,7 @@ namespace SharpCompress.Compressors.LZMA
{
_stream.Flush();
}
// TODO: Both Length and Position are sometimes feasible, but would require
// reading the output length when we initialize.
public override long Length => throw new NotImplementedException();

View File

@@ -996,7 +996,7 @@ namespace SharpCompress.Compressors.LZMA
}
}
UInt32 startLen = 2; // speed optimization
UInt32 startLen = 2; // speed optimization
for (UInt32 repIndex = 0; repIndex < Base.K_NUM_REP_DISTANCES; repIndex++)
{
@@ -1571,12 +1571,17 @@ namespace SharpCompress.Compressors.LZMA
public void WriteCoderProperties(Stream outStream)
{
_properties[0] = (Byte)((_posStateBits * 5 + _numLiteralPosStateBits) * 9 + _numLiteralContextBits);
WriteCoderProperties(_properties);
outStream.Write(_properties, 0, K_PROP_SIZE);
}
public void WriteCoderProperties(Span<byte> span)
{
span[0] = (byte)((_posStateBits * 5 + _numLiteralPosStateBits) * 9 + _numLiteralContextBits);
for (int i = 0; i < 4; i++)
{
_properties[1 + i] = (Byte)((_dictionarySize >> (8 * i)) & 0xFF);
span[1 + i] = (byte)((_dictionarySize >> (8 * i)) & 0xFF);
}
outStream.Write(_properties, 0, K_PROP_SIZE);
}
private readonly UInt32[] _tempPrices = new UInt32[Base.K_NUM_FULL_DISTANCES];
@@ -1794,4 +1799,4 @@ namespace SharpCompress.Compressors.LZMA
_trainSize = trainSize;
}
}
}
}

View File

@@ -1,7 +1,7 @@
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.LZMA
{
@@ -56,7 +56,7 @@ namespace SharpCompress.Compressors.LZMA
if (!isLzma2)
{
_dictionarySize = DataConverter.LittleEndian.GetInt32(properties, 1);
_dictionarySize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
_outWindow.Create(_dictionarySize);
if (presetDictionary != null)
{
@@ -107,9 +107,9 @@ namespace SharpCompress.Compressors.LZMA
_encoder = new Encoder();
_encoder.SetCoderProperties(properties._propIDs, properties._properties);
MemoryStream propStream = new MemoryStream(5);
_encoder.WriteCoderProperties(propStream);
Properties = propStream.ToArray();
byte[] prop = new byte[5];
_encoder.WriteCoderProperties(prop);
Properties = prop;
_encoder.SetStreams(null, outputStream, -1, -1);
if (presetDictionary != null)
@@ -315,4 +315,4 @@ namespace SharpCompress.Compressors.LZMA
public byte[] Properties { get; } = new byte[5];
}
}
}

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -19,7 +19,11 @@ namespace SharpCompress.Compressors.PPMd.H
{
}
internal int SummFreq { get => DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff; set => DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value); }
internal int SummFreq
{
get => BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address)) & 0xffff;
set => BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address), (short)value);
}
internal FreqData Initialize(byte[] mem)
{
@@ -28,14 +32,12 @@ namespace SharpCompress.Compressors.PPMd.H
internal void IncrementSummFreq(int dSummFreq)
{
short summFreq = DataConverter.LittleEndian.GetInt16(Memory, Address);
summFreq += (short)dSummFreq;
DataConverter.LittleEndian.PutBytes(Memory, Address, summFreq);
SummFreq += (short)dSummFreq;
}
internal int GetStats()
{
return DataConverter.LittleEndian.GetInt32(Memory, Address + 2);
return BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 2));
}
internal virtual void SetStats(State state)
@@ -45,7 +47,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal void SetStats(int state)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 2, state);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 2), state);
}
public override String ToString()
@@ -64,4 +66,4 @@ namespace SharpCompress.Compressors.PPMd.H
return buffer.ToString();
}
}
}
}

View File

@@ -137,7 +137,7 @@ namespace SharpCompress.Compressors.PPMd.H
private void RestartModelRare()
{
Utility.Fill(_charMask, 0);
new Span<int>(_charMask).Clear();
SubAlloc.InitSubAllocator();
_initRl = -(_maxOrder < 12 ? _maxOrder : 12) - 1;
int addr = SubAlloc.AllocContext();
@@ -228,7 +228,7 @@ namespace SharpCompress.Compressors.PPMd.H
private void ClearMask()
{
_escCount = 1;
Utility.Fill(_charMask, 0);
new Span<int>(_charMask).Clear();
}
internal bool DecodeInit(IRarUnpack unpackRead, int escChar)
@@ -912,4 +912,4 @@ namespace SharpCompress.Compressors.PPMd.H
}
}
}
}
}

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -22,7 +22,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_numStats = DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff;
_numStats = BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address)) & 0xffff;
}
return _numStats;
}
@@ -32,7 +32,7 @@ namespace SharpCompress.Compressors.PPMd.H
_numStats = value & 0xffff;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value);
BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address), (short)value);
}
}
}
@@ -109,7 +109,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_suffix = DataConverter.LittleEndian.GetInt32(Memory, Address + 8);
_suffix = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 8));
}
return _suffix;
}
@@ -124,7 +124,7 @@ namespace SharpCompress.Compressors.PPMd.H
_suffix = suffix;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 8, suffix);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 8), suffix);
}
}
@@ -307,7 +307,7 @@ namespace SharpCompress.Compressors.PPMd.H
// byte[] bytes = model.getSubAlloc().getHeap();
// int p1 = state1.Address;
// int p2 = state2.Address;
//
//
// for (int i = 0; i < StatePtr.size; i++) {
// byte temp = bytes[p1+i];
// bytes[p1+i] = bytes[p2+i];
@@ -564,4 +564,4 @@ namespace SharpCompress.Compressors.PPMd.H
UNION_SIZE = Math.Max(FreqData.SIZE, State.SIZE);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using SharpCompress.Converters;
using System;
using System.Buffers.Binary;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -21,7 +22,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_stamp = DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff;
_stamp = BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address)) & 0xffff;
}
return _stamp;
}
@@ -31,7 +32,7 @@ namespace SharpCompress.Compressors.PPMd.H
_stamp = value;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value);
BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address), (short)value);
}
}
}
@@ -63,7 +64,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_next = DataConverter.LittleEndian.GetInt32(Memory, Address + 4);
_next = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 4));
}
return _next;
}
@@ -78,7 +79,7 @@ namespace SharpCompress.Compressors.PPMd.H
_next = next;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 4, next);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 4), next);
}
}
@@ -86,7 +87,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_nu = DataConverter.LittleEndian.GetInt16(Memory, Address + 2) & 0xffff;
_nu = BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address + 2)) & 0xffff;
}
return _nu;
}
@@ -96,7 +97,7 @@ namespace SharpCompress.Compressors.PPMd.H
_nu = nu & 0xffff;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 2, (short)nu);
BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address + 2), (short)nu);
}
}
@@ -104,7 +105,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_prev = DataConverter.LittleEndian.GetInt32(Memory, Address + 8);
_prev = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 8));
}
return _prev;
}
@@ -119,8 +120,8 @@ namespace SharpCompress.Compressors.PPMd.H
_prev = prev;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 8, prev);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 8), prev);
}
}
}
}
}

View File

@@ -1,5 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -18,7 +19,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_next = DataConverter.LittleEndian.GetInt32(Memory, Address);
_next = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address));
}
return _next;
}
@@ -33,7 +34,7 @@ namespace SharpCompress.Compressors.PPMd.H
_next = next;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address, next);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address), next);
}
}
@@ -51,4 +52,4 @@ namespace SharpCompress.Compressors.PPMd.H
return buffer.ToString();
}
}
}
}

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -29,7 +29,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal int GetSuccessor()
{
return DataConverter.LittleEndian.GetInt32(Memory, Address + 2);
return BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 2));
}
internal void SetSuccessor(PpmContext successor)
@@ -39,7 +39,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal void SetSuccessor(int successor)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 2, successor);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 2), successor);
}
internal void SetValues(StateRef state)
@@ -95,4 +95,4 @@ namespace SharpCompress.Compressors.PPMd.H
return buffer.ToString();
}
}
}
}

View File

@@ -166,7 +166,7 @@ namespace SharpCompress.Compressors.PPMd.H
_freeListPos = _heapStart + allocSize;
//UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'"
//assert(realAllocSize - tempMemBlockPos == RarMemBlock.size): realAllocSize
//assert(realAllocSize - tempMemBlockPos == RarMemBlock.size): realAllocSize
//+ + tempMemBlockPos + + RarMemBlock.size;
// Init freeList
@@ -360,7 +360,7 @@ namespace SharpCompress.Compressors.PPMd.H
public virtual void InitSubAllocator()
{
int i, k;
Utility.Fill(_heap, _freeListPos, _freeListPos + SizeOfFreeList(), (byte)0);
new Span<byte>(_heap, _freeListPos, SizeOfFreeList()).Clear();
_pText = _heapStart;
@@ -448,4 +448,4 @@ namespace SharpCompress.Compressors.PPMd.H
UNIT_SIZE = Math.Max(PpmContext.SIZE, RarMemBlock.SIZE);
}
}
}
}

View File

@@ -58,7 +58,7 @@ namespace SharpCompress.Compressors.PPMd.I1
0x6051
};
private static readonly byte[] EXPONENTIAL_ESCAPES = {25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2};
private static ReadOnlySpan<byte> EXPONENTIAL_ESCAPES => new byte[] {25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2};
#region Public Methods

View File

@@ -1,5 +1,6 @@
using SharpCompress.Compressors.PPMd.I1;
using SharpCompress.Converters;
using System;
using System.Buffers.Binary;
using SharpCompress.Compressors.PPMd.I1;
namespace SharpCompress.Compressors.PPMd
{
@@ -25,7 +26,7 @@ namespace SharpCompress.Compressors.PPMd
ModelOrder = modelOrder;
RestorationMethod = modelRestorationMethod;
}
public int ModelOrder { get; }
public PpmdVersion Version { get; } = PpmdVersion.I1;
internal ModelRestorationMethod RestorationMethod { get; }
@@ -34,7 +35,7 @@ namespace SharpCompress.Compressors.PPMd
{
if (properties.Length == 2)
{
ushort props = DataConverter.LittleEndian.GetUInt16(properties, 0);
ushort props = BinaryPrimitives.ReadUInt16LittleEndian(properties);
AllocatorSize = (((props >> 4) & 0xff) + 1) << 20;
ModelOrder = (props & 0x0f) + 1;
RestorationMethod = (ModelRestorationMethod)(props >> 12);
@@ -42,7 +43,7 @@ namespace SharpCompress.Compressors.PPMd
else if (properties.Length == 5)
{
Version = PpmdVersion.H7Z;
AllocatorSize = DataConverter.LittleEndian.GetInt32(properties, 1);
AllocatorSize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
ModelOrder = properties[0];
}
}
@@ -64,8 +65,16 @@ namespace SharpCompress.Compressors.PPMd
}
}
public byte[] Properties => DataConverter.LittleEndian.GetBytes(
(ushort)
((ModelOrder - 1) + (((AllocatorSize >> 20) - 1) << 4) + ((ushort)RestorationMethod << 12)));
public byte[] Properties
{
get
{
byte[] bytes = new byte[2];
BinaryPrimitives.WriteUInt16LittleEndian(
bytes,
(ushort)((ModelOrder - 1) + (((AllocatorSize >> 20) - 1) << 4) + ((ushort)RestorationMethod << 12)));
return bytes;
}
}
}
}
}

View File

@@ -1,4 +1,3 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Rar.Headers;

View File

@@ -32,9 +32,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
}
}
public bool Suspended {
public bool Suspended {
get => suspended;
set => suspended = value;
set => suspended = value;
}
public int Char
@@ -139,12 +139,12 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
case 36: // alternative hash
Unpack29(fileHeader.IsSolid);
break;
case 50: // rar 5.x compression
Unpack5(fileHeader.IsSolid);
break;
default:
default:
throw new InvalidFormatException("unknown rar compression version " + fileHeader.CompressionAlgorithm);
}
}
@@ -729,13 +729,13 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
if (!solid)
{
tablesRead = false;
Utility.Fill(oldDist, 0); // memset(oldDist,0,sizeof(OldDist));
new Span<int>(oldDist).Clear(); // memset(oldDist,0,sizeof(OldDist));
oldDistPtr = 0;
lastDist = 0;
lastLength = 0;
Utility.Fill(unpOldTable, (byte)0); // memset(UnpOldTable,0,sizeof(UnpOldTable));
new Span<byte>(unpOldTable).Clear(); // memset(UnpOldTable,0,sizeof(UnpOldTable));
unpPtr = 0;
wrPtr = 0;
@@ -837,7 +837,7 @@ WriteBorder=Math.Min(MaxWinSize,UNPACK_MAX_WRITE)&MaxWinMask;
if ((bitField & 0x4000) == 0)
{
Utility.Fill(unpOldTable, (byte)0); // memset(UnpOldTable,0,sizeof(UnpOldTable));
new Span<byte>(unpOldTable).Clear(); // memset(UnpOldTable,0,sizeof(UnpOldTable));
}
AddBits(2);
@@ -1109,7 +1109,7 @@ WriteBorder=Math.Min(MaxWinSize,UNPACK_MAX_WRITE)&MaxWinMask;
oldFilterLengths[FiltPos] = StackFilter.BlockLength;
// memset(StackFilter->Prg.InitR,0,sizeof(StackFilter->Prg.InitR));
Utility.Fill(StackFilter.Program.InitR, 0);
new Span<int>(StackFilter.Program.InitR).Clear();
StackFilter.Program.InitR[3] = RarVM.VM_GLOBALMEMADDR; // StackFilter->Prg.InitR[3]=VM_GLOBALMEMADDR;
StackFilter.Program.InitR[4] = StackFilter.BlockLength;
@@ -1267,4 +1267,4 @@ WriteBorder=Math.Min(MaxWinSize,UNPACK_MAX_WRITE)&MaxWinMask;
}
}
}
}
}

View File

@@ -3,7 +3,7 @@
* Original author: Edmund Wagner
* Creation date: 21.06.2007
*
* the unrar licence applies to all junrar source and binary distributions
* the unrar licence applies to all junrar source and binary distributions
* you are not allowed to use this source to re-create the RAR compression algorithm
*/
@@ -652,9 +652,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
ChSetC[I] = ((~I + 1) & 0xff) << 8;
}
Utility.Fill(NToPl, 0); // memset(NToPl,0,sizeof(NToPl));
Utility.Fill(NToPlB, 0); // memset(NToPlB,0,sizeof(NToPlB));
Utility.Fill(NToPlC, 0); // memset(NToPlC,0,sizeof(NToPlC));
new Span<int>(NToPl).Clear(); // memset(NToPl,0,sizeof(NToPl));
new Span<int>(NToPlB).Clear(); // memset(NToPlB,0,sizeof(NToPlB));
new Span<int>(NToPlC).Clear(); // memset(NToPlC,0,sizeof(NToPlC));
corrHuff(ChSetB, NToPlB);
}
@@ -670,7 +670,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
// & ~0xff) | I;
}
}
Utility.Fill(NumToPlace, 0); // memset(NumToPlace,0,sizeof(NToPl));
new Span<int>(NumToPlace).Clear(); // memset(NumToPlace,0,sizeof(NToPl));
for (I = 6; I >= 0; I--)
{
NumToPlace[I] = (7 - I) * 32;
@@ -717,4 +717,4 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
wrPtr = unpPtr;
}
}
}
}

View File

@@ -3,7 +3,7 @@
* Original author: Edmund Wagner
* Creation date: 21.06.2007
*
* the unrar licence applies to all junrar source and binary distributions
* the unrar licence applies to all junrar source and binary distributions
* you are not allowed to use this source to re-create the RAR compression algorithm
*/
@@ -38,7 +38,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
56, 64, 80, 96, 112, 128, 160, 192, 224
};
private static readonly byte[] LBits =
private static ReadOnlySpan<byte> LBits => new byte[]
{
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4,
4, 5, 5, 5, 5
@@ -263,7 +263,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
if (0 == (BitField & 0x4000))
{
// memset(UnpOldTable20,0,sizeof(UnpOldTable20));
Utility.Fill(UnpOldTable20, (byte)0);
new Span<byte>(UnpOldTable20).Clear();
}
AddBits(2);
@@ -371,7 +371,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
AudV[3] = new AudioVariables();
// memset(UnpOldTable20,0,sizeof(UnpOldTable20));
Utility.Fill(UnpOldTable20, (byte)0);
new Span<byte>(UnpOldTable20).Clear();
}
}
@@ -521,4 +521,4 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
return ((byte)Ch);
}
}
}
}

View File

@@ -2,8 +2,6 @@
using System;
using System.Collections.Generic;
using SharpCompress.Compressors.Rar.UnpackV1.Decode;
using SharpCompress.Compressors.Rar.VM;
using size_t=System.UInt32;
using UnpackBlockHeader = SharpCompress.Compressors.Rar.UnpackV1;
@@ -139,14 +137,18 @@ public bool TablePresent;
{
UnpInitData(Solid);
if (!UnpReadBuf())
return;
{
return;
}
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (!ReadBlockHeader() ||
!ReadTables() || !TablesRead5)
return;
{
return;
}
}
while (true)
@@ -169,17 +171,24 @@ public bool TablePresent;
break;
}
if (!ReadBlockHeader() || !ReadTables())
{
return;
}
}
if (FileDone || !UnpReadBuf())
{
break;
}
}
if (((WriteBorder-UnpPtr) & MaxWinMask)<PackDef.MAX_LZ_MATCH+3 && WriteBorder!=UnpPtr)
{
UnpWriteBuf();
if (WrittenFileSize>DestUnpSize)
{
return;
}
if (Suspended)
{
FileExtracted=false;
@@ -243,7 +252,9 @@ public bool TablePresent;
{
Length++;
if (Distance>0x40000)
{
Length++;
}
}
}
@@ -259,7 +270,10 @@ public bool TablePresent;
{
UnpackFilter Filter = new UnpackFilter();
if (!ReadFilter(Filter) || !AddFilter(Filter))
{
break;
}
continue;
}
if (MainSlot==257)
@@ -269,7 +283,10 @@ public bool TablePresent;
// FragWindow.CopyString(LastLength,OldDist[0],UnpPtr,MaxWinMask);
// else
//CopyString(LastLength,OldDist[0]);
CopyString(LastLength,OldDistN(0));
{
CopyString(LastLength,OldDistN(0));
}
continue;
}
if (MainSlot<262)
@@ -281,7 +298,10 @@ public bool TablePresent;
//for (uint I=DistNum;I>0;I--)
for (int I=DistNum;I>0;I--)
//OldDistN[I]=OldDistN(I-1);
{
SetOldDistN(I, OldDistN(I-1));
}
//OldDistN[0]=Distance;
SetOldDistN(0, Distance);
@@ -316,13 +336,19 @@ public bool TablePresent;
private bool ReadFilter(UnpackFilter Filter)
{
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-16)
{
if (!UnpReadBuf())
{
return false;
}
}
Filter.uBlockStart=ReadFilterData();
Filter.uBlockLength=ReadFilterData();
if (Filter.BlockLength>MAX_FILTER_BLOCK_SIZE)
{
Filter.BlockLength=0;
}
//Filter.Type=Inp.fgetbits()>>13;
Filter.Type=(byte)(Inp.fgetbits()>>13);
@@ -344,7 +370,9 @@ public bool TablePresent;
{
UnpWriteBuf(); // Write data, apply and flush filters.
if (Filters.Count>=MAX_UNPACK_FILTERS)
{
InitFilters(); // Still too many filters, prevent excessive memory use.
}
}
// If distance to filter start is that large that due to circular dictionary
@@ -361,7 +389,10 @@ public bool TablePresent;
{
int DataSize=ReadTop-Inp.InAddr; // Data left to process.
if (DataSize<0)
{
return false;
}
BlockHeader.BlockSize-=Inp.InAddr-BlockHeader.BlockStart;
if (Inp.InAddr>MAX_SIZE/2)
{
@@ -373,21 +404,33 @@ public bool TablePresent;
// to make it zero.
if (DataSize>0)
//memmove(Inp.InBuf,Inp.InBuf+Inp.InAddr,DataSize);
{
Array.Copy(InBuf, inAddr, InBuf, 0, DataSize);
// TODO: perf
}
// TODO: perf
//Buffer.BlockCopy(InBuf, inAddr, InBuf, 0, DataSize);
Inp.InAddr=0;
ReadTop=DataSize;
}
else
{
DataSize=ReadTop;
}
int ReadCode=0;
if (MAX_SIZE!=DataSize)
//ReadCode=UnpIO->UnpRead(Inp.InBuf+DataSize,BitInput.MAX_SIZE-DataSize);
{
ReadCode = readStream.Read(InBuf, DataSize, MAX_SIZE-DataSize);
}
if (ReadCode>0) // Can be also -1.
{
ReadTop+=ReadCode;
}
ReadBorder=ReadTop-30;
BlockHeader.BlockStart=Inp.InAddr;
if (BlockHeader.BlockSize!=-1) // '-1' means not defined yet.
@@ -674,7 +717,9 @@ public bool TablePresent;
private void UnpInitData50(bool Solid)
{
if (!Solid)
{
TablesRead5=false;
}
}
private bool ReadBlockHeader()
@@ -682,8 +727,13 @@ public bool TablePresent;
Header.HeaderSize=0;
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-7)
{
if (!UnpReadBuf())
{
return false;
}
}
//Inp.faddbits((8-Inp.InBit)&7);
Inp.faddbits((uint)((8-Inp.InBit)&7));
@@ -693,7 +743,9 @@ public bool TablePresent;
uint ByteCount=(uint)(((BlockFlags>>3)&3)+1); // Block size byte count.
if (ByteCount==4)
{
return false;
}
//Header.HeaderSize=2+ByteCount;
Header.HeaderSize=(int)(2+ByteCount);
@@ -715,7 +767,9 @@ public bool TablePresent;
Header.BlockSize=BlockSize;
byte CheckSum=(byte)(0x5a^BlockFlags^BlockSize^(BlockSize>>8)^(BlockSize>>16));
if (CheckSum!=SavedCheckSum)
{
return false;
}
Header.BlockStart=Inp.InAddr;
ReadBorder=Math.Min(ReadBorder,Header.BlockStart+Header.BlockSize-1);

View File

@@ -1,4 +1,5 @@
using SharpCompress.Compressors.Rar.VM;
using System;
using SharpCompress.Compressors.Rar.VM;
namespace SharpCompress.Compressors.Rar.UnpackV1
{
@@ -186,9 +187,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
int i;
long M, N;
Utility.Fill(lenCount, 0); // memset(LenCount,0,sizeof(LenCount));
Utility.Fill(dec.DecodeNum, 0); // memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
new Span<int>(dec.DecodeNum).Clear(); // memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
for (i = 0; i < size; i++)
{
@@ -217,4 +216,4 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
dec.MaxNum = size;
}
}
}
}

View File

@@ -30,7 +30,9 @@ public BitInput(bool AllocBuffer)
//memset(InBuf,0,BufSize);
}
else
InBuf=null;
{
InBuf=null;
}
}

View File

@@ -31,11 +31,13 @@ public FragmentedWindow()
private void Reset()
{
for (uint I=0;I<Mem.Length;I++)
{
if (Mem[I]!=null)
{
//free(Mem[I]);
Mem[I]=null;
}
}
}
@@ -60,13 +62,18 @@ public void Init(size_t WinSize)
{
NewMem=new byte[Size];
if (NewMem!=null)
{
break;
}
Size-=Size/32;
}
if (NewMem==null)
//throw std::bad_alloc();
{
throw new InvalidOperationException();
}
// Clean the window to generate the same output when unpacking corrupt
// RAR files, which may access to unused areas of sliding dictionary.
// sharpcompress: don't need this, freshly allocated above
@@ -79,17 +86,27 @@ public void Init(size_t WinSize)
}
if (TotalSize<WinSize) // Not found enough free blocks.
//throw std::bad_alloc();
{
throw new InvalidOperationException();
}
}
public byte this[size_t Item] {
get {
if (Item<MemSize[0])
{
return Mem[0][Item];
}
for (uint I=1;I<MemSize.Length;I++)
{
if (Item<MemSize[I])
{
return Mem[I][Item-MemSize[I-1]];
}
}
return Mem[0][0]; // Must never happen;
}
set {
@@ -98,10 +115,13 @@ set {
return;
}
for (uint I=1;I<MemSize.Length;I++)
{
if (Item<MemSize[I]) {
Mem[I][Item-MemSize[I-1]] = value;
return;
}
}
}
Mem[0][0] = value; // Must never happen;
}
}
@@ -138,15 +158,22 @@ public void CopyString(uint Length,uint Distance,ref size_t UnpPtr,size_t MaxWin
public void CopyData(byte[] Dest, size_t destOffset, size_t WinPos,size_t Size)
{
for (size_t I=0;I<Size;I++)
{
Dest[destOffset+I]=this[WinPos+I];
}
}
public size_t GetBlockSize(size_t StartPos,size_t RequiredSize)
{
for (uint I=0;I<MemSize.Length;I++)
{
if (StartPos<MemSize[I])
{
return Math.Min(MemSize[I]-StartPos,RequiredSize);
}
}
return 0; // Must never be here.
}

View File

@@ -12,7 +12,6 @@ using int64 = System.Int64;
using System;
using System.IO;
using SharpCompress.Common.Rar.Headers;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
namespace SharpCompress.Compressors.Rar.UnpackV2017
{

View File

@@ -10,10 +10,6 @@ using size_t = System.UInt64;
using int64 = System.Int64;
using uint32 = System.UInt32;
using System;
using System.Collections.Generic;
using System.Text;
namespace SharpCompress.Compressors.Rar.UnpackV2017
{
internal partial class Unpack

View File

@@ -1,4 +1,5 @@
using static SharpCompress.Compressors.Rar.UnpackV2017.Unpack.Unpack15Local;
using System;
using static SharpCompress.Compressors.Rar.UnpackV2017.Unpack.Unpack15Local;
namespace SharpCompress.Compressors.Rar.UnpackV2017
{
@@ -61,7 +62,10 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
UnpPtr=0;
}
else
{
UnpPtr=WrPtr;
}
--DestUnpSize;
if (DestUnpSize>=0)
{
@@ -74,9 +78,15 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
UnpPtr&=MaxWinMask;
if (Inp.InAddr>ReadTop-30 && !UnpReadBuf())
{
break;
}
if (((WrPtr-UnpPtr) & MaxWinMask)<270 && WrPtr!=UnpPtr)
{
UnpWriteBuf20();
}
if (StMode != 0)
{
HuffDecode();
@@ -93,9 +103,13 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
FlagBuf<<=1;
if (Nlzb > Nhfb)
{
LongLZ();
}
else
{
HuffDecode();
}
}
else
{
@@ -109,9 +123,13 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
FlagBuf<<=1;
if (Nlzb > Nhfb)
{
HuffDecode();
}
else
{
LongLZ();
}
}
else
{
@@ -167,15 +185,25 @@ internal static class Unpack15Local {
if (AvrLn1<37)
{
for (Length=0;;Length++)
{
if (((BitField^ShortXor1[Length]) & (~(0xff>>(int)GetShortLen1(Length))))==0)
{
break;
}
}
Inp.faddbits(GetShortLen1(Length));
}
else
{
for (Length=0;;Length++)
{
if (((BitField^ShortXor2[Length]) & (~(0xff>>(int)GetShortLen2(Length))))==0)
{
break;
}
}
Inp.faddbits(GetShortLen2(Length));
}
@@ -209,9 +237,14 @@ internal static class Unpack15Local {
return;
}
if (Distance > 256)
{
Length++;
}
if (Distance >= MaxDist3)
{
Length++;
}
OldDist[OldDistPtr++]=Distance;
OldDistPtr = OldDistPtr & 3;
@@ -259,10 +292,14 @@ internal static class Unpack15Local {
uint BitField=Inp.fgetbits();
if (AvrLn2 >= 122)
{
Length=DecodeNum(BitField,STARTL2,DecL2,PosL2);
}
else
if (AvrLn2 >= 64)
{
Length=DecodeNum(BitField,STARTL1,DecL1,PosL1);
}
else
if (BitField < 0x100)
{
@@ -272,7 +309,10 @@ internal static class Unpack15Local {
else
{
for (Length=0;((BitField<<(int)Length)&0x8000)==0;Length++)
{
;
}
Inp.faddbits(Length+1);
}
@@ -281,12 +321,18 @@ internal static class Unpack15Local {
BitField=Inp.fgetbits();
if (AvrPlcB > 0x28ff)
{
DistancePlace=DecodeNum(BitField,STARTHF2,DecHf2,PosHf2);
}
else
if (AvrPlcB > 0x6ff)
{
DistancePlace=DecodeNum(BitField,STARTHF1,DecHf1,PosHf1);
}
else
{
DistancePlace=DecodeNum(BitField,STARTHF0,DecHf0,PosHf0);
}
AvrPlcB += DistancePlace;
AvrPlcB -= AvrPlcB >> 8;
@@ -295,9 +341,13 @@ internal static class Unpack15Local {
Distance = ChSetB[DistancePlace & 0xff];
NewDistancePlace = NToPlB[Distance++ & 0xff]++;
if ((Distance & 0xff) != 0)
{
CorrHuff(ChSetB,NToPlB);
}
else
{
break;
}
}
ChSetB[DistancePlace & 0xff]=ChSetB[NewDistancePlace];
@@ -308,23 +358,39 @@ internal static class Unpack15Local {
OldAvr3=AvrLn3;
if (Length!=1 && Length!=4)
{
if (Length==0 && Distance <= MaxDist3)
{
AvrLn3++;
AvrLn3 -= AvrLn3 >> 8;
}
else
if (AvrLn3 > 0)
AvrLn3--;
if (AvrLn3 > 0)
{
AvrLn3--;
}
}
Length+=3;
if (Distance >= MaxDist3)
{
Length++;
}
if (Distance <= 256)
{
Length+=8;
}
if (OldAvr3 > 0xb0 || AvrPlc >= 0x2a00 && OldAvr2 < 0x40)
{
MaxDist3=0x7f00;
}
else
{
MaxDist3=0x2001;
}
OldDist[OldDistPtr++]=Distance;
OldDistPtr = OldDistPtr & 3;
LastLength=Length;
@@ -342,23 +408,37 @@ internal static class Unpack15Local {
uint BitField=Inp.fgetbits();
if (AvrPlc > 0x75ff)
{
BytePlace=(int)DecodeNum(BitField,STARTHF4,DecHf4,PosHf4);
}
else
if (AvrPlc > 0x5dff)
{
BytePlace=(int)DecodeNum(BitField,STARTHF3,DecHf3,PosHf3);
}
else
if (AvrPlc > 0x35ff)
{
BytePlace=(int)DecodeNum(BitField,STARTHF2,DecHf2,PosHf2);
}
else
if (AvrPlc > 0x0dff)
{
BytePlace=(int)DecodeNum(BitField,STARTHF1,DecHf1,PosHf1);
}
else
{
BytePlace=(int)DecodeNum(BitField,STARTHF0,DecHf0,PosHf0);
}
BytePlace&=0xff;
if (StMode != 0)
{
if (BytePlace==0 && BitField > 0xfff)
{
BytePlace=0x100;
}
if (--BytePlace==-1)
{
BitField=Inp.fgetbits();
@@ -382,7 +462,10 @@ internal static class Unpack15Local {
}
else
if (NumHuf++ >= 16 && FlagsCnt==0)
{
StMode=1;
}
AvrPlc += (uint)BytePlace;
AvrPlc -= AvrPlc >> 8;
Nhfb+=16;
@@ -400,9 +483,13 @@ internal static class Unpack15Local {
CurByte=ChSet[BytePlace];
NewBytePlace=NToPl[CurByte++ & 0xff]++;
if ((CurByte & 0xff) > 0xa1)
{
CorrHuff(ChSet,NToPl);
}
else
{
break;
}
}
ChSet[BytePlace]=ChSet[NewBytePlace];
@@ -420,7 +507,9 @@ internal static class Unpack15Local {
// we need to check for value 256 when unpacking in case we unpack
// a corrupt archive.
if (FlagsPlace>=ChSetC.Length)
{
return;
}
while (true)
{
@@ -428,7 +517,10 @@ internal static class Unpack15Local {
FlagBuf=Flags>>8;
NewFlagsPlace=NToPlC[Flags++ & 0xff]++;
if ((Flags & 0xff) != 0)
{
break;
}
CorrHuff(ChSetC,NToPlC);
}
@@ -461,9 +553,9 @@ internal static class Unpack15Local {
ChSetA[I]=(ushort)I;
ChSetC[I]=(ushort)(((~I+1) & 0xff)<<8);
}
Utility.Memset(NToPl,0,NToPl.Length);
Utility.Memset(NToPlB,0,NToPlB.Length);
Utility.Memset(NToPlC,0,NToPlC.Length);
new Span<byte>(NToPl).Clear();
new Span<byte>(NToPlB).Clear();
new Span<byte>(NToPlC).Clear();
CorrHuff(ChSetB,NToPlB);
}
@@ -472,10 +564,15 @@ internal static class Unpack15Local {
int I,J;
for (I=7;I>=0;I--)
for (J=0;J<32;J++)
{
CharSet[J]=(ushort)((CharSet[J] & ~0xff) | I);
Utility.Memset(NumToPlace,0,NToPl.Length);
}
new Span<byte>(NumToPlace, 0, NToPl.Length).Clear();
for (I=6;I>=0;I--)
{
NumToPlace[I]=(byte)((7-I)*32);
}
}
private void CopyString15(uint Distance,uint Length)
@@ -492,7 +589,10 @@ internal static class Unpack15Local {
{
int I;
for (Num&=0xfff0,I=0;DecTab[I]<=Num;I++)
{
StartPos++;
}
Inp.faddbits(StartPos);
return(((Num-(I != 0 ? DecTab[I-1]:0))>>(int)(16-StartPos))+PosTab[StartPos]);
}

View File

@@ -40,14 +40,22 @@ internal static class Unpack20Local {
uint Bits;
if (Suspended)
{
UnpPtr=WrPtr;
}
else
{
UnpInitData(Solid);
if (!UnpReadBuf())
{
return;
}
if ((!Solid || !TablesRead2) && !ReadTables20())
{
return;
}
--DestUnpSize;
}
@@ -56,13 +64,20 @@ internal static class Unpack20Local {
UnpPtr&=MaxWinMask;
if (Inp.InAddr>ReadTop-30)
{
if (!UnpReadBuf())
{
break;
}
}
if (((WrPtr-UnpPtr) & MaxWinMask)<270 && WrPtr!=UnpPtr)
{
UnpWriteBuf20();
if (Suspended)
{
return;
}
}
if (UnpAudioBlock)
{
@@ -71,12 +86,18 @@ internal static class Unpack20Local {
if (AudioNumber==256)
{
if (!ReadTables20())
{
break;
}
continue;
}
Window[UnpPtr++]=DecodeAudio((int)AudioNumber);
if (++UnpCurChannel==UnpChannels)
{
UnpCurChannel=0;
}
--DestUnpSize;
continue;
}
@@ -109,7 +130,9 @@ internal static class Unpack20Local {
{
Length++;
if (Distance>=0x40000L)
{
Length++;
}
}
CopyString20(Length,Distance);
@@ -118,7 +141,10 @@ internal static class Unpack20Local {
if (Number==269)
{
if (!ReadTables20())
{
break;
}
continue;
}
if (Number==256)
@@ -143,7 +169,9 @@ internal static class Unpack20Local {
{
Length++;
if (Distance>=0x40000)
{
Length++;
}
}
}
CopyString20(Length,Distance);
@@ -168,7 +196,10 @@ internal static class Unpack20Local {
private void UnpWriteBuf20()
{
if (UnpPtr!=WrPtr)
{
UnpSomeRead=true;
}
if (UnpPtr<WrPtr)
{
UnpIO_UnpWrite(Window, WrPtr,(uint)(-(int)WrPtr & MaxWinMask));
@@ -176,7 +207,10 @@ internal static class Unpack20Local {
UnpAllBuf=true;
}
else
{
UnpIO_UnpWrite(Window,WrPtr,UnpPtr-WrPtr);
}
WrPtr=UnpPtr;
}
@@ -185,13 +219,21 @@ internal static class Unpack20Local {
byte[] BitLength = new byte[BC20];
byte[] Table = new byte[MC20*4];
if (Inp.InAddr>ReadTop-25)
{
if (!UnpReadBuf())
{
return false;
}
}
uint BitField=Inp.getbits();
UnpAudioBlock=(BitField & 0x8000)!=0;
if ((BitField & 0x4000) != 0)
Utility.Memset(UnpOldTable20,0,UnpOldTable20.Length);
{
new Span<byte>(UnpOldTable20).Clear();
}
Inp.addbits(2);
uint TableSize;
@@ -199,12 +241,17 @@ internal static class Unpack20Local {
{
UnpChannels=((BitField>>12) & 3)+1;
if (UnpCurChannel>=UnpChannels)
{
UnpCurChannel=0;
}
Inp.addbits(2);
TableSize=MC20*UnpChannels;
}
else
{
TableSize=NC20+DC20+RC20;
}
for (uint I=0;I<BC20;I++)
{
@@ -215,8 +262,13 @@ internal static class Unpack20Local {
for (uint I=0;I<TableSize;)
{
if (Inp.InAddr>ReadTop-5)
{
if (!UnpReadBuf())
{
return false;
}
}
uint Number=DecodeNumber(Inp,BlockTables.BD);
if (Number<16)
{
@@ -229,13 +281,17 @@ internal static class Unpack20Local {
uint N=(Inp.getbits() >> 14)+3;
Inp.addbits(2);
if (I==0)
{
return false; // We cannot have "repeat previous" code at the first position.
}
else
{
while (N-- > 0 && I<TableSize)
{
Table[I]=Table[I-1];
I++;
}
}
}
else
{
@@ -251,15 +307,24 @@ internal static class Unpack20Local {
Inp.addbits(7);
}
while (N-- > 0 && I<TableSize)
{
Table[I++]=0;
}
}
}
TablesRead2=true;
if (Inp.InAddr>ReadTop)
{
return true;
}
if (UnpAudioBlock)
{
for (uint I=0;I<UnpChannels;I++)
{
MakeDecodeTables(Table,(int)(I*MC20),MD[I],MC20);
}
}
else
{
MakeDecodeTables(Table,0,BlockTables.LD,NC20);
@@ -267,21 +332,27 @@ internal static class Unpack20Local {
MakeDecodeTables(Table,(int)(NC20+DC20),BlockTables.RD,RC20);
}
//x memcpy(UnpOldTable20,Table,sizeof(UnpOldTable20));
Array.Copy(Table,0,UnpOldTable20,0,UnpOldTable20.Length);
Array.Copy(Table,UnpOldTable20,UnpOldTable20.Length);
return true;
}
private void ReadLastTables()
{
if (ReadTop>=Inp.InAddr+5)
{
if (UnpAudioBlock)
{
if (DecodeNumber(Inp,MD[UnpCurChannel])==256)
{
ReadTables20();
}
}
else
if (DecodeNumber(Inp,BlockTables.LD)==269)
ReadTables20();
if (DecodeNumber(Inp,BlockTables.LD)==269)
{
ReadTables20();
}
}
}
private void UnpInitData20(bool Solid)
@@ -296,7 +367,7 @@ internal static class Unpack20Local {
//memset(AudV,0,sizeof(AudV));
AudV = new AudioVariables[4];
Utility.Memset(UnpOldTable20, 0, UnpOldTable20.Length);
new Span<byte>(UnpOldTable20).Clear();
//memset(MD,0,sizeof(MD));
MD = new DecodeTable[4];
}
@@ -352,43 +423,73 @@ internal static class Unpack20Local {
{
case 1:
if (V.K1>=-16)
{
V.K1--;
}
break;
case 2:
if (V.K1<16)
{
V.K1++;
}
break;
case 3:
if (V.K2>=-16)
{
V.K2--;
}
break;
case 4:
if (V.K2<16)
{
V.K2++;
}
break;
case 5:
if (V.K3>=-16)
{
V.K3--;
}
break;
case 6:
if (V.K3<16)
{
V.K3++;
}
break;
case 7:
if (V.K4>=-16)
{
V.K4--;
}
break;
case 8:
if (V.K4<16)
{
V.K4++;
}
break;
case 9:
if (V.K5>=-16)
{
V.K5--;
}
break;
case 10:
if (V.K5<16)
{
V.K5++;
}
break;
}
}

View File

@@ -9,9 +9,6 @@ using size_t = System.UInt64;
#endif
using int64 = System.Int64;
using System;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.UnpackGlobal;
//using static SharpCompress.Compressors.Rar.UnpackV2017.Unpack.Unpack30Local;
/*
namespace SharpCompress.Compressors.Rar.UnpackV2017

View File

@@ -25,14 +25,18 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
UnpInitData(Solid);
if (!UnpReadBuf())
{
return;
}
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (!ReadBlockHeader(Inp,ref BlockHeader) ||
if (!ReadBlockHeader(Inp,ref BlockHeader) ||
!ReadTables(Inp,ref BlockHeader, ref BlockTables) || !TablesRead5)
{
return;
}
}
while (true)
@@ -45,8 +49,8 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// We use 'while', because for empty block containing only Huffman table,
// we'll be on the block border once again just after reading the table.
while (Inp.InAddr>BlockHeader.BlockStart+BlockHeader.BlockSize-1 ||
Inp.InAddr==BlockHeader.BlockStart+BlockHeader.BlockSize-1 &&
while (Inp.InAddr>BlockHeader.BlockStart+BlockHeader.BlockSize-1 ||
Inp.InAddr==BlockHeader.BlockStart+BlockHeader.BlockSize-1 &&
Inp.InBit>=BlockHeader.BlockBitSize)
{
if (BlockHeader.LastBlockInFile)
@@ -55,17 +59,24 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
break;
}
if (!ReadBlockHeader(Inp,ref BlockHeader) || !ReadTables(Inp, ref BlockHeader, ref BlockTables))
{
return;
}
}
if (FileDone || !UnpReadBuf())
{
break;
}
}
if (((WriteBorder-UnpPtr) & MaxWinMask)<MAX_LZ_MATCH+3 && WriteBorder!=UnpPtr)
{
UnpWriteBuf();
if (WrittenFileSize>DestUnpSize)
{
return;
}
if (Suspended)
{
FileExtracted=false;
@@ -77,9 +88,14 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
if (MainSlot<256)
{
if (Fragmented)
{
FragWindow[UnpPtr++]=(byte)MainSlot;
}
else
{
Window[UnpPtr++]=(byte)MainSlot;
}
continue;
}
if (MainSlot>=262)
@@ -124,32 +140,49 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
Length++;
if (Distance>0x40000)
{
Length++;
}
}
}
InsertOldDist(Distance);
LastLength=Length;
if (Fragmented)
{
FragWindow.CopyString(Length,Distance,ref UnpPtr,MaxWinMask);
}
else
{
CopyString(Length,Distance);
}
continue;
}
if (MainSlot==256)
{
UnpackFilter Filter = new UnpackFilter();
if (!ReadFilter(Inp,Filter) || !AddFilter(Filter))
{
break;
}
continue;
}
if (MainSlot==257)
{
if (LastLength!=0)
{
if (Fragmented)
{
FragWindow.CopyString(LastLength,OldDist[0],ref UnpPtr,MaxWinMask);
}
else
{
CopyString(LastLength,OldDist[0]);
}
}
continue;
}
if (MainSlot<262)
@@ -157,16 +190,24 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
uint DistNum=MainSlot-258;
uint Distance=OldDist[DistNum];
for (uint I=DistNum;I>0;I--)
{
OldDist[I]=OldDist[I-1];
}
OldDist[0]=Distance;
uint LengthSlot=DecodeNumber(Inp,BlockTables.RD);
uint Length=SlotToLength(Inp,LengthSlot);
LastLength=Length;
if (Fragmented)
{
FragWindow.CopyString(Length,Distance,ref UnpPtr,MaxWinMask);
}
else
{
CopyString(Length,Distance);
}
continue;
}
}
@@ -190,13 +231,19 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
private bool ReadFilter(BitInput Inp,UnpackFilter Filter)
{
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-16)
{
if (!UnpReadBuf())
{
return false;
}
}
Filter.BlockStart=ReadFilterData(Inp);
Filter.BlockLength=ReadFilterData(Inp);
if (Filter.BlockLength>MAX_FILTER_BLOCK_SIZE)
{
Filter.BlockLength=0;
}
Filter.Type=(byte)(Inp.fgetbits()>>13);
Inp.faddbits(3);
@@ -216,7 +263,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
UnpWriteBuf(); // Write data, apply and flush filters.
if (Filters.Count>=MAX_UNPACK_FILTERS)
{
InitFilters(); // Still too many filters, prevent excessive memory use.
}
}
// If distance to filter start is that large that due to circular dictionary
@@ -233,7 +282,10 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
int DataSize=ReadTop-Inp.InAddr; // Data left to process.
if (DataSize<0)
{
return false;
}
BlockHeader.BlockSize-=Inp.InAddr-BlockHeader.BlockStart;
if (Inp.InAddr>MAX_SIZE/2)
{
@@ -245,17 +297,29 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// to make it zero.
if (DataSize>0)
//x memmove(Inp.InBuf,Inp.InBuf+Inp.InAddr,DataSize);
{
Buffer.BlockCopy(Inp.InBuf, Inp.InAddr, Inp.InBuf, 0, DataSize);
}
Inp.InAddr=0;
ReadTop=DataSize;
}
else
{
DataSize=ReadTop;
}
int ReadCode=0;
if (MAX_SIZE!=DataSize)
{
ReadCode=UnpIO_UnpRead(Inp.InBuf,DataSize,MAX_SIZE-DataSize);
}
if (ReadCode>0) // Can be also -1.
{
ReadTop+=ReadCode;
}
ReadBorder=ReadTop-30;
BlockHeader.BlockStart=Inp.InAddr;
if (BlockHeader.BlockSize!=-1) // '-1' means not defined yet.
@@ -285,7 +349,10 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
UnpackFilter flt=Filters[I];
if (flt.Type==FILTER_NONE)
{
continue;
}
if (flt.NextWindow)
{
// Here we skip filters which have block start in current data range
@@ -301,7 +368,10 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// our write here, we can safely assume that filter is applicable
// to next block on no further wrap arounds is possible.
if (((flt.BlockStart-WrPtr)&MaxWinMask)<=FullWriteSize)
{
flt.NextWindow=false;
}
continue;
}
uint BlockStart=flt.BlockStart;
@@ -326,10 +396,14 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
if (BlockStart<BlockEnd || BlockEnd==0)
{
if (Fragmented)
{
FragWindow.CopyData(Mem,0,BlockStart,BlockLength);
}
else
//x memcpy(Mem,Window+BlockStart,BlockLength);
{
Utility.Copy(Window, BlockStart, Mem, 0, BlockLength);
}
}
else
{
@@ -353,7 +427,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
Filters[I].Type=FILTER_NONE;
if (OutMem!=null)
{
UnpIO_UnpWrite(OutMem,0,BlockLength);
}
UnpSomeRead=true;
WrittenFileSize+=BlockLength;
@@ -376,7 +452,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
UnpackFilter _flt=Filters[J];
if (_flt.Type!=FILTER_NONE)
{
_flt.NextWindow=false;
}
}
// Do not write data left after current filter now.
@@ -393,13 +471,20 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
for (int I=0;I<Filters.Count;I++)
{
if (EmptyCount>0)
{
Filters[I-EmptyCount]=Filters[I];
}
if (Filters[I].Type==FILTER_NONE)
{
EmptyCount++;
}
}
if (EmptyCount>0)
//Filters.Alloc(Filters.Count-EmptyCount);
{
Filters.RemoveRange(Filters.Count-EmptyCount, EmptyCount);
}
if (!NotAllFiltersProcessed) // Only if all filters are processed.
{
@@ -415,9 +500,11 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// Choose the nearest among WriteBorder and WrPtr actual written border.
// If border is equal to UnpPtr, it means that we have MaxWinSize data ahead.
if (WriteBorder==UnpPtr ||
if (WriteBorder==UnpPtr ||
WrPtr!=UnpPtr && ((WrPtr-UnpPtr)&MaxWinMask)<((WriteBorder-UnpPtr)&MaxWinMask))
{
WriteBorder=WrPtr;
}
}
private byte[] ApplyFilter(byte[] __d,uint DataSize,UnpackFilter Flt)
@@ -450,11 +537,15 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
if ((Addr & 0x80000000)!=0) // Addr<0
{
if (((Addr+Offset) & 0x80000000)==0) // Addr+Offset>=0
{
RawPut4(Addr+FileSize,__d,Data);
}
}
else
if (((Addr-FileSize) & 0x80000000)!=0) // Addr<FileSize
{
RawPut4(Addr-Offset,__d,Data);
}
Data+=4;
CurPos+=4;
@@ -498,7 +589,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
{
byte PrevByte=0;
for (uint DestPos=CurChannel;DestPos<DataSize;DestPos+=Channels)
{
DstData[DestPos]=(PrevByte-=__d[Data+SrcPos++]);
}
}
return DstData;
}
@@ -510,9 +603,14 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
private void UnpWriteArea(size_t StartPtr,size_t EndPtr)
{
if (EndPtr!=StartPtr)
{
UnpSomeRead=true;
}
if (EndPtr<StartPtr)
{
UnpAllBuf=true;
}
if (Fragmented)
{
@@ -534,17 +632,25 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
UnpWriteData(Window,0,EndPtr);
}
else
{
UnpWriteData(Window,StartPtr,EndPtr-StartPtr);
}
}
private void UnpWriteData(byte[] Data, size_t offset, size_t Size)
{
if (WrittenFileSize>=DestUnpSize)
{
return;
}
size_t WriteSize=Size;
int64 LeftToWrite=DestUnpSize-WrittenFileSize;
if ((int64)WriteSize>LeftToWrite)
{
WriteSize=(size_t)LeftToWrite;
}
UnpIO_UnpWrite(Data, offset, WriteSize);
WrittenFileSize+=Size;
}
@@ -552,7 +658,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
private void UnpInitData50(bool Solid)
{
if (!Solid)
{
TablesRead5=false;
}
}
private bool ReadBlockHeader(BitInput Inp,ref UnpackBlockHeader Header)
@@ -560,16 +668,23 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
Header.HeaderSize=0;
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-7)
{
if (!UnpReadBuf())
{
return false;
}
}
Inp.faddbits((uint)((8-Inp.InBit)&7));
byte BlockFlags=(byte)(Inp.fgetbits()>>8);
Inp.faddbits(8);
uint ByteCount=(uint)(((BlockFlags>>3)&3)+1); // Block size byte count.
if (ByteCount==4)
{
return false;
}
Header.HeaderSize=(int)(2+ByteCount);
@@ -588,7 +703,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
Header.BlockSize=BlockSize;
byte CheckSum=(byte)(0x5a^BlockFlags^BlockSize^(BlockSize>>8)^(BlockSize>>16));
if (CheckSum!=SavedCheckSum)
{
return false;
}
Header.BlockStart=Inp.InAddr;
ReadBorder=Math.Min(ReadBorder,Header.BlockStart+Header.BlockSize-1);
@@ -601,11 +718,17 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
private bool ReadTables(BitInput Inp,ref UnpackBlockHeader Header, ref UnpackBlockTables Tables)
{
if (!Header.TablePresent)
{
return true;
}
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-25)
{
if (!UnpReadBuf())
{
return false;
}
}
byte[] BitLength = new byte[BC];
for (uint I=0;I<BC;I++)
@@ -617,17 +740,24 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
uint ZeroCount=(byte)(Inp.fgetbits() >> 12);
Inp.faddbits(4);
if (ZeroCount==0)
{
BitLength[I]=15;
}
else
{
ZeroCount+=2;
while (ZeroCount-- > 0 && I<BitLength.Length)
{
BitLength[I++]=0;
}
I--;
}
}
else
{
BitLength[I]=(byte)Length;
}
}
MakeDecodeTables(BitLength,0,Tables.BD,BC);
@@ -637,8 +767,13 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
for (uint I=0;I<TableSize;)
{
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop-5)
{
if (!UnpReadBuf())
{
return false;
}
}
uint Number=DecodeNumber(Inp,Tables.BD);
if (Number<16)
{
@@ -669,11 +804,13 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
return false;
}
else
{
while (N-- > 0 && I<TableSize)
{
Table[I]=Table[I-1];
I++;
}
}
}
else
{
@@ -689,12 +826,17 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
Inp.faddbits(7);
}
while (N-- > 0 && I<TableSize)
{
Table[I++]=0;
}
}
}
TablesRead5=true;
if (!Inp.ExternalBuffer && Inp.InAddr>ReadTop)
{
return false;
}
MakeDecodeTables(Table, 0, Tables.LD,NC);
MakeDecodeTables(Table, (int)NC,Tables.DD,DC);
MakeDecodeTables(Table, (int)(NC+DC),Tables.LDD,LDC);

View File

@@ -70,7 +70,9 @@ public Unpack(/* ComprDataIO *DataIO */)
// will be 0 because of size_t overflow. Let's issue the memory error.
if (WinSize==0)
//ErrHandler.MemoryError();
{
throw new InvalidFormatException("invalid window size (possibly due to a rar file with a 4GB being unpacked on a 32-bit platform)");
}
// Minimum window size must be at least twice more than maximum possible
// size of filter block, which is 0x10000 in RAR now. If window size is
@@ -79,12 +81,19 @@ public Unpack(/* ComprDataIO *DataIO */)
// use 0x40000 for extra safety and possible filter area size expansion.
const size_t MinAllocSize=0x40000;
if (WinSize<MinAllocSize)
{
WinSize=MinAllocSize;
}
if (WinSize<=MaxWinSize) // Use the already allocated window.
{
return;
}
if ((WinSize>>16)>0x10000) // Window size must not exceed 4 GB.
{
return;
}
// Archiving code guarantees that window size does not grow in the same
// solid stream. So if we are here, we are either creating a new window
@@ -96,11 +105,14 @@ public Unpack(/* ComprDataIO *DataIO */)
// We do not handle growth for existing fragmented window.
if (Grow && Fragmented)
//throw std::bad_alloc();
{
throw new InvalidFormatException("Grow && Fragmented");
}
byte[] NewWindow=Fragmented ? null : new byte[WinSize];
if (NewWindow==null)
{
if (Grow || WinSize<0x1000000)
{
// We do not support growth for new fragmented window.
@@ -118,6 +130,7 @@ public Unpack(/* ComprDataIO *DataIO */)
FragWindow.Init(WinSize);
Fragmented=true;
}
}
if (!Fragmented)
{
@@ -132,8 +145,12 @@ public Unpack(/* ComprDataIO *DataIO */)
// RAR archiving code does not allow it in solid streams now,
// but let's implement it anyway just in case we'll change it sometimes.
if (Grow)
{
for (size_t I=1;I<=MaxWinSize;I++)
{
NewWindow[(UnpPtr-I)&(WinSize-1)]=Window[(UnpPtr-I)&(MaxWinSize-1)];
}
}
//if (Window!=null)
// free(Window);
@@ -154,18 +171,27 @@ public Unpack(/* ComprDataIO *DataIO */)
#if !RarV2017_SFX_MODULE
case 15: // rar 1.5 compression
if (!Fragmented)
{
Unpack15(Solid);
}
break;
case 20: // rar 2.x compression
case 26: // files larger than 2GB
if (!Fragmented)
{
Unpack20(Solid);
}
break;
#endif
#if !RarV2017_RAR5ONLY
case 29: // rar 3.x compression
if (!Fragmented)
{
throw new NotImplementedException();
}
break;
#endif
case 50: // RAR 5.0 compression algorithm.
@@ -196,7 +222,7 @@ public Unpack(/* ComprDataIO *DataIO */)
{
if (!Solid)
{
Utility.Memset<uint>(OldDist, 0, OldDist.Length);
new Span<uint>(OldDist).Clear();
OldDistPtr=0;
LastDist=LastLength=0;
// memset(Window,0,MaxWinSize);
@@ -239,14 +265,16 @@ public Unpack(/* ComprDataIO *DataIO */)
uint[] LengthCount = new uint[16];
//memset(LengthCount,0,sizeof(LengthCount));
for (size_t I=0;I<Size;I++)
{
LengthCount[LengthTable[offset+I] & 0xf]++;
}
// We must not calculate the number of zero length codes.
LengthCount[0]=0;
// Set the entire DecodeNum to zero.
//memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
Utility.FillFast<ushort>(Dec.DecodeNum, 0);
new Span<ushort>(Dec.DecodeNum).Clear();
// Initialize not really used entry for zero length code.
Dec.DecodePos[0]=0;
@@ -272,7 +300,7 @@ public Unpack(/* ComprDataIO *DataIO */)
Dec.DecodeLen[I]=(uint)LeftAligned;
// Every item of this array contains the sum of all preceding items.
// So it contains the start position in code list for every bit length.
// So it contains the start position in code list for every bit length.
Dec.DecodePos[I]=Dec.DecodePos[I-1]+LengthCount[I-1];
}
@@ -280,7 +308,7 @@ public Unpack(/* ComprDataIO *DataIO */)
// so we cannot use the original DecodePos.
uint[] CopyDecodePos = new uint[Dec.DecodePos.Length];
//memcpy(CopyDecodePos,Dec->DecodePos,sizeof(CopyDecodePos));
Array.Copy(Dec.DecodePos, 0, CopyDecodePos, 0, CopyDecodePos.Length);
Array.Copy(Dec.DecodePos, CopyDecodePos, CopyDecodePos.Length);
// For every bit length in the bit length table and so for every item
// of alphabet.
@@ -337,11 +365,13 @@ public Unpack(/* ComprDataIO *DataIO */)
uint BitField=Code<<(int)(16-Dec.QuickBits);
// Prepare the table for quick decoding of bit lengths.
// Find the upper limit for current bit field and adjust the bit length
// accordingly if necessary.
while (CurBitLength<Dec.DecodeLen.Length && BitField>=Dec.DecodeLen[CurBitLength])
{
CurBitLength++;
}
// Translation of right aligned bit string to bit length.
Dec.QuickLen[Code]=CurBitLength;

View File

@@ -94,6 +94,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// if (Length>6) { Dest[6]=Src[6]; } } } } } } } // Close all nested "if"s.
}
else
{
while (Length-- > 0) // Slow copying with all possible precautions.
{
Window[UnpPtr]=Window[SrcPtr++ & MaxWinMask];
@@ -101,6 +102,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// be replaced with 'Window[UnpPtr++ & MaxWinMask]'
UnpPtr=(UnpPtr+1) & MaxWinMask;
}
}
}
private uint DecodeNumber(BitInput Inp,DecodeTable Dec)
@@ -118,11 +120,13 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// Detect the real bit length for current code.
uint Bits=15;
for (uint I=Dec.QuickBits+1;I<15;I++)
{
if (BitField<Dec.DecodeLen[I])
{
Bits=I;
break;
}
}
Inp.addbits(Bits);
@@ -140,7 +144,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// Out of bounds safety check required for damaged archives.
if (Pos>=Dec.MaxNum)
{
Pos=0;
}
// Convert the position in the code list to position in alphabet
// and return it.

View File

@@ -432,7 +432,10 @@ internal partial class Unpack
private uint GetChar()
{
if (Inp.InAddr>MAX_SIZE-30)
UnpReadBuf();
{
UnpReadBuf();
}
return(Inp.InBuf[Inp.InAddr++]);
}

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.Rar.VM
{
@@ -72,9 +72,9 @@ namespace SharpCompress.Compressors.Rar.VM
}
if (IsVMMem(mem))
{
return DataConverter.LittleEndian.GetInt32(mem, offset);
return BinaryPrimitives.ReadInt32LittleEndian(mem.AsSpan(offset));
}
return DataConverter.BigEndian.GetInt32(mem, offset);
return BinaryPrimitives.ReadInt32BigEndian(mem.AsSpan(offset));
}
private void SetValue(bool byteMode, byte[] mem, int offset, int value)
@@ -94,11 +94,11 @@ namespace SharpCompress.Compressors.Rar.VM
{
if (IsVMMem(mem))
{
DataConverter.LittleEndian.PutBytes(mem, offset, value);
BinaryPrimitives.WriteInt32LittleEndian(mem.AsSpan(offset), value);
}
else
{
DataConverter.BigEndian.PutBytes(mem, offset, value);
BinaryPrimitives.WriteInt32BigEndian(mem.AsSpan(offset), value);
}
}
@@ -120,12 +120,12 @@ namespace SharpCompress.Compressors.Rar.VM
if (cmdOp.Type == VMOpType.VM_OPREGMEM)
{
int pos = (cmdOp.Offset + cmdOp.Base) & VM_MEMMASK;
ret = DataConverter.LittleEndian.GetInt32(Mem, pos);
ret = BinaryPrimitives.ReadInt32LittleEndian(Mem.AsSpan(pos));
}
else
{
int pos = cmdOp.Offset;
ret = DataConverter.LittleEndian.GetInt32(Mem, pos);
ret = BinaryPrimitives.ReadInt32LittleEndian(Mem.AsSpan(pos));
}
return ret;
}
@@ -190,12 +190,12 @@ namespace SharpCompress.Compressors.Rar.VM
{
//prg.GlobalData.Clear();
// ->GlobalData.Add(dataSize+VM_FIXEDGLOBALSIZE);
prg.GlobalData.SetSize(dataSize + VM_FIXEDGLOBALSIZE);
prg.GlobalData.Capacity = dataSize + VM_FIXEDGLOBALSIZE;
for (int i = 0; i < dataSize + VM_FIXEDGLOBALSIZE; i++)
// memcpy(&Prg->GlobalData[0],&Mem[VM_GLOBALMEMADDR],DataSize+VM_FIXEDGLOBALSIZE);
{
prg.GlobalData[i] = Mem[VM_GLOBALMEMADDR + i];
prg.GlobalData.Add(Mem[VM_GLOBALMEMADDR + i]);
}
}
}
@@ -1449,4 +1449,4 @@ namespace SharpCompress.Compressors.Rar.VM
}
//
}
}

View File

@@ -36,7 +36,9 @@ namespace SharpCompress.Compressors.Xz
var result = BitConverter.GetBytes(uint32);
if (BitConverter.IsLittleEndian)
{
Array.Reverse(result);
}
return result;
}
@@ -46,7 +48,9 @@ namespace SharpCompress.Compressors.Xz
var result = BitConverter.GetBytes(uint32);
if (!BitConverter.IsLittleEndian)
{
Array.Reverse(result);
}
return result;
}

View File

@@ -28,22 +28,33 @@ namespace SharpCompress.Compressors.Xz
private static UInt32[] InitializeTable(UInt32 polynomial)
{
if (polynomial == DefaultPolynomial && defaultTable != null)
{
return defaultTable;
}
var createTable = new UInt32[256];
for (var i = 0; i < 256; i++)
{
var entry = (UInt32)i;
for (var j = 0; j < 8; j++)
{
if ((entry & 1) == 1)
{
entry = (entry >> 1) ^ polynomial;
}
else
{
entry = entry >> 1;
}
}
createTable[i] = entry;
}
if (polynomial == DefaultPolynomial)
{
defaultTable = createTable;
}
return createTable;
}
@@ -52,7 +63,10 @@ namespace SharpCompress.Compressors.Xz
{
var crc = seed;
for (var i = start; i < size - start; i++)
{
crc = (crc >> 8) ^ table[buffer[i] ^ crc & 0xff];
}
return crc;
}

View File

@@ -19,7 +19,9 @@ namespace SharpCompress.Compressors.Xz
public static UInt64 Compute(UInt64 seed, byte[] buffer)
{
if (Table == null)
{
Table = CreateTable(Iso3309Polynomial);
}
return CalculateHash(seed, Table, buffer, 0, buffer.Length);
}
@@ -29,10 +31,12 @@ namespace SharpCompress.Compressors.Xz
var crc = seed;
for (var i = start; i < size; i++)
{
unchecked
{
crc = (crc >> 8) ^ table[(buffer[i] ^ crc) & 0xff];
}
}
return crc;
}
@@ -44,10 +48,17 @@ namespace SharpCompress.Compressors.Xz
{
var entry = (UInt64)i;
for (var j = 0; j < 8; ++j)
{
if ((entry & 1) == 1)
{
entry = (entry >> 1) ^ polynomial;
}
else
{
entry = entry >> 1;
}
}
createTable[i] = entry;
}
return createTable;

View File

@@ -35,12 +35,18 @@ namespace SharpCompress.Compressors.Xz.Filters
{
var filterType = (FilterTypes)reader.ReadXZInteger();
if (!FilterMap.ContainsKey(filterType))
{
throw new NotImplementedException($"Filter {filterType} has not yet been implemented");
}
var filter = Activator.CreateInstance(FilterMap[filterType]) as BlockFilter;
var sizeOfProperties = reader.ReadXZInteger();
if (sizeOfProperties > int.MaxValue)
{
throw new InvalidDataException("Block filter information too large");
}
byte[] properties = reader.ReadBytes((int)sizeOfProperties);
filter.Init(properties);
return filter;

View File

@@ -16,7 +16,10 @@ namespace SharpCompress.Compressors.Xz.Filters
get
{
if (_dictionarySize > 40)
{
throw new OverflowException("Dictionary size greater than UInt32.Max");
}
if (_dictionarySize == 40)
{
return uint.MaxValue;
@@ -30,12 +33,16 @@ namespace SharpCompress.Compressors.Xz.Filters
public override void Init(byte[] properties)
{
if (properties.Length != 1)
{
throw new InvalidDataException("LZMA properties unexpected length");
}
_dictionarySize = (byte)(properties[0] & 0x3F);
var reserved = properties[0] & 0xC0;
if (reserved != 0)
{
throw new InvalidDataException("Reserved bits used in LZMA properties");
}
}
public override void ValidateFilter()

View File

@@ -8,9 +8,14 @@ namespace SharpCompress.Compressors.Xz
public static ulong ReadXZInteger(this BinaryReader reader, int MaxBytes = 9)
{
if (MaxBytes <= 0)
{
throw new ArgumentOutOfRangeException();
}
if (MaxBytes > 9)
{
MaxBytes = 9;
}
byte LastByte = reader.ReadByte();
ulong Output = (ulong)LastByte & 0x7F;
@@ -19,10 +24,15 @@ namespace SharpCompress.Compressors.Xz
while ((LastByte & 0x80) != 0)
{
if (++i >= MaxBytes)
{
throw new InvalidDataException();
}
LastByte = reader.ReadByte();
if (LastByte == 0)
{
throw new InvalidDataException();
}
Output |= ((ulong)(LastByte & 0x7F)) << (i * 7);
}

View File

@@ -33,17 +33,35 @@ namespace SharpCompress.Compressors.Xz
{
int bytesRead = 0;
if (!HeaderIsLoaded)
{
LoadHeader();
}
if (!_streamConnected)
{
ConnectStream();
}
if (!_endOfStream)
{
bytesRead = _decomStream.Read(buffer, offset, count);
}
if (bytesRead != count)
{
_endOfStream = true;
}
if (_endOfStream && !_paddingSkipped)
{
SkipPadding();
}
if (_endOfStream && !_crcChecked)
{
CheckCrc();
}
_bytesRead += (ulong)bytesRead;
return bytesRead;
}
@@ -56,7 +74,9 @@ namespace SharpCompress.Compressors.Xz
byte[] paddingBytes = new byte[4 - bytes];
BaseStream.Read(paddingBytes, 0, paddingBytes.Length);
if (paddingBytes.Any(b => b != 0))
{
throw new InvalidDataException("Padding bytes were non-null");
}
}
_paddingSkipped = true;
}
@@ -101,7 +121,9 @@ namespace SharpCompress.Compressors.Xz
{
_blockHeaderSizeByte = (byte)BaseStream.ReadByte();
if (_blockHeaderSizeByte == 0)
{
throw new XZIndexMarkerReachedException();
}
}
private byte[] CacheHeader()
@@ -110,12 +132,16 @@ namespace SharpCompress.Compressors.Xz
blockHeaderWithoutCrc[0] = _blockHeaderSizeByte;
var read = BaseStream.Read(blockHeaderWithoutCrc, 1, BlockHeaderSize - 5);
if (read != BlockHeaderSize - 5)
{
throw new EndOfStreamException("Reached end of stream unexectedly");
}
uint crc = BaseStream.ReadLittleEndianUInt32();
uint calcCrc = Crc32.Compute(blockHeaderWithoutCrc);
if (crc != calcCrc)
{
throw new InvalidDataException("Block header corrupt");
}
return blockHeaderWithoutCrc;
}
@@ -127,15 +153,22 @@ namespace SharpCompress.Compressors.Xz
byte reserved = (byte)(blockFlags & 0x3C);
if (reserved != 0)
{
throw new InvalidDataException("Reserved bytes used, perhaps an unknown XZ implementation");
}
bool compressedSizePresent = (blockFlags & 0x40) != 0;
bool uncompressedSizePresent = (blockFlags & 0x80) != 0;
if (compressedSizePresent)
{
CompressedSize = reader.ReadXZInteger();
}
if (uncompressedSizePresent)
{
UncompressedSize = reader.ReadXZInteger();
}
}
private void ReadFilters(BinaryReader reader, long baseStreamOffset = 0)
@@ -146,20 +179,30 @@ namespace SharpCompress.Compressors.Xz
var filter = BlockFilter.Read(reader);
if ((i + 1 == _numFilters && !filter.AllowAsLast)
|| (i + 1 < _numFilters && !filter.AllowAsNonLast))
{
throw new InvalidDataException("Block Filters in bad order");
}
if (filter.ChangesDataSize && i + 1 < _numFilters)
{
nonLastSizeChangers++;
}
filter.ValidateFilter();
Filters.Push(filter);
}
if (nonLastSizeChangers > 2)
{
throw new InvalidDataException("More than two non-last block filters cannot change stream size");
}
int blockHeaderPaddingSize = BlockHeaderSize -
(4 + (int)(reader.BaseStream.Position - baseStreamOffset));
(4 + (int)(reader.BaseStream.Position - baseStreamOffset));
byte[] blockHeaderPadding = reader.ReadBytes(blockHeaderPaddingSize);
if (!blockHeaderPadding.All(b => b == 0))
{
throw new InvalidDataException("Block header contains unknown fields");
}
}
}
}

View File

@@ -32,7 +32,10 @@ namespace SharpCompress.Compressors.Xz
byte[] footerBytes = _reader.ReadBytes(6);
uint myCrc = Crc32.Compute(footerBytes);
if (crc != myCrc)
{
throw new InvalidDataException("Footer corrupt");
}
using (var stream = new MemoryStream(footerBytes))
using (var reader = new BinaryReader(stream))
{

View File

@@ -38,18 +38,24 @@ namespace SharpCompress.Compressors.Xz
UInt32 crc = _reader.ReadLittleEndianUInt32();
UInt32 calcCrc = Crc32.Compute(streamFlags);
if (crc != calcCrc)
{
throw new InvalidDataException("Stream header corrupt");
}
BlockCheckType = (CheckType)(streamFlags[1] & 0x0F);
byte futureUse = (byte)(streamFlags[1] & 0xF0);
if (futureUse != 0 || streamFlags[0] != 0)
{
throw new InvalidDataException("Unknown XZ Stream Version");
}
}
private void CheckMagicBytes(byte[] header)
{
if (!header.SequenceEqual(MagicHeader))
{
throw new InvalidDataException("Invalid XZ Stream");
}
}
}
}

View File

@@ -23,7 +23,9 @@ namespace SharpCompress.Compressors.Xz
_indexMarkerAlreadyVerified = indexMarkerAlreadyVerified;
StreamStartPosition = reader.BaseStream.Position;
if (indexMarkerAlreadyVerified)
{
StreamStartPosition--;
}
}
public static XZIndex FromStream(Stream stream, bool indexMarkerAlreadyVerified)
@@ -36,7 +38,10 @@ namespace SharpCompress.Compressors.Xz
public void Process()
{
if (!_indexMarkerAlreadyVerified)
{
VerifyIndexMarker();
}
NumberOfRecords = _reader.ReadXZInteger();
for (ulong i = 0; i < NumberOfRecords; i++)
{
@@ -50,7 +55,9 @@ namespace SharpCompress.Compressors.Xz
{
byte marker = _reader.ReadByte();
if (marker != 0)
{
throw new InvalidDataException("Not an index block");
}
}
private void SkipPadding()
@@ -60,7 +67,9 @@ namespace SharpCompress.Compressors.Xz
{
byte[] paddingBytes = _reader.ReadBytes(4 - bytes);
if (paddingBytes.Any(b => b != 0))
{
throw new InvalidDataException("Padding bytes were non-null");
}
}
}

View File

@@ -8,7 +8,9 @@ namespace SharpCompress.Compressors.Xz
{
BaseStream = stream;
if (!BaseStream.CanRead)
{
throw new InvalidDataException("Must be able to read from stream");
}
}
}
}

View File

@@ -50,9 +50,15 @@ namespace SharpCompress.Compressors.Xz
{
int bytesRead = 0;
if (_endOfStream)
{
return bytesRead;
}
if (!HeaderIsRead)
{
ReadHeader();
}
bytesRead = ReadBlocks(buffer, offset, count);
if (bytesRead < count)
{
@@ -86,18 +92,27 @@ namespace SharpCompress.Compressors.Xz
{
int bytesRead = 0;
if (_currentBlock == null)
{
NextBlock();
}
for (;;)
{
try
{
if (bytesRead >= count)
{
break;
}
int remaining = count - bytesRead;
int newOffset = offset + bytesRead;
int justRead = _currentBlock.Read(buffer, newOffset, remaining);
if (justRead < remaining)
{
NextBlock();
}
bytesRead += justRead;
}
catch (XZIndexMarkerReachedException)

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,4 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Crypto
@@ -79,22 +78,33 @@ namespace SharpCompress.Crypto
private static uint[] InitializeTable(uint polynomial)
{
if (polynomial == DefaultPolynomial && defaultTable != null)
{
return defaultTable;
}
var createTable = new uint[256];
for (var i = 0; i < 256; i++)
{
var entry = (uint)i;
for (var j = 0; j < 8; j++)
{
if ((entry & 1) == 1)
{
entry = (entry >> 1) ^ polynomial;
}
else
{
entry = entry >> 1;
}
}
createTable[i] = entry;
}
if (polynomial == DefaultPolynomial)
{
defaultTable = createTable;
}
return createTable;
}

View File

@@ -9,7 +9,7 @@ namespace SharpCompress.Crypto
private static readonly int MAXKC = (256 / 4);
private static readonly byte[] Logtable =
private static ReadOnlySpan<byte> Logtable => new byte[]
{
0, 0, 25, 1, 50, 2, 26, 198,
75, 199, 27, 104, 51, 238, 223, 3,
@@ -45,7 +45,7 @@ namespace SharpCompress.Crypto
13, 99, 140, 128, 192, 247, 112, 7
};
private static readonly byte[] Alogtable =
private static ReadOnlySpan<byte> Alogtable => new byte[]
{
0, 3, 5, 15, 17, 51, 85, 255, 26, 46, 114, 150, 161, 248, 19, 53,
95, 225, 56, 72, 216, 115, 149, 164, 247, 2, 6, 10, 30, 34, 102, 170,
@@ -121,7 +121,7 @@ namespace SharpCompress.Crypto
23, 43, 4, 126, 186, 119, 214, 38, 225, 105, 20, 99, 85, 33, 12, 125
};
private static readonly byte[] rcon =
private static ReadOnlySpan<byte> rcon => new byte[]
{
0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36, 0x6c, 0xd8, 0xab, 0x4d, 0x9a,
0x2f, 0x5e, 0xbc, 0x63, 0xc6, 0x97, 0x35, 0x6a, 0xd4, 0xb3, 0x7d, 0xfa, 0xef, 0xc5, 0x91

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Converters;
namespace SharpCompress.IO
{
@@ -81,17 +81,17 @@ namespace SharpCompress.IO
public override short ReadInt16()
{
return DataConverter.LittleEndian.GetInt16(ReadBytes(2), 0);
return BinaryPrimitives.ReadInt16LittleEndian(ReadBytes(2));
}
public override int ReadInt32()
{
return DataConverter.LittleEndian.GetInt32(ReadBytes(4), 0);
return BinaryPrimitives.ReadInt32LittleEndian(ReadBytes(4));
}
public override long ReadInt64()
{
return DataConverter.LittleEndian.GetInt64(ReadBytes(8), 0);
return BinaryPrimitives.ReadInt64LittleEndian(ReadBytes(8));
}
public override sbyte ReadSByte()
@@ -111,17 +111,17 @@ namespace SharpCompress.IO
public override ushort ReadUInt16()
{
return DataConverter.LittleEndian.GetUInt16(ReadBytes(2), 0);
return BinaryPrimitives.ReadUInt16LittleEndian(ReadBytes(2));
}
public override uint ReadUInt32()
{
return DataConverter.LittleEndian.GetUInt32(ReadBytes(4), 0);
return BinaryPrimitives.ReadUInt32LittleEndian(ReadBytes(4));
}
public override ulong ReadUInt64()
{
return DataConverter.LittleEndian.GetUInt64(ReadBytes(8), 0);
return BinaryPrimitives.ReadUInt64LittleEndian(ReadBytes(8));
}
// RAR5 style variable length encoded value
@@ -196,4 +196,4 @@ namespace SharpCompress.IO
throw new FormatException("malformed vint");
}
}
}
}

View File

@@ -1,6 +1,5 @@
using System;
using System.IO;
using SharpCompress.Compressors.Filters;
namespace SharpCompress.IO
{

View File

@@ -148,4 +148,4 @@ namespace SharpCompress
#endregion
}
}
}

Some files were not shown because too many files have changed in this diff Show More