Compare commits

...

43 Commits
0.23 ... 0.25

Author SHA1 Message Date
Adam Hathcock
762497b1c1 Tag for 0.25.0 and update packages 2020-04-03 08:25:43 +01:00
Adam Hathcock
be9edc7512 Merge pull request #500 from Erior/Issue_86
ZipReader/StreamingZipReaderFactory fails for archive entries which are uncompressed files in ZIP format #86
2020-01-17 09:38:19 +00:00
Lars Vahlenberg
9bf9d34d94 Issue86 Proposal 2020-01-16 22:08:48 +01:00
Adam Hathcock
df8405006c Fix workflow name 2020-01-03 09:24:08 +00:00
Adam Hathcock
d135fdce58 Give github actions build a name and use badge 2020-01-03 09:22:51 +00:00
Adam Hathcock
ba570b93bb Merge pull request #496 from Bond-009/allocations
Reduce the amount of allocations
2020-01-03 09:18:17 +00:00
Adam Hathcock
6dfe0c7a96 Merge branch 'master' into allocations 2020-01-03 09:16:46 +00:00
Adam Hathcock
73d4430a65 Merge pull request #498 from adamhathcock/build-netcore3
Build netcore3
2020-01-03 09:15:14 +00:00
Adam Hathcock
ce6fd9b976 JUst one target 2020-01-03 09:12:10 +00:00
Adam Hathcock
ae7e8c03f2 Put wrong SDK 2020-01-03 09:07:34 +00:00
Adam Hathcock
22e2526f4c Update cake and dependencies 2020-01-03 09:06:13 +00:00
Adam Hathcock
50283d9411 Add new build targets for netcore3 2020-01-03 09:02:04 +00:00
Bond-009
d2c2b58f3b Fix language version and add netstandard2.1 2020-01-02 17:43:58 +01:00
Bond_009
50d4b39ca0 Fix test 2019-12-30 22:17:45 +01:00
Bond_009
1ed675e960 Minor improvement 2019-12-30 19:19:05 +01:00
Bond_009
80b0671844 Reduce the amount of allocations
* Directly fill an array instead of filling a List and copying that to
an array
* Use own buffer when writing bytes to a stream
* Remove DataConverter class, replaced by BinaryPrimitives
2019-12-30 18:58:25 +01:00
Bond-009
6f387336c0 Use functions from System.Memory instead of selfmade ones (#495)
* Use functions from System.Memory instead of selfmade ones

* Update SharpCompress.Test.csproj
2019-12-30 15:19:46 +00:00
Adam Hathcock
9540b01bcc NET Standard 1.3 and 2.0 only (#482)
* Remove NET35, NET45 and NET Standard 1.0

* Update README and memset

* Remove NETCORE build flag

* NET 46 too?

* Update packages and usage
2019-10-10 09:24:41 +01:00
Adam Hathcock
446d6914c1 Merge pull request #483 from Bond-009/nameof
Use nameof for param names
2019-09-17 14:21:04 +01:00
Bond_009
637223aa53 Use nameof for param names 2019-09-17 13:28:44 +02:00
Adam Hathcock
17d5565120 Merge pull request #478 from Bond-009/buffers
Use System.Buffers Nuget package
2019-09-17 10:05:29 +01:00
Bond_009
4b54187b4c Fix build 2019-09-11 21:33:57 +02:00
Bond_009
cfb1421367 Use System.Buffers Nuget package 2019-09-11 20:06:50 +02:00
Adam Hathcock
5072a0f6f5 Merge pull request #471 from adamhathcock/release-024
Bump version and dependencies
2019-08-20 20:36:38 +01:00
Adam Hathcock
357dff1403 Bump version and dependencies 2019-08-20 14:29:47 -05:00
Adam Hathcock
a2bd66ded8 Merge pull request #460 from itn3000/tar-fix-garbled2
fix filename garbling in tar(#414)
2019-06-27 12:16:53 +01:00
itn3000
6bfa3c25a4 add more comments 2019-06-27 20:01:40 +09:00
itn3000
1ea9ab72c1 add comment for subtracting 2019-06-27 19:59:16 +09:00
itn3000
07c42b8725 replace magic number 2019-06-27 10:59:21 +09:00
itn3000
70392c32e2 use Buffer.BlockCopy for performance 2019-06-27 09:47:26 +09:00
itn3000
9b4b2a9f7c considering encoding in processing filename(#414)
modify test tar archive because it was not expected one.
(expected "тест.txt" in encoding 866, but actual is omitted upper byte)
2019-06-26 17:34:12 +09:00
Adam Hathcock
d3dd708b58 Merge pull request #457 from DannyBoyk/issue_456_zip_bounded_substreams_data_descriptors
Return a bounded substream when data descriptors are used in seekable zips
2019-06-04 13:42:24 +01:00
Daniel Nash
af264cdc58 Return a bounded substream when data descriptors are used in seekable zips 2019-06-04 08:31:42 -04:00
Adam Hathcock
cfd6df976f Merge pull request #455 from DannyBoyk/issue_454_zip_bad_extra_field
Handle a bad extra field in a local file header in zip files
2019-06-04 09:24:55 +01:00
Daniel Nash
b2bd20b47e Handle a bad extra field in a local file header in zip files 2019-06-03 13:02:28 -04:00
Adam Hathcock
ffea093e95 Merge pull request #453 from Lssikkes/master
Fix for clients failing on just having a 64 bit offset in ZIP64
2019-05-24 19:33:59 +01:00
Leroy Sikkes
78eb8fcf92 Fix for clients that don't support ZIP64 standard correctly in case headers are only pointed to in ZIP64 directory structure 2019-05-24 18:27:49 +02:00
Adam Hathcock
a052956881 Merge pull request #452 from Lssikkes/master
Various fixes for ZIP64 writer (zero byte entries, 32 bit where supported)
2019-05-24 16:17:48 +01:00
Lssikkes
9319ea6992 Updated ZIP64 writer to write 32 bit values to directory entries for better compatibility.
Support for zero byte files without corruption errors from WinRAR/7-zip
2019-05-24 16:14:30 +02:00
Adam Hathcock
4e5b70dbfa Merge pull request #444 from eugeny-trunin/mem-opt
Memory and speed optimization
2019-03-20 15:13:00 +00:00
evgeny
c68eaa8397 Memory and speed optimization 2019-03-20 17:46:57 +03:00
Adam Hathcock
bbb7c85ba7 Merge pull request #442 from turbolocust/master
Fix: ArchiveEncoding was ignored in TarWriterOptions
2019-03-19 08:31:31 +00:00
Matthias Fussenegger
8174359228 Fix: ArchiveEncoding was ignored in TarWriterOptions 2019-03-18 18:25:00 +01:00
101 changed files with 697 additions and 2605 deletions

View File

@@ -1,16 +0,0 @@
version: 2
jobs:
build:
docker:
- image: microsoft/dotnet:2.2.104-sdk
steps:
- checkout
- run:
name: Install Cake
command: |
dotnet tool install -g Cake.Tool
echo 'export PATH=$PATH:/root/.dotnet/tools' >> $BASH_ENV
source $BASH_ENV
- run:
name: Build
command: dotnet cake build.cake

17
.github/workflows/dotnetcore.yml vendored Normal file
View File

@@ -0,0 +1,17 @@
name: SharpCompress
on: [push]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest, ubuntu-latest]
steps:
- uses: actions/checkout@v1
- uses: actions/setup-dotnet@v1
with:
dotnet-version: 3.1.100
- name: Run the Cake script
uses: ecampidoglio/cake-action@master

View File

@@ -1,14 +1,14 @@
# SharpCompress
SharpCompress is a compression library in pure C# for .NET 3.5, 4.5, .NET Standard 1.0, 1.3 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
SharpCompress is a compression library in pure C# for .NET Standard 1.3 and 2.0 that can unrar, un7zip, unzip, untar unbzip2 and ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip are implemented.
The major feature is support for non-seekable streams so large files can be processed on the fly (i.e. download stream).
AppVeyor Build -
[![Build status](https://ci.appveyor.com/api/projects/status/voxg971oemmvxh1e/branch/master?svg=true)](https://ci.appveyor.com/project/adamhathcock/sharpcompress/branch/master)
Circle CI Build -
[![CircleCI](https://circleci.com/gh/adamhathcock/sharpcompress.svg?style=svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
GitHub Actions Build -
[![GitHubActions](https://github.com/adamhathcock/sharpcompress/workflows/SharpCompress/badge.svg)](https://circleci.com/gh/adamhathcock/sharpcompress)
## Need Help?

View File

@@ -1,5 +1,5 @@
version: '{build}'
image: Visual Studio 2017
image: Visual Studio 2019
pull_requests:
do_not_increment_build_number: true

View File

@@ -17,24 +17,24 @@ Task("Build")
{
c.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2017);
.UseToolVersion(MSBuildToolVersion.VS2019);
});
}
else
{
var settings = new DotNetCoreBuildSettings
{
Framework = "netstandard1.0",
Framework = "netstandard1.3",
Configuration = "Release",
NoRestore = true
};
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netstandard1.3";
settings.Framework = "netstandard2.0";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
settings.Framework = "netstandard2.0";
settings.Framework = "netstandard2.1";
DotNetCoreBuild("./src/SharpCompress/SharpCompress.csproj", settings);
}
});
@@ -49,7 +49,7 @@ Task("Test")
var settings = new DotNetCoreTestSettings
{
Configuration = "Release",
Framework = "netcoreapp2.2"
Framework = "netcoreapp3.1"
};
DotNetCoreTest(file.ToString(), settings);
}
@@ -64,7 +64,7 @@ Task("Pack")
MSBuild("src/SharpCompress/SharpCompress.csproj", c => c
.SetConfiguration("Release")
.SetVerbosity(Verbosity.Minimal)
.UseToolVersion(MSBuildToolVersion.VS2017)
.UseToolVersion(MSBuildToolVersion.VS2019)
.WithProperty("NoBuild", "true")
.WithTarget("Pack"));
}

View File

@@ -24,7 +24,6 @@ namespace SharpCompress.Archives
private bool disposed;
#if !NO_FILE
internal AbstractArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerOptions)
{
Type = type;
@@ -40,7 +39,6 @@ namespace SharpCompress.Archives
protected abstract IEnumerable<TVolume> LoadVolumes(FileInfo file);
#endif
internal AbstractArchive(ArchiveType type, IEnumerable<Stream> streams, ReaderOptions readerOptions)
{
@@ -142,12 +140,12 @@ namespace SharpCompress.Archives
/// <summary>
/// Use this method to extract all entries in an archive in order.
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// This is primarily for SOLID Rar Archives or 7Zip Archives as they need to be
/// extracted sequentially for the best performance.
///
///
/// This method will load all entry information from the archive.
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
///
/// WARNING: this will reuse the underlying stream for the archive. Errors may
/// occur if this is used at the same time as other extraction methods on this instance.
/// </summary>
/// <returns></returns>
@@ -176,4 +174,4 @@ namespace SharpCompress.Archives
}
}
}
}
}

View File

@@ -28,12 +28,10 @@ namespace SharpCompress.Archives
{
}
#if !NO_FILE
internal AbstractWritableArchive(ArchiveType type, FileInfo fileInfo, ReaderOptions readerFactoryOptions)
: base(type, fileInfo, readerFactoryOptions)
{
}
#endif
public override ICollection<TEntry> Entries
{
@@ -144,4 +142,4 @@ namespace SharpCompress.Archives
modifiedEntries.Cast<Entry>().ForEach(x => x.Close());
}
}
}
}

View File

@@ -6,7 +6,6 @@ using SharpCompress.Archives.SevenZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Archives.Zip;
using SharpCompress.Common;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Readers;
namespace SharpCompress.Archives
@@ -21,7 +20,7 @@ namespace SharpCompress.Archives
/// <returns></returns>
public static IArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
if (!stream.CanRead || !stream.CanSeek)
{
throw new ArgumentException("Stream should be readable and seekable");
@@ -82,8 +81,6 @@ namespace SharpCompress.Archives
}
}
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -91,7 +88,7 @@ namespace SharpCompress.Archives
/// <param name="options"></param>
public static IArchive Open(string filePath, ReaderOptions options = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), options);
}
@@ -102,7 +99,7 @@ namespace SharpCompress.Archives
/// <param name="options"></param>
public static IArchive Open(FileInfo fileInfo, ReaderOptions options = null)
{
fileInfo.CheckNotNull("fileInfo");
fileInfo.CheckNotNull(nameof(fileInfo));
options = options ?? new ReaderOptions { LeaveStreamOpen = false };
using (var stream = fileInfo.OpenRead())
{
@@ -148,6 +145,5 @@ namespace SharpCompress.Archives
}
}
}
#endif
}
}
}

View File

@@ -13,8 +13,6 @@ namespace SharpCompress.Archives.GZip
{
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -22,7 +20,7 @@ namespace SharpCompress.Archives.GZip
/// <param name="readerOptions"></param>
public static GZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -33,10 +31,9 @@ namespace SharpCompress.Archives.GZip
/// <param name="readerOptions"></param>
public static GZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
fileInfo.CheckNotNull(nameof(fileInfo));
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
@@ -45,7 +42,7 @@ namespace SharpCompress.Archives.GZip
/// <param name="readerOptions"></param>
public static GZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
return new GZipArchive(stream, readerOptions ?? new ReaderOptions());
}
@@ -54,8 +51,6 @@ namespace SharpCompress.Archives.GZip
return new GZipArchive();
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -100,7 +95,6 @@ namespace SharpCompress.Archives.GZip
SaveTo(stream, new WriterOptions(CompressionType.GZip));
}
}
#endif
public static bool IsGZipFile(Stream stream)
{
@@ -185,4 +179,4 @@ namespace SharpCompress.Archives.GZip
return GZipReader.Open(stream);
}
}
}
}

View File

@@ -36,12 +36,10 @@ namespace SharpCompress.Archives
}
streamListener.FireEntryExtractionEnd(archiveEntry);
}
#if !NO_FILE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchiveEntry entry, string destinationDirectory,
ExtractionOptions options = null)
{
@@ -65,6 +63,5 @@ namespace SharpCompress.Archives
}
});
}
#endif
}
}

View File

@@ -1,18 +1,13 @@
#if !NO_FILE
using System.Linq;
using System.Linq;
using SharpCompress.Common;
#endif
namespace SharpCompress.Archives
{
public static class IArchiveExtensions
{
#if !NO_FILE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
public static void WriteToDirectory(this IArchive archive, string destinationDirectory,
ExtractionOptions options = null)
{
@@ -21,6 +16,5 @@ namespace SharpCompress.Archives
entry.WriteToDirectory(destinationDirectory, options);
}
}
#endif
}
}

View File

@@ -1,6 +1,4 @@
#if !NO_FILE
using System;
#endif
using System;
using System.IO;
using SharpCompress.Writers;
@@ -8,8 +6,6 @@ namespace SharpCompress.Archives
{
public static class IWritableArchiveExtensions
{
#if !NO_FILE
public static void AddEntry(this IWritableArchive writableArchive,
string entryPath, string filePath)
{
@@ -39,11 +35,7 @@ namespace SharpCompress.Archives
this IWritableArchive writableArchive,
string filePath, string searchPattern = "*.*", SearchOption searchOption = SearchOption.AllDirectories)
{
#if NET35
foreach (var path in Directory.GetFiles(filePath, searchPattern, searchOption))
#else
foreach (var path in Directory.EnumerateFiles(filePath, searchPattern, searchOption))
#endif
{
var fileInfo = new FileInfo(path);
writableArchive.AddEntry(path.Substring(filePath.Length), fileInfo.OpenRead(), true, fileInfo.Length,
@@ -58,6 +50,5 @@ namespace SharpCompress.Archives
}
return writableArchive.AddEntry(key, fileInfo.OpenRead(), true, fileInfo.Length, fileInfo.LastWriteTime);
}
#endif
}
}

View File

@@ -1,7 +1,6 @@
#if !NO_FILE
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.IO;
@@ -18,7 +17,7 @@ namespace SharpCompress.Archives.Rar
: base(StreamingMode.Seekable, fileInfo.OpenRead(), FixOptions(options))
{
FileInfo = fileInfo;
FileParts = GetVolumeFileParts().ToReadOnly();
FileParts = GetVolumeFileParts().ToArray().ToReadOnly();
}
private static ReaderOptions FixOptions(ReaderOptions options)
@@ -43,4 +42,3 @@ namespace SharpCompress.Archives.Rar
}
}
}
#endif

View File

@@ -1,6 +1,4 @@
#if !NO_FILE
using System.IO;
using System.IO;
using SharpCompress.Common.Rar.Headers;
namespace SharpCompress.Archives.Rar
@@ -25,4 +23,3 @@ namespace SharpCompress.Archives.Rar
}
}
}
#endif

View File

@@ -15,8 +15,6 @@ namespace SharpCompress.Archives.Rar
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -31,7 +29,6 @@ namespace SharpCompress.Archives.Rar
{
return RarArchiveVolumeFactory.GetParts(file, ReaderOptions);
}
#endif
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
@@ -63,9 +60,6 @@ namespace SharpCompress.Archives.Rar
public override bool IsSolid => Volumes.First().IsSolidArchive;
#region Creation
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -73,7 +67,7 @@ namespace SharpCompress.Archives.Rar
/// <param name="options"></param>
public static RarArchive Open(string filePath, ReaderOptions options = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.CheckNotNullOrEmpty(nameof(filePath));
return new RarArchive(new FileInfo(filePath), options ?? new ReaderOptions());
}
@@ -84,10 +78,9 @@ namespace SharpCompress.Archives.Rar
/// <param name="options"></param>
public static RarArchive Open(FileInfo fileInfo, ReaderOptions options = null)
{
fileInfo.CheckNotNull("fileInfo");
fileInfo.CheckNotNull(nameof(fileInfo));
return new RarArchive(fileInfo, options ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
@@ -96,7 +89,7 @@ namespace SharpCompress.Archives.Rar
/// <param name="options"></param>
public static RarArchive Open(Stream stream, ReaderOptions options = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
return Open(stream.AsEnumerable(), options ?? new ReaderOptions());
}
@@ -107,11 +100,10 @@ namespace SharpCompress.Archives.Rar
/// <param name="options"></param>
public static RarArchive Open(IEnumerable<Stream> streams, ReaderOptions options = null)
{
streams.CheckNotNull("streams");
streams.CheckNotNull(nameof(streams));
return new RarArchive(streams, options ?? new ReaderOptions());
}
#if !NO_FILE
public static bool IsRarFile(string filePath)
{
return IsRarFile(new FileInfo(filePath));
@@ -128,7 +120,6 @@ namespace SharpCompress.Archives.Rar
return IsRarFile(stream);
}
}
#endif
public static bool IsRarFile(Stream stream, ReaderOptions options = null)
{
@@ -145,4 +136,4 @@ namespace SharpCompress.Archives.Rar
#endregion
}
}
}

View File

@@ -3,11 +3,9 @@ using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
#if !NO_FILE
using System.Linq;
using System.Text;
using SharpCompress.Common.Rar.Headers;
#endif
namespace SharpCompress.Archives.Rar
{
@@ -25,8 +23,7 @@ namespace SharpCompress.Archives.Rar
yield return part;
}
}
#if !NO_FILE
internal static IEnumerable<RarVolume> GetParts(FileInfo fileInfo, ReaderOptions options)
{
FileInfoRarArchiveVolume part = new FileInfoRarArchiveVolume(fileInfo, options);
@@ -141,7 +138,5 @@ namespace SharpCompress.Archives.Rar
throw new ArgumentException("Filename invalid or next archive could not be found:"
+ fileInfo.FullName);
}
#endif
}
}

View File

@@ -19,12 +19,10 @@ namespace SharpCompress.Archives.Rar
internal override Stream GetCompressedStream()
{
stream.Position = FileHeader.DataStartPosition;
#if !NO_CRYPTO
if (FileHeader.R4Salt != null)
{
return new RarCryptoWrapper(stream, password, FileHeader.R4Salt);
}
#endif
return stream;
}

View File

@@ -13,8 +13,6 @@ namespace SharpCompress.Archives.SevenZip
public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVolume>
{
private ArchiveDatabase database;
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -36,7 +34,6 @@ namespace SharpCompress.Archives.SevenZip
fileInfo.CheckNotNull("fileInfo");
return new SevenZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -48,7 +45,6 @@ namespace SharpCompress.Archives.SevenZip
return new SevenZipArchive(stream, readerOptions ?? new ReaderOptions());
}
#if !NO_FILE
internal SevenZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, fileInfo, readerOptions)
{
@@ -75,7 +71,6 @@ namespace SharpCompress.Archives.SevenZip
return IsSevenZipFile(stream);
}
}
#endif
internal SevenZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.SevenZip, stream.AsEnumerable(), readerOptions)
@@ -140,7 +135,7 @@ namespace SharpCompress.Archives.SevenZip
{
BinaryReader reader = new BinaryReader(stream);
byte[] signatureBytes = reader.ReadBytes(6);
return signatureBytes.BinaryEquals(SIGNATURE);
return signatureBytes.SequenceEqual(SIGNATURE);
}
protected override IReader CreateReaderForSolidExtraction()
@@ -206,7 +201,7 @@ namespace SharpCompress.Archives.SevenZip
return CreateEntryStream(new ReadOnlySubStream(currentStream, currentItem.Size));
}
}
private class PasswordProvider : IPasswordProvider
{
private readonly string _password;
@@ -214,7 +209,6 @@ namespace SharpCompress.Archives.SevenZip
public PasswordProvider(string password)
{
_password = password;
}
public string CryptoGetTextPassword()

View File

@@ -15,8 +15,6 @@ namespace SharpCompress.Archives.Tar
{
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
{
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -24,7 +22,7 @@ namespace SharpCompress.Archives.Tar
/// <param name="readerOptions"></param>
public static TarArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -35,10 +33,9 @@ namespace SharpCompress.Archives.Tar
/// <param name="readerOptions"></param>
public static TarArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
fileInfo.CheckNotNull(nameof(fileInfo));
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
@@ -47,12 +44,10 @@ namespace SharpCompress.Archives.Tar
/// <param name="readerOptions"></param>
public static TarArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
return new TarArchive(stream, readerOptions ?? new ReaderOptions());
}
#if !NO_FILE
public static bool IsTarFile(string filePath)
{
return IsTarFile(new FileInfo(filePath));
@@ -69,7 +64,6 @@ namespace SharpCompress.Archives.Tar
return IsTarFile(stream);
}
}
#endif
public static bool IsTarFile(Stream stream)
{
@@ -85,9 +79,7 @@ namespace SharpCompress.Archives.Tar
}
return false;
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -102,7 +94,6 @@ namespace SharpCompress.Archives.Tar
{
return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
#endif
/// <summary>
/// Takes multiple seekable Streams for a multi-part archive
@@ -203,4 +194,4 @@ namespace SharpCompress.Archives.Tar
return TarReader.Open(stream);
}
}
}
}

View File

@@ -22,9 +22,7 @@ namespace SharpCompress.Archives.Zip
/// if the compression method is set to deflate
/// </summary>
public CompressionLevel DeflateCompressionLevel { get; set; }
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -32,7 +30,7 @@ namespace SharpCompress.Archives.Zip
/// <param name="readerOptions"></param>
public static ZipArchive Open(string filePath, ReaderOptions readerOptions = null)
{
filePath.CheckNotNullOrEmpty("filePath");
filePath.CheckNotNullOrEmpty(nameof(filePath));
return Open(new FileInfo(filePath), readerOptions ?? new ReaderOptions());
}
@@ -43,10 +41,9 @@ namespace SharpCompress.Archives.Zip
/// <param name="readerOptions"></param>
public static ZipArchive Open(FileInfo fileInfo, ReaderOptions readerOptions = null)
{
fileInfo.CheckNotNull("fileInfo");
fileInfo.CheckNotNull(nameof(fileInfo));
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
@@ -55,12 +52,10 @@ namespace SharpCompress.Archives.Zip
/// <param name="readerOptions"></param>
public static ZipArchive Open(Stream stream, ReaderOptions readerOptions = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
return new ZipArchive(stream, readerOptions ?? new ReaderOptions());
}
#if !NO_FILE
public static bool IsZipFile(string filePath, string password = null)
{
return IsZipFile(new FileInfo(filePath), password);
@@ -77,7 +72,6 @@ namespace SharpCompress.Archives.Zip
return IsZipFile(stream, password);
}
}
#endif
public static bool IsZipFile(Stream stream, string password = null)
{
@@ -101,9 +95,7 @@ namespace SharpCompress.Archives.Zip
return false;
}
}
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
@@ -119,7 +111,6 @@ namespace SharpCompress.Archives.Zip
{
return new ZipVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
#endif
internal ZipArchive()
: base(ArchiveType.Zip)
@@ -211,4 +202,4 @@ namespace SharpCompress.Archives.Zip
return ZipReader.Open(stream, ReaderOptions);
}
}
}
}

View File

@@ -5,7 +5,6 @@ using System.Runtime.CompilerServices;
[assembly: AssemblyTitle("SharpCompress")]
[assembly: AssemblyProduct("SharpCompress")]
[assembly: InternalsVisibleTo("SharpCompress.Test" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: InternalsVisibleTo("SharpCompress.Test.Portable" + SharpCompress.AssemblyInfo.PublicKeySuffix)]
[assembly: CLSCompliant(true)]
namespace SharpCompress

View File

@@ -1,119 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System.Runtime.CompilerServices;
using System.Threading;
namespace SharpCompress.Buffers
{
/// <summary>
/// Provides a resource pool that enables reusing instances of type <see cref="T:T[]"/>.
/// </summary>
/// <remarks>
/// <para>
/// Renting and returning buffers with an <see cref="ArrayPool{T}"/> can increase performance
/// in situations where arrays are created and destroyed frequently, resulting in significant
/// memory pressure on the garbage collector.
/// </para>
/// <para>
/// This class is thread-safe. All members may be used by multiple threads concurrently.
/// </para>
/// </remarks>
internal abstract class ArrayPool<T>
{
/// <summary>The lazily-initialized shared pool instance.</summary>
private static ArrayPool<T> s_sharedInstance = null;
/// <summary>
/// Retrieves a shared <see cref="ArrayPool{T}"/> instance.
/// </summary>
/// <remarks>
/// The shared pool provides a default implementation of <see cref="ArrayPool{T}"/>
/// that's intended for general applicability. It maintains arrays of multiple sizes, and
/// may hand back a larger array than was actually requested, but will never hand back a smaller
/// array than was requested. Renting a buffer from it with <see cref="Rent"/> will result in an
/// existing buffer being taken from the pool if an appropriate buffer is available or in a new
/// buffer being allocated if one is not available.
/// </remarks>
public static ArrayPool<T> Shared
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
get { return Volatile.Read(ref s_sharedInstance) ?? EnsureSharedCreated(); }
}
/// <summary>Ensures that <see cref="s_sharedInstance"/> has been initialized to a pool and returns it.</summary>
[MethodImpl(MethodImplOptions.NoInlining)]
private static ArrayPool<T> EnsureSharedCreated()
{
Interlocked.CompareExchange(ref s_sharedInstance, Create(), null);
return s_sharedInstance;
}
/// <summary>
/// Creates a new <see cref="ArrayPool{T}"/> instance using default configuration options.
/// </summary>
/// <returns>A new <see cref="ArrayPool{T}"/> instance.</returns>
public static ArrayPool<T> Create()
{
return new DefaultArrayPool<T>();
}
/// <summary>
/// Creates a new <see cref="ArrayPool{T}"/> instance using custom configuration options.
/// </summary>
/// <param name="maxArrayLength">The maximum length of array instances that may be stored in the pool.</param>
/// <param name="maxArraysPerBucket">
/// The maximum number of array instances that may be stored in each bucket in the pool. The pool
/// groups arrays of similar lengths into buckets for faster access.
/// </param>
/// <returns>A new <see cref="ArrayPool{T}"/> instance with the specified configuration options.</returns>
/// <remarks>
/// The created pool will group arrays into buckets, with no more than <paramref name="maxArraysPerBucket"/>
/// in each bucket and with those arrays not exceeding <paramref name="maxArrayLength"/> in length.
/// </remarks>
public static ArrayPool<T> Create(int maxArrayLength, int maxArraysPerBucket)
{
return new DefaultArrayPool<T>(maxArrayLength, maxArraysPerBucket);
}
/// <summary>
/// Retrieves a buffer that is at least the requested length.
/// </summary>
/// <param name="minimumLength">The minimum length of the array needed.</param>
/// <returns>
/// An <see cref="T:T[]"/> that is at least <paramref name="minimumLength"/> in length.
/// </returns>
/// <remarks>
/// This buffer is loaned to the caller and should be returned to the same pool via
/// <see cref="Return"/> so that it may be reused in subsequent usage of <see cref="Rent"/>.
/// It is not a fatal error to not return a rented buffer, but failure to do so may lead to
/// decreased application performance, as the pool may need to create a new buffer to replace
/// the one lost.
/// </remarks>
public abstract T[] Rent(int minimumLength);
/// <summary>
/// Returns to the pool an array that was previously obtained via <see cref="Rent"/> on the same
/// <see cref="ArrayPool{T}"/> instance.
/// </summary>
/// <param name="array">
/// The buffer previously obtained from <see cref="Rent"/> to return to the pool.
/// </param>
/// <param name="clearArray">
/// If <c>true</c> and if the pool will store the buffer to enable subsequent reuse, <see cref="Return"/>
/// will clear <paramref name="array"/> of its contents so that a subsequent consumer via <see cref="Rent"/>
/// will not see the previous consumer's content. If <c>false</c> or if the pool will release the buffer,
/// the array's contents are left unchanged.
/// </param>
/// <remarks>
/// Once a buffer has been returned to the pool, the caller gives up all ownership of the buffer
/// and must not use it. The reference returned from a given call to <see cref="Rent"/> must only be
/// returned via <see cref="Return"/> once. The default <see cref="ArrayPool{T}"/>
/// may hold onto the returned buffer in order to rent it again, or it may release the returned buffer
/// if it's determined that the pool already has enough buffers stored.
/// </remarks>
public abstract void Return(T[] array, bool clearArray = false);
}
}
#endif

View File

@@ -1,144 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System;
namespace SharpCompress.Buffers
{
internal sealed partial class DefaultArrayPool<T> : ArrayPool<T>
{
/// <summary>The default maximum length of each array in the pool (2^20).</summary>
private const int DefaultMaxArrayLength = 1024 * 1024;
/// <summary>The default maximum number of arrays per bucket that are available for rent.</summary>
private const int DefaultMaxNumberOfArraysPerBucket = 50;
/// <summary>Lazily-allocated empty array used when arrays of length 0 are requested.</summary>
private static T[] s_emptyArray; // we support contracts earlier than those with Array.Empty<T>()
private readonly Bucket[] _buckets;
internal DefaultArrayPool() : this(DefaultMaxArrayLength, DefaultMaxNumberOfArraysPerBucket)
{
}
internal DefaultArrayPool(int maxArrayLength, int maxArraysPerBucket)
{
if (maxArrayLength <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxArrayLength));
}
if (maxArraysPerBucket <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxArraysPerBucket));
}
// Our bucketing algorithm has a min length of 2^4 and a max length of 2^30.
// Constrain the actual max used to those values.
const int MinimumArrayLength = 0x10, MaximumArrayLength = 0x40000000;
if (maxArrayLength > MaximumArrayLength)
{
maxArrayLength = MaximumArrayLength;
}
else if (maxArrayLength < MinimumArrayLength)
{
maxArrayLength = MinimumArrayLength;
}
// Create the buckets.
int poolId = Id;
int maxBuckets = Utilities.SelectBucketIndex(maxArrayLength);
var buckets = new Bucket[maxBuckets + 1];
for (int i = 0; i < buckets.Length; i++)
{
buckets[i] = new Bucket(Utilities.GetMaxSizeForBucket(i), maxArraysPerBucket, poolId);
}
_buckets = buckets;
}
/// <summary>Gets an ID for the pool to use with events.</summary>
private int Id => GetHashCode();
public override T[] Rent(int minimumLength)
{
// Arrays can't be smaller than zero. We allow requesting zero-length arrays (even though
// pooling such an array isn't valuable) as it's a valid length array, and we want the pool
// to be usable in general instead of using `new`, even for computed lengths.
if (minimumLength < 0)
{
throw new ArgumentOutOfRangeException(nameof(minimumLength));
}
else if (minimumLength == 0)
{
// No need for events with the empty array. Our pool is effectively infinite
// and we'll never allocate for rents and never store for returns.
return s_emptyArray ?? (s_emptyArray = new T[0]);
}
T[] buffer = null;
int index = Utilities.SelectBucketIndex(minimumLength);
if (index < _buckets.Length)
{
// Search for an array starting at the 'index' bucket. If the bucket is empty, bump up to the
// next higher bucket and try that one, but only try at most a few buckets.
const int MaxBucketsToTry = 2;
int i = index;
do
{
// Attempt to rent from the bucket. If we get a buffer from it, return it.
buffer = _buckets[i].Rent();
if (buffer != null)
{
return buffer;
}
}
while (++i < _buckets.Length && i != index + MaxBucketsToTry);
// The pool was exhausted for this buffer size. Allocate a new buffer with a size corresponding
// to the appropriate bucket.
buffer = new T[_buckets[index]._bufferLength];
}
else
{
// The request was for a size too large for the pool. Allocate an array of exactly the requested length.
// When it's returned to the pool, we'll simply throw it away.
buffer = new T[minimumLength];
}
return buffer;
}
public override void Return(T[] array, bool clearArray = false)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
else if (array.Length == 0)
{
// Ignore empty arrays. When a zero-length array is rented, we return a singleton
// rather than actually taking a buffer out of the lowest bucket.
return;
}
// Determine with what bucket this array length is associated
int bucket = Utilities.SelectBucketIndex(array.Length);
// If we can tell that the buffer was allocated, drop it. Otherwise, check if we have space in the pool
if (bucket < _buckets.Length)
{
// Clear the array if the user requests
if (clearArray)
{
Array.Clear(array, 0, array.Length);
}
// Return the buffer to its bucket. In the future, we might consider having Return return false
// instead of dropping a bucket, in which case we could try to return to a lower-sized bucket,
// just as how in Rent we allow renting from a higher-sized bucket.
_buckets[bucket].Return(array);
}
}
}
}
#endif

View File

@@ -1,111 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System;
using System.Diagnostics;
using System.Threading;
namespace SharpCompress.Buffers
{
internal sealed partial class DefaultArrayPool<T> : ArrayPool<T>
{
/// <summary>Provides a thread-safe bucket containing buffers that can be Rent'd and Return'd.</summary>
private sealed class Bucket
{
internal readonly int _bufferLength;
private readonly T[][] _buffers;
private readonly int _poolId;
private SpinLock _lock; // do not make this readonly; it's a mutable struct
private int _index;
/// <summary>
/// Creates the pool with numberOfBuffers arrays where each buffer is of bufferLength length.
/// </summary>
internal Bucket(int bufferLength, int numberOfBuffers, int poolId)
{
_lock = new SpinLock(Debugger.IsAttached); // only enable thread tracking if debugger is attached; it adds non-trivial overheads to Enter/Exit
_buffers = new T[numberOfBuffers][];
_bufferLength = bufferLength;
_poolId = poolId;
}
/// <summary>Gets an ID for the bucket to use with events.</summary>
internal int Id => GetHashCode();
/// <summary>Takes an array from the bucket. If the bucket is empty, returns null.</summary>
internal T[] Rent()
{
T[][] buffers = _buffers;
T[] buffer = null;
// While holding the lock, grab whatever is at the next available index and
// update the index. We do as little work as possible while holding the spin
// lock to minimize contention with other threads. The try/finally is
// necessary to properly handle thread aborts on platforms which have them.
bool lockTaken = false, allocateBuffer = false;
try
{
_lock.Enter(ref lockTaken);
if (_index < buffers.Length)
{
buffer = buffers[_index];
buffers[_index++] = null;
allocateBuffer = buffer == null;
}
}
finally
{
if (lockTaken) _lock.Exit(false);
}
// While we were holding the lock, we grabbed whatever was at the next available index, if
// there was one. If we tried and if we got back null, that means we hadn't yet allocated
// for that slot, in which case we should do so now.
if (allocateBuffer)
{
buffer = new T[_bufferLength];
}
return buffer;
}
/// <summary>
/// Attempts to return the buffer to the bucket. If successful, the buffer will be stored
/// in the bucket and true will be returned; otherwise, the buffer won't be stored, and false
/// will be returned.
/// </summary>
internal void Return(T[] array)
{
// Check to see if the buffer is the correct size for this bucket
if (array.Length != _bufferLength)
{
throw new ArgumentException("Buffer not from pool", nameof(array));
}
// While holding the spin lock, if there's room available in the bucket,
// put the buffer into the next available slot. Otherwise, we just drop it.
// The try/finally is necessary to properly handle thread aborts on platforms
// which have them.
bool lockTaken = false;
try
{
_lock.Enter(ref lockTaken);
if (_index != 0)
{
_buffers[--_index] = array;
}
}
finally
{
if (lockTaken) _lock.Exit(false);
}
}
}
}
}
#endif

View File

@@ -1,38 +0,0 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if NETCORE
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace SharpCompress.Buffers
{
internal static class Utilities
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static int SelectBucketIndex(int bufferSize)
{
Debug.Assert(bufferSize > 0);
uint bitsRemaining = ((uint)bufferSize - 1) >> 4;
int poolIndex = 0;
if (bitsRemaining > 0xFFFF) { bitsRemaining >>= 16; poolIndex = 16; }
if (bitsRemaining > 0xFF) { bitsRemaining >>= 8; poolIndex += 8; }
if (bitsRemaining > 0xF) { bitsRemaining >>= 4; poolIndex += 4; }
if (bitsRemaining > 0x3) { bitsRemaining >>= 2; poolIndex += 2; }
if (bitsRemaining > 0x1) { bitsRemaining >>= 1; poolIndex += 1; }
return poolIndex + (int)bitsRemaining;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static int GetMaxSizeForBucket(int binIndex)
{
int maxSize = 16 << binIndex;
Debug.Assert(maxSize >= 0);
return maxSize;
}
}
}
#endif

View File

@@ -28,21 +28,14 @@ namespace SharpCompress.Common
public ArchiveEncoding()
{
#if NETSTANDARD1_0
Default = Encoding.GetEncoding("cp437");
Password = Encoding.GetEncoding("cp437");
#else
Default = Encoding.GetEncoding(437);
Password = Encoding.GetEncoding(437);
#endif
}
#if NETSTANDARD1_3 || NETSTANDARD2_0
static ArchiveEncoding()
{
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
}
#endif
public string Decode(byte[] bytes)
{
@@ -74,4 +67,4 @@ namespace SharpCompress.Common
return CustomDecoder ?? ((bytes, index, count) => GetEncoding().GetString(bytes, index, count));
}
}
}
}

View File

@@ -1,14 +1,10 @@
#if !NO_FILE
using System;
using System;
using System.IO;
#endif
namespace SharpCompress.Common
{
internal static class ExtractionMethods
{
#if !NO_FILE
/// <summary>
/// Extract to specific directory, retaining filename
/// </summary>
@@ -93,6 +89,5 @@ namespace SharpCompress.Common
entry.PreserveExtractionOptions(destinationFileName, options);
}
}
#endif
}
}

View File

@@ -10,11 +10,11 @@ namespace SharpCompress.Common
}
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string FilePartName { get; }
internal abstract Stream GetCompressedStream();
internal abstract Stream GetRawStream();
internal bool Skipped { get; set; }
}
}
}

View File

@@ -1,11 +1,10 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Common.GZip
{
@@ -60,7 +59,7 @@ namespace SharpCompress.Common.GZip
throw new ZlibException("Bad GZIP header.");
}
Int32 timet = DataConverter.LittleEndian.GetInt32(header, 4);
int timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
DateModified = TarHeader.EPOCH.AddSeconds(timet);
if ((header[3] & 0x04) == 0x04)
{
@@ -69,7 +68,7 @@ namespace SharpCompress.Common.GZip
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
if (!stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
@@ -117,4 +116,4 @@ namespace SharpCompress.Common.GZip
return ArchiveEncoding.Decode(buffer);
}
}
}
}

View File

@@ -10,13 +10,11 @@ namespace SharpCompress.Common.GZip
{
}
#if !NO_FILE
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
: base(fileInfo.OpenRead(), options)
{
options.LeaveStreamOpen = false;
}
#endif
public override bool IsFirstVolume => true;

View File

@@ -1,6 +1,4 @@
#if !NO_FILE
using System.IO;
using System.IO;
using SharpCompress.Readers;
namespace SharpCompress.Common
@@ -48,4 +46,3 @@ namespace SharpCompress.Common
}
}
}
#endif

View File

@@ -1,9 +1,5 @@
using System;
#if !NO_FILE
using System.IO;
#endif
namespace SharpCompress.Common
{
public interface IVolume : IDisposable

View File

@@ -195,17 +195,12 @@ namespace SharpCompress.Common.Rar.Headers
private static string ConvertPathV5(string path)
{
#if NO_FILE
// not sure what to do here
throw new NotImplementedException("TODO");
#else
if (Path.DirectorySeparatorChar == '\\')
{
// replace embedded \\ with valid filename char
return path.Replace('\\', '-').Replace('/', '\\');
}
return path;
#endif
}
@@ -361,9 +356,6 @@ namespace SharpCompress.Common.Rar.Headers
private static string ConvertPathV4(string path)
{
#if NO_FILE
return path.Replace('\\', '/');
#else
if (Path.DirectorySeparatorChar == '/')
{
return path.Replace('\\', '/');
@@ -373,7 +365,6 @@ namespace SharpCompress.Common.Rar.Headers
return path.Replace('/', '\\');
}
return path;
#endif
}
public override string ToString()

View File

@@ -48,15 +48,11 @@ namespace SharpCompress.Common.Rar.Headers
}
else
{
#if !NO_CRYPTO
if (Options.Password == null)
{
throw new CryptographicException("Encrypted Rar archive has no password specified.");
}
reader = new RarCryptoBinaryReader(stream, Options.Password);
#else
throw new CryptographicException("Rar encryption unsupported on this platform");
#endif
}
var header = RarHeader.TryReadBase(reader, _isRar5, Options.ArchiveEncoding);
@@ -138,11 +134,7 @@ namespace SharpCompress.Common.Rar.Headers
}
else
{
#if !NO_CRYPTO
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.R4Salt);
#else
throw new NotSupportedException("RarCrypto not supported");
#endif
}
}
break;

View File

@@ -1,5 +1,4 @@
#if !NO_CRYPTO
using System.Collections.Generic;
using System.Collections.Generic;
using System.IO;
namespace SharpCompress.Common.Rar
@@ -111,5 +110,4 @@ namespace SharpCompress.Common.Rar
ClearQueue();
}
}
}
#endif
}

View File

@@ -1,5 +1,3 @@
#if !NO_CRYPTO
using System;
using System.Collections.Generic;
using System.IO;
@@ -52,10 +50,10 @@ namespace SharpCompress.Common.Rar
if (sizeToRead > 0)
{
int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
for (int i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
_actualStream.Read(cipherText, 0, RarRijndael.CRYPTO_BLOCK_SIZE);
var readBytes = _rijndael.ProcessBlock(cipherText);
@@ -96,4 +94,3 @@ namespace SharpCompress.Common.Rar
}
}
}
#endif

View File

@@ -1,7 +1,4 @@
#if !NO_CRYPTO
using System;
using System.Collections.Generic;
using System.Linq;
using System;
using System.Security.Cryptography;
using System.Text;
using SharpCompress.Crypto;
@@ -23,12 +20,6 @@ namespace SharpCompress.Common.Rar
_salt = salt;
}
private byte[] ComputeHash(byte[] input)
{
var sha = SHA1.Create();
return sha.ComputeHash(input);
}
private void Initialize()
{
@@ -47,28 +38,27 @@ namespace SharpCompress.Common.Rar
rawPassword[i + rawLength] = _salt[i];
}
const int noOfRounds = (1 << 18);
IList<byte> bytes = new List<byte>();
const int iblock = 3;
byte[] digest;
byte[] data = new byte[(rawPassword.Length + iblock) * noOfRounds];
//TODO slow code below, find ways to optimize
for (int i = 0; i < noOfRounds; i++)
{
bytes.AddRange(rawPassword);
rawPassword.CopyTo(data, i * (rawPassword.Length + iblock));
bytes.AddRange(new[]
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 0] = (byte)i;
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 1] = (byte)(i >> 8);
data[i * (rawPassword.Length + iblock) + rawPassword.Length + 2] = (byte)(i >> CRYPTO_BLOCK_SIZE);
if (i % (noOfRounds / CRYPTO_BLOCK_SIZE) == 0)
{
(byte) i, (byte) (i >> 8), (byte) (i >> CRYPTO_BLOCK_SIZE)
});
if (i%(noOfRounds/CRYPTO_BLOCK_SIZE) == 0)
{
digest = ComputeHash(bytes.ToArray());
_aesInitializationVector[i/(noOfRounds/CRYPTO_BLOCK_SIZE)] = digest[19];
digest = SHA1.Create().ComputeHash(data, 0, (i + 1) * (rawPassword.Length + iblock));
_aesInitializationVector[i / (noOfRounds / CRYPTO_BLOCK_SIZE)] = digest[19];
}
}
digest = ComputeHash(bytes.ToArray());
digest = SHA1.Create().ComputeHash(data);
//slow code ends
byte[] aesKey = new byte[CRYPTO_BLOCK_SIZE];
@@ -98,19 +88,20 @@ namespace SharpCompress.Common.Rar
public byte[] ProcessBlock(byte[] cipherText)
{
var plainText = new byte[CRYPTO_BLOCK_SIZE];
var decryptedBytes = new List<byte>();
byte[] decryptedBytes = new byte[CRYPTO_BLOCK_SIZE];
_rijndael.ProcessBlock(cipherText, 0, plainText, 0);
for (int j = 0; j < plainText.Length; j++)
for (int j = 0; j < CRYPTO_BLOCK_SIZE; j++)
{
decryptedBytes.Add((byte) (plainText[j] ^ _aesInitializationVector[j%16])); //32:114, 33:101
decryptedBytes[j] = (byte)(plainText[j] ^ _aesInitializationVector[j % 16]); //32:114, 33:101
}
for (int j = 0; j < _aesInitializationVector.Length; j++)
{
_aesInitializationVector[j] = cipherText[j];
}
return decryptedBytes.ToArray();
return decryptedBytes;
}
public void Dispose()
@@ -118,4 +109,3 @@ namespace SharpCompress.Common.Rar
}
}
}
#endif

View File

@@ -22,7 +22,7 @@ namespace SharpCompress.Common.SevenZip
internal List<long> _packStreamStartPositions = new List<long>();
internal List<int> _folderStartFileIndex = new List<int>();
internal List<int> _fileIndexToFolderIndexMap = new List<int>();
internal IPasswordProvider PasswordProvider { get; }
public ArchiveDatabase(IPasswordProvider passwordProvider)
@@ -152,13 +152,14 @@ namespace SharpCompress.Common.SevenZip
{
int packStreamIndex = folder._firstPackStreamId;
long folderStartPackPos = GetFolderStreamPos(folder, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folder._packStreams.Count; j++)
int count = folder._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
{
packSizes.Add(_packSizes[packStreamIndex + j]);
packSizes[j] = _packSizes[packStreamIndex + j];
}
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes.ToArray(), folder, pw);
return DecoderStreamHelper.CreateDecoderStream(stream, folderStartPackPos, packSizes, folder, pw);
}
private long GetFolderPackStreamSize(int folderIndex, int streamIndex)
@@ -179,4 +180,4 @@ namespace SharpCompress.Common.SevenZip
return 0;
}
}
}
}

View File

@@ -1449,13 +1449,14 @@ namespace SharpCompress.Common.SevenZip
CFolder folderInfo = db._folders[folderIndex];
int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
long folderStartPackPos = db.GetFolderStreamPos(folderInfo, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folderInfo._packStreams.Count; j++)
var count = folderInfo._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
{
packSizes.Add(db._packSizes[packStreamIndex + j]);
packSizes[j] = db._packSizes[packStreamIndex + j];
}
s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(), folderInfo,
s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes, folderInfo,
db.PasswordProvider);
_cachedStreams.Add(folderIndex, s);
}
@@ -1553,15 +1554,16 @@ namespace SharpCompress.Common.SevenZip
int packStreamIndex = db._folders[folderIndex]._firstPackStreamId;
long folderStartPackPos = db.GetFolderStreamPos(folderInfo, 0);
List<long> packSizes = new List<long>();
for (int j = 0; j < folderInfo._packStreams.Count; j++)
var count = folderInfo._packStreams.Count;
long[] packSizes = new long[count];
for (int j = 0; j < count; j++)
{
packSizes.Add(db._packSizes[packStreamIndex + j]);
packSizes[j] = db._packSizes[packStreamIndex + j];
}
// TODO: If the decoding fails the last file may be extracted incompletely. Delete it?
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes.ToArray(),
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes,
folderInfo, db.PasswordProvider);
byte[] buffer = new byte[4 << 10];
for (;;)

View File

@@ -1,7 +1,7 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Tar.Headers
{
@@ -39,16 +39,17 @@ namespace SharpCompress.Common.Tar.Headers
WriteOctalBytes(0, buffer, 116, 8); // group ID
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
if (Name.Length > 100)
var nameByteCount = ArchiveEncoding.GetEncoding().GetByteCount(Name);
if (nameByteCount > 100)
{
// Set mock filename and filetype to indicate the next block is the actual name of the file
WriteStringBytes("././@LongLink", buffer, 0, 100);
buffer[156] = (byte)EntryType.LongName;
WriteOctalBytes(Name.Length + 1, buffer, 124, 12);
WriteOctalBytes(nameByteCount + 1, buffer, 124, 12);
}
else
{
WriteStringBytes(Name, buffer, 0, 100);
WriteStringBytes(ArchiveEncoding.Encode(Name), buffer, 100);
WriteOctalBytes(Size, buffer, 124, 12);
var time = (long)(LastModifiedTime.ToUniversalTime() - EPOCH).TotalSeconds;
WriteOctalBytes(time, buffer, 136, 12);
@@ -56,11 +57,10 @@ namespace SharpCompress.Common.Tar.Headers
if (Size >= 0x1FFFFFFFF)
{
byte[] bytes = DataConverter.BigEndian.GetBytes(Size);
var bytes12 = new byte[12];
bytes.CopyTo(bytes12, 12 - bytes.Length);
Span<byte> bytes12 = stackalloc byte[12];
BinaryPrimitives.WriteInt64BigEndian(bytes12.Slice(4), Size);
bytes12[0] |= 0x80;
bytes12.CopyTo(buffer, 124);
bytes12.CopyTo(buffer.AsSpan(124));
}
}
@@ -69,10 +69,17 @@ namespace SharpCompress.Common.Tar.Headers
output.Write(buffer, 0, buffer.Length);
if (Name.Length > 100)
if (nameByteCount > 100)
{
WriteLongFilenameHeader(output);
Name = Name.Substring(0, 100);
// update to short name lower than 100 - [max bytes of one character].
// subtracting bytes is needed because preventing infinite loop(example code is here).
//
// var bytes = Encoding.UTF8.GetBytes(new string(0x3042, 100));
// var truncated = Encoding.UTF8.GetBytes(Encoding.UTF8.GetString(bytes, 0, 100));
//
// and then infinite recursion is occured in WriteLongFilenameHeader because truncated.Length is 102.
Name = ArchiveEncoding.Decode(ArchiveEncoding.Encode(Name), 0, 100 - ArchiveEncoding.GetEncoding().GetMaxByteCount(1));
Write(output);
}
}
@@ -168,8 +175,9 @@ namespace SharpCompress.Common.Tar.Headers
{
if ((buffer[124] & 0x80) == 0x80) // if size in binary
{
return DataConverter.BigEndian.GetInt64(buffer, 0x80);
return BinaryPrimitives.ReadInt64BigEndian(buffer.AsSpan(0x80));
}
return ReadAsciiInt64Base8(buffer, 124, 11);
}
@@ -184,6 +192,13 @@ namespace SharpCompress.Common.Tar.Headers
return buffer;
}
private static void WriteStringBytes(ReadOnlySpan<byte> name, Span<byte> buffer, int length)
{
name.CopyTo(buffer);
int i = Math.Min(length, name.Length);
buffer.Slice(i, length - i).Fill(0);
}
private static void WriteStringBytes(string name, byte[] buffer, int offset, int length)
{
int i;

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip.Headers
{
@@ -76,34 +76,34 @@ namespace SharpCompress.Common.Zip.Headers
switch (DataBytes.Length)
{
case 4:
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 0);
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes);
return;
case 8:
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
return;
case 12:
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 8);
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(8));
return;
case 16:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
return;
case 20:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 16);
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(16));
return;
case 24:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 16);
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
return;
case 28:
UncompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 0);
CompressedSize = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 8);
RelativeOffsetOfEntryHeader = (long)DataConverter.LittleEndian.GetUInt64(DataBytes, 16);
VolumeNumber = DataConverter.LittleEndian.GetUInt32(DataBytes, 24);
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(24));
return;
default:
throw new ArchiveException("Unexpected size of of Zip64 extended information extra field");
@@ -132,7 +132,7 @@ namespace SharpCompress.Common.Zip.Headers
case ExtraDataType.Zip64ExtendedInformationExtraField:
return new Zip64ExtendedInformationExtraField
(
type,
type,
length,
extraData
);
@@ -146,4 +146,4 @@ namespace SharpCompress.Common.Zip.Headers
}
}
}
}
}

View File

@@ -1,8 +1,7 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip.Headers
{
@@ -30,7 +29,7 @@ namespace SharpCompress.Common.Zip.Headers
&& Name.EndsWith("\\");
}
}
internal Stream PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
@@ -66,9 +65,7 @@ namespace SharpCompress.Common.Zip.Headers
return encryptionData;
}
#if !NO_CRYPTO
internal WinzipAesEncryptionData WinzipAesEncryptionData { get; set; }
#endif
internal ushort LastModifiedDate { get; set; }
@@ -80,13 +77,22 @@ namespace SharpCompress.Common.Zip.Headers
{
for (int i = 0; i < extra.Length - 4;)
{
ExtraDataType type = (ExtraDataType)DataConverter.LittleEndian.GetUInt16(extra, i);
ExtraDataType type = (ExtraDataType)BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i));
if (!Enum.IsDefined(typeof(ExtraDataType), type))
{
type = ExtraDataType.NotImplementedExtraData;
}
ushort length = DataConverter.LittleEndian.GetUInt16(extra, i + 2);
ushort length = BinaryPrimitives.ReadUInt16LittleEndian(extra.AsSpan(i + 2));
// 7zip has this same kind of check to ignore extras blocks that don't conform to the standard 2-byte ID, 2-byte length, N-byte value.
// CPP/7Zip/Zip/ZipIn.cpp: CInArchive::ReadExtra
if (length > extra.Length)
{
// bad extras block
return;
}
byte[] data = new byte[length];
Buffer.BlockCopy(extra, i + 4, data, 0, length);
Extra.Add(LocalEntryHeaderExtraFactory.Create(type, length, data));
@@ -99,4 +105,4 @@ namespace SharpCompress.Common.Zip.Headers
internal bool IsZip64 => CompressedSize == uint.MaxValue;
}
}
}

View File

@@ -1,5 +1,6 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
{
@@ -7,11 +8,13 @@ namespace SharpCompress.Common.Zip
{
private bool _isLocalHeaderLoaded;
private readonly SeekableZipHeaderFactory _headerFactory;
private readonly DirectoryEntryHeader _directoryEntryHeader;
internal SeekableZipFilePart(SeekableZipHeaderFactory headerFactory, DirectoryEntryHeader header, Stream stream)
: base(header, stream)
{
_headerFactory = headerFactory;
_directoryEntryHeader = header;
}
internal override Stream GetCompressedStream()
@@ -36,6 +39,15 @@ namespace SharpCompress.Common.Zip
protected override Stream CreateBaseStream()
{
BaseStream.Position = Header.DataStartPosition.Value;
if ((Header.CompressedSize == 0)
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor)
&& (_directoryEntryHeader?.HasData == true)
&& (_directoryEntryHeader?.CompressedSize != 0))
{
return new ReadOnlySubStream(BaseStream, _directoryEntryHeader.CompressedSize);
}
return BaseStream;
}
}

View File

@@ -54,14 +54,30 @@ namespace SharpCompress.Common.Zip
//entry could be zero bytes so we need to know that.
if (header.ZipHeaderType == ZipHeaderType.LocalEntry)
{
bool isRecording = rewindableStream.IsRecording;
if (!isRecording)
var local_header = ((LocalEntryHeader)header);
// If we have CompressedSize, there is data to be read
if( local_header.CompressedSize > 0 )
{
rewindableStream.StartRecording();
header.HasData = true;
} // Check if zip is streaming ( Length is 0 and is declared in PostDataDescriptor )
else if( local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor) )
{
bool isRecording = rewindableStream.IsRecording;
if (!isRecording)
{
rewindableStream.StartRecording();
}
uint nextHeaderBytes = reader.ReadUInt32();
// Check if next data is PostDataDescriptor, streamed file with 0 length
header.HasData = !IsHeader(nextHeaderBytes);
rewindableStream.Rewind(!isRecording);
}
else // We are not streaming and compressed size is 0, we have no data
{
header.HasData = false;
}
uint nextHeaderBytes = reader.ReadUInt32();
header.HasData = !IsHeader(nextHeaderBytes);
rewindableStream.Rewind(!isRecording);
}
yield return header;
}

View File

@@ -1,9 +1,7 @@
#if !NO_CRYPTO
using System;
using System.Buffers.Binary;
using System.IO;
using System.Security.Cryptography;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip
{
@@ -120,7 +118,7 @@ namespace SharpCompress.Common.Zip
: bytesRemaining;
// update the counter
DataConverter.LittleEndian.PutBytes(_counter, 0, _nonce++);
BinaryPrimitives.WriteInt32LittleEndian(_counter, _nonce++);
// Determine if this is the final block
if ((bytesToRead == bytesRemaining) && (_totalBytesLeftToRead == 0))
@@ -181,4 +179,3 @@ namespace SharpCompress.Common.Zip
}
}
}
#endif

View File

@@ -1,8 +1,6 @@
#if !NO_CRYPTO
using System;
using System.Buffers.Binary;
using System.Security.Cryptography;
using SharpCompress.Converters;
namespace SharpCompress.Common.Zip
{
@@ -64,10 +62,10 @@ namespace SharpCompress.Common.Zip
IvBytes = rfc2898.GetBytes(KeySizeInBytes);
_generatedVerifyValue = rfc2898.GetBytes(2);
short verify = DataConverter.LittleEndian.GetInt16(_passwordVerifyValue, 0);
short verify = BinaryPrimitives.ReadInt16LittleEndian(_passwordVerifyValue);
if (_password != null)
{
short generated = DataConverter.LittleEndian.GetInt16(_generatedVerifyValue, 0);
short generated = BinaryPrimitives.ReadInt16LittleEndian(_generatedVerifyValue);
if (verify != generated)
{
throw new InvalidFormatException("bad password");
@@ -76,4 +74,3 @@ namespace SharpCompress.Common.Zip
}
}
}
#endif

View File

@@ -1,4 +1,5 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Linq;
using SharpCompress.Common.Zip.Headers;
@@ -8,7 +9,6 @@ using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.Converters;
using SharpCompress.IO;
namespace SharpCompress.Common.Zip
@@ -108,19 +108,19 @@ namespace SharpCompress.Common.Zip
{
throw new InvalidFormatException("Winzip data length is not 7.");
}
ushort compressedMethod = DataConverter.LittleEndian.GetUInt16(data.DataBytes, 0);
ushort compressedMethod = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes);
if (compressedMethod != 0x01 && compressedMethod != 0x02)
{
throw new InvalidFormatException("Unexpected vendor version number for WinZip AES metadata");
}
ushort vendorId = DataConverter.LittleEndian.GetUInt16(data.DataBytes, 2);
ushort vendorId = BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(2));
if (vendorId != 0x4541)
{
throw new InvalidFormatException("Unexpected vendor ID for WinZip AES metadata");
}
return CreateDecompressionStream(stream, (ZipCompressionMethod)DataConverter.LittleEndian.GetUInt16(data.DataBytes, 5));
return CreateDecompressionStream(stream, (ZipCompressionMethod)BinaryPrimitives.ReadUInt16LittleEndian(data.DataBytes.AsSpan(5)));
}
default:
{
@@ -142,7 +142,7 @@ namespace SharpCompress.Common.Zip
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor))
|| Header.IsZip64)
{
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
plainStream = new NonDisposingStream(plainStream); //make sure AES doesn't close
}
else
{
@@ -165,12 +165,10 @@ namespace SharpCompress.Common.Zip
case ZipCompressionMethod.WinzipAes:
{
#if !NO_FILE
if (Header.WinzipAesEncryptionData != null)
{
return new WinzipAesCryptoStream(plainStream, Header.WinzipAesEncryptionData, Header.CompressedSize - 10);
}
#endif
return plainStream;
}
@@ -184,4 +182,4 @@ namespace SharpCompress.Common.Zip
return plainStream;
}
}
}
}

View File

@@ -1,8 +1,6 @@
using System;
using System.IO;
#if !NO_CRYPTO
using System.Linq;
#endif
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
@@ -132,10 +130,6 @@ namespace SharpCompress.Common.Zip
if (entryHeader.CompressionMethod == ZipCompressionMethod.WinzipAes)
{
#if NO_CRYPTO
throw new NotSupportedException("Cannot decrypt Winzip AES with Silverlight or WP7.");
#else
ExtraData data = entryHeader.Extra.SingleOrDefault(x => x.Type == ExtraDataType.WinZipAes);
if (data != null)
{
@@ -150,7 +144,6 @@ namespace SharpCompress.Common.Zip
entryHeader.CompressedSize -= (uint)(salt.Length + 2);
}
#endif
}
}

View File

@@ -65,16 +65,16 @@ namespace SharpCompress.Compressors.ADC
}
}
private static int GetOffset(byte[] chunk, int position)
private static int GetOffset(ReadOnlySpan<byte> chunk)
{
switch (GetChunkType(chunk[position]))
switch (GetChunkType(chunk[0]))
{
case PLAIN:
return 0;
case TWO_BYTE:
return ((chunk[position] & 0x03) << 8) + chunk[position + 1];
return ((chunk[0] & 0x03) << 8) + chunk[1];
case THREE_BYTE:
return (chunk[position + 1] << 8) + chunk[position + 2];
return (chunk[1] << 8) + chunk[2];
default:
return -1;
}
@@ -116,7 +116,7 @@ namespace SharpCompress.Compressors.ADC
byte[] buffer = new byte[bufferSize];
int outPosition = 0;
bool full = false;
MemoryStream tempMs;
Span<byte> temp = stackalloc byte[3];
while (position < input.Length)
{
@@ -142,11 +142,10 @@ namespace SharpCompress.Compressors.ADC
position += chunkSize + 1;
break;
case TWO_BYTE:
tempMs = new MemoryStream();
chunkSize = GetChunkSize((byte)readByte);
tempMs.WriteByte((byte)readByte);
tempMs.WriteByte((byte)input.ReadByte());
offset = GetOffset(tempMs.ToArray(), 0);
temp[0] = (byte)readByte;
temp[1] = (byte)input.ReadByte();
offset = GetOffset(temp);
if (outPosition + chunkSize > bufferSize)
{
full = true;
@@ -173,12 +172,11 @@ namespace SharpCompress.Compressors.ADC
}
break;
case THREE_BYTE:
tempMs = new MemoryStream();
chunkSize = GetChunkSize((byte)readByte);
tempMs.WriteByte((byte)readByte);
tempMs.WriteByte((byte)input.ReadByte());
tempMs.WriteByte((byte)input.ReadByte());
offset = GetOffset(tempMs.ToArray(), 0);
temp[0] = (byte)readByte;
temp[1] = (byte)input.ReadByte();
temp[2] = (byte)input.ReadByte();
offset = GetOffset(temp);
if (outPosition + chunkSize > bufferSize)
{
full = true;
@@ -217,4 +215,4 @@ namespace SharpCompress.Compressors.ADC
return position - start;
}
}
}
}

View File

@@ -27,9 +27,8 @@
// ------------------------------------------------------------------
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Compressors.Deflate
@@ -442,7 +441,7 @@ namespace SharpCompress.Compressors.Deflate
}
TimeSpan delta = LastModified.Value - UNIX_EPOCH;
var timet = (Int32)delta.TotalSeconds;
DataConverter.LittleEndian.PutBytes(header, i, timet);
BinaryPrimitives.WriteInt32LittleEndian(header.AsSpan(i), timet);
i += 4;
// xflg
@@ -476,4 +475,4 @@ namespace SharpCompress.Compressors.Deflate
return header.Length; // bytes written
}
}
}
}

View File

@@ -25,11 +25,10 @@
// ------------------------------------------------------------------
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Compressors.Deflate
@@ -244,10 +243,12 @@ namespace SharpCompress.Compressors.Deflate
if (_wantCompress)
{
// Emit the GZIP trailer: CRC32 and size mod 2^32
int c1 = crc.Crc32Result;
_stream.Write(DataConverter.LittleEndian.GetBytes(c1), 0, 4);
byte[] intBuf = new byte[4];
BinaryPrimitives.WriteInt32LittleEndian(intBuf, crc.Crc32Result);
_stream.Write(intBuf, 0, 4);
int c2 = (Int32)(crc.TotalBytesRead & 0x00000000FFFFFFFF);
_stream.Write(DataConverter.LittleEndian.GetBytes(c2), 0, 4);
BinaryPrimitives.WriteInt32LittleEndian(intBuf, c2);
_stream.Write(intBuf, 0, 4);
}
else
{
@@ -293,9 +294,9 @@ namespace SharpCompress.Compressors.Deflate
Array.Copy(_z.InputBuffer, _z.NextIn, trailer, 0, trailer.Length);
}
Int32 crc32_expected = DataConverter.LittleEndian.GetInt32(trailer, 0);
Int32 crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
Int32 crc32_actual = crc.Crc32Result;
Int32 isize_expected = DataConverter.LittleEndian.GetInt32(trailer, 4);
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.AsSpan(4));
Int32 isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
if (crc32_actual != crc32_expected)
@@ -446,7 +447,7 @@ namespace SharpCompress.Compressors.Deflate
throw new ZlibException("Bad GZIP header.");
}
Int32 timet = DataConverter.LittleEndian.GetInt32(header, 4);
Int32 timet = BinaryPrimitives.ReadInt32LittleEndian(header.AsSpan(4));
_GzipMtime = TarHeader.EPOCH.AddSeconds(timet);
totalBytesRead += n;
if ((header[3] & 0x04) == 0x04)
@@ -647,4 +648,4 @@ namespace SharpCompress.Compressors.Deflate
Undefined
}
}
}
}

View File

@@ -1,6 +1,4 @@
#if !NO_CRYPTO
using System;
using System;
using System.IO;
using System.Security.Cryptography;
using System.Text;
@@ -195,7 +193,7 @@ namespace SharpCompress.Compressors.LZMA
}
else
{
#if NETSTANDARD1_3
#if NETSTANDARD1_3 || NETSTANDARD2_0
using (IncrementalHash sha = IncrementalHash.CreateHash(HashAlgorithmName.SHA256))
{
byte[] counter = new byte[8];
@@ -262,5 +260,3 @@ namespace SharpCompress.Compressors.LZMA
#endregion
}
}
#endif

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Converters;
using SharpCompress.Crypto;
using SharpCompress.IO;
@@ -58,16 +58,17 @@ namespace SharpCompress.Compressors.LZMA
crc32Stream.WrappedStream.Dispose();
crc32Stream.Dispose();
var compressedCount = _countingWritableSubStream.Count;
var bytes = DataConverter.LittleEndian.GetBytes(crc32Stream.Crc);
_countingWritableSubStream.Write(bytes, 0, bytes.Length);
bytes = DataConverter.LittleEndian.GetBytes(_writeCount);
_countingWritableSubStream.Write(bytes, 0, bytes.Length);
byte[] intBuf = new byte[8];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc32Stream.Crc);
_countingWritableSubStream.Write(intBuf, 0, 4);
BinaryPrimitives.WriteInt64LittleEndian(intBuf, _writeCount);
_countingWritableSubStream.Write(intBuf, 0, 8);
//total with headers
bytes = DataConverter.LittleEndian.GetBytes(compressedCount + 6 + 20);
_countingWritableSubStream.Write(bytes, 0, bytes.Length);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, compressedCount + 6 + 20);
_countingWritableSubStream.Write(intBuf, 0, 8);
}
_finished = true;
}
@@ -101,7 +102,7 @@ namespace SharpCompress.Compressors.LZMA
{
_stream.Flush();
}
// TODO: Both Length and Position are sometimes feasible, but would require
// reading the output length when we initialize.
public override long Length => throw new NotImplementedException();

View File

@@ -34,34 +34,26 @@ namespace SharpCompress.Compressors.LZMA
if (NEEDS_INDENT)
{
NEEDS_INDENT = false;
#if !NO_FILE
Debug.Write(INDENT.Peek());
#endif
}
}
public static void Write(object value)
{
EnsureIndent();
#if !NO_FILE
Debug.Write(value);
#endif
}
public static void Write(string text)
{
EnsureIndent();
#if !NO_FILE
Debug.Write(text);
#endif
}
public static void Write(string format, params object[] args)
{
EnsureIndent();
#if !NO_FILE
Debug.Write(string.Format(format, args));
#endif
}
public static void WriteLine()

View File

@@ -996,7 +996,7 @@ namespace SharpCompress.Compressors.LZMA
}
}
UInt32 startLen = 2; // speed optimization
UInt32 startLen = 2; // speed optimization
for (UInt32 repIndex = 0; repIndex < Base.K_NUM_REP_DISTANCES; repIndex++)
{
@@ -1571,12 +1571,17 @@ namespace SharpCompress.Compressors.LZMA
public void WriteCoderProperties(Stream outStream)
{
_properties[0] = (Byte)((_posStateBits * 5 + _numLiteralPosStateBits) * 9 + _numLiteralContextBits);
WriteCoderProperties(_properties);
outStream.Write(_properties, 0, K_PROP_SIZE);
}
public void WriteCoderProperties(Span<byte> span)
{
span[0] = (byte)((_posStateBits * 5 + _numLiteralPosStateBits) * 9 + _numLiteralContextBits);
for (int i = 0; i < 4; i++)
{
_properties[1 + i] = (Byte)((_dictionarySize >> (8 * i)) & 0xFF);
span[1 + i] = (byte)((_dictionarySize >> (8 * i)) & 0xFF);
}
outStream.Write(_properties, 0, K_PROP_SIZE);
}
private readonly UInt32[] _tempPrices = new UInt32[Base.K_NUM_FULL_DISTANCES];
@@ -1794,4 +1799,4 @@ namespace SharpCompress.Compressors.LZMA
_trainSize = trainSize;
}
}
}
}

View File

@@ -1,7 +1,7 @@
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Compressors.LZMA.LZ;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.LZMA
{
@@ -56,7 +56,7 @@ namespace SharpCompress.Compressors.LZMA
if (!isLzma2)
{
_dictionarySize = DataConverter.LittleEndian.GetInt32(properties, 1);
_dictionarySize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
_outWindow.Create(_dictionarySize);
if (presetDictionary != null)
{
@@ -107,9 +107,9 @@ namespace SharpCompress.Compressors.LZMA
_encoder = new Encoder();
_encoder.SetCoderProperties(properties._propIDs, properties._properties);
MemoryStream propStream = new MemoryStream(5);
_encoder.WriteCoderProperties(propStream);
Properties = propStream.ToArray();
byte[] prop = new byte[5];
_encoder.WriteCoderProperties(prop);
Properties = prop;
_encoder.SetStreams(null, outputStream, -1, -1);
if (presetDictionary != null)
@@ -315,4 +315,4 @@ namespace SharpCompress.Compressors.LZMA
public byte[] Properties { get; } = new byte[5];
}
}
}

View File

@@ -36,10 +36,8 @@ namespace SharpCompress.Compressors.LZMA
case K_LZMA:
case K_LZMA2:
return new LzmaStream(info, inStreams.Single(), -1, limit);
#if !NO_CRYPTO
case CMethodId.K_AES_ID:
return new AesDecoderStream(inStreams.Single(), info, pass, limit);
#endif
case K_BCJ:
return new BCJFilter(false, inStreams.Single());
case K_BCJ2:

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -19,7 +19,11 @@ namespace SharpCompress.Compressors.PPMd.H
{
}
internal int SummFreq { get => DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff; set => DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value); }
internal int SummFreq
{
get => BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address)) & 0xffff;
set => BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address), (short)value);
}
internal FreqData Initialize(byte[] mem)
{
@@ -28,14 +32,12 @@ namespace SharpCompress.Compressors.PPMd.H
internal void IncrementSummFreq(int dSummFreq)
{
short summFreq = DataConverter.LittleEndian.GetInt16(Memory, Address);
summFreq += (short)dSummFreq;
DataConverter.LittleEndian.PutBytes(Memory, Address, summFreq);
SummFreq += (short)dSummFreq;
}
internal int GetStats()
{
return DataConverter.LittleEndian.GetInt32(Memory, Address + 2);
return BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 2));
}
internal virtual void SetStats(State state)
@@ -45,7 +47,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal void SetStats(int state)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 2, state);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 2), state);
}
public override String ToString()
@@ -64,4 +66,4 @@ namespace SharpCompress.Compressors.PPMd.H
return buffer.ToString();
}
}
}
}

View File

@@ -137,7 +137,7 @@ namespace SharpCompress.Compressors.PPMd.H
private void RestartModelRare()
{
Utility.Fill(_charMask, 0);
new Span<int>(_charMask).Fill(0);
SubAlloc.InitSubAllocator();
_initRl = -(_maxOrder < 12 ? _maxOrder : 12) - 1;
int addr = SubAlloc.AllocContext();
@@ -228,7 +228,7 @@ namespace SharpCompress.Compressors.PPMd.H
private void ClearMask()
{
_escCount = 1;
Utility.Fill(_charMask, 0);
new Span<int>(_charMask).Fill(0);
}
internal bool DecodeInit(IRarUnpack unpackRead, int escChar)
@@ -912,4 +912,4 @@ namespace SharpCompress.Compressors.PPMd.H
}
}
}
}
}

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -22,7 +22,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_numStats = DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff;
_numStats = BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address)) & 0xffff;
}
return _numStats;
}
@@ -32,7 +32,7 @@ namespace SharpCompress.Compressors.PPMd.H
_numStats = value & 0xffff;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value);
BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address), (short)value);
}
}
}
@@ -109,7 +109,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_suffix = DataConverter.LittleEndian.GetInt32(Memory, Address + 8);
_suffix = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 8));
}
return _suffix;
}
@@ -124,7 +124,7 @@ namespace SharpCompress.Compressors.PPMd.H
_suffix = suffix;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 8, suffix);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 8), suffix);
}
}
@@ -307,7 +307,7 @@ namespace SharpCompress.Compressors.PPMd.H
// byte[] bytes = model.getSubAlloc().getHeap();
// int p1 = state1.Address;
// int p2 = state2.Address;
//
//
// for (int i = 0; i < StatePtr.size; i++) {
// byte temp = bytes[p1+i];
// bytes[p1+i] = bytes[p2+i];
@@ -564,4 +564,4 @@ namespace SharpCompress.Compressors.PPMd.H
UNION_SIZE = Math.Max(FreqData.SIZE, State.SIZE);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using SharpCompress.Converters;
using System;
using System.Buffers.Binary;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -21,7 +22,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_stamp = DataConverter.LittleEndian.GetInt16(Memory, Address) & 0xffff;
_stamp = BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address)) & 0xffff;
}
return _stamp;
}
@@ -31,7 +32,7 @@ namespace SharpCompress.Compressors.PPMd.H
_stamp = value;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address, (short)value);
BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address), (short)value);
}
}
}
@@ -63,7 +64,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_next = DataConverter.LittleEndian.GetInt32(Memory, Address + 4);
_next = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 4));
}
return _next;
}
@@ -78,7 +79,7 @@ namespace SharpCompress.Compressors.PPMd.H
_next = next;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 4, next);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 4), next);
}
}
@@ -86,7 +87,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_nu = DataConverter.LittleEndian.GetInt16(Memory, Address + 2) & 0xffff;
_nu = BinaryPrimitives.ReadInt16LittleEndian(Memory.AsSpan(Address + 2)) & 0xffff;
}
return _nu;
}
@@ -96,7 +97,7 @@ namespace SharpCompress.Compressors.PPMd.H
_nu = nu & 0xffff;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 2, (short)nu);
BinaryPrimitives.WriteInt16LittleEndian(Memory.AsSpan(Address + 2), (short)nu);
}
}
@@ -104,7 +105,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_prev = DataConverter.LittleEndian.GetInt32(Memory, Address + 8);
_prev = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 8));
}
return _prev;
}
@@ -119,8 +120,8 @@ namespace SharpCompress.Compressors.PPMd.H
_prev = prev;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 8, prev);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 8), prev);
}
}
}
}
}

View File

@@ -1,5 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -18,7 +19,7 @@ namespace SharpCompress.Compressors.PPMd.H
{
if (Memory != null)
{
_next = DataConverter.LittleEndian.GetInt32(Memory, Address);
_next = BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address));
}
return _next;
}
@@ -33,7 +34,7 @@ namespace SharpCompress.Compressors.PPMd.H
_next = next;
if (Memory != null)
{
DataConverter.LittleEndian.PutBytes(Memory, Address, next);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address), next);
}
}
@@ -51,4 +52,4 @@ namespace SharpCompress.Compressors.PPMd.H
return buffer.ToString();
}
}
}
}

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Text;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.PPMd.H
{
@@ -29,7 +29,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal int GetSuccessor()
{
return DataConverter.LittleEndian.GetInt32(Memory, Address + 2);
return BinaryPrimitives.ReadInt32LittleEndian(Memory.AsSpan(Address + 2));
}
internal void SetSuccessor(PpmContext successor)
@@ -39,7 +39,7 @@ namespace SharpCompress.Compressors.PPMd.H
internal void SetSuccessor(int successor)
{
DataConverter.LittleEndian.PutBytes(Memory, Address + 2, successor);
BinaryPrimitives.WriteInt32LittleEndian(Memory.AsSpan(Address + 2), successor);
}
internal void SetValues(StateRef state)
@@ -95,4 +95,4 @@ namespace SharpCompress.Compressors.PPMd.H
return buffer.ToString();
}
}
}
}

View File

@@ -166,7 +166,7 @@ namespace SharpCompress.Compressors.PPMd.H
_freeListPos = _heapStart + allocSize;
//UPGRADE_ISSUE: The following fragment of code could not be parsed and was not converted. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1156'"
//assert(realAllocSize - tempMemBlockPos == RarMemBlock.size): realAllocSize
//assert(realAllocSize - tempMemBlockPos == RarMemBlock.size): realAllocSize
//+ + tempMemBlockPos + + RarMemBlock.size;
// Init freeList
@@ -360,7 +360,7 @@ namespace SharpCompress.Compressors.PPMd.H
public virtual void InitSubAllocator()
{
int i, k;
Utility.Fill(_heap, _freeListPos, _freeListPos + SizeOfFreeList(), (byte)0);
new Span<byte>(_heap, _freeListPos, SizeOfFreeList()).Fill(0);
_pText = _heapStart;
@@ -448,4 +448,4 @@ namespace SharpCompress.Compressors.PPMd.H
UNIT_SIZE = Math.Max(PpmContext.SIZE, RarMemBlock.SIZE);
}
}
}
}

View File

@@ -1,5 +1,6 @@
using SharpCompress.Compressors.PPMd.I1;
using SharpCompress.Converters;
using System;
using System.Buffers.Binary;
using SharpCompress.Compressors.PPMd.I1;
namespace SharpCompress.Compressors.PPMd
{
@@ -25,7 +26,7 @@ namespace SharpCompress.Compressors.PPMd
ModelOrder = modelOrder;
RestorationMethod = modelRestorationMethod;
}
public int ModelOrder { get; }
public PpmdVersion Version { get; } = PpmdVersion.I1;
internal ModelRestorationMethod RestorationMethod { get; }
@@ -34,7 +35,7 @@ namespace SharpCompress.Compressors.PPMd
{
if (properties.Length == 2)
{
ushort props = DataConverter.LittleEndian.GetUInt16(properties, 0);
ushort props = BinaryPrimitives.ReadUInt16LittleEndian(properties);
AllocatorSize = (((props >> 4) & 0xff) + 1) << 20;
ModelOrder = (props & 0x0f) + 1;
RestorationMethod = (ModelRestorationMethod)(props >> 12);
@@ -42,7 +43,7 @@ namespace SharpCompress.Compressors.PPMd
else if (properties.Length == 5)
{
Version = PpmdVersion.H7Z;
AllocatorSize = DataConverter.LittleEndian.GetInt32(properties, 1);
AllocatorSize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
ModelOrder = properties[0];
}
}
@@ -64,8 +65,16 @@ namespace SharpCompress.Compressors.PPMd
}
}
public byte[] Properties => DataConverter.LittleEndian.GetBytes(
(ushort)
((ModelOrder - 1) + (((AllocatorSize >> 20) - 1) << 4) + ((ushort)RestorationMethod << 12)));
public byte[] Properties
{
get
{
byte[] bytes = new byte[2];
BinaryPrimitives.WriteUInt16LittleEndian(
bytes,
(ushort)((ModelOrder - 1) + (((AllocatorSize >> 20) - 1) << 4) + ((ushort)RestorationMethod << 12)));
return bytes;
}
}
}
}
}

View File

@@ -32,9 +32,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
}
}
public bool Suspended {
public bool Suspended {
get => suspended;
set => suspended = value;
set => suspended = value;
}
public int Char
@@ -139,12 +139,12 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
case 36: // alternative hash
Unpack29(fileHeader.IsSolid);
break;
case 50: // rar 5.x compression
Unpack5(fileHeader.IsSolid);
break;
default:
default:
throw new InvalidFormatException("unknown rar compression version " + fileHeader.CompressionAlgorithm);
}
}
@@ -729,13 +729,13 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
if (!solid)
{
tablesRead = false;
Utility.Fill(oldDist, 0); // memset(oldDist,0,sizeof(OldDist));
new Span<int>(oldDist).Fill(0); // memset(oldDist,0,sizeof(OldDist));
oldDistPtr = 0;
lastDist = 0;
lastLength = 0;
Utility.Fill(unpOldTable, (byte)0); // memset(UnpOldTable,0,sizeof(UnpOldTable));
new Span<byte>(unpOldTable).Fill(0); // memset(UnpOldTable,0,sizeof(UnpOldTable));
unpPtr = 0;
wrPtr = 0;
@@ -837,7 +837,7 @@ WriteBorder=Math.Min(MaxWinSize,UNPACK_MAX_WRITE)&MaxWinMask;
if ((bitField & 0x4000) == 0)
{
Utility.Fill(unpOldTable, (byte)0); // memset(UnpOldTable,0,sizeof(UnpOldTable));
new Span<byte>(unpOldTable).Fill(0); // memset(UnpOldTable,0,sizeof(UnpOldTable));
}
AddBits(2);
@@ -1109,7 +1109,7 @@ WriteBorder=Math.Min(MaxWinSize,UNPACK_MAX_WRITE)&MaxWinMask;
oldFilterLengths[FiltPos] = StackFilter.BlockLength;
// memset(StackFilter->Prg.InitR,0,sizeof(StackFilter->Prg.InitR));
Utility.Fill(StackFilter.Program.InitR, 0);
new Span<int>(StackFilter.Program.InitR).Fill(0);
StackFilter.Program.InitR[3] = RarVM.VM_GLOBALMEMADDR; // StackFilter->Prg.InitR[3]=VM_GLOBALMEMADDR;
StackFilter.Program.InitR[4] = StackFilter.BlockLength;
@@ -1267,4 +1267,4 @@ WriteBorder=Math.Min(MaxWinSize,UNPACK_MAX_WRITE)&MaxWinMask;
}
}
}
}
}

View File

@@ -3,7 +3,7 @@
* Original author: Edmund Wagner
* Creation date: 21.06.2007
*
* the unrar licence applies to all junrar source and binary distributions
* the unrar licence applies to all junrar source and binary distributions
* you are not allowed to use this source to re-create the RAR compression algorithm
*/
@@ -652,9 +652,9 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
ChSetC[I] = ((~I + 1) & 0xff) << 8;
}
Utility.Fill(NToPl, 0); // memset(NToPl,0,sizeof(NToPl));
Utility.Fill(NToPlB, 0); // memset(NToPlB,0,sizeof(NToPlB));
Utility.Fill(NToPlC, 0); // memset(NToPlC,0,sizeof(NToPlC));
new Span<int>(NToPl).Fill(0); // memset(NToPl,0,sizeof(NToPl));
new Span<int>(NToPlB).Fill(0); // memset(NToPlB,0,sizeof(NToPlB));
new Span<int>(NToPlC).Fill(0); // memset(NToPlC,0,sizeof(NToPlC));
corrHuff(ChSetB, NToPlB);
}
@@ -670,7 +670,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
// & ~0xff) | I;
}
}
Utility.Fill(NumToPlace, 0); // memset(NumToPlace,0,sizeof(NToPl));
new Span<int>(NumToPlace).Fill(0); // memset(NumToPlace,0,sizeof(NToPl));
for (I = 6; I >= 0; I--)
{
NumToPlace[I] = (7 - I) * 32;
@@ -717,4 +717,4 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
wrPtr = unpPtr;
}
}
}
}

View File

@@ -3,7 +3,7 @@
* Original author: Edmund Wagner
* Creation date: 21.06.2007
*
* the unrar licence applies to all junrar source and binary distributions
* the unrar licence applies to all junrar source and binary distributions
* you are not allowed to use this source to re-create the RAR compression algorithm
*/
@@ -263,7 +263,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
if (0 == (BitField & 0x4000))
{
// memset(UnpOldTable20,0,sizeof(UnpOldTable20));
Utility.Fill(UnpOldTable20, (byte)0);
new Span<byte>(UnpOldTable20).Fill(0);
}
AddBits(2);
@@ -371,7 +371,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
AudV[3] = new AudioVariables();
// memset(UnpOldTable20,0,sizeof(UnpOldTable20));
Utility.Fill(UnpOldTable20, (byte)0);
new Span<byte>(UnpOldTable20).Fill(0);
}
}
@@ -521,4 +521,4 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
return ((byte)Ch);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using SharpCompress.Compressors.Rar.VM;
using System;
using SharpCompress.Compressors.Rar.VM;
namespace SharpCompress.Compressors.Rar.UnpackV1
{
@@ -186,9 +187,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
int i;
long M, N;
Utility.Fill(lenCount, 0); // memset(LenCount,0,sizeof(LenCount));
Utility.Fill(dec.DecodeNum, 0); // memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
new Span<int>(dec.DecodeNum).Fill(0); // memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
for (i = 0; i < size; i++)
{
@@ -217,4 +216,4 @@ namespace SharpCompress.Compressors.Rar.UnpackV1
dec.MaxNum = size;
}
}
}
}

View File

@@ -1,4 +1,5 @@
using static SharpCompress.Compressors.Rar.UnpackV2017.Unpack.Unpack15Local;
using System;
using static SharpCompress.Compressors.Rar.UnpackV2017.Unpack.Unpack15Local;
namespace SharpCompress.Compressors.Rar.UnpackV2017
{
@@ -461,9 +462,9 @@ internal static class Unpack15Local {
ChSetA[I]=(ushort)I;
ChSetC[I]=(ushort)(((~I+1) & 0xff)<<8);
}
Utility.Memset(NToPl,0,NToPl.Length);
Utility.Memset(NToPlB,0,NToPlB.Length);
Utility.Memset(NToPlC,0,NToPlC.Length);
new Span<byte>(NToPl).Fill(0);
new Span<byte>(NToPlB).Fill(0);
new Span<byte>(NToPlC).Fill(0);
CorrHuff(ChSetB,NToPlB);
}
@@ -473,7 +474,7 @@ internal static class Unpack15Local {
for (I=7;I>=0;I--)
for (J=0;J<32;J++)
CharSet[J]=(ushort)((CharSet[J] & ~0xff) | I);
Utility.Memset(NumToPlace,0,NToPl.Length);
new Span<byte>(NumToPlace, 0, NToPl.Length).Fill(0);
for (I=6;I>=0;I--)
NumToPlace[I]=(byte)((7-I)*32);
}

View File

@@ -191,7 +191,7 @@ internal static class Unpack20Local {
UnpAudioBlock=(BitField & 0x8000)!=0;
if ((BitField & 0x4000) != 0)
Utility.Memset(UnpOldTable20,0,UnpOldTable20.Length);
new Span<byte>(UnpOldTable20).Fill(0);
Inp.addbits(2);
uint TableSize;
@@ -296,7 +296,7 @@ internal static class Unpack20Local {
//memset(AudV,0,sizeof(AudV));
AudV = new AudioVariables[4];
Utility.Memset(UnpOldTable20, 0, UnpOldTable20.Length);
new Span<byte>(UnpOldTable20).Fill(0);
//memset(MD,0,sizeof(MD));
MD = new DecodeTable[4];
}

View File

@@ -30,7 +30,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// Check TablesRead5 to be sure that we read tables at least once
// regardless of current block header TablePresent flag.
// So we can safefly use these tables below.
if (!ReadBlockHeader(Inp,ref BlockHeader) ||
if (!ReadBlockHeader(Inp,ref BlockHeader) ||
!ReadTables(Inp,ref BlockHeader, ref BlockTables) || !TablesRead5)
return;
}
@@ -45,8 +45,8 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// We use 'while', because for empty block containing only Huffman table,
// we'll be on the block border once again just after reading the table.
while (Inp.InAddr>BlockHeader.BlockStart+BlockHeader.BlockSize-1 ||
Inp.InAddr==BlockHeader.BlockStart+BlockHeader.BlockSize-1 &&
while (Inp.InAddr>BlockHeader.BlockStart+BlockHeader.BlockSize-1 ||
Inp.InAddr==BlockHeader.BlockStart+BlockHeader.BlockSize-1 &&
Inp.InBit>=BlockHeader.BlockBitSize)
{
if (BlockHeader.LastBlockInFile)
@@ -415,7 +415,7 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
// Choose the nearest among WriteBorder and WrPtr actual written border.
// If border is equal to UnpPtr, it means that we have MaxWinSize data ahead.
if (WriteBorder==UnpPtr ||
if (WriteBorder==UnpPtr ||
WrPtr!=UnpPtr && ((WrPtr-UnpPtr)&MaxWinMask)<((WriteBorder-UnpPtr)&MaxWinMask))
WriteBorder=WrPtr;
}
@@ -563,11 +563,11 @@ namespace SharpCompress.Compressors.Rar.UnpackV2017
if (!UnpReadBuf())
return false;
Inp.faddbits((uint)((8-Inp.InBit)&7));
byte BlockFlags=(byte)(Inp.fgetbits()>>8);
Inp.faddbits(8);
uint ByteCount=(uint)(((BlockFlags>>3)&3)+1); // Block size byte count.
if (ByteCount==4)
return false;

View File

@@ -196,7 +196,7 @@ public Unpack(/* ComprDataIO *DataIO */)
{
if (!Solid)
{
Utility.Memset<uint>(OldDist, 0, OldDist.Length);
new Span<uint>(OldDist).Fill(0);
OldDistPtr=0;
LastDist=LastLength=0;
// memset(Window,0,MaxWinSize);
@@ -246,7 +246,7 @@ public Unpack(/* ComprDataIO *DataIO */)
// Set the entire DecodeNum to zero.
//memset(Dec->DecodeNum,0,Size*sizeof(*Dec->DecodeNum));
Utility.FillFast<ushort>(Dec.DecodeNum, 0);
new Span<ushort>(Dec.DecodeNum).Fill(0);
// Initialize not really used entry for zero length code.
Dec.DecodePos[0]=0;
@@ -272,7 +272,7 @@ public Unpack(/* ComprDataIO *DataIO */)
Dec.DecodeLen[I]=(uint)LeftAligned;
// Every item of this array contains the sum of all preceding items.
// So it contains the start position in code list for every bit length.
// So it contains the start position in code list for every bit length.
Dec.DecodePos[I]=Dec.DecodePos[I-1]+LengthCount[I-1];
}
@@ -337,7 +337,7 @@ public Unpack(/* ComprDataIO *DataIO */)
uint BitField=Code<<(int)(16-Dec.QuickBits);
// Prepare the table for quick decoding of bit lengths.
// Find the upper limit for current bit field and adjust the bit length
// accordingly if necessary.
while (CurBitLength<Dec.DecodeLen.Length && BitField>=Dec.DecodeLen[CurBitLength])

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using SharpCompress.Converters;
namespace SharpCompress.Compressors.Rar.VM
{
@@ -72,9 +72,9 @@ namespace SharpCompress.Compressors.Rar.VM
}
if (IsVMMem(mem))
{
return DataConverter.LittleEndian.GetInt32(mem, offset);
return BinaryPrimitives.ReadInt32LittleEndian(mem.AsSpan(offset));
}
return DataConverter.BigEndian.GetInt32(mem, offset);
return BinaryPrimitives.ReadInt32BigEndian(mem.AsSpan(offset));
}
private void SetValue(bool byteMode, byte[] mem, int offset, int value)
@@ -94,11 +94,11 @@ namespace SharpCompress.Compressors.Rar.VM
{
if (IsVMMem(mem))
{
DataConverter.LittleEndian.PutBytes(mem, offset, value);
BinaryPrimitives.WriteInt32LittleEndian(mem.AsSpan(offset), value);
}
else
{
DataConverter.BigEndian.PutBytes(mem, offset, value);
BinaryPrimitives.WriteInt32BigEndian(mem.AsSpan(offset), value);
}
}
@@ -120,12 +120,12 @@ namespace SharpCompress.Compressors.Rar.VM
if (cmdOp.Type == VMOpType.VM_OPREGMEM)
{
int pos = (cmdOp.Offset + cmdOp.Base) & VM_MEMMASK;
ret = DataConverter.LittleEndian.GetInt32(Mem, pos);
ret = BinaryPrimitives.ReadInt32LittleEndian(Mem.AsSpan(pos));
}
else
{
int pos = cmdOp.Offset;
ret = DataConverter.LittleEndian.GetInt32(Mem, pos);
ret = BinaryPrimitives.ReadInt32LittleEndian(Mem.AsSpan(pos));
}
return ret;
}
@@ -190,12 +190,12 @@ namespace SharpCompress.Compressors.Rar.VM
{
//prg.GlobalData.Clear();
// ->GlobalData.Add(dataSize+VM_FIXEDGLOBALSIZE);
prg.GlobalData.SetSize(dataSize + VM_FIXEDGLOBALSIZE);
prg.GlobalData.Capacity = dataSize + VM_FIXEDGLOBALSIZE;
for (int i = 0; i < dataSize + VM_FIXEDGLOBALSIZE; i++)
// memcpy(&Prg->GlobalData[0],&Mem[VM_GLOBALMEMADDR],DataSize+VM_FIXEDGLOBALSIZE);
{
prg.GlobalData[i] = Mem[VM_GLOBALMEMADDR + i];
prg.GlobalData.Add(Mem[VM_GLOBALMEMADDR + i]);
}
}
}
@@ -1449,4 +1449,4 @@ namespace SharpCompress.Compressors.Rar.VM
}
//
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,18 +0,0 @@
#if NET35
using System;
namespace SharpCompress
{
internal static class EnumExtensions
{
public static bool HasFlag(this Enum enumRef, Enum flag)
{
long value = Convert.ToInt64(enumRef);
long flagVal = Convert.ToInt64(flag);
return (value & flagVal) == flagVal;
}
}
}
#endif

View File

@@ -1,6 +1,6 @@
using System;
using System.Buffers.Binary;
using System.IO;
using SharpCompress.Converters;
namespace SharpCompress.IO
{
@@ -74,13 +74,6 @@ namespace SharpCompress.IO
throw new NotSupportedException();
}
#if !SILVERLIGHT
public override decimal ReadDecimal()
{
throw new NotSupportedException();
}
#endif
public override double ReadDouble()
{
throw new NotSupportedException();
@@ -88,17 +81,17 @@ namespace SharpCompress.IO
public override short ReadInt16()
{
return DataConverter.LittleEndian.GetInt16(ReadBytes(2), 0);
return BinaryPrimitives.ReadInt16LittleEndian(ReadBytes(2));
}
public override int ReadInt32()
{
return DataConverter.LittleEndian.GetInt32(ReadBytes(4), 0);
return BinaryPrimitives.ReadInt32LittleEndian(ReadBytes(4));
}
public override long ReadInt64()
{
return DataConverter.LittleEndian.GetInt64(ReadBytes(8), 0);
return BinaryPrimitives.ReadInt64LittleEndian(ReadBytes(8));
}
public override sbyte ReadSByte()
@@ -118,17 +111,17 @@ namespace SharpCompress.IO
public override ushort ReadUInt16()
{
return DataConverter.LittleEndian.GetUInt16(ReadBytes(2), 0);
return BinaryPrimitives.ReadUInt16LittleEndian(ReadBytes(2));
}
public override uint ReadUInt32()
{
return DataConverter.LittleEndian.GetUInt32(ReadBytes(4), 0);
return BinaryPrimitives.ReadUInt32LittleEndian(ReadBytes(4));
}
public override ulong ReadUInt64()
{
return DataConverter.LittleEndian.GetUInt64(ReadBytes(8), 0);
return BinaryPrimitives.ReadUInt64LittleEndian(ReadBytes(8));
}
// RAR5 style variable length encoded value
@@ -203,4 +196,4 @@ namespace SharpCompress.IO
throw new FormatException("malformed vint");
}
}
}
}

View File

@@ -148,4 +148,4 @@ namespace SharpCompress
#endregion
}
}
}

View File

@@ -25,7 +25,7 @@ namespace SharpCompress.Readers.GZip
/// <returns></returns>
public static GZipReader Open(Stream stream, ReaderOptions options = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
return new GZipReader(stream, options ?? new ReaderOptions());
}
@@ -36,4 +36,4 @@ namespace SharpCompress.Readers.GZip
return GZipEntry.GetEntries(stream, Options);
}
}
}
}

View File

@@ -1,13 +1,10 @@
#if !NO_FILE
using System.IO;
using System.IO;
using SharpCompress.Common;
#endif
namespace SharpCompress.Readers
{
public static class IReaderExtensions
{
#if !NO_FILE
public static void WriteEntryTo(this IReader reader, string filePath)
{
using (Stream stream = File.Open(filePath, FileMode.Create, FileAccess.Write))
@@ -61,6 +58,5 @@ namespace SharpCompress.Readers
}
});
}
#endif
}
}

View File

@@ -33,7 +33,7 @@ namespace SharpCompress.Readers.Rar
/// <returns></returns>
public static RarReader Open(Stream stream, ReaderOptions options = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
return new SingleVolumeRarReader(stream, options ?? new ReaderOptions());
}
@@ -45,7 +45,7 @@ namespace SharpCompress.Readers.Rar
/// <returns></returns>
public static RarReader Open(IEnumerable<Stream> streams, ReaderOptions options = null)
{
streams.CheckNotNull("streams");
streams.CheckNotNull(nameof(streams));
return new MultiVolumeRarReader(streams, options ?? new ReaderOptions());
}
@@ -64,7 +64,7 @@ namespace SharpCompress.Readers.Rar
return Entry.Parts;
}
protected override EntryStream GetEntryStream()
protected override EntryStream GetEntryStream()
{
var stream = new MultiVolumeReadOnlyStream(CreateFilePartEnumerableForCurrentEntry().Cast<RarFilePart>(), this);
if (Entry.IsRarV3)

View File

@@ -28,7 +28,7 @@ namespace SharpCompress.Readers
/// <returns></returns>
public static IReader Open(Stream stream, ReaderOptions options = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
options = options ?? new ReaderOptions()
{
LeaveStreamOpen = false
@@ -104,4 +104,4 @@ namespace SharpCompress.Readers
throw new InvalidOperationException("Cannot determine compressed stream type. Supported Reader Formats: Zip, GZip, BZip2, Tar, Rar, LZip, XZ");
}
}
}
}

View File

@@ -69,7 +69,7 @@ namespace SharpCompress.Readers.Tar
/// <returns></returns>
public static TarReader Open(Stream stream, ReaderOptions options = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
options = options ?? new ReaderOptions();
RewindableStream rewindableStream = new RewindableStream(stream);
rewindableStream.StartRecording();
@@ -121,4 +121,4 @@ namespace SharpCompress.Readers.Tar
return TarEntry.GetEntries(StreamingMode.Streaming, stream, compressionType, Options.ArchiveEncoding);
}
}
}
}

View File

@@ -29,7 +29,7 @@ namespace SharpCompress.Readers.Zip
/// <returns></returns>
public static ZipReader Open(Stream stream, ReaderOptions options = null)
{
stream.CheckNotNull("stream");
stream.CheckNotNull(nameof(stream));
return new ZipReader(stream, options ?? new ReaderOptions());
}
@@ -58,4 +58,4 @@ namespace SharpCompress.Readers.Zip
}
}
}
}
}

View File

@@ -2,13 +2,14 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.23.0</VersionPrefix>
<AssemblyVersion>0.23.0</AssemblyVersion>
<FileVersion>0.23.0</FileVersion>
<VersionPrefix>0.25.0</VersionPrefix>
<AssemblyVersion>0.25.0</AssemblyVersion>
<FileVersion>0.25.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks Condition="'$(LibraryFrameworks)'==''">net45;net35;netstandard1.0;netstandard1.3;netstandard2.0</TargetFrameworks>
<TargetFrameworks Condition="'$(LibraryFrameworks)'==''">netstandard1.3;netstandard2.0;netstandard2.1;net46</TargetFrameworks>
<TargetFrameworks Condition=" '$(OS)' != 'Windows_NT' ">netstandard1.3;netstandard2.0;netstandard2.1</TargetFrameworks>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<AllowUnsafeBlocks>false</AllowUnsafeBlocks>
<AssemblyName>SharpCompress</AssemblyName>
<AssemblyOriginatorKeyFile>../../SharpCompress.snk</AssemblyOriginatorKeyFile>
<SignAssembly>true</SignAssembly>
@@ -19,21 +20,15 @@
<PackageLicenseUrl>https://github.com/adamhathcock/sharpcompress/blob/master/LICENSE.txt</PackageLicenseUrl>
<GenerateAssemblyTitleAttribute>false</GenerateAssemblyTitleAttribute>
<GenerateAssemblyProductAttribute>false</GenerateAssemblyProductAttribute>
<Description>SharpCompress is a compression library for NET Standard 1.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
<Description>SharpCompress is a compression library for NET Standard 1.3/2.0 that can unrar, decompress 7zip, decompress xz, zip/unzip, tar/untar lzip/unlzip, bzip2/unbzip2 and gzip/ungzip with forward-only reading and file random access APIs. Write support for zip/tar/bzip2/gzip is implemented.</Description>
<LangVersion>latest</LangVersion>
</PropertyGroup>
<PropertyGroup Condition=" '$(TargetFramework)' == 'netstandard1.0' ">
<DefineConstants>$(DefineConstants);NO_FILE;NO_CRYPTO;SILVERLIGHT</DefineConstants>
</PropertyGroup>
<PropertyGroup Condition=" '$(TargetFramework)' == 'netstandard1.3' ">
<DefineConstants>$(DefineConstants);NETCORE</DefineConstants>
</PropertyGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard1.3' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.5.1" />
<ItemGroup>
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.7.0" />
</ItemGroup>
<PropertyGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<DefineConstants>$(DefineConstants);NETCORE</DefineConstants>
</PropertyGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.5.1" />
<ItemGroup Condition=" '$(TargetFramework)' != 'netstandard2.1' ">
<PackageReference Include="System.Memory" Version="4.5.4" />
</ItemGroup>
</Project>
</Project>

View File

@@ -1,29 +1,23 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Reflection.Emit;
using System.Runtime.InteropServices;
#if NETCORE
using SharpCompress.Buffers;
#endif
using SharpCompress.Readers;
namespace SharpCompress
{
internal static class Utility
{
public static ReadOnlyCollection<T> ToReadOnly<T>(this IEnumerable<T> items)
public static ReadOnlyCollection<T> ToReadOnly<T>(this ICollection<T> items)
{
return new ReadOnlyCollection<T>(items.ToList());
return new ReadOnlyCollection<T>(items);
}
/// <summary>
/// Performs an unsigned bitwise right shift with the specified number
/// </summary>
/// <param name="number">Number to operate on</param>
/// <param name="bits">Ammount of bits to shift</param>
/// <param name="bits">Amount of bits to shift</param>
/// <returns>The resulting number from the shift operation</returns>
public static int URShift(int number, int bits)
{
@@ -38,7 +32,7 @@ namespace SharpCompress
/// Performs an unsigned bitwise right shift with the specified number
/// </summary>
/// <param name="number">Number to operate on</param>
/// <param name="bits">Ammount of bits to shift</param>
/// <param name="bits">Amount of bits to shift</param>
/// <returns>The resulting number from the shift operation</returns>
public static long URShift(long number, int bits)
{
@@ -49,112 +43,12 @@ namespace SharpCompress
return (number >> bits) + (2L << ~bits);
}
/// <summary>
/// Fills the array with an specific value from an specific index to an specific index.
/// </summary>
/// <param name="array">The array to be filled.</param>
/// <param name="fromindex">The first index to be filled.</param>
/// <param name="toindex">The last index to be filled.</param>
/// <param name="val">The value to fill the array with.</param>
public static void Fill<T>(T[] array, int fromindex, int toindex, T val) where T : struct
{
if (array.Length == 0)
{
throw new NullReferenceException();
}
if (fromindex > toindex)
{
throw new ArgumentException();
}
if ((fromindex < 0) || array.Length < toindex)
{
throw new IndexOutOfRangeException();
}
for (int index = (fromindex > 0) ? fromindex-- : fromindex; index < toindex; index++)
{
array[index] = val;
}
}
#if NET45
// super fast memset, up to 40x faster than for loop on large arrays
// see https://stackoverflow.com/questions/1897555/what-is-the-equivalent-of-memset-in-c
private static readonly Action<IntPtr, byte, uint> MemsetDelegate = CreateMemsetDelegate();
private static Action<IntPtr, byte, uint> CreateMemsetDelegate() {
var dynamicMethod = new DynamicMethod(
"Memset",
MethodAttributes.Public | MethodAttributes.Static,
CallingConventions.Standard,
null,
new[] { typeof(IntPtr), typeof(byte), typeof(uint) },
typeof(Utility),
true);
var generator = dynamicMethod.GetILGenerator();
generator.Emit(OpCodes.Ldarg_0);
generator.Emit(OpCodes.Ldarg_1);
generator.Emit(OpCodes.Ldarg_2);
generator.Emit(OpCodes.Initblk);
generator.Emit(OpCodes.Ret);
return (Action<IntPtr, byte, uint>)dynamicMethod.CreateDelegate(typeof(Action<IntPtr, byte, uint>));
}
public static void Memset(byte[] array, byte what, int length)
{
var gcHandle = GCHandle.Alloc(array, GCHandleType.Pinned);
MemsetDelegate(gcHandle.AddrOfPinnedObject(), what, (uint)length);
gcHandle.Free();
}
#else
public static void Memset(byte[] array, byte what, int length)
{
for(var i = 0; i < length; i++)
{
array[i] = what;
}
}
#endif
public static void Memset<T>(T[] array, T what, int length)
{
for(var i = 0; i < length; i++)
{
array[i] = what;
}
}
public static void FillFast<T>(T[] array, T val) where T : struct
{
for (int i = 0; i < array.Length; i++)
{
array[i] = val;
}
}
public static void FillFast<T>(T[] array, int start, int length, T val) where T : struct
{
int toIndex = start + length;
for (int i = start; i < toIndex; i++)
{
array[i] = val;
}
}
/// <summary>
/// Fills the array with an specific value.
/// </summary>
/// <param name="array">The array to be filled.</param>
/// <param name="val">The value to fill the array with.</param>
public static void Fill<T>(T[] array, T val) where T : struct
{
Fill(array, 0, array.Length, val);
}
public static void SetSize(this List<byte> list, int count)
{
if (count > list.Count)
{
// Ensure the list only needs to grow once
list.Capacity = count;
for (int i = list.Count; i < count; i++)
{
list.Add(0x0);
@@ -162,18 +56,7 @@ namespace SharpCompress
}
else
{
byte[] temp = new byte[count];
list.CopyTo(temp, 0);
list.Clear();
list.AddRange(temp);
}
}
public static void AddRange<T>(this ICollection<T> destination, IEnumerable<T> source)
{
foreach (T item in source)
{
destination.Add(item);
list.RemoveRange(count, list.Count - count);
}
}
@@ -184,7 +67,7 @@ namespace SharpCompress
action(item);
}
}
public static void Copy(Array sourceArray, long sourceIndex, Array destinationArray, long destinationIndex, long length)
{
if (sourceIndex > Int32.MaxValue || sourceIndex < Int32.MinValue)
@@ -215,7 +98,7 @@ namespace SharpCompress
obj.CheckNotNull(name);
if (obj.Length == 0)
{
throw new ArgumentException("String is empty.");
throw new ArgumentException("String is empty.", name);
}
}
@@ -254,9 +137,7 @@ namespace SharpCompress
}
finally
{
#if NETCORE
ArrayPool<byte>.Shared.Return(buffer);
#endif
}
}
@@ -272,9 +153,7 @@ namespace SharpCompress
}
finally
{
#if NETCORE
ArrayPool<byte>.Shared.Return(buffer);
#endif
}
}
@@ -359,9 +238,7 @@ namespace SharpCompress
}
finally
{
#if NETCORE
ArrayPool<byte>.Shared.Return(array);
#endif
}
}
@@ -384,9 +261,7 @@ namespace SharpCompress
}
finally
{
#if NETCORE
ArrayPool<byte>.Shared.Return(array);
#endif
}
}
@@ -397,11 +272,7 @@ namespace SharpCompress
private static byte[] GetTransferByteArray()
{
#if NETCORE
return ArrayPool<byte>.Shared.Rent(81920);
#else
return new byte[81920];
#endif
}
public static bool ReadFully(this Stream stream, byte[] buffer)
@@ -423,21 +294,5 @@ namespace SharpCompress
{
return source.Replace('\0', ' ').Trim();
}
public static bool BinaryEquals(this byte[] source, byte[] target)
{
if (source.Length != target.Length)
{
return false;
}
for (int i = 0; i < source.Length; ++i)
{
if (source[i] != target[i])
{
return false;
}
}
return true;
}
}
}
}

View File

@@ -1,6 +1,4 @@
#if !NO_FILE
using System;
#endif
using System;
using System.IO;
using System.Linq;
using System.Linq.Expressions;
@@ -14,7 +12,6 @@ namespace SharpCompress.Writers
writer.Write(entryPath, source, null);
}
#if !NO_FILE
public static void Write(this IWriter writer, string entryPath, FileInfo source)
{
if (!source.Exists)
@@ -37,7 +34,10 @@ namespace SharpCompress.Writers
writer.WriteAll(directory, searchPattern, null, option);
}
public static void WriteAll(this IWriter writer, string directory, string searchPattern = "*", Expression<Func<string, bool>> fileSearchFunc = null,
public static void WriteAll(this IWriter writer,
string directory,
string searchPattern = "*",
Expression<Func<string, bool>> fileSearchFunc = null,
SearchOption option = SearchOption.TopDirectoryOnly)
{
if (!Directory.Exists(directory))
@@ -49,16 +49,10 @@ namespace SharpCompress.Writers
{
fileSearchFunc = n => true;
}
#if NET35
foreach (var file in Directory.GetDirectories(directory, searchPattern, option).Where(fileSearchFunc.Compile()))
#else
foreach (var file in Directory.EnumerateFiles(directory, searchPattern, option).Where(fileSearchFunc.Compile()))
#endif
{
writer.Write(file.Substring(directory.Length), file);
}
}
#endif
}
}

View File

@@ -1,4 +1,3 @@
using SharpCompress.Archives;
using SharpCompress.Common;
namespace SharpCompress.Writers.Tar
@@ -18,6 +17,7 @@ namespace SharpCompress.Writers.Tar
internal TarWriterOptions(WriterOptions options) : this(options.CompressionType, true)
{
ArchiveEncoding = options.ArchiveEncoding;
}
}
}

View File

@@ -1,10 +1,10 @@
using System;
using System.Buffers.Binary;
using System.IO;
using System.Text;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Converters;
namespace SharpCompress.Writers.Zip
{
@@ -36,7 +36,8 @@ namespace SharpCompress.Writers.Zip
byte[] encodedComment = archiveEncoding.Encode(Comment);
var zip64_stream = Compressed >= uint.MaxValue || Decompressed >= uint.MaxValue;
var zip64 = zip64_stream || HeaderOffset >= uint.MaxValue || Zip64HeaderOffset != 0;
var zip64 = zip64_stream || HeaderOffset >= uint.MaxValue;
var usedCompression = compression;
var compressedvalue = zip64 ? uint.MaxValue : (uint)Compressed;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)Decompressed;
@@ -56,45 +57,70 @@ namespace SharpCompress.Writers.Zip
if (!zip64_stream)
flags |= HeaderFlags.UsePostDataDescriptor;
if (compression == ZipCompressionMethod.LZMA)
if (usedCompression == ZipCompressionMethod.LZMA)
{
flags |= HeaderFlags.Bit1; // eos marker
}
}
//constant sig, then version made by, then version to extract
outputStream.Write(new byte[] { 80, 75, 1, 2, version, 0, version, 0 }, 0, 8);
// Support for zero byte files
if (Decompressed == 0 && Compressed == 0)
usedCompression = ZipCompressionMethod.None;
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)flags), 0, 2);
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)compression), 0, 2); // zipping method
outputStream.Write(DataConverter.LittleEndian.GetBytes(ModificationTime.DateTimeToDosTime()), 0, 4);
byte[] intBuf = new byte[] { 80, 75, 1, 2, version, 0, version, 0 };
//constant sig, then version made by, then version to extract
outputStream.Write(intBuf, 0, 8);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
outputStream.Write(intBuf, 0, 2);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)usedCompression);
outputStream.Write(intBuf, 0, 2); // zipping method
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, ModificationTime.DateTimeToDosTime());
outputStream.Write(intBuf, 0, 4);
// zipping date and time
outputStream.Write(DataConverter.LittleEndian.GetBytes(Crc), 0, 4); // file CRC
outputStream.Write(DataConverter.LittleEndian.GetBytes(compressedvalue), 0, 4); // compressed file size
outputStream.Write(DataConverter.LittleEndian.GetBytes(decompressedvalue), 0, 4); // uncompressed file size
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)encodedFilename.Length), 0, 2); // Filename in zip
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)extralength), 0, 2); // extra length
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)encodedComment.Length), 0, 2);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, Crc);
outputStream.Write(intBuf, 0, 4); // file CRC
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, compressedvalue);
outputStream.Write(intBuf, 0, 4); // compressed file size
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, decompressedvalue);
outputStream.Write(intBuf, 0, 4); // uncompressed file size
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedFilename.Length);
outputStream.Write(intBuf, 0, 2); // Filename in zip
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)extralength);
outputStream.Write(intBuf, 0, 2); // extra length
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedComment.Length);
outputStream.Write(intBuf, 0, 2);
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)0), 0, 2); // disk=0
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)0), 0, 2); // file type: binary
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)0), 0, 2); // Internal file attributes
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)0x8100), 0, 2);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0);
outputStream.Write(intBuf, 0, 2); // disk=0
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
outputStream.Write(intBuf, 0, 2); // file type: binary
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
outputStream.Write(intBuf, 0, 2); // Internal file attributes
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0x8100);
outputStream.Write(intBuf, 0, 2);
// External file attributes (normal/readable)
outputStream.Write(DataConverter.LittleEndian.GetBytes(headeroffsetvalue), 0, 4); // Offset of header
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, headeroffsetvalue);
outputStream.Write(intBuf, 0, 4); // Offset of header
outputStream.Write(encodedFilename, 0, encodedFilename.Length);
if (zip64)
{
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)0x0001), 0, 2);
outputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)(extralength - 4)), 0, 2);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0x0001);
outputStream.Write(intBuf, 0, 2);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)(extralength - 4));
outputStream.Write(intBuf, 0, 2);
outputStream.Write(DataConverter.LittleEndian.GetBytes(Decompressed), 0, 8);
outputStream.Write(DataConverter.LittleEndian.GetBytes(Compressed), 0, 8);
outputStream.Write(DataConverter.LittleEndian.GetBytes(HeaderOffset), 0, 8);
outputStream.Write(DataConverter.LittleEndian.GetBytes(0), 0, 4); // VolumeNumber = 0
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, Decompressed);
outputStream.Write(intBuf, 0, 8);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, Compressed);
outputStream.Write(intBuf, 0, 8);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, HeaderOffset);
outputStream.Write(intBuf, 0, 8);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
outputStream.Write(intBuf, 0, 4); // VolumeNumber = 0
}
outputStream.Write(encodedComment, 0, encodedComment.Length);
@@ -103,4 +129,4 @@ namespace SharpCompress.Writers.Zip
+ 2 + 2 + 2 + 2 + 4 + encodedFilename.Length + extralength + encodedComment.Length);
}
}
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.IO;
using System.Text;
@@ -10,7 +11,6 @@ using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.Converters;
using SharpCompress.IO;
namespace SharpCompress.Writers.Zip
@@ -37,7 +37,7 @@ namespace SharpCompress.Writers.Zip
compressionType = zipWriterOptions.CompressionType;
compressionLevel = zipWriterOptions.DeflateCompressionLevel;
if (WriterOptions.LeaveStreamOpen)
{
destination = new NonDisposingStream(destination);
@@ -162,7 +162,10 @@ namespace SharpCompress.Writers.Zip
var explicitZipCompressionInfo = ToZipCompressionMethod(zipWriterEntryOptions.CompressionType ?? compressionType);
byte[] encodedFilename = WriterOptions.ArchiveEncoding.Encode(filename);
OutputStream.Write(DataConverter.LittleEndian.GetBytes(ZipHeaderFactory.ENTRY_HEADER_BYTES), 0, 4);
// TODO: Use stackalloc when we exclusively support netstandard2.1 or higher
byte[] intBuf = new byte[4];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, ZipHeaderFactory.ENTRY_HEADER_BYTES);
OutputStream.Write(intBuf, 0, 4);
if (explicitZipCompressionInfo == ZipCompressionMethod.Deflate)
{
if (OutputStream.CanSeek && useZip64)
@@ -184,21 +187,27 @@ namespace SharpCompress.Writers.Zip
flags |= HeaderFlags.Bit1; // eos marker
}
}
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)flags), 0, 2);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)explicitZipCompressionInfo), 0, 2); // zipping method
OutputStream.Write(DataConverter.LittleEndian.GetBytes(zipWriterEntryOptions.ModificationDateTime.DateTimeToDosTime()), 0, 4);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
OutputStream.Write(intBuf, 0, 2);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)explicitZipCompressionInfo);
OutputStream.Write(intBuf, 0, 2); // zipping method
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, zipWriterEntryOptions.ModificationDateTime.DateTimeToDosTime());
OutputStream.Write(intBuf, 0, 4);
// zipping date and time
OutputStream.Write(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, 0, 12);
// unused CRC, un/compressed size, updated later
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)encodedFilename.Length), 0, 2); // filename length
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedFilename.Length);
OutputStream.Write(intBuf, 0, 2); // filename length
var extralength = 0;
if (OutputStream.CanSeek && useZip64)
extralength = 2 + 2 + 8 + 8;
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)extralength), 0, 2); // extra length
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)extralength);
OutputStream.Write(intBuf, 0, 2); // extra length
OutputStream.Write(encodedFilename, 0, encodedFilename.Length);
if (extralength != 0)
@@ -212,44 +221,60 @@ namespace SharpCompress.Writers.Zip
private void WriteFooter(uint crc, uint compressed, uint uncompressed)
{
OutputStream.Write(DataConverter.LittleEndian.GetBytes(crc), 0, 4);
OutputStream.Write(DataConverter.LittleEndian.GetBytes(compressed), 0, 4);
OutputStream.Write(DataConverter.LittleEndian.GetBytes(uncompressed), 0, 4);
byte[] intBuf = new byte[4];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc);
OutputStream.Write(intBuf, 0, 4);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, compressed);
OutputStream.Write(intBuf, 0, 4);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, uncompressed);
OutputStream.Write(intBuf, 0, 4);
}
private void WriteEndRecord(ulong size)
{
byte[] encodedComment = WriterOptions.ArchiveEncoding.Encode(zipComment);
var zip64 = isZip64 || entries.Count > ushort.MaxValue || streamPosition >= uint.MaxValue || size >= uint.MaxValue;
var sizevalue = size >= uint.MaxValue ? uint.MaxValue : (uint)size;
var streampositionvalue = streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streamPosition;
byte[] intBuf = new byte[8];
if (zip64)
{
var recordlen = 2 + 2 + 4 + 4 + 8 + 8 + 8 + 8;
// Write zip64 end of central directory record
OutputStream.Write(new byte[] { 80, 75, 6, 6 }, 0, 4);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ulong)recordlen), 0, 8); // Size of zip64 end of central directory record
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)0), 0, 2); // Made by
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)45), 0, 2); // Version needed
OutputStream.Write(DataConverter.LittleEndian.GetBytes((uint)0), 0, 4); // Disk number
OutputStream.Write(DataConverter.LittleEndian.GetBytes((uint)0), 0, 4); // Central dir disk
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)recordlen);
OutputStream.Write(intBuf, 0, 8); // Size of zip64 end of central directory record
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0);
OutputStream.Write(intBuf, 0, 2); // Made by
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
OutputStream.Write(intBuf, 0, 2); // Version needed
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
OutputStream.Write(intBuf, 0, 4); // Disk number
OutputStream.Write(intBuf, 0, 4); // Central dir disk
// TODO: entries.Count is int, so max 2^31 files
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ulong)entries.Count), 0, 8); // Entries in this disk
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ulong)entries.Count), 0, 8); // Total entries
OutputStream.Write(DataConverter.LittleEndian.GetBytes(size), 0, 8); // Central Directory size
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ulong)streamPosition), 0, 8); // Disk offset
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)entries.Count);
OutputStream.Write(intBuf, 0, 8); // Entries in this disk
OutputStream.Write(intBuf, 0, 8); // Total entries
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, size);
OutputStream.Write(intBuf, 0, 8); // Central Directory size
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition);
OutputStream.Write(intBuf, 0, 8); // Disk offset
// Write zip64 end of central directory locator
OutputStream.Write(new byte[] { 80, 75, 6, 7 }, 0, 4);
OutputStream.Write(DataConverter.LittleEndian.GetBytes(0uL), 0, 4); // Entry disk
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ulong)streamPosition + size), 0, 8); // Offset to the zip64 central directory
OutputStream.Write(DataConverter.LittleEndian.GetBytes(0u), 0, 4); // Number of disks
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
OutputStream.Write(intBuf, 0, 4); // Entry disk
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition + size);
OutputStream.Write(intBuf, 0, 8); // Offset to the zip64 central directory
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
OutputStream.Write(intBuf, 0, 4); // Number of disks
streamPosition += recordlen + (4 + 4 + 8 + 4);
streampositionvalue = streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streampositionvalue;
@@ -257,11 +282,16 @@ namespace SharpCompress.Writers.Zip
// Write normal end of central directory record
OutputStream.Write(new byte[] { 80, 75, 5, 6, 0, 0, 0, 0 }, 0, 8);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)entries.Count), 0, 2);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)entries.Count), 0, 2);
OutputStream.Write(DataConverter.LittleEndian.GetBytes(sizevalue), 0, 4);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((uint)streampositionvalue), 0, 4);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)encodedComment.Length), 0, 2);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)entries.Count);
OutputStream.Write(intBuf, 0, 2);
OutputStream.Write(intBuf, 0, 2);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, sizevalue);
OutputStream.Write(intBuf, 0, 4);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, streampositionvalue);
OutputStream.Write(intBuf, 0, 4);
byte[] encodedComment = WriterOptions.ArchiveEncoding.Encode(zipComment);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedComment.Length);
OutputStream.Write(intBuf, 0, 2);
OutputStream.Write(encodedComment, 0, encodedComment.Length);
}
@@ -383,6 +413,14 @@ namespace SharpCompress.Writers.Zip
originalStream.Position = (long)(entry.HeaderOffset + 6);
originalStream.WriteByte(0);
if (counting.Count == 0 && entry.Decompressed == 0)
{
// set compression to STORED for zero byte files (no compression data)
originalStream.Position = (long)(entry.HeaderOffset + 8);
originalStream.WriteByte(0);
originalStream.WriteByte(0);
}
originalStream.Position = (long)(entry.HeaderOffset + 14);
writer.WriteFooter(entry.Crc, compressedvalue, decompressedvalue);
@@ -397,11 +435,16 @@ namespace SharpCompress.Writers.Zip
if (entry.Zip64HeaderOffset != 0)
{
originalStream.Position = (long)(entry.HeaderOffset + entry.Zip64HeaderOffset);
originalStream.Write(DataConverter.LittleEndian.GetBytes((ushort)0x0001), 0, 2);
originalStream.Write(DataConverter.LittleEndian.GetBytes((ushort)(8 + 8)), 0, 2);
byte[] intBuf = new byte[8];
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0x0001);
originalStream.Write(intBuf, 0, 2);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 8 + 8);
originalStream.Write(intBuf, 0, 2);
originalStream.Write(DataConverter.LittleEndian.GetBytes(entry.Decompressed), 0, 8);
originalStream.Write(DataConverter.LittleEndian.GetBytes(entry.Compressed), 0, 8);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Decompressed);
originalStream.Write(intBuf, 0, 8);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Compressed);
originalStream.Write(intBuf, 0, 8);
}
originalStream.Position = writer.streamPosition + (long)entry.Compressed;
@@ -418,10 +461,12 @@ namespace SharpCompress.Writers.Zip
if (zip64)
throw new NotSupportedException("Streams larger than 4GiB are not supported for non-seekable streams");
originalStream.Write(DataConverter.LittleEndian.GetBytes(ZipHeaderFactory.POST_DATA_DESCRIPTOR), 0, 4);
byte[] intBuf = new byte[4];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, ZipHeaderFactory.POST_DATA_DESCRIPTOR);
originalStream.Write(intBuf, 0, 4);
writer.WriteFooter(entry.Crc,
(uint)compressedvalue,
(uint)decompressedvalue);
compressedvalue,
decompressedvalue);
writer.streamPosition += (long)entry.Compressed + 16;
}
writer.entries.Add(entry);
@@ -481,4 +526,4 @@ namespace SharpCompress.Writers.Zip
#endregion Nested type: ZipWritingStream
}
}
}

View File

@@ -1,6 +1,6 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>netcoreapp2.2</TargetFrameworks>
<TargetFramework>netcoreapp3.1</TargetFramework>
<AssemblyName>SharpCompress.Test</AssemblyName>
<AssemblyOriginatorKeyFile>../../SharpCompress.snk</AssemblyOriginatorKeyFile>
<SignAssembly>true</SignAssembly>
@@ -12,10 +12,10 @@
<ProjectReference Include="..\..\src\SharpCompress\SharpCompress.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="15.9.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="16.5.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.1" />
<PackageReference Include="xunit" Version="2.4.1" />
<PackageReference Include="Xunit.SkippableFact" Version="1.3.12" />
<PackageReference Include="Mono.Posix.NETStandard" Version="1.0.0" />
</ItemGroup>
</Project>
</Project>

View File

@@ -5,6 +5,10 @@ using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Writers;
using Xunit;
using System.Text;
using SharpCompress.Readers;
using SharpCompress.Writers.Tar;
using SharpCompress.Readers.Tar;
namespace SharpCompress.Test.Tar
{
@@ -26,7 +30,7 @@ namespace SharpCompress.Test.Tar
{
ArchiveFileRead("Tar.tar");
}
[Fact]
public void Tar_FileName_Exactly_100_Characters()
{
@@ -135,10 +139,17 @@ namespace SharpCompress.Test.Tar
string scratchPath = Path.Combine(SCRATCH_FILES_PATH, "Tar.tar");
string unmodified = Path.Combine(TEST_ARCHIVES_PATH, "Tar.noEmptyDirs.tar");
// var aropt = new Ar
using (var archive = TarArchive.Create())
{
archive.AddAllFromDirectory(ORIGINAL_FILES_PATH);
archive.SaveTo(scratchPath, CompressionType.None);
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = new ArchiveEncoding()
{
Default = Encoding.GetEncoding(866)
};
archive.SaveTo(scratchPath, twopt);
}
CompareArchivesByPath(unmodified, scratchPath);
}
@@ -195,5 +206,40 @@ namespace SharpCompress.Test.Tar
Assert.True(archive.Type == ArchiveType.Tar);
}
}
[Theory]
[InlineData(10)]
[InlineData(128)]
public void Tar_Japanese_Name(int length)
{
using (var mstm = new MemoryStream())
{
var enc = new ArchiveEncoding()
{
Default = Encoding.UTF8
};
var twopt = new TarWriterOptions(CompressionType.None, true);
twopt.ArchiveEncoding = enc;
var fname = new string((char)0x3042, length);
using (var tw = new TarWriter(mstm, twopt))
using (var input = new MemoryStream(new byte[32]))
{
tw.Write(fname, input, null);
}
using (var inputMemory = new MemoryStream(mstm.ToArray()))
{
var tropt = new ReaderOptions()
{
ArchiveEncoding = enc
};
using (var tr = TarReader.Open(inputMemory, tropt))
{
while (tr.MoveToNextEntry())
{
Assert.Equal(fname, tr.Entry.Key);
}
}
}
}
}
}
}

View File

@@ -512,5 +512,48 @@ namespace SharpCompress.Test.Zip
}
}
}
}
[Fact]
public void Zip_BadLocalExtra_Read()
{
string zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.badlocalextra.zip");
using (ZipArchive za = ZipArchive.Open(zipPath))
{
var ex = Record.Exception(() =>
{
var firstEntry = za.Entries.First(x => x.Key == "first.txt");
var buffer = new byte[4096];
using (var memoryStream = new MemoryStream())
using (var firstStream = firstEntry.OpenEntryStream())
{
firstStream.CopyTo(memoryStream);
Assert.Equal(199, memoryStream.Length);
}
});
Assert.Null(ex);
}
}
[Fact]
public void Zip_NoCompression_DataDescriptors_Read()
{
string zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.none.datadescriptors.zip");
using (ZipArchive za = ZipArchive.Open(zipPath))
{
var firstEntry = za.Entries.First(x => x.Key == "first.txt");
var buffer = new byte[4096];
using (var memoryStream = new MemoryStream())
using (var firstStream = firstEntry.OpenEntryStream())
{
firstStream.CopyTo(memoryStream);
Assert.Equal(199, memoryStream.Length);
}
}
}
}
}

View File

@@ -315,5 +315,30 @@ namespace SharpCompress.Test.Zip
}
}
}
[Fact]
public void Zip_None_Issue86_Streamed_Read()
{
var keys = new string[] { "Empty1", "Empty2", "Dir1/", "Dir2/", "Fake1", "Fake2", "Internal.zip" };
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Zip.none.issue86.zip")))
using (var reader = ZipReader.Open(stream))
{
foreach( var key in keys )
{
reader.MoveToNextEntry();
Assert.Equal( reader.Entry.Key, key );
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.None, reader.Entry.CompressionType);
}
}
Assert.False(reader.MoveToNextEntry());
}
}
}
}

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More