Compare commits

...

40 Commits

Author SHA1 Message Date
Adam Hathcock
7fe27ac310 Mark for 0.38 2024-09-02 09:09:57 +01:00
Adam Hathcock
1e300349ce Merge pull request #868 from kikaragyozov/patch-1
Fix small typo in USAGE.md
2024-09-02 07:43:30 +01:00
Kiril Karagyozov
6b01a7b08e Fix small typo in USAGE.md 2024-08-29 12:11:19 +03:00
Adam Hathcock
34d948df18 Merge pull request #866 from TwanVanDongen/master
Added shrink, reduce and implode to FORMATS
2024-08-22 16:07:23 +01:00
Twan
27091c4f1d Update FORMATS.md 2024-08-21 19:09:14 +02:00
Twan
970a3d7f2a Update FORMATS.md 2024-08-21 19:08:40 +02:00
Twan
2bedbbfc54 Update FORMATS.md 2024-08-21 19:06:14 +02:00
Adam Hathcock
8de33f0db3 Merge pull request #864 from adamhathcock/update-csproj
Update csproj to get green marks and update deps
2024-08-12 16:08:28 +01:00
Adam Hathcock
df4eab67dc Update csproj to get green marks and update deps 2024-08-08 08:41:51 +01:00
Adam Hathcock
2d13bc0046 Merge pull request #860 from lostmsu/7zSFX
Added support for 7zip SFX archives
2024-08-06 08:54:12 +01:00
Victor Nova
704a0cb35d added support for 7zip SFX archives by handling ReaderOptions.LookForHeader 2024-08-05 23:11:15 -07:00
Adam Hathcock
06a983e445 Merge pull request #859 from DineshSolanki/#858-fix-invalid-character-in-filename
Fix #858 - Replaces invalid filename characters
2024-07-30 08:22:01 +01:00
Dinesh Solanki
2d10df8b87 Fix #858 - Replaces invalid filename characters
Added a method to replace invalid characters in file names with underscores during file extraction. This prevents errors related to invalid file names.
2024-07-26 00:16:44 +05:30
Adam Hathcock
baf66db9dc format 2024-07-24 08:31:44 +01:00
GordonJ
3545693999 Added Tests and supporting Files. 2024-07-23 14:05:07 -05:00
gjefferyes
84fb99c2c8 Merge branch 'adamhathcock:master' into master 2024-07-23 13:58:48 -05:00
Adam Hathcock
21e2983ae1 Merge pull request #857 from alexprabhat99/master
Fix for missing empty directories when using ExtractToDirectory
2024-07-18 08:34:20 +01:00
Alex Prabhat Bara
004e0941d5 code formatted using csharpier 2024-07-16 20:12:01 +05:30
Alex Prabhat Bara
188a426dde fix for missing empty directories when using ExtractToDirectory 2024-07-16 16:20:04 +05:30
Adam Hathcock
6fcfae8bfe Merge pull request #855 from Erior/feature/Check-tar-crc-on-header
Check crc on tar header
2024-07-12 08:35:27 +01:00
Lars Vahlenberg
9515350f52 Remove using directive 2024-07-11 19:56:46 +02:00
Lars Vahlenberg
6b88f82656 Handle special case, empty file 2024-07-11 19:52:33 +02:00
Lars Vahlenberg
e42d953f47 Check crc on tar header 2024-07-10 19:53:32 +02:00
gjefferyes
9c257faf26 Merge branch 'master' into master 2024-06-26 06:28:55 -05:00
Adam Hathcock
d18cad6d76 Merge pull request #852 from LANCommander/fix-post-zip64-entry-subsequent-extractions
Fixed extractions after first ZIP64 entry is read from stream
2024-06-26 08:31:58 +01:00
GordonJ
061273be22 Added Export and (un)Reduce to sharpCompress 2024-06-25 11:35:11 -05:00
Pat Hartl
b89de6caad Fix formatting 2024-06-24 17:19:53 -05:00
Pat Hartl
9bc0a1d7c7 Null reference checking
Reorders this null reference check to avoid throwing a null reference exception.
2024-06-23 22:30:34 -05:00
Pat Hartl
eee518b7fa Reworked ZIP64 handling to separate block
The last commit made in this branch messed up some ZIP reading and caused a bunch of tests to fail. These changes branch off ZIP64 logic into its own block so that data is read correctly for 64 and non-64 entries.
2024-06-23 22:29:33 -05:00
Pat Hartl
b7b78edaa3 Fixed extractions after first ZIP64 entry is read from stream 2024-06-22 00:09:25 -05:00
Adam Hathcock
3eaac68ab4 Merge pull request #850 from Erior/feature/Issue-842
Issue 842
2024-06-18 13:45:53 +01:00
Adam Hathcock
a7672190e9 Merge branch 'master' into feature/Issue-842 2024-06-18 13:43:22 +01:00
Adam Hathcock
4e4e89b6eb Merge pull request #849 from Erior/develop
Fix for issue #844
2024-06-18 13:41:52 +01:00
Lars Vahlenberg
33dd519f56 Throw exception when bzip2 is corrupt 2024-06-08 18:26:12 +02:00
Lars Vahlenberg
5c1149aa8b #844 2024-06-08 17:22:20 +02:00
Adam Hathcock
9061e92af6 Merge pull request #848 from Morilli/fix-gzip-archivetype
Fix gzip archives having a `Type` of `ArchiveType.Tar` instead of `ArchiveType.Gzip`
2024-06-06 08:21:14 +01:00
Morilli
49f5ceaa9b Fix GZipArchive getting Type set to ArchiveType.Tar 2024-06-04 10:34:06 +02:00
Morilli
525b309d37 Add failing test 2024-06-04 10:33:32 +02:00
Adam Hathcock
bdb3a787fc Merge pull request #847 from DannyBoyk/846_tar_longlinkname
Tar: Add processing for the LongLink header type
2024-06-04 08:47:57 +01:00
Daniel Nash
a9601ef848 Tar: Add processing for the LongLink header type
Fixes #846
2024-06-03 12:54:19 -04:00
40 changed files with 1850 additions and 169 deletions

View File

@@ -4,15 +4,15 @@
<PackageVersion Include="FluentAssertions" Version="6.12.0" />
<PackageVersion Include="Glob" Version="1.1.9" />
<PackageVersion Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.9.0" />
<PackageVersion Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
<PackageVersion Include="Microsoft.NET.Test.Sdk" Version="17.10.0" />
<PackageVersion Include="Mono.Posix.NETStandard" Version="1.0.0" />
<PackageVersion Include="SimpleExec" Version="12.0.0" />
<PackageVersion Include="System.Memory" Version="4.5.5" />
<PackageVersion Include="System.Text.Encoding.CodePages" Version="8.0.0" />
<PackageVersion Include="xunit" Version="2.7.1" />
<PackageVersion Include="xunit.runner.visualstudio" Version="2.5.8" />
<PackageVersion Include="xunit" Version="2.9.0" />
<PackageVersion Include="xunit.runner.visualstudio" Version="2.8.2" />
<PackageVersion Include="xunit.SkippableFact" Version="1.4.13" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.0" />
<PackageVersion Include="ZstdSharp.Port" Version="0.8.1" />
<GlobalPackageReference Include="Microsoft.SourceLink.GitHub" Version="8.0.0" />
</ItemGroup>
</Project>

View File

@@ -11,7 +11,7 @@
| Archive Format | Compression Format(s) | Compress/Decompress | Archive API | Reader API | Writer API |
| ---------------------- | ------------------------------------------------- | ------------------- | --------------- | ---------- | ------------- |
| Rar | Rar | Decompress (1) | RarArchive | RarReader | N/A |
| Zip (2) | None, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Zip (2) | None, Shrink, Reduce, Implode, DEFLATE, Deflate64, BZip2, LZMA/LZMA2, PPMd | Both | ZipArchive | ZipReader | ZipWriter |
| Tar | None | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.GZip | DEFLATE | Both | TarArchive | TarReader | TarWriter (3) |
| Tar.BZip2 | BZip2 | Both | TarArchive | TarReader | TarWriter (3) |

View File

@@ -27,7 +27,7 @@ To deal with the "correct" rules as well as the expectations of users, I've deci
To be explicit though, consider always using the overloads that use `ReaderOptions` or `WriterOptions` and explicitly set `LeaveStreamOpen` the way you want.
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapped to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
If using Compression Stream classes directly and you don't want the wrapped stream to be closed. Use the `NonDisposingStream` as a wrapper to prevent the stream being disposed. The change in 0.21 simplified a lot even though the usage is a bit more convoluted.
## Samples

View File

@@ -14,11 +14,31 @@
"resolved": "1.1.9",
"contentHash": "AfK5+ECWYTP7G3AAdnU8IfVj+QpGjrh9GC2mpdcJzCvtQ4pnerAGwHsxJ9D4/RnhDUz2DSzd951O/lQjQby2Sw=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"SimpleExec": {
"type": "Direct",
"requested": "[12.0.0, )",
"resolved": "12.0.0",
"contentHash": "ptxlWtxC8vM6Y6e3h9ZTxBBkOWnWrm/Sa1HT+2i1xcXY3Hx2hmKDZP5RShPf8Xr9D+ivlrXNy57ktzyH8kyt+Q=="
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
}
}
}

View File

@@ -239,4 +239,6 @@ public static class ArchiveFactory
}
}
}
public static IArchiveFactory AutoFactory { get; } = new AutoArchiveFactory();
}

View File

@@ -0,0 +1,27 @@
using System;
using System.Collections.Generic;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace SharpCompress.Archives;
class AutoArchiveFactory : IArchiveFactory
{
public string Name => nameof(AutoArchiveFactory);
public ArchiveType? KnownArchiveType => null;
public IEnumerable<string> GetSupportedExtensions() => throw new NotSupportedException();
public bool IsArchive(Stream stream, string? password = null) =>
throw new NotSupportedException();
public FileInfo? GetFilePart(int index, FileInfo part1) => throw new NotSupportedException();
public IArchive Open(Stream stream, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(stream, readerOptions);
public IArchive Open(FileInfo fileInfo, ReaderOptions? readerOptions = null) =>
ArchiveFactory.Open(fileInfo, readerOptions);
}

View File

@@ -101,7 +101,7 @@ public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
/// </summary>
/// <param name="sourceStream"></param>
private GZipArchive(SourceStream sourceStream)
: base(ArchiveType.Tar, sourceStream) { }
: base(ArchiveType.GZip, sourceStream) { }
protected override IEnumerable<GZipVolume> LoadVolumes(SourceStream sourceStream)
{

View File

@@ -54,14 +54,26 @@ public static class IArchiveExtensions
var entry = entries.Entry;
if (entry.IsDirectory)
{
var dirPath = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
if (
Path.GetDirectoryName(dirPath + "/") is { } emptyDirectory
&& seenDirectories.Add(dirPath)
)
{
Directory.CreateDirectory(emptyDirectory);
}
continue;
}
// Create each directory
// Create each directory if not already created
var path = Path.Combine(destination, entry.Key.NotNull("Entry Key is null"));
if (Path.GetDirectoryName(path) is { } directory && seenDirectories.Add(path))
if (Path.GetDirectoryName(path) is { } directory)
{
Directory.CreateDirectory(directory);
if (!Directory.Exists(directory) && !seenDirectories.Contains(directory))
{
Directory.CreateDirectory(directory);
seenDirectories.Add(directory);
}
}
// Write file

View File

@@ -163,7 +163,7 @@ public class SevenZipArchive : AbstractArchive<SevenZipArchiveEntry, SevenZipVol
{
stream.Position = 0;
var reader = new ArchiveReader();
reader.Open(stream);
reader.Open(stream, lookForHeader: ReaderOptions.LookForHeader);
_database = reader.ReadDatabase(new PasswordProvider(ReaderOptions.Password));
}
}

View File

@@ -16,5 +16,10 @@ public enum CompressionType
Unknown,
Deflate64,
Shrink,
Lzw
Lzw,
Reduce1,
Reduce2,
Reduce3,
Reduce4,
Explode
}

View File

@@ -37,6 +37,7 @@ internal static class ExtractionMethods
options ??= new ExtractionOptions() { Overwrite = true };
var file = Path.GetFileName(entry.Key.NotNull("Entry Key is null")).NotNull("File is null");
file = Utility.ReplaceInvalidFileNameChars(file);
if (options.ExtractFullPath)
{
var folder = Path.GetDirectoryName(entry.Key.NotNull("Entry Key is null"))

View File

@@ -61,9 +61,8 @@ public abstract class RarVolume : Volume
var fh = (FileHeader)header;
if (fh.FileName == "CMT")
{
var part = CreateFilePart(lastMarkHeader!, fh);
var buffer = new byte[fh.CompressedSize];
part.GetCompressedStream().Read(buffer, 0, buffer.Length);
fh.PackedStream.Read(buffer, 0, buffer.Length);
Comment = Encoding.UTF8.GetString(buffer, 0, buffer.Length - 1);
}
}

View File

@@ -1220,23 +1220,46 @@ internal class ArchiveReader
#region Public Methods
public void Open(Stream stream)
public void Open(Stream stream, bool lookForHeader)
{
Close();
_streamOrigin = stream.Position;
_streamEnding = stream.Length;
// TODO: Check Signature!
_header = new byte[0x20];
for (var offset = 0; offset < 0x20; )
var canScan = lookForHeader ? 0x80000 - 20 : 0;
while (true)
{
var delta = stream.Read(_header, offset, 0x20 - offset);
if (delta == 0)
// TODO: Check Signature!
_header = new byte[0x20];
for (var offset = 0; offset < 0x20; )
{
throw new EndOfStreamException();
var delta = stream.Read(_header, offset, 0x20 - offset);
if (delta == 0)
{
throw new EndOfStreamException();
}
offset += delta;
}
offset += delta;
if (
!lookForHeader
|| _header
.AsSpan(0, length: 6)
.SequenceEqual<byte>([0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C])
)
{
break;
}
if (canScan == 0)
{
throw new InvalidFormatException("Unable to find 7z signature");
}
canScan--;
stream.Position = ++_streamOrigin;
}
_stream = stream;

View File

@@ -101,57 +101,85 @@ internal sealed class TarHeader
internal bool Read(BinaryReader reader)
{
var buffer = ReadBlock(reader);
if (buffer.Length == 0)
string? longName = null;
string? longLinkName = null;
var hasLongValue = true;
byte[] buffer;
EntryType entryType;
do
{
buffer = ReadBlock(reader);
if (buffer.Length == 0)
{
return false;
}
entryType = ReadEntryType(buffer);
// LongName and LongLink headers can follow each other and need
// to apply to the header that follows them.
if (entryType == EntryType.LongName)
{
longName = ReadLongName(reader, buffer);
continue;
}
else if (entryType == EntryType.LongLink)
{
longLinkName = ReadLongName(reader, buffer);
continue;
}
hasLongValue = false;
} while (hasLongValue);
// Check header checksum
if (!checkChecksum(buffer))
{
return false;
}
// for symlinks, additionally read the linkname
if (ReadEntryType(buffer) == EntryType.SymLink)
{
LinkName = ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
if (ReadEntryType(buffer) == EntryType.LongName)
{
Name = ReadLongName(reader, buffer);
buffer = ReadBlock(reader);
}
else
{
Name = ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
}
EntryType = ReadEntryType(buffer);
Name = longName ?? ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
EntryType = entryType;
Size = ReadSize(buffer);
// for symlinks, additionally read the linkname
if (entryType == EntryType.SymLink || entryType == EntryType.HardLink)
{
LinkName = longLinkName ?? ArchiveEncoding.Decode(buffer, 157, 100).TrimNulls();
}
Mode = ReadAsciiInt64Base8(buffer, 100, 7);
if (EntryType == EntryType.Directory)
if (entryType == EntryType.Directory)
{
Mode |= 0b1_000_000_000;
}
UserId = ReadAsciiInt64Base8oldGnu(buffer, 108, 7);
GroupId = ReadAsciiInt64Base8oldGnu(buffer, 116, 7);
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
var unixTimeStamp = ReadAsciiInt64Base8(buffer, 136, 11);
LastModifiedTime = EPOCH.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic) && "ustar".Equals(Magic))
{
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157);
namePrefix = namePrefix.TrimNulls();
var namePrefix = ArchiveEncoding.Decode(buffer, 345, 157).TrimNulls();
if (!string.IsNullOrEmpty(namePrefix))
{
Name = namePrefix + "/" + Name;
}
}
if (EntryType != EntryType.LongName && Name.Length == 0)
if (entryType != EntryType.LongName && Name.Length == 0)
{
return false;
}
return true;
}
@@ -289,6 +317,42 @@ internal sealed class TarHeader
(byte)' '
};
internal static bool checkChecksum(byte[] buf)
{
const int eightSpacesChksum = 256;
var buffer = new Span<byte>(buf).Slice(0, 512);
int posix_sum = eightSpacesChksum;
int sun_sum = eightSpacesChksum;
foreach (byte b in buffer)
{
posix_sum += b;
sun_sum += unchecked((sbyte)b);
}
// Special case, empty file header
if (posix_sum == eightSpacesChksum)
{
return true;
}
// Remove current checksum from calculation
foreach (byte b in buffer.Slice(148, 8))
{
posix_sum -= b;
sun_sum -= unchecked((sbyte)b);
}
// Read and compare checksum for header
var crc = ReadAsciiInt64Base8(buf, 148, 7);
if (crc != posix_sum && crc != sun_sum)
{
return false;
}
return true;
}
internal static int RecalculateChecksum(byte[] buf)
{
// Set default value for checksum. That is 8 spaces.

View File

@@ -36,10 +36,7 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
uint headerBytes = 0;
if (
_lastEntryHeader != null
&& (
FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
|| _lastEntryHeader.IsZip64
)
&& FlagUtility.HasFlag(_lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor)
)
{
if (_lastEntryHeader.Part is null)
@@ -49,7 +46,9 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
ref rewindableStream
);
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
var crc = reader.ReadUInt32();
if (crc == POST_DATA_DESCRIPTOR)
{
@@ -82,6 +81,60 @@ internal class StreamingZipHeaderFactory : ZipHeaderFactory
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
}
}
else if (_lastEntryHeader != null && _lastEntryHeader.IsZip64)
{
if (_lastEntryHeader.Part is null)
continue;
reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
ref rewindableStream
);
var pos = rewindableStream.CanSeek ? (long?)rewindableStream.Position : null;
headerBytes = reader.ReadUInt32();
var version = reader.ReadUInt16();
var flags = (HeaderFlags)reader.ReadUInt16();
var compressionMethod = (ZipCompressionMethod)reader.ReadUInt16();
var lastModifiedDate = reader.ReadUInt16();
var lastModifiedTime = reader.ReadUInt16();
var crc = reader.ReadUInt32();
if (crc == POST_DATA_DESCRIPTOR)
{
crc = reader.ReadUInt32();
}
_lastEntryHeader.Crc = crc;
// The DataDescriptor can be either 64bit or 32bit
var compressed_size = reader.ReadUInt32();
var uncompressed_size = reader.ReadUInt32();
// Check if we have header or 64bit DataDescriptor
var test_header = !(headerBytes == 0x04034b50 || headerBytes == 0x02014b50);
var test_64bit = ((long)uncompressed_size << 32) | compressed_size;
if (test_64bit == _lastEntryHeader.CompressedSize && test_header)
{
_lastEntryHeader.UncompressedSize =
((long)reader.ReadUInt32() << 32) | headerBytes;
headerBytes = reader.ReadUInt32();
}
else
{
_lastEntryHeader.UncompressedSize = uncompressed_size;
}
if (pos.HasValue)
{
_lastEntryHeader.DataStartPosition = pos - _lastEntryHeader.CompressedSize;
// 4 = First 4 bytes of the entry header (i.e. 50 4B 03 04)
rewindableStream.Position = pos.Value + 4;
}
}
else
{
headerBytes = reader.ReadUInt32();

View File

@@ -4,6 +4,11 @@ internal enum ZipCompressionMethod
{
None = 0,
Shrink = 1,
Reduce1 = 2,
Reduce2 = 3,
Reduce3 = 4,
Reduce4 = 5,
Explode = 6,
Deflate = 8,
Deflate64 = 9,
BZip2 = 12,

View File

@@ -31,6 +31,11 @@ public class ZipEntry : Entry
ZipCompressionMethod.PPMd => CompressionType.PPMd,
ZipCompressionMethod.None => CompressionType.None,
ZipCompressionMethod.Shrink => CompressionType.Shrink,
ZipCompressionMethod.Reduce1 => CompressionType.Reduce1,
ZipCompressionMethod.Reduce2 => CompressionType.Reduce2,
ZipCompressionMethod.Reduce3 => CompressionType.Reduce3,
ZipCompressionMethod.Reduce4 => CompressionType.Reduce4,
ZipCompressionMethod.Explode => CompressionType.Explode,
_ => CompressionType.Unknown
};

View File

@@ -7,8 +7,10 @@ using SharpCompress.Compressors;
using SharpCompress.Compressors.BZip2;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Compressors.Deflate64;
using SharpCompress.Compressors.Explode;
using SharpCompress.Compressors.LZMA;
using SharpCompress.Compressors.PPMd;
using SharpCompress.Compressors.Reduce;
using SharpCompress.Compressors.Shrink;
using SharpCompress.Compressors.Xz;
using SharpCompress.IO;
@@ -89,6 +91,32 @@ internal abstract class ZipFilePart : FilePart
Header.UncompressedSize
);
}
case ZipCompressionMethod.Reduce1:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 1);
}
case ZipCompressionMethod.Reduce2:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 2);
}
case ZipCompressionMethod.Reduce3:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 3);
}
case ZipCompressionMethod.Reduce4:
{
return new ReduceStream(stream, Header.CompressedSize, Header.UncompressedSize, 4);
}
case ZipCompressionMethod.Explode:
{
return new ExplodeStream(
stream,
Header.CompressedSize,
Header.UncompressedSize,
Header.Flags
);
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(stream, CompressionMode.Decompress);
@@ -203,6 +231,10 @@ internal abstract class ZipFilePart : FilePart
{
case ZipCompressionMethod.None:
case ZipCompressionMethod.Shrink:
case ZipCompressionMethod.Reduce1:
case ZipCompressionMethod.Reduce2:
case ZipCompressionMethod.Reduce3:
case ZipCompressionMethod.Reduce4:
case ZipCompressionMethod.Deflate:
case ZipCompressionMethod.Deflate64:
case ZipCompressionMethod.BZip2:

View File

@@ -56,11 +56,11 @@ internal class ZipHeaderFactory
case POST_DATA_DESCRIPTOR:
{
if (
FlagUtility.HasFlag(
_lastEntryHeader != null
&& FlagUtility.HasFlag(
_lastEntryHeader.NotNull().Flags,
HeaderFlags.UsePostDataDescriptor
)
&& _lastEntryHeader != null
)
{
_lastEntryHeader.Crc = reader.ReadUInt32();

View File

@@ -1,4 +1,4 @@
#nullable disable
#nullable disable
using System;
using System.IO;
@@ -42,14 +42,17 @@ internal class CBZip2InputStream : Stream
private static void Cadvise()
{
//System.out.Println("CRC Error");
//throw new CCoruptionError();
throw new InvalidOperationException("BZip2 error");
}
private static void BadBGLengths() => Cadvise();
private static void BitStreamEOF() => Cadvise();
private static void CompressedStreamEOF() => Cadvise();
private static void CompressedStreamEOF()
{
throw new InvalidOperationException("BZip2 compressed file ends unexpectedly");
}
private void MakeMaps()
{

View File

@@ -0,0 +1,746 @@
using System;
using System.IO;
using SharpCompress.Common.Zip.Headers;
namespace SharpCompress.Compressors.Explode;
public class ExplodeStream : Stream
{
private const int INVALID_CODE = 99;
private const int WSIZE = 64 * 1024;
private readonly long unCompressedSize;
private readonly int compressedSize;
private readonly HeaderFlags generalPurposeBitFlag;
private readonly Stream inStream;
private huftNode[]? hufLiteralCodeTable; /* literal code table */
private huftNode[] hufLengthCodeTable = []; /* length code table */
private huftNode[] hufDistanceCodeTable = []; /* distance code table */
private int bitsForLiteralCodeTable;
private int bitsForLengthCodeTable;
private int bitsForDistanceCodeTable;
private int numOfUncodedLowerDistanceBits; /* number of uncoded lower distance bits */
private ulong bitBuffer;
private int bitBufferCount;
private readonly byte[] windowsBuffer;
private uint maskForLiteralCodeTable;
private uint maskForLengthCodeTable;
private uint maskForDistanceCodeTable;
private uint maskForDistanceLowBits;
private long outBytesCount;
private int windowIndex;
private int distance;
private int length;
internal ExplodeStream(
Stream inStr,
long compressedSize,
long uncompressedSize,
HeaderFlags generalPurposeBitFlag
)
{
inStream = inStr;
this.compressedSize = (int)compressedSize;
unCompressedSize = (long)uncompressedSize;
this.generalPurposeBitFlag = generalPurposeBitFlag;
explode_SetTables();
windowsBuffer = new byte[WSIZE];
explode_var_init();
}
public override void Flush()
{
throw new NotImplementedException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => unCompressedSize;
public override long Position
{
get => outBytesCount;
set { }
}
static uint[] mask_bits = new uint[]
{
0x0000,
0x0001,
0x0003,
0x0007,
0x000f,
0x001f,
0x003f,
0x007f,
0x00ff,
0x01ff,
0x03ff,
0x07ff,
0x0fff,
0x1fff,
0x3fff,
0x7fff,
0xffff
};
/* Tables for length and distance */
static int[] cplen2 = new int[]
{
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65
};
static int[] cplen3 = new int[]
{
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
28,
29,
30,
31,
32,
33,
34,
35,
36,
37,
38,
39,
40,
41,
42,
43,
44,
45,
46,
47,
48,
49,
50,
51,
52,
53,
54,
55,
56,
57,
58,
59,
60,
61,
62,
63,
64,
65,
66
};
static int[] extra = new int[]
{
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
8
};
static int[] cpdist4 = new int[]
{
1,
65,
129,
193,
257,
321,
385,
449,
513,
577,
641,
705,
769,
833,
897,
961,
1025,
1089,
1153,
1217,
1281,
1345,
1409,
1473,
1537,
1601,
1665,
1729,
1793,
1857,
1921,
1985,
2049,
2113,
2177,
2241,
2305,
2369,
2433,
2497,
2561,
2625,
2689,
2753,
2817,
2881,
2945,
3009,
3073,
3137,
3201,
3265,
3329,
3393,
3457,
3521,
3585,
3649,
3713,
3777,
3841,
3905,
3969,
4033
};
static int[] cpdist8 = new int[]
{
1,
129,
257,
385,
513,
641,
769,
897,
1025,
1153,
1281,
1409,
1537,
1665,
1793,
1921,
2049,
2177,
2305,
2433,
2561,
2689,
2817,
2945,
3073,
3201,
3329,
3457,
3585,
3713,
3841,
3969,
4097,
4225,
4353,
4481,
4609,
4737,
4865,
4993,
5121,
5249,
5377,
5505,
5633,
5761,
5889,
6017,
6145,
6273,
6401,
6529,
6657,
6785,
6913,
7041,
7169,
7297,
7425,
7553,
7681,
7809,
7937,
8065
};
private int get_tree(int[] arrBitLengths, int numberExpected)
/* Get the bit lengths for a code representation from the compressed
stream. If get_tree() returns 4, then there is an error in the data.
Otherwise zero is returned. */
{
/* get bit lengths */
int inIndex = inStream.ReadByte() + 1; /* length/count pairs to read */
int outIndex = 0; /* next code */
do
{
int nextByte = inStream.ReadByte();
int bitLengthOfCodes = (nextByte & 0xf) + 1; /* bits in code (1..16) */
int numOfCodes = ((nextByte & 0xf0) >> 4) + 1; /* codes with those bits (1..16) */
if (outIndex + numOfCodes > numberExpected)
return 4; /* don't overflow arrBitLengths[] */
do
{
arrBitLengths[outIndex++] = bitLengthOfCodes;
} while ((--numOfCodes) != 0);
} while ((--inIndex) != 0);
return outIndex != numberExpected ? 4 : 0; /* should have read numberExpected of them */
}
private int explode_SetTables()
{
int returnCode; /* return codes */
int[] arrBitLengthsForCodes = new int[256]; /* bit lengths for codes */
bitsForLiteralCodeTable = 0; /* bits for tb */
bitsForLengthCodeTable = 7;
bitsForDistanceCodeTable = (compressedSize) > 200000 ? 8 : 7;
if ((generalPurposeBitFlag & HeaderFlags.Bit2) != 0)
/* With literal tree--minimum match length is 3 */
{
bitsForLiteralCodeTable = 9; /* base table size for literals */
if ((returnCode = get_tree(arrBitLengthsForCodes, 256)) != 0)
return returnCode;
if (
(
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
256,
256,
[],
[],
out hufLiteralCodeTable,
ref bitsForLiteralCodeTable
)
) != 0
)
return returnCode;
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
return returnCode;
if (
(
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
64,
0,
cplen3,
extra,
out hufLengthCodeTable,
ref bitsForLengthCodeTable
)
) != 0
)
return returnCode;
}
else
/* No literal tree--minimum match length is 2 */
{
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
return returnCode;
hufLiteralCodeTable = null;
if (
(
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
64,
0,
cplen2,
extra,
out hufLengthCodeTable,
ref bitsForLengthCodeTable
)
) != 0
)
return returnCode;
}
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
return (int)returnCode;
if ((generalPurposeBitFlag & HeaderFlags.Bit1) != 0) /* true if 8K */
{
numOfUncodedLowerDistanceBits = 7;
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
64,
0,
cpdist8,
extra,
out hufDistanceCodeTable,
ref bitsForDistanceCodeTable
);
}
else /* else 4K */
{
numOfUncodedLowerDistanceBits = 6;
returnCode = HuftTree.huftbuid(
arrBitLengthsForCodes,
64,
0,
cpdist4,
extra,
out hufDistanceCodeTable,
ref bitsForDistanceCodeTable
);
}
return returnCode;
}
private void NeedBits(int numberOfBits)
{
while (bitBufferCount < (numberOfBits))
{
bitBuffer |= (uint)inStream.ReadByte() << bitBufferCount;
bitBufferCount += 8;
}
}
private void DumpBits(int numberOfBits)
{
bitBuffer >>= numberOfBits;
bitBufferCount -= numberOfBits;
}
int DecodeHuft(huftNode[] htab, int bits, uint mask, out huftNode huftPointer, out int e)
{
NeedBits(bits);
int tabOffset = (int)(~bitBuffer & mask);
huftPointer = htab[tabOffset];
while (true)
{
DumpBits(huftPointer.NumberOfBitsUsed);
e = huftPointer.NumberOfExtraBits;
if (e <= 32)
break;
if (e == INVALID_CODE)
return 1;
e &= 31;
NeedBits(e);
tabOffset = (int)(~bitBuffer & mask_bits[e]);
huftPointer = huftPointer.ChildNodes[tabOffset];
}
return 0;
}
private void explode_var_init()
{
/* explode the coded data */
bitBuffer = 0;
bitBufferCount = 0;
maskForLiteralCodeTable = mask_bits[bitsForLiteralCodeTable]; //only used in explode_lit
maskForLengthCodeTable = mask_bits[bitsForLengthCodeTable];
maskForDistanceCodeTable = mask_bits[bitsForDistanceCodeTable];
maskForDistanceLowBits = mask_bits[numOfUncodedLowerDistanceBits];
outBytesCount = 0;
windowIndex = 0; /* initialize bit buffer, window */
}
public override int Read(byte[] buffer, int offset, int count)
{
int countIndex = 0;
while (countIndex < count && outBytesCount < unCompressedSize) /* do until unCompressedSize bytes uncompressed */
{
if (length == 0)
{
NeedBits(1);
bool literal = (bitBuffer & 1) == 1;
DumpBits(1);
huftNode huftPointer;
if (literal) /* then literal--decode it */
{
byte nextByte;
if (hufLiteralCodeTable != null)
{
/* get coded literal */
if (
DecodeHuft(
hufLiteralCodeTable,
bitsForLiteralCodeTable,
maskForLiteralCodeTable,
out huftPointer,
out _
) != 0
)
throw new Exception("Error decoding literal value");
nextByte = (byte)huftPointer.Value;
}
else
{
NeedBits(8);
nextByte = (byte)bitBuffer;
DumpBits(8);
}
buffer[offset + (countIndex++)] = nextByte;
windowsBuffer[windowIndex++] = nextByte;
outBytesCount++;
if (windowIndex == WSIZE)
windowIndex = 0;
continue;
}
NeedBits(numOfUncodedLowerDistanceBits); /* get distance low bits */
distance = (int)(bitBuffer & maskForDistanceLowBits);
DumpBits(numOfUncodedLowerDistanceBits);
/* get coded distance high bits */
if (
DecodeHuft(
hufDistanceCodeTable,
bitsForDistanceCodeTable,
maskForDistanceCodeTable,
out huftPointer,
out _
) != 0
)
throw new Exception("Error decoding distance high bits");
distance = windowIndex - (distance + huftPointer.Value); /* construct offset */
/* get coded length */
if (
DecodeHuft(
hufLengthCodeTable,
bitsForLengthCodeTable,
maskForLengthCodeTable,
out huftPointer,
out int extraBitLength
) != 0
)
throw new Exception("Error decoding coded length");
length = huftPointer.Value;
if (extraBitLength != 0) /* get length extra bits */
{
NeedBits(8);
length += (int)(bitBuffer & 0xff);
DumpBits(8);
}
if (length > (unCompressedSize - outBytesCount))
length = (int)(unCompressedSize - outBytesCount);
distance &= WSIZE - 1;
}
while (length != 0 && countIndex < count)
{
byte nextByte = windowsBuffer[distance++];
buffer[offset + (countIndex++)] = nextByte;
windowsBuffer[windowIndex++] = nextByte;
outBytesCount++;
if (distance == WSIZE)
distance = 0;
if (windowIndex == WSIZE)
windowIndex = 0;
length--;
}
}
return countIndex;
}
}

View File

@@ -0,0 +1,269 @@
/*
* This code has been converted to C# based on the original huft_tree code found in
* inflate.c -- by Mark Adler version c17e, 30 Mar 2007
*/
namespace SharpCompress.Compressors.Explode;
public class huftNode
{
public int NumberOfExtraBits; /* number of extra bits or operation */
public int NumberOfBitsUsed; /* number of bits in this code or subcode */
public int Value; /* literal, length base, or distance base */
public huftNode[] ChildNodes = []; /* next level of table */
}
public static class HuftTree
{
private const int INVALID_CODE = 99;
/* If BMAX needs to be larger than 16, then h and x[] should be ulg. */
private const int BMAX = 16; /* maximum bit length of any code (16 for explode) */
private const int N_MAX = 288; /* maximum number of codes in any set */
public static int huftbuid(
int[] arrBitLengthForCodes,
int numberOfCodes,
int numberOfSimpleValueCodes,
int[] arrBaseValuesForNonSimpleCodes,
int[] arrExtraBitsForNonSimpleCodes,
out huftNode[] outHufTable,
ref int outBitsForTable
)
/* Given a list of code lengths and a maximum table size, make a set of
tables to decode that set of codes. Return zero on success, one if
the given code set is incomplete (the tables are still built in this
case), two if the input is invalid (all zero length codes or an
oversubscribed set of lengths), and three if not enough memory.
The code with value 256 is special, and the tables are constructed
so that no bits beyond that code are fetched when that code is
decoded. */
{
outHufTable = [];
/* Generate counts for each bit length */
int lengthOfEOBcode = numberOfCodes > 256 ? arrBitLengthForCodes[256] : BMAX; /* set length of EOB code, if any */
int[] arrBitLengthCount = new int[BMAX + 1];
for (int i = 0; i < BMAX + 1; i++)
arrBitLengthCount[i] = 0;
int pIndex = 0;
int counterCurrentCode = numberOfCodes;
do
{
arrBitLengthCount[arrBitLengthForCodes[pIndex]]++;
pIndex++; /* assume all entries <= BMAX */
} while ((--counterCurrentCode) != 0);
if (arrBitLengthCount[0] == numberOfCodes) /* null input--all zero length codes */
{
return 0;
}
/* Find minimum and maximum length, bound *outBitsForTable by those */
int counter;
for (counter = 1; counter <= BMAX; counter++)
if (arrBitLengthCount[counter] != 0)
break;
int numberOfBitsInCurrentCode = counter; /* minimum code length */
if (outBitsForTable < counter)
outBitsForTable = counter;
for (counterCurrentCode = BMAX; counterCurrentCode != 0; counterCurrentCode--)
if (arrBitLengthCount[counterCurrentCode] != 0)
break;
int maximumCodeLength = counterCurrentCode; /* maximum code length */
if (outBitsForTable > counterCurrentCode)
outBitsForTable = counterCurrentCode;
/* Adjust last length count to fill out codes, if needed */
int numberOfDummyCodesAdded;
for (
numberOfDummyCodesAdded = 1 << counter;
counter < counterCurrentCode;
counter++, numberOfDummyCodesAdded <<= 1
)
if ((numberOfDummyCodesAdded -= arrBitLengthCount[counter]) < 0)
return 2; /* bad input: more codes than bits */
if ((numberOfDummyCodesAdded -= arrBitLengthCount[counterCurrentCode]) < 0)
return 2;
arrBitLengthCount[counterCurrentCode] += numberOfDummyCodesAdded;
/* Generate starting offsets into the value table for each length */
int[] bitOffset = new int[BMAX + 1];
bitOffset[1] = 0;
counter = 0;
pIndex = 1;
int xIndex = 2;
while ((--counterCurrentCode) != 0)
{ /* note that i == g from above */
bitOffset[xIndex++] = (counter += arrBitLengthCount[pIndex++]);
}
/* Make a table of values in order of bit lengths */
int[] arrValuesInOrderOfBitLength = new int[N_MAX];
for (int i = 0; i < N_MAX; i++)
arrValuesInOrderOfBitLength[i] = 0;
pIndex = 0;
counterCurrentCode = 0;
do
{
if ((counter = arrBitLengthForCodes[pIndex++]) != 0)
arrValuesInOrderOfBitLength[bitOffset[counter]++] = counterCurrentCode;
} while (++counterCurrentCode < numberOfCodes);
numberOfCodes = bitOffset[maximumCodeLength]; /* set numberOfCodes to length of v */
/* Generate the Huffman codes and for each, make the table entries */
bitOffset[0] = counterCurrentCode = 0; /* first Huffman code is zero */
pIndex = 0; /* grab values in bit order */
int tableLevel = -1; /* no tables yet--level -1 */
int bitsBeforeThisTable = 0;
int[] arrLX = new int[BMAX + 1];
int stackOfBitsPerTable = 1; /* stack of bits per table */
arrLX[stackOfBitsPerTable - 1] = 0; /* no bits decoded yet */
huftNode[][] arrHufTableStack = new huftNode[BMAX][];
huftNode[] pointerToCurrentTable = [];
int numberOfEntriesInCurrentTable = 0;
bool first = true;
/* go through the bit lengths (k already is bits in shortest code) */
for (; numberOfBitsInCurrentCode <= maximumCodeLength; numberOfBitsInCurrentCode++)
{
int counterForCodes = arrBitLengthCount[numberOfBitsInCurrentCode];
while ((counterForCodes--) != 0)
{
/* here i is the Huffman code of length k bits for value *p */
/* make tables up to required level */
while (
numberOfBitsInCurrentCode
> bitsBeforeThisTable + arrLX[stackOfBitsPerTable + tableLevel]
)
{
bitsBeforeThisTable += arrLX[stackOfBitsPerTable + (tableLevel++)]; /* add bits already decoded */
/* compute minimum size table less than or equal to *outBitsForTable bits */
numberOfEntriesInCurrentTable =
(numberOfEntriesInCurrentTable = maximumCodeLength - bitsBeforeThisTable)
> outBitsForTable
? outBitsForTable
: numberOfEntriesInCurrentTable; /* upper limit */
int fBitCounter1 =
1 << (counter = numberOfBitsInCurrentCode - bitsBeforeThisTable);
if (fBitCounter1 > counterForCodes + 1) /* try a k-w bit table */
{ /* too few codes for k-w bit table */
fBitCounter1 -= counterForCodes + 1; /* deduct codes from patterns left */
xIndex = numberOfBitsInCurrentCode;
while (++counter < numberOfEntriesInCurrentTable) /* try smaller tables up to z bits */
{
if ((fBitCounter1 <<= 1) <= arrBitLengthCount[++xIndex])
break; /* enough codes to use up j bits */
fBitCounter1 -= arrBitLengthCount[xIndex]; /* else deduct codes from patterns */
}
}
if (
bitsBeforeThisTable + counter > lengthOfEOBcode
&& bitsBeforeThisTable < lengthOfEOBcode
)
counter = lengthOfEOBcode - bitsBeforeThisTable; /* make EOB code end at table */
numberOfEntriesInCurrentTable = 1 << counter; /* table entries for j-bit table */
arrLX[stackOfBitsPerTable + tableLevel] = counter; /* set table size in stack */
/* allocate and link in new table */
pointerToCurrentTable = new huftNode[numberOfEntriesInCurrentTable];
// set the pointer, pointed to by *outHufTable to the second huft in pointertoCurrentTable
if (first)
{
outHufTable = pointerToCurrentTable; /* link to list for huft_free() */
first = false;
}
arrHufTableStack[tableLevel] = pointerToCurrentTable; /* table starts after link */
/* connect to last table, if there is one */
if (tableLevel != 0)
{
bitOffset[tableLevel] = counterCurrentCode; /* save pattern for backing up */
huftNode vHuft = new huftNode
{
NumberOfBitsUsed = arrLX[stackOfBitsPerTable + tableLevel - 1], /* bits to dump before this table */
NumberOfExtraBits = 32 + counter, /* bits in this table */
ChildNodes = pointerToCurrentTable /* pointer to this table */
};
counter =
(counterCurrentCode & ((1 << bitsBeforeThisTable) - 1))
>> (bitsBeforeThisTable - arrLX[stackOfBitsPerTable + tableLevel - 1]);
arrHufTableStack[tableLevel - 1][counter] = vHuft; /* connect to last table */
}
}
/* set up table entry in r */
huftNode vHuft1 = new huftNode
{
NumberOfBitsUsed = numberOfBitsInCurrentCode - bitsBeforeThisTable
};
if (pIndex >= numberOfCodes)
vHuft1.NumberOfExtraBits = INVALID_CODE; /* out of values--invalid code */
else if (arrValuesInOrderOfBitLength[pIndex] < numberOfSimpleValueCodes)
{
vHuft1.NumberOfExtraBits = (
arrValuesInOrderOfBitLength[pIndex] < 256 ? 32 : 31
); /* 256 is end-of-block code */
vHuft1.Value = arrValuesInOrderOfBitLength[pIndex++]; /* simple code is just the value */
}
else
{
vHuft1.NumberOfExtraBits = arrExtraBitsForNonSimpleCodes[
arrValuesInOrderOfBitLength[pIndex] - numberOfSimpleValueCodes
]; /* non-simple--look up in lists */
vHuft1.Value = arrBaseValuesForNonSimpleCodes[
arrValuesInOrderOfBitLength[pIndex++] - numberOfSimpleValueCodes
];
}
/* fill code-like entries with r */
int fBitCounter2 = 1 << (numberOfBitsInCurrentCode - bitsBeforeThisTable);
for (
counter = counterCurrentCode >> bitsBeforeThisTable;
counter < numberOfEntriesInCurrentTable;
counter += fBitCounter2
)
pointerToCurrentTable[counter] = vHuft1;
/* backwards increment the k-bit code i */
for (
counter = 1 << (numberOfBitsInCurrentCode - 1);
(counterCurrentCode & counter) != 0;
counter >>= 1
)
counterCurrentCode ^= counter;
counterCurrentCode ^= counter;
/* backup over finished tables */
while (
(counterCurrentCode & ((1 << bitsBeforeThisTable) - 1)) != bitOffset[tableLevel]
)
bitsBeforeThisTable -= arrLX[stackOfBitsPerTable + (--tableLevel)];
}
}
/* return actual size of base table */
outBitsForTable = arrLX[stackOfBitsPerTable];
/* Return true (1) if we were given an incomplete table */
return (numberOfDummyCodesAdded != 0 && maximumCodeLength != 1) ? 1 : 0;
}
}

View File

@@ -0,0 +1,249 @@
using System;
using System.IO;
namespace SharpCompress.Compressors.Reduce;
public class ReduceStream : Stream
{
private readonly long unCompressedSize;
private readonly long compressedSize;
private readonly Stream inStream;
private long inByteCount;
private const int EOF = 1234;
private readonly int factor;
private readonly int distanceMask;
private readonly int lengthMask;
private long outBytesCount;
private readonly byte[] windowsBuffer;
private int windowIndex;
private int length;
private int distance;
public ReduceStream(Stream inStr, long compsize, long unCompSize, int factor)
{
inStream = inStr;
compressedSize = compsize;
unCompressedSize = unCompSize;
inByteCount = 0;
outBytesCount = 0;
this.factor = factor;
distanceMask = (int)mask_bits[factor] << 8;
lengthMask = 0xff >> factor;
windowIndex = 0;
length = 0;
distance = 0;
windowsBuffer = new byte[WSIZE];
outByte = 0;
LoadBitLengthTable();
LoadNextByteTable();
}
public override void Flush()
{
throw new NotImplementedException();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotImplementedException();
}
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => unCompressedSize;
public override long Position
{
get => outBytesCount;
set { }
}
private const int RunLengthCode = 144;
private const int WSIZE = 0x4000;
private readonly uint[] mask_bits = new uint[]
{
0x0000,
0x0001,
0x0003,
0x0007,
0x000f,
0x001f,
0x003f,
0x007f,
0x00ff,
0x01ff,
0x03ff,
0x07ff,
0x0fff,
0x1fff,
0x3fff,
0x7fff,
0xffff
};
private int bitBufferCount;
private ulong bitBuffer;
private int NEXTBYTE()
{
if (inByteCount == compressedSize)
return EOF;
inByteCount++;
return inStream.ReadByte();
}
private void READBITS(int nbits, out byte zdest)
{
if (nbits > bitBufferCount)
{
int temp;
while (bitBufferCount <= 8 * (int)(4 - 1) && (temp = NEXTBYTE()) != EOF)
{
bitBuffer |= (ulong)temp << bitBufferCount;
bitBufferCount += 8;
}
}
zdest = (byte)(bitBuffer & (ulong)mask_bits[nbits]);
bitBuffer >>= nbits;
bitBufferCount -= nbits;
}
private byte[] bitCountTable = [];
private void LoadBitLengthTable()
{
byte[] bitPos = { 0, 2, 4, 8, 16, 32, 64, 128, 255 };
bitCountTable = new byte[256];
for (byte i = 1; i <= 8; i++)
{
int vMin = bitPos[i - 1] + 1;
int vMax = bitPos[i];
for (int j = vMin; j <= vMax; j++)
{
bitCountTable[j] = i;
}
}
}
private byte[][] nextByteTable = [];
private void LoadNextByteTable()
{
nextByteTable = new byte[256][];
for (int x = 255; x >= 0; x--)
{
READBITS(6, out byte Slen);
nextByteTable[x] = new byte[Slen];
for (int i = 0; i < Slen; i++)
{
READBITS(8, out nextByteTable[x][i]);
}
}
}
private byte outByte;
private byte GetNextByte()
{
if (nextByteTable[outByte].Length == 0)
{
READBITS(8, out outByte);
return outByte;
}
READBITS(1, out byte nextBit);
if (nextBit == 1)
{
READBITS(8, out outByte);
return outByte;
}
READBITS(bitCountTable[nextByteTable[outByte].Length], out byte nextByteIndex);
outByte = nextByteTable[outByte][nextByteIndex];
return outByte;
}
public override int Read(byte[] buffer, int offset, int count)
{
int countIndex = 0;
while (countIndex < count && outBytesCount < unCompressedSize)
{
if (length == 0)
{
byte nextByte = GetNextByte();
if (nextByte != RunLengthCode)
{
buffer[offset + (countIndex++)] = nextByte;
windowsBuffer[windowIndex++] = nextByte;
outBytesCount++;
if (windowIndex == WSIZE)
windowIndex = 0;
continue;
}
nextByte = GetNextByte();
if (nextByte == 0)
{
buffer[offset + (countIndex++)] = RunLengthCode;
windowsBuffer[windowIndex++] = RunLengthCode;
outBytesCount++;
if (windowIndex == WSIZE)
windowIndex = 0;
continue;
}
int lengthDistanceByte = nextByte;
length = lengthDistanceByte & lengthMask;
if (length == lengthMask)
{
length += GetNextByte();
}
length += 3;
int distanceHighByte = (lengthDistanceByte << factor) & distanceMask;
distance = windowIndex - (distanceHighByte + GetNextByte() + 1);
distance &= WSIZE - 1;
}
while (length != 0 && countIndex < count)
{
byte nextByte = windowsBuffer[distance++];
buffer[offset + (countIndex++)] = nextByte;
windowsBuffer[windowIndex++] = nextByte;
outBytesCount++;
if (distance == WSIZE)
distance = 0;
if (windowIndex == WSIZE)
windowIndex = 0;
length--;
}
}
return countIndex;
}
}

View File

@@ -2,9 +2,9 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.37.2</VersionPrefix>
<AssemblyVersion>0.37.2</AssemblyVersion>
<FileVersion>0.37.2</FileVersion>
<VersionPrefix>0.38.0</VersionPrefix>
<AssemblyVersion>0.38.0</AssemblyVersion>
<FileVersion>0.38.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks>net462;netstandard2.0;netstandard2.1;net6.0;net8.0</TargetFrameworks>
<AssemblyName>SharpCompress</AssemblyName>
@@ -24,15 +24,15 @@
<LangVersion>latest</LangVersion>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<PackageReadmeFile>README.md</PackageReadmeFile>
<ContinuousIntegrationBuild>true</ContinuousIntegrationBuild>
<EmbedUntrackedSources>true</EmbedUntrackedSources>
<AllowedOutputExtensionsInPackageBuildOutputFolder>$(AllowedOutputExtensionsInPackageBuildOutputFolder);.pdb</AllowedOutputExtensionsInPackageBuildOutputFolder>
</PropertyGroup>
<PropertyGroup Condition=" '$(TargetFramework)' == 'net8.0' ">
<IsTrimmable>true</IsTrimmable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.SourceLink.GitHub">
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="ZstdSharp.Port" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.1' ">

View File

@@ -2,6 +2,7 @@ using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.Text;
using SharpCompress.Readers;
namespace SharpCompress;
@@ -434,4 +435,17 @@ public static class Utility
buffer[offset + 2] = (byte)(number >> 8);
buffer[offset + 3] = (byte)number;
}
public static string ReplaceInvalidFileNameChars(string fileName)
{
var invalidChars = new HashSet<char>(Path.GetInvalidFileNameChars());
var sb = new StringBuilder(fileName.Length);
foreach (var c in fileName)
{
var newChar = invalidChars.Contains(c) ? '_' : c;
sb.Append(newChar);
}
return sb.ToString();
}
}

View File

@@ -53,9 +53,9 @@
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA==",
"requested": "[0.8.1, )",
"resolved": "0.8.1",
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg==",
"dependencies": {
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
"System.Memory": "4.5.5"
@@ -152,9 +152,9 @@
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA==",
"requested": "[0.8.1, )",
"resolved": "0.8.1",
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg==",
"dependencies": {
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
"System.Memory": "4.5.5"
@@ -228,9 +228,9 @@
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA==",
"requested": "[0.8.1, )",
"resolved": "0.8.1",
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg==",
"dependencies": {
"System.Runtime.CompilerServices.Unsafe": "6.0.0"
}
@@ -285,9 +285,9 @@
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA=="
"requested": "[0.8.1, )",
"resolved": "0.8.1",
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg=="
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
@@ -303,9 +303,9 @@
"net8.0": {
"Microsoft.NET.ILLink.Tasks": {
"type": "Direct",
"requested": "[8.0.4, )",
"resolved": "8.0.4",
"contentHash": "PZb5nfQ+U19nhnmnR9T1jw+LTmozhuG2eeuzuW5A7DqxD/UXW2ucjmNJqnqOuh8rdPzM3MQXoF8AfFCedJdCUw=="
"requested": "[8.0.7, )",
"resolved": "8.0.7",
"contentHash": "iI52ptEKby2ymQ6B7h4TWbFmm85T4VvLgc/HvS45Yr3lgi4IIFbQtjON3bQbX/Vc94jXNSLvrDOp5Kh7SJyFYQ=="
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
@@ -319,9 +319,9 @@
},
"ZstdSharp.Port": {
"type": "Direct",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA=="
"requested": "[0.8.1, )",
"resolved": "0.8.1",
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg=="
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",

View File

@@ -73,27 +73,45 @@ public class ArchiveTests : ReaderTests
}
}
protected void ArchiveStreamRead(string testArchive, ReaderOptions? readerOptions = null)
protected void ArchiveStreamRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveStreamRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
ArchiveStreamRead(readerOptions, testArchive);
ArchiveStreamRead(archiveFactory, readerOptions, testArchive);
}
protected void ArchiveStreamRead(
ReaderOptions? readerOptions = null,
params string[] testArchives
) => ArchiveStreamRead(ArchiveFactory.AutoFactory, readerOptions, testArchives);
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
ReaderOptions? readerOptions = null,
params string[] testArchives
) =>
ArchiveStreamRead(
archiveFactory,
readerOptions,
testArchives.Select(x => Path.Combine(TEST_ARCHIVES_PATH, x))
);
protected void ArchiveStreamRead(ReaderOptions? readerOptions, IEnumerable<string> testArchives)
protected void ArchiveStreamRead(
IArchiveFactory archiveFactory,
ReaderOptions? readerOptions,
IEnumerable<string> testArchives
)
{
foreach (var path in testArchives)
{
using (var stream = NonDisposingStream.Create(File.OpenRead(path), true))
using (var archive = ArchiveFactory.Open(stream, readerOptions))
using (var archive = archiveFactory.Open(stream, readerOptions))
{
try
{
@@ -218,10 +236,14 @@ public class ArchiveTests : ReaderTests
}
}
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null)
protected void ArchiveFileRead(
IArchiveFactory archiveFactory,
string testArchive,
ReaderOptions? readerOptions = null
)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
using (var archive = ArchiveFactory.Open(testArchive, readerOptions))
using (var archive = archiveFactory.Open(new FileInfo(testArchive), readerOptions))
{
foreach (var entry in archive.Entries.Where(entry => !entry.IsDirectory))
{
@@ -234,6 +256,9 @@ public class ArchiveTests : ReaderTests
VerifyFiles();
}
protected void ArchiveFileRead(string testArchive, ReaderOptions? readerOptions = null) =>
ArchiveFileRead(ArchiveFactory.AutoFactory, testArchive, readerOptions);
protected void ArchiveFileSkip(
string testArchive,
string fileOrder,

View File

@@ -0,0 +1,21 @@
using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.GZip;
using Xunit;
namespace SharpCompress.Test.BZip2;
public class BZip2ReaderTests : ReaderTests
{
[Fact]
public void BZip2_Reader_Factory()
{
Stream stream = new MemoryStream(
new byte[] { 0x42, 0x5a, 0x68, 0x34, 0x31, 0x41, 0x59, 0x26, 0x53, 0x59, 0x35 }
);
Assert.Throws(typeof(InvalidOperationException), () => ReaderFactory.Open(stream));
}
}

View File

@@ -4,6 +4,7 @@ using System.Linq;
using SharpCompress.Archives;
using SharpCompress.Archives.GZip;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using Xunit;
namespace SharpCompress.Test.GZip;
@@ -106,7 +107,7 @@ public class GZipArchiveTests : ArchiveTests
}
[Fact]
public void TestGzCrcWithMostSignificaltBitNotNegative()
public void TestGzCrcWithMostSignificantBitNotNegative()
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
using var archive = GZipArchive.Open(stream);
@@ -116,4 +117,12 @@ public class GZipArchiveTests : ArchiveTests
Assert.InRange(entry.Crc, 0L, 0xFFFFFFFFL);
}
}
[Fact]
public void TestGzArchiveTypeGzip()
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
using var archive = GZipArchive.Open(stream);
Assert.Equal(archive.Type, ArchiveType.GZip);
}
}

View File

@@ -4,6 +4,7 @@ using System.Linq;
using SharpCompress.Archives;
using SharpCompress.Archives.SevenZip;
using SharpCompress.Common;
using SharpCompress.Factories;
using SharpCompress.Readers;
using Xunit;
@@ -54,6 +55,14 @@ public class SevenZipArchiveTests : ArchiveTests
[Fact]
public void SevenZipArchive_LZMA2_PathRead() => ArchiveFileRead("7Zip.LZMA2.7z");
[Fact]
public void SevenZipArchive_LZMA2_EXE_StreamRead() =>
ArchiveStreamRead(new SevenZipFactory(), "7Zip.LZMA2.exe", new() { LookForHeader = true });
[Fact]
public void SevenZipArchive_LZMA2_EXE_PathRead() =>
ArchiveFileRead(new SevenZipFactory(), "7Zip.LZMA2.exe", new() { LookForHeader = true });
[Fact]
public void SevenZipArchive_LZMA2AES_StreamRead() =>
ArchiveStreamRead("7Zip.LZMA2.Aes.7z", new ReaderOptions { Password = "testpassword" });

View File

@@ -287,4 +287,12 @@ public class TarArchiveTests : ArchiveTests
Assert.Equal(2, numberOfEntries);
}
[Fact]
public void Tar_Detect_Test()
{
var isTar = TarArchive.IsTarFile(Path.Combine(TEST_ARCHIVES_PATH, "false.positive.tar"));
Assert.False(isTar);
}
}

View File

@@ -16,14 +16,6 @@ public class ZipArchiveTests : ArchiveTests
{
public ZipArchiveTests() => UseExtensionInsteadOfNameToVerify = true;
[Fact]
public void Zip_Shrink_ArchiveStreamRead()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
ArchiveStreamRead("Zip.shrink.zip");
}
[Fact]
public void Zip_ZipX_ArchiveStreamRead() => ArchiveStreamRead("Zip.zipx");
@@ -183,6 +175,54 @@ public class ZipArchiveTests : ArchiveTests
[Fact]
public void Zip_Zip64_ArchiveFileRead() => ArchiveFileRead("Zip.zip64.zip");
[Fact]
public void Zip_Shrink_ArchiveStreamRead()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
ArchiveStreamRead("Zip.shrink.zip");
}
[Fact]
public void Zip_Implode_ArchiveStreamRead()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
ArchiveStreamRead("Zip.implode.zip");
}
[Fact]
public void Zip_Reduce1_ArchiveStreamRead()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
ArchiveStreamRead("Zip.reduce1.zip");
}
[Fact]
public void Zip_Reduce2_ArchiveStreamRead()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
ArchiveStreamRead("Zip.reduce2.zip");
}
[Fact]
public void Zip_Reduce3_ArchiveStreamRead()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
ArchiveStreamRead("Zip.reduce3.zip");
}
[Fact]
public void Zip_Reduce4_ArchiveStreamRead()
{
UseExtensionInsteadOfNameToVerify = true;
UseCaseInsensitiveToVerify = true;
ArchiveStreamRead("Zip.reduce4.zip");
}
[Fact]
public void Zip_Random_Write_Remove()
{

View File

@@ -13,11 +13,11 @@
},
"Microsoft.NET.Test.Sdk": {
"type": "Direct",
"requested": "[17.9.0, )",
"resolved": "17.9.0",
"contentHash": "7GUNAUbJYn644jzwLm5BD3a2p9C1dmP8Hr6fDPDxgItQk9hBs1Svdxzz07KQ/UphMSmgza9AbijBJGmw5D658A==",
"requested": "[17.10.0, )",
"resolved": "17.10.0",
"contentHash": "0/2HeACkaHEYU3wc83YlcD2Fi4LMtECJjqrtvw0lPi9DCEa35zSPt1j4fuvM8NagjDqJuh1Ja35WcRtn1Um6/A==",
"dependencies": {
"Microsoft.CodeCoverage": "17.9.0"
"Microsoft.CodeCoverage": "17.10.0"
}
},
"Microsoft.NETFramework.ReferenceAssemblies": {
@@ -29,24 +29,34 @@
"Microsoft.NETFramework.ReferenceAssemblies.net462": "1.0.3"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"xunit": {
"type": "Direct",
"requested": "[2.7.1, )",
"resolved": "2.7.1",
"contentHash": "9o050yCLzPvHxbrAkaHnI1j+YfPvRw+/ncvKbTfzIhO4JvQA0rPgoICJxXHMkscfgXmLFwZ8107ehnMUVzE23A==",
"requested": "[2.9.0, )",
"resolved": "2.9.0",
"contentHash": "PtU3rZ0ThdmdJqTbK7GkgFf6iBaCR6Q0uvJHznID+XEYk2v6O/b7sRxqnbi3B2gRDXxjTqMkVNayzwsqsFUxRw==",
"dependencies": {
"xunit.analyzers": "1.12.0",
"xunit.assert": "2.7.1",
"xunit.core": "[2.7.1]"
"xunit.analyzers": "1.15.0",
"xunit.assert": "2.9.0",
"xunit.core": "[2.9.0]"
}
},
"xunit.runner.visualstudio": {
"type": "Direct",
"requested": "[2.5.8, )",
"resolved": "2.5.8",
"contentHash": "ZJTm71neOfZcUnqdyY0A0Qgcg1162DoOq6+VpCCsOaD9rwCK5alcjOEHeu17sEekzq4qNv3kyelx6lUMsAt/eA==",
"requested": "[2.8.2, )",
"resolved": "2.8.2",
"contentHash": "vm1tbfXhFmjFMUmS4M0J0ASXz3/U5XvXBa6DOQUL3fEz4Vt6YPhv+ESCarx6M6D+9kJkJYZKCNvJMas1+nVfmQ==",
"dependencies": {
"Microsoft.TestPlatform.ObjectModel": "17.9.0"
"Microsoft.TestPlatform.ObjectModel": "17.10.0"
}
},
"Xunit.SkippableFact": {
@@ -59,20 +69,30 @@
"xunit.extensibility.execution": "2.4.0"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.CodeCoverage": {
"type": "Transitive",
"resolved": "17.9.0",
"contentHash": "RGD37ZSrratfScYXm7M0HjvxMxZyWZL4jm+XgMZbkIY1UPgjUpbNA/t+WTGj/rC/0Hm9A3IrH3ywbKZkOCnoZA=="
"resolved": "17.10.0",
"contentHash": "yC7oSlnR54XO5kOuHlVOKtxomNNN1BWXX8lK1G2jaPXT9sUok7kCOoA4Pgs0qyFaCtMrNsprztYMeoEGqCm4uA=="
},
"Microsoft.NETFramework.ReferenceAssemblies.net462": {
"type": "Transitive",
"resolved": "1.0.3",
"contentHash": "IzAV30z22ESCeQfxP29oVf4qEo8fBGXLXSU6oacv/9Iqe6PzgHDKCaWfwMBak7bSJQM0F5boXWoZS+kChztRIQ=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
},
"Microsoft.TestPlatform.ObjectModel": {
"type": "Transitive",
"resolved": "17.9.0",
"contentHash": "1ilw/8vgmjLyKU+2SKXKXaOqpYFJCQfGqGz+x0cosl981VzjrY74Sv6qAJv+neZMZ9ZMxF3ArN6kotaQ4uvEBw==",
"resolved": "17.10.0",
"contentHash": "KkwhjQevuDj0aBRoPLY6OLAhGqbPUEBuKLbaCs0kUVw29qiOYncdORd4mLVJbn9vGZ7/iFGQ/+AoJl0Tu5Umdg==",
"dependencies": {
"System.Reflection.Metadata": "1.6.0"
}
@@ -125,37 +145,37 @@
},
"xunit.analyzers": {
"type": "Transitive",
"resolved": "1.12.0",
"contentHash": "w23LH3aXade2WXKvXi0oA/uV15fpgUMjsPq1x91iQckzgWApgAiijNHmfFQtqNPm41wfrdbRl7nSJRd0yux/dw=="
"resolved": "1.15.0",
"contentHash": "s+M8K/Rtlgr6CmD7AYQKrNTvT5sh0l0ZKDoZ3Z/ExhlIwfV9mGAMR4f7KqIB7SSK7ZOhqDTgTUMYPmKfmvWUWQ=="
},
"xunit.assert": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "JqvXS4yX+PtJn5BuqoKkYav7I0g4nXcxRbGTomDwVQjFccOdyfYKpuPOHX/DqrPCcL+MIHrGVdP3bveUXlvdnA=="
"resolved": "2.9.0",
"contentHash": "Z/1pyia//860wEYTKn6Q5dmgikJdRjgE4t5AoxJkK8oTmidzPLEPG574kmm7LFkMLbH6Frwmgb750kcyR+hwoA=="
},
"xunit.core": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "hcODgS+RXdjmXq0zQzmRbERQgY+bAGGx1bdH3370t/8CTGmIEU2qAc1dQAjIRpARsacR0cj6LLJDUF5BNQNKTQ==",
"resolved": "2.9.0",
"contentHash": "uRaop9tZsZMCaUS4AfbSPGYHtvywWnm8XXFNUqII7ShWyDBgdchY6gyDNgO4AK1Lv/1NNW61Zq63CsDV6oH6Jg==",
"dependencies": {
"xunit.extensibility.core": "[2.7.1]",
"xunit.extensibility.execution": "[2.7.1]"
"xunit.extensibility.core": "[2.9.0]",
"xunit.extensibility.execution": "[2.9.0]"
}
},
"xunit.extensibility.core": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "yLX4XlBFkvNYCzf+DEzlNk45KsSlu9W93IJHBmtUP96qZ9XyRYDFlwMj6BCcOhDKVNrZxSM8bqu4F/Qud4ehxA==",
"resolved": "2.9.0",
"contentHash": "zjDEUSxsr6UNij4gIwCgMqQox+oLDPRZ+mubwWLci+SssPBFQD1xeRR4SvgBuXqbE0QXCJ/STVTp+lxiB5NLVA==",
"dependencies": {
"xunit.abstractions": "2.0.3"
}
},
"xunit.extensibility.execution": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "ei3dKF8agj4UKkJ6KkaZ5/Gcif3if6hBsyzegIQJonZDSKZFvb0AyKtyOhDfggBaXVL5iXZExITdRkfjC95yhw==",
"resolved": "2.9.0",
"contentHash": "5ZTQZvmPLlBw6QzCOwM0KnMsZw6eGjbmC176QHZlcbQoMhGIeGcYzYwn5w9yXxf+4phtplMuVqTpTbFDQh2bqQ==",
"dependencies": {
"xunit.extensibility.core": "[2.7.1]"
"xunit.extensibility.core": "[2.9.0]"
}
},
"sharpcompress": {
@@ -164,7 +184,7 @@
"Microsoft.Bcl.AsyncInterfaces": "[8.0.0, )",
"System.Memory": "[4.5.5, )",
"System.Text.Encoding.CodePages": "[8.0.0, )",
"ZstdSharp.Port": "[0.8.0, )"
"ZstdSharp.Port": "[0.8.1, )"
}
},
"Microsoft.Bcl.AsyncInterfaces": {
@@ -199,9 +219,9 @@
},
"ZstdSharp.Port": {
"type": "CentralTransitive",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA==",
"requested": "[0.8.1, )",
"resolved": "0.8.1",
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg==",
"dependencies": {
"Microsoft.Bcl.AsyncInterfaces": "5.0.0",
"System.Memory": "4.5.5"
@@ -220,12 +240,22 @@
},
"Microsoft.NET.Test.Sdk": {
"type": "Direct",
"requested": "[17.9.0, )",
"resolved": "17.9.0",
"contentHash": "7GUNAUbJYn644jzwLm5BD3a2p9C1dmP8Hr6fDPDxgItQk9hBs1Svdxzz07KQ/UphMSmgza9AbijBJGmw5D658A==",
"requested": "[17.10.0, )",
"resolved": "17.10.0",
"contentHash": "0/2HeACkaHEYU3wc83YlcD2Fi4LMtECJjqrtvw0lPi9DCEa35zSPt1j4fuvM8NagjDqJuh1Ja35WcRtn1Um6/A==",
"dependencies": {
"Microsoft.CodeCoverage": "17.9.0",
"Microsoft.TestPlatform.TestHost": "17.9.0"
"Microsoft.CodeCoverage": "17.10.0",
"Microsoft.TestPlatform.TestHost": "17.10.0"
}
},
"Microsoft.SourceLink.GitHub": {
"type": "Direct",
"requested": "[8.0.0, )",
"resolved": "8.0.0",
"contentHash": "G5q7OqtwIyGTkeIOAc3u2ZuV/kicQaec5EaRnc0pIeSnh9LUjj+PYQrJYBURvDt7twGl2PKA7nSN0kz1Zw5bnQ==",
"dependencies": {
"Microsoft.Build.Tasks.Git": "8.0.0",
"Microsoft.SourceLink.Common": "8.0.0"
}
},
"Mono.Posix.NETStandard": {
@@ -236,20 +266,20 @@
},
"xunit": {
"type": "Direct",
"requested": "[2.7.1, )",
"resolved": "2.7.1",
"contentHash": "9o050yCLzPvHxbrAkaHnI1j+YfPvRw+/ncvKbTfzIhO4JvQA0rPgoICJxXHMkscfgXmLFwZ8107ehnMUVzE23A==",
"requested": "[2.9.0, )",
"resolved": "2.9.0",
"contentHash": "PtU3rZ0ThdmdJqTbK7GkgFf6iBaCR6Q0uvJHznID+XEYk2v6O/b7sRxqnbi3B2gRDXxjTqMkVNayzwsqsFUxRw==",
"dependencies": {
"xunit.analyzers": "1.12.0",
"xunit.assert": "2.7.1",
"xunit.core": "[2.7.1]"
"xunit.analyzers": "1.15.0",
"xunit.assert": "2.9.0",
"xunit.core": "[2.9.0]"
}
},
"xunit.runner.visualstudio": {
"type": "Direct",
"requested": "[2.5.8, )",
"resolved": "2.5.8",
"contentHash": "ZJTm71neOfZcUnqdyY0A0Qgcg1162DoOq6+VpCCsOaD9rwCK5alcjOEHeu17sEekzq4qNv3kyelx6lUMsAt/eA=="
"requested": "[2.8.2, )",
"resolved": "2.8.2",
"contentHash": "vm1tbfXhFmjFMUmS4M0J0ASXz3/U5XvXBa6DOQUL3fEz4Vt6YPhv+ESCarx6M6D+9kJkJYZKCNvJMas1+nVfmQ=="
},
"Xunit.SkippableFact": {
"type": "Direct",
@@ -261,25 +291,35 @@
"xunit.extensibility.execution": "2.4.0"
}
},
"Microsoft.Build.Tasks.Git": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "bZKfSIKJRXLTuSzLudMFte/8CempWjVamNUR5eHJizsy+iuOuO/k2gnh7W0dHJmYY0tBf+gUErfluCv5mySAOQ=="
},
"Microsoft.CodeCoverage": {
"type": "Transitive",
"resolved": "17.9.0",
"contentHash": "RGD37ZSrratfScYXm7M0HjvxMxZyWZL4jm+XgMZbkIY1UPgjUpbNA/t+WTGj/rC/0Hm9A3IrH3ywbKZkOCnoZA=="
"resolved": "17.10.0",
"contentHash": "yC7oSlnR54XO5kOuHlVOKtxomNNN1BWXX8lK1G2jaPXT9sUok7kCOoA4Pgs0qyFaCtMrNsprztYMeoEGqCm4uA=="
},
"Microsoft.SourceLink.Common": {
"type": "Transitive",
"resolved": "8.0.0",
"contentHash": "dk9JPxTCIevS75HyEQ0E4OVAFhB2N+V9ShCXf8Q6FkUQZDkgLI12y679Nym1YqsiSysuQskT7Z+6nUf3yab6Vw=="
},
"Microsoft.TestPlatform.ObjectModel": {
"type": "Transitive",
"resolved": "17.9.0",
"contentHash": "1ilw/8vgmjLyKU+2SKXKXaOqpYFJCQfGqGz+x0cosl981VzjrY74Sv6qAJv+neZMZ9ZMxF3ArN6kotaQ4uvEBw==",
"resolved": "17.10.0",
"contentHash": "KkwhjQevuDj0aBRoPLY6OLAhGqbPUEBuKLbaCs0kUVw29qiOYncdORd4mLVJbn9vGZ7/iFGQ/+AoJl0Tu5Umdg==",
"dependencies": {
"System.Reflection.Metadata": "1.6.0"
}
},
"Microsoft.TestPlatform.TestHost": {
"type": "Transitive",
"resolved": "17.9.0",
"contentHash": "Spmg7Wx49Ya3SxBjyeAR+nQpjMTKZwTwpZ7KyeOTIqI/WHNPnBU4HUvl5kuHPQAwGWqMy4FGZja1HvEwvoaDiA==",
"resolved": "17.10.0",
"contentHash": "LWpMdfqhHvcUkeMCvNYJO8QlPLlYz9XPPb+ZbaXIKhdmjAV0wqTSrTiW5FLaf7RRZT50AQADDOYMOe0HxDxNgA==",
"dependencies": {
"Microsoft.TestPlatform.ObjectModel": "17.9.0",
"Microsoft.TestPlatform.ObjectModel": "17.10.0",
"Newtonsoft.Json": "13.0.1"
}
},
@@ -318,50 +358,50 @@
},
"xunit.analyzers": {
"type": "Transitive",
"resolved": "1.12.0",
"contentHash": "w23LH3aXade2WXKvXi0oA/uV15fpgUMjsPq1x91iQckzgWApgAiijNHmfFQtqNPm41wfrdbRl7nSJRd0yux/dw=="
"resolved": "1.15.0",
"contentHash": "s+M8K/Rtlgr6CmD7AYQKrNTvT5sh0l0ZKDoZ3Z/ExhlIwfV9mGAMR4f7KqIB7SSK7ZOhqDTgTUMYPmKfmvWUWQ=="
},
"xunit.assert": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "JqvXS4yX+PtJn5BuqoKkYav7I0g4nXcxRbGTomDwVQjFccOdyfYKpuPOHX/DqrPCcL+MIHrGVdP3bveUXlvdnA=="
"resolved": "2.9.0",
"contentHash": "Z/1pyia//860wEYTKn6Q5dmgikJdRjgE4t5AoxJkK8oTmidzPLEPG574kmm7LFkMLbH6Frwmgb750kcyR+hwoA=="
},
"xunit.core": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "hcODgS+RXdjmXq0zQzmRbERQgY+bAGGx1bdH3370t/8CTGmIEU2qAc1dQAjIRpARsacR0cj6LLJDUF5BNQNKTQ==",
"resolved": "2.9.0",
"contentHash": "uRaop9tZsZMCaUS4AfbSPGYHtvywWnm8XXFNUqII7ShWyDBgdchY6gyDNgO4AK1Lv/1NNW61Zq63CsDV6oH6Jg==",
"dependencies": {
"xunit.extensibility.core": "[2.7.1]",
"xunit.extensibility.execution": "[2.7.1]"
"xunit.extensibility.core": "[2.9.0]",
"xunit.extensibility.execution": "[2.9.0]"
}
},
"xunit.extensibility.core": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "yLX4XlBFkvNYCzf+DEzlNk45KsSlu9W93IJHBmtUP96qZ9XyRYDFlwMj6BCcOhDKVNrZxSM8bqu4F/Qud4ehxA==",
"resolved": "2.9.0",
"contentHash": "zjDEUSxsr6UNij4gIwCgMqQox+oLDPRZ+mubwWLci+SssPBFQD1xeRR4SvgBuXqbE0QXCJ/STVTp+lxiB5NLVA==",
"dependencies": {
"xunit.abstractions": "2.0.3"
}
},
"xunit.extensibility.execution": {
"type": "Transitive",
"resolved": "2.7.1",
"contentHash": "ei3dKF8agj4UKkJ6KkaZ5/Gcif3if6hBsyzegIQJonZDSKZFvb0AyKtyOhDfggBaXVL5iXZExITdRkfjC95yhw==",
"resolved": "2.9.0",
"contentHash": "5ZTQZvmPLlBw6QzCOwM0KnMsZw6eGjbmC176QHZlcbQoMhGIeGcYzYwn5w9yXxf+4phtplMuVqTpTbFDQh2bqQ==",
"dependencies": {
"xunit.extensibility.core": "[2.7.1]"
"xunit.extensibility.core": "[2.9.0]"
}
},
"sharpcompress": {
"type": "Project",
"dependencies": {
"ZstdSharp.Port": "[0.8.0, )"
"ZstdSharp.Port": "[0.8.1, )"
}
},
"ZstdSharp.Port": {
"type": "CentralTransitive",
"requested": "[0.8.0, )",
"resolved": "0.8.0",
"contentHash": "Z62eNBIu8E8YtbqlMy57tK3dV1+m2b9NhPeaYovB5exmLKvrGCqOhJTzrEUH5VyUWU6vwX3c1XHJGhW5HVs8dA=="
"requested": "[0.8.1, )",
"resolved": "0.8.1",
"contentHash": "19tNz33kn2EkyViFXuxfVn338UJaRmkwBphVqP2dVJIYQUQgFrgG5h061mxkRRg1Ax6r+6WOj1FxaFZ5qaWqqg=="
}
}
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.