Compare commits

..

19 Commits
0.27 ... 0.28

Author SHA1 Message Date
Adam Hathcock
403baf05a6 Mark for 0.28 2021-02-14 13:07:35 +00:00
Adam Hathcock
a51b56339a Fix complete entry check for RAR files. 2021-02-14 13:00:43 +00:00
Adam Hathcock
f48a6d47dc Merge pull request #571 from Erior/feature/540
Proposal fixing Extra bytes written when setting zip64
2021-02-14 12:54:17 +00:00
Adam Hathcock
5b52463e4c Merge pull request #570 from Erior/feature/555
Propsal for handling Zip with long comment
2021-02-14 12:52:42 +00:00
Adam Hathcock
6f08bb72d8 Merge pull request #569 from BrendanGrant/improve_how_missing_parts_are_handled
Improve how missing parts are handled
2021-02-14 12:49:49 +00:00
Lars Vahlenberg
045093f453 Linux is case sensitive with files names 2021-02-14 10:26:26 +01:00
Lars Vahlenberg
566c49ce53 Proposal
Zip64 requires version 4.5
Number of disks is 4 bytes and not 8
2021-02-14 02:42:32 +01:00
Lars Vahlenberg
d1d2758ee0 Propsal for handling Zip with long comment 2021-02-13 23:57:03 +01:00
Brendan Grant
5b86c40d5b Properly detect if RAR is complete at the end or not 2021-02-13 13:34:57 -06:00
Brendan Grant
53393e744e Supporting reading contents of incomplete files 2021-02-13 13:33:43 -06:00
Adam Hathcock
2dd17e3882 Be explicit about zip64 extra field sizes. Formatting 2021-02-13 07:05:53 +00:00
Adam Hathcock
c4f7433584 Merge pull request #567 from Nanook/master
Zip64 Header and Size fix
2021-02-13 06:58:41 +00:00
Adam Hathcock
9405a7cf4b Merge pull request #568 from Bond-009/stackalloc
Use stackallocs where possible/sensible
2021-02-13 06:39:32 +00:00
Bond_009
cd677440ce Use stackallocs where possible/sensible 2021-02-12 20:20:15 +01:00
Craig Greenhill
c06f4bc5a8 Zip64 Header and Size fix 2021-02-11 09:37:59 +00:00
Adam Hathcock
4a7337b223 Merge pull request #563 from adamhathcock/add-reader-test-gzip
Fix Rewindable stream Length and add GZip Reader tests
2021-01-13 15:13:34 +00:00
Adam Hathcock
1d8afb817e Bump version 2021-01-13 14:41:25 +00:00
Adam Hathcock
0f06c3d934 Fix rewindable stream to expose length 2021-01-13 14:40:36 +00:00
Adam Hathcock
9d5cb8d119 Add GZip Reader tests 2021-01-13 10:42:59 +00:00
30 changed files with 251 additions and 171 deletions

View File

@@ -98,7 +98,7 @@ namespace SharpCompress.Archives.GZip
public static bool IsGZipFile(Stream stream)
{
// read the header on the first read
byte[] header = new byte[10];
Span<byte> header = stackalloc byte[10];
// workitem 8501: handle edge case (decompress empty stream)
if (!stream.ReadFully(header))

View File

@@ -10,7 +10,8 @@ using SharpCompress.Readers.Rar;
namespace SharpCompress.Archives.Rar
{
public class RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
public class
RarArchive : AbstractArchive<RarArchiveEntry, RarVolume>
{
internal Lazy<IRarUnpack> UnpackV2017 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV2017.Unpack());
internal Lazy<IRarUnpack> UnpackV1 { get; } = new Lazy<IRarUnpack>(() => new SharpCompress.Compressors.Rar.UnpackV1.Unpack());
@@ -42,7 +43,7 @@ namespace SharpCompress.Archives.Rar
protected override IEnumerable<RarArchiveEntry> LoadEntries(IEnumerable<RarVolume> volumes)
{
return RarArchiveEntryFactory.GetEntries(this, volumes);
return RarArchiveEntryFactory.GetEntries(this, volumes, ReaderOptions);
}
protected override IEnumerable<RarVolume> LoadVolumes(IEnumerable<Stream> streams)

View File

@@ -6,6 +6,7 @@ using SharpCompress.Common;
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
{
@@ -13,11 +14,13 @@ namespace SharpCompress.Archives.Rar
{
private readonly ICollection<RarFilePart> parts;
private readonly RarArchive archive;
private readonly ReaderOptions readerOptions;
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts)
internal RarArchiveEntry(RarArchive archive, IEnumerable<RarFilePart> parts, ReaderOptions readerOptions)
{
this.parts = parts.ToList();
this.archive = archive;
this.readerOptions = readerOptions;
}
public override CompressionType CompressionType => CompressionType.Rar;
@@ -69,13 +72,14 @@ namespace SharpCompress.Archives.Rar
{
get
{
return parts.Select(fp => fp.FileHeader).Any(fh => !fh.IsSplitAfter);
var headers = parts.Select(x => x.FileHeader);
return !headers.First().IsSplitBefore && !headers.Last().IsSplitAfter;
}
}
private void CheckIncomplete()
{
if (!IsComplete)
if (!readerOptions.DisableCheckIncomplete && !IsComplete)
{
throw new IncompleteArchiveException("ArchiveEntry is incomplete and cannot perform this operation.");
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using SharpCompress.Common.Rar;
using SharpCompress.Readers;
namespace SharpCompress.Archives.Rar
{
@@ -36,11 +37,12 @@ namespace SharpCompress.Archives.Rar
}
internal static IEnumerable<RarArchiveEntry> GetEntries(RarArchive archive,
IEnumerable<RarVolume> rarParts)
IEnumerable<RarVolume> rarParts,
ReaderOptions readerOptions)
{
foreach (var groupedParts in GetMatchedFileParts(rarParts))
{
yield return new RarArchiveEntry(archive, groupedParts);
yield return new RarArchiveEntry(archive, groupedParts, readerOptions);
}
}
}

View File

@@ -110,13 +110,13 @@ namespace SharpCompress.Common.GZip
private string ReadZeroTerminatedString(Stream stream)
{
byte[] buf1 = new byte[1];
Span<byte> buf1 = stackalloc byte[1];
var list = new List<byte>();
bool done = false;
do
{
// workitem 7740
int n = stream.Read(buf1, 0, 1);
int n = stream.Read(buf1);
if (n != 1)
{
throw new ZlibException("Unexpected EOF reading GZIP header.");

View File

@@ -437,6 +437,7 @@ namespace SharpCompress.Common.Rar.Headers
internal long DataStartPosition { get; set; }
public Stream PackedStream { get; set; }
public bool IsSplitBefore => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_BEFORE) : HasFlag(FileFlagsV4.SPLIT_BEFORE);
public bool IsSplitAfter => IsRar5 ? HasHeaderFlag(HeaderFlagsV5.SPLIT_AFTER) : HasFlag(FileFlagsV4.SPLIT_AFTER);
public bool IsDirectory => HasFlag(IsRar5 ? FileFlagsV5.DIRECTORY : FileFlagsV4.DIRECTORY);

View File

@@ -50,11 +50,11 @@ namespace SharpCompress.Common.Rar
if (sizeToRead > 0)
{
int alignedSize = sizeToRead + ((~sizeToRead + 1) & 0xf);
byte[] cipherText = new byte[RarRijndael.CRYPTO_BLOCK_SIZE];
Span<byte> cipherText = stackalloc byte[RarRijndael.CRYPTO_BLOCK_SIZE];
for (int i = 0; i < alignedSize / 16; i++)
{
//long ax = System.currentTimeMillis();
_actualStream.Read(cipherText, 0, RarRijndael.CRYPTO_BLOCK_SIZE);
_actualStream.Read(cipherText);
var readBytes = _rijndael.ProcessBlock(cipherText);
foreach (var readByte in readBytes)

View File

@@ -1517,6 +1517,7 @@ namespace SharpCompress.Common.SevenZip
}
}
byte[] buffer = null;
foreach (CExtractFolderInfo efi in extractFolderInfoVector)
{
int startIndex;
@@ -1553,7 +1554,7 @@ namespace SharpCompress.Common.SevenZip
Stream s = DecoderStreamHelper.CreateDecoderStream(_stream, folderStartPackPos, packSizes,
folderInfo, db.PasswordProvider);
byte[] buffer = new byte[4 << 10];
buffer ??= new byte[4 << 10];
for (; ; )
{
int processed = s.Read(buffer, 0, buffer.Length);

View File

@@ -97,7 +97,7 @@ namespace SharpCompress.Common.Tar.Headers
{
numPaddingBytes = BLOCK_SIZE;
}
output.Write(new byte[numPaddingBytes], 0, numPaddingBytes);
output.Write(stackalloc byte[numPaddingBytes]);
}
internal bool Read(BinaryReader reader)

View File

@@ -69,53 +69,37 @@ namespace SharpCompress.Common.Zip.Headers
Process();
}
//From the spec values are only in the extradata if the standard
//value is set to 0xFFFF, but if one of the sizes are present, both are.
//Hence if length == 4 volume only
// if length == 8 offset only
// if length == 12 offset + volume
// if length == 16 sizes only
// if length == 20 sizes + volume
// if length == 24 sizes + offset
// if length == 28 everything.
//It is unclear how many of these are used in the wild.
private void Process()
{
if (DataBytes.Length >= 8)
{
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
}
if (DataBytes.Length >= 16)
{
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
}
if (DataBytes.Length >= 24)
{
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
}
if (DataBytes.Length >= 28)
{
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(24));
}
switch (DataBytes.Length)
{
case 4:
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes);
return;
case 8:
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
return;
case 12:
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(8));
return;
case 16:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
return;
case 20:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(16));
return;
case 24:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
return;
case 28:
UncompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes);
CompressedSize = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(8));
RelativeOffsetOfEntryHeader = BinaryPrimitives.ReadInt64LittleEndian(DataBytes.AsSpan(16));
VolumeNumber = BinaryPrimitives.ReadUInt32LittleEndian(DataBytes.AsSpan(24));
return;
break;
default:
throw new ArchiveException("Unexpected size of of Zip64 extended information extra field");
throw new ArchiveException($"Unexpected size of of Zip64 extended information extra field: {DataBytes.Length}");
}
}

View File

@@ -8,7 +8,10 @@ namespace SharpCompress.Common.Zip
{
internal sealed class SeekableZipHeaderFactory : ZipHeaderFactory
{
private const int MAX_ITERATIONS_FOR_DIRECTORY_HEADER = 4096;
private const int MINIMUM_EOCD_LENGTH = 22;
private const int ZIP64_EOCD_LENGTH = 20;
// Comment may be within 64kb + structure 22 bytes
private const int MAX_SEARCH_LENGTH_FOR_EOCD = 65557;
private bool _zip64;
internal SeekableZipHeaderFactory(string? password, ArchiveEncoding archiveEncoding)
@@ -20,14 +23,24 @@ namespace SharpCompress.Common.Zip
{
var reader = new BinaryReader(stream);
SeekBackToHeader(stream, reader, DIRECTORY_END_HEADER_BYTES);
SeekBackToHeader(stream, reader);
var eocd_location = stream.Position;
var entry = new DirectoryEndHeader();
entry.Read(reader);
if (entry.IsZip64)
{
_zip64 = true;
SeekBackToHeader(stream, reader, ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR);
// ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR should be before the EOCD
stream.Seek(eocd_location - ZIP64_EOCD_LENGTH - 4, SeekOrigin.Begin);
uint zip64_locator = reader.ReadUInt32();
if( zip64_locator != ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR )
{
throw new ArchiveException("Failed to locate the Zip64 Directory Locator");
}
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
zip64Locator.Read(reader);
@@ -73,27 +86,49 @@ namespace SharpCompress.Common.Zip
}
}
private static void SeekBackToHeader(Stream stream, BinaryReader reader, uint headerSignature)
private static bool IsMatch( byte[] haystack, int position, byte[] needle)
{
long offset = 0;
uint signature;
int iterationCount = 0;
do
for( int i = 0; i < needle.Length; i++ )
{
if ((stream.Length + offset) - 4 < 0)
if( haystack[ position + i ] != needle[ i ] )
{
throw new ArchiveException("Failed to locate the Zip Header");
}
stream.Seek(offset - 4, SeekOrigin.End);
signature = reader.ReadUInt32();
offset--;
iterationCount++;
if (iterationCount > MAX_ITERATIONS_FOR_DIRECTORY_HEADER)
{
throw new ArchiveException("Could not find Zip file Directory at the end of the file. File may be corrupted.");
return false;
}
}
while (signature != headerSignature);
return true;
}
private static void SeekBackToHeader(Stream stream, BinaryReader reader)
{
// Minimum EOCD length
if (stream.Length < MINIMUM_EOCD_LENGTH)
{
throw new ArchiveException("Could not find Zip file Directory at the end of the file. File may be corrupted.");
}
int len = stream.Length < MAX_SEARCH_LENGTH_FOR_EOCD ? (int)stream.Length : MAX_SEARCH_LENGTH_FOR_EOCD;
// We search for marker in reverse to find the first occurance
byte[] needle = { 0x06, 0x05, 0x4b, 0x50 };
stream.Seek(-len, SeekOrigin.End);
byte[] seek = reader.ReadBytes(len);
// Search in reverse
Array.Reverse(seek);
var max_search_area = len - MINIMUM_EOCD_LENGTH;
for( int pos_from_end = 0; pos_from_end < max_search_area; ++pos_from_end)
{
if( IsMatch(seek, pos_from_end, needle) )
{
stream.Seek(-pos_from_end, SeekOrigin.End);
return;
}
}
throw new ArchiveException("Failed to locate the Zip Header");
}
internal LocalEntryHeader GetLocalHeader(Stream stream, DirectoryEntryHeader directoryEntryHeader)

View File

@@ -75,7 +75,7 @@ namespace SharpCompress.Common.Zip
if (disposing)
{
//read out last 10 auth bytes
var ten = new byte[10];
Span<byte> ten = stackalloc byte[10];
_stream.ReadFully(ten);
_stream.Dispose();
}

View File

@@ -93,7 +93,7 @@ namespace SharpCompress.Common.Zip
}
case ZipCompressionMethod.PPMd:
{
var props = new byte[2];
Span<byte> props = stackalloc byte[2];
stream.ReadFully(props);
return new PpmdStream(new PpmdProperties(props), stream, false);
}

View File

@@ -256,17 +256,15 @@ namespace SharpCompress.Compressors.Deflate
}
// Read and potentially verify the GZIP trailer: CRC32 and size mod 2^32
byte[] trailer = new byte[8];
Span<byte> trailer = stackalloc byte[8];
// workitem 8679
if (_z.AvailableBytesIn != 8)
{
// Make sure we have read to the end of the stream
Array.Copy(_z.InputBuffer, _z.NextIn, trailer, 0, _z.AvailableBytesIn);
_z.InputBuffer.AsSpan(_z.NextIn, _z.AvailableBytesIn).CopyTo(trailer);
int bytesNeeded = 8 - _z.AvailableBytesIn;
int bytesRead = _stream.Read(trailer,
_z.AvailableBytesIn,
bytesNeeded);
int bytesRead = _stream.Read(trailer.Slice(_z.AvailableBytesIn, bytesNeeded));
if (bytesNeeded != bytesRead)
{
throw new ZlibException(String.Format(
@@ -276,12 +274,12 @@ namespace SharpCompress.Compressors.Deflate
}
else
{
Array.Copy(_z.InputBuffer, _z.NextIn, trailer, 0, trailer.Length);
_z.InputBuffer.AsSpan(_z.NextIn, trailer.Length).CopyTo(trailer);
}
Int32 crc32_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer);
Int32 crc32_actual = crc.Crc32Result;
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.AsSpan(4));
Int32 isize_expected = BinaryPrimitives.ReadInt32LittleEndian(trailer.Slice(4));
Int32 isize_actual = (Int32)(_z.TotalBytesOut & 0x00000000FFFFFFFF);
if (crc32_actual != crc32_expected)

View File

@@ -59,16 +59,16 @@ namespace SharpCompress.Compressors.LZMA
crc32Stream.Dispose();
var compressedCount = _countingWritableSubStream!.Count;
byte[] intBuf = new byte[8];
Span<byte> intBuf = stackalloc byte[8];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc32Stream.Crc);
_countingWritableSubStream.Write(intBuf, 0, 4);
_countingWritableSubStream.Write(intBuf.Slice(0, 4));
BinaryPrimitives.WriteInt64LittleEndian(intBuf, _writeCount);
_countingWritableSubStream.Write(intBuf, 0, 8);
_countingWritableSubStream.Write(intBuf);
//total with headers
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, compressedCount + 6 + 20);
_countingWritableSubStream.Write(intBuf, 0, 8);
_countingWritableSubStream.Write(intBuf);
}
_finished = true;
}

View File

@@ -31,7 +31,11 @@ namespace SharpCompress.Compressors.PPMd
public PpmdVersion Version { get; } = PpmdVersion.I1;
internal ModelRestorationMethod RestorationMethod { get; }
public PpmdProperties(byte[] properties)
public PpmdProperties(byte[] properties) : this(properties.AsSpan())
{
}
public PpmdProperties(ReadOnlySpan<byte> properties)
{
if (properties.Length == 2)
{
@@ -43,7 +47,7 @@ namespace SharpCompress.Compressors.PPMd
else if (properties.Length == 5)
{
Version = PpmdVersion.H7Z;
AllocatorSize = BinaryPrimitives.ReadInt32LittleEndian(properties.AsSpan(1));
AllocatorSize = BinaryPrimitives.ReadInt32LittleEndian(properties.Slice(1));
ModelOrder = properties[0];
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Buffers.Binary;
using System.IO;
namespace SharpCompress.Compressors.Xz
@@ -8,7 +9,7 @@ namespace SharpCompress.Compressors.Xz
public static int ReadLittleEndianInt32(this BinaryReader reader)
{
byte[] bytes = reader.ReadBytes(4);
return (bytes[0] + (bytes[1] << 8) + (bytes[2] << 16) + (bytes[3] << 24));
return BinaryPrimitives.ReadInt32LittleEndian(bytes);
}
internal static uint ReadLittleEndianUInt32(this BinaryReader reader)
@@ -17,13 +18,13 @@ namespace SharpCompress.Compressors.Xz
}
public static int ReadLittleEndianInt32(this Stream stream)
{
byte[] bytes = new byte[4];
Span<byte> bytes = stackalloc byte[4];
var read = stream.ReadFully(bytes);
if (!read)
{
throw new EndOfStreamException();
}
return (bytes[0] + (bytes[1] << 8) + (bytes[2] << 16) + (bytes[3] << 24));
return BinaryPrimitives.ReadInt32LittleEndian(bytes);
}
internal static uint ReadLittleEndianUInt32(this Stream stream)

View File

@@ -1,7 +1,6 @@
#nullable disable
using System;
using System.Collections.Generic;
namespace SharpCompress.Compressors.Xz
{
@@ -24,7 +23,7 @@ namespace SharpCompress.Compressors.Xz
public static UInt32 Compute(UInt32 polynomial, UInt32 seed, byte[] buffer)
{
return ~CalculateHash(InitializeTable(polynomial), seed, buffer, 0, buffer.Length);
return ~CalculateHash(InitializeTable(polynomial), seed, buffer);
}
private static UInt32[] InitializeTable(UInt32 polynomial)
@@ -61,16 +60,16 @@ namespace SharpCompress.Compressors.Xz
return createTable;
}
private static UInt32 CalculateHash(UInt32[] table, UInt32 seed, IList<byte> buffer, int start, int size)
private static UInt32 CalculateHash(UInt32[] table, UInt32 seed, ReadOnlySpan<byte> buffer)
{
var crc = seed;
for (var i = start; i < size - start; i++)
int len = buffer.Length;
for (var i = 0; i < len; i++)
{
crc = (crc >> 8) ^ table[buffer[i] ^ crc & 0xff];
crc = (crc >> 8) ^ table[(buffer[i] ^ crc) & 0xff];
}
return crc;
}
}
}

View File

@@ -22,14 +22,14 @@ namespace SharpCompress.Compressors.Xz
{
Table ??= CreateTable(Iso3309Polynomial);
return CalculateHash(seed, Table, buffer, 0, buffer.Length);
return CalculateHash(seed, Table, buffer);
}
public static UInt64 CalculateHash(UInt64 seed, UInt64[] table, IList<byte> buffer, int start, int size)
public static UInt64 CalculateHash(UInt64 seed, UInt64[] table, ReadOnlySpan<byte> buffer)
{
var crc = seed;
for (var i = start; i < size; i++)
int len = buffer.Length;
for (var i = 0; i < len; i++)
{
unchecked
{

View File

@@ -84,11 +84,11 @@ namespace SharpCompress.IO
throw new NotSupportedException();
}
public override long Length => throw new NotSupportedException();
public override long Length => stream.Length;
public override long Position
{
get { return stream.Position + bufferStream.Position - bufferStream.Length; }
get => stream.Position + bufferStream.Position - bufferStream.Length;
set
{
if (!isRewound)

View File

@@ -12,7 +12,7 @@ namespace System.IO
try
{
int read = stream.Read(temp, 0, buffer.Length);
int read = stream.Read(buffer);
temp.AsSpan(0, read).CopyTo(buffer);
@@ -42,4 +42,4 @@ namespace System.IO
}
}
#endif
#endif

View File

@@ -10,5 +10,7 @@ namespace SharpCompress.Readers
public bool LookForHeader { get; set; }
public string? Password { get; set; }
public bool DisableCheckIncomplete { get; set; }
}
}

View File

@@ -2,9 +2,9 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.27.0</VersionPrefix>
<AssemblyVersion>0.27.0</AssemblyVersion>
<FileVersion>0.27.0</FileVersion>
<VersionPrefix>0.28.0</VersionPrefix>
<AssemblyVersion>0.28.0</AssemblyVersion>
<FileVersion>0.28.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks>netstandard2.0;netstandard2.1;netcoreapp3.1;net5.0</TargetFrameworks>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>

View File

@@ -281,11 +281,11 @@ namespace SharpCompress
return ArrayPool<byte>.Shared.Rent(81920);
}
public static bool ReadFully(this Stream stream, byte[] buffer)
public static bool ReadFully(this Stream stream, Span<byte> buffer)
{
int total = 0;
int read;
while ((read = stream.Read(buffer, total, buffer.Length - total)) > 0)
while ((read = stream.Read(buffer.Slice(total, buffer.Length - total))) > 0)
{
total += read;
if (total >= buffer.Length)

View File

@@ -99,7 +99,7 @@ namespace SharpCompress.Writers.Tar
return;
}
zeros = 512 - zeros;
OutputStream.Write(new byte[zeros], 0, zeros);
OutputStream.Write(stackalloc byte[zeros]);
}
protected override void Dispose(bool isDisposing)
@@ -128,4 +128,4 @@ namespace SharpCompress.Writers.Tar
base.Dispose(isDisposing);
}
}
}
}

View File

@@ -71,60 +71,60 @@ namespace SharpCompress.Writers.Zip
usedCompression = ZipCompressionMethod.None;
}
byte[] intBuf = new byte[] { 80, 75, 1, 2, version, 0, version, 0 };
Span<byte> intBuf = stackalloc byte[] { 80, 75, 1, 2, version, 0, version, 0 };
//constant sig, then version made by, then version to extract
outputStream.Write(intBuf, 0, 8);
outputStream.Write(intBuf);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
outputStream.Write(intBuf, 0, 2);
outputStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)usedCompression);
outputStream.Write(intBuf, 0, 2); // zipping method
outputStream.Write(intBuf.Slice(0, 2)); // zipping method
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, ModificationTime.DateTimeToDosTime());
outputStream.Write(intBuf, 0, 4);
outputStream.Write(intBuf.Slice(0, 4));
// zipping date and time
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, Crc);
outputStream.Write(intBuf, 0, 4); // file CRC
outputStream.Write(intBuf.Slice(0, 4)); // file CRC
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, compressedvalue);
outputStream.Write(intBuf, 0, 4); // compressed file size
outputStream.Write(intBuf.Slice(0, 4)); // compressed file size
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, decompressedvalue);
outputStream.Write(intBuf, 0, 4); // uncompressed file size
outputStream.Write(intBuf.Slice(0, 4)); // uncompressed file size
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedFilename.Length);
outputStream.Write(intBuf, 0, 2); // Filename in zip
outputStream.Write(intBuf.Slice(0, 2)); // Filename in zip
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)extralength);
outputStream.Write(intBuf, 0, 2); // extra length
outputStream.Write(intBuf.Slice(0, 2)); // extra length
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedComment.Length);
outputStream.Write(intBuf, 0, 2);
outputStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0);
outputStream.Write(intBuf, 0, 2); // disk=0
outputStream.Write(intBuf.Slice(0, 2)); // disk=0
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
outputStream.Write(intBuf, 0, 2); // file type: binary
outputStream.Write(intBuf.Slice(0, 2)); // file type: binary
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
outputStream.Write(intBuf, 0, 2); // Internal file attributes
outputStream.Write(intBuf.Slice(0, 2)); // Internal file attributes
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0x8100);
outputStream.Write(intBuf, 0, 2);
outputStream.Write(intBuf.Slice(0, 2));
// External file attributes (normal/readable)
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, headeroffsetvalue);
outputStream.Write(intBuf, 0, 4); // Offset of header
outputStream.Write(intBuf.Slice(0, 4)); // Offset of header
outputStream.Write(encodedFilename, 0, encodedFilename.Length);
if (zip64)
{
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0x0001);
outputStream.Write(intBuf, 0, 2);
outputStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)(extralength - 4));
outputStream.Write(intBuf, 0, 2);
outputStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, Decompressed);
outputStream.Write(intBuf, 0, 8);
outputStream.Write(intBuf);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, Compressed);
outputStream.Write(intBuf, 0, 8);
outputStream.Write(intBuf);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, HeaderOffset);
outputStream.Write(intBuf, 0, 8);
outputStream.Write(intBuf);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
outputStream.Write(intBuf, 0, 4); // VolumeNumber = 0
outputStream.Write(intBuf.Slice(0, 4)); // VolumeNumber = 0
}
outputStream.Write(encodedComment, 0, encodedComment.Length);

View File

@@ -162,10 +162,9 @@ namespace SharpCompress.Writers.Zip
var explicitZipCompressionInfo = ToZipCompressionMethod(zipWriterEntryOptions.CompressionType ?? compressionType);
byte[] encodedFilename = WriterOptions.ArchiveEncoding.Encode(filename);
// TODO: Use stackalloc when we exclusively support netstandard2.1 or higher
byte[] intBuf = new byte[4];
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, ZipHeaderFactory.ENTRY_HEADER_BYTES);
OutputStream.Write(intBuf, 0, 4);
OutputStream.Write(intBuf);
if (explicitZipCompressionInfo == ZipCompressionMethod.Deflate)
{
if (OutputStream.CanSeek && useZip64)
@@ -193,18 +192,18 @@ namespace SharpCompress.Writers.Zip
}
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)flags);
OutputStream.Write(intBuf, 0, 2);
OutputStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)explicitZipCompressionInfo);
OutputStream.Write(intBuf, 0, 2); // zipping method
OutputStream.Write(intBuf.Slice(0, 2)); // zipping method
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, zipWriterEntryOptions.ModificationDateTime.DateTimeToDosTime());
OutputStream.Write(intBuf, 0, 4);
OutputStream.Write(intBuf);
// zipping date and time
OutputStream.Write(stackalloc byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 });
// unused CRC, un/compressed size, updated later
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedFilename.Length);
OutputStream.Write(intBuf, 0, 2); // filename length
OutputStream.Write(intBuf.Slice(0, 2)); // filename length
var extralength = 0;
if (OutputStream.CanSeek && useZip64)
@@ -213,7 +212,7 @@ namespace SharpCompress.Writers.Zip
}
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)extralength);
OutputStream.Write(intBuf, 0, 2); // extra length
OutputStream.Write(intBuf.Slice(0, 2)); // extra length
OutputStream.Write(encodedFilename, 0, encodedFilename.Length);
if (extralength != 0)
@@ -227,13 +226,13 @@ namespace SharpCompress.Writers.Zip
private void WriteFooter(uint crc, uint compressed, uint uncompressed)
{
byte[] intBuf = new byte[4];
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, crc);
OutputStream.Write(intBuf, 0, 4);
OutputStream.Write(intBuf);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, compressed);
OutputStream.Write(intBuf, 0, 4);
OutputStream.Write(intBuf);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, uncompressed);
OutputStream.Write(intBuf, 0, 4);
OutputStream.Write(intBuf);
}
private void WriteEndRecord(ulong size)
@@ -244,7 +243,7 @@ namespace SharpCompress.Writers.Zip
var sizevalue = size >= uint.MaxValue ? uint.MaxValue : (uint)size;
var streampositionvalue = streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streamPosition;
byte[] intBuf = new byte[8];
Span<byte> intBuf = stackalloc byte[8];
if (zip64)
{
var recordlen = 2 + 2 + 4 + 4 + 8 + 8 + 8 + 8;
@@ -253,34 +252,34 @@ namespace SharpCompress.Writers.Zip
OutputStream.Write(stackalloc byte[] { 80, 75, 6, 6 });
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)recordlen);
OutputStream.Write(intBuf, 0, 8); // Size of zip64 end of central directory record
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0);
OutputStream.Write(intBuf, 0, 2); // Made by
OutputStream.Write(intBuf); // Size of zip64 end of central directory record
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
OutputStream.Write(intBuf, 0, 2); // Version needed
OutputStream.Write(intBuf.Slice(0, 2)); // Made by
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 45);
OutputStream.Write(intBuf.Slice(0, 2)); // Version needed
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
OutputStream.Write(intBuf, 0, 4); // Disk number
OutputStream.Write(intBuf, 0, 4); // Central dir disk
OutputStream.Write(intBuf.Slice(0, 4)); // Disk number
OutputStream.Write(intBuf.Slice(0, 4)); // Central dir disk
// TODO: entries.Count is int, so max 2^31 files
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)entries.Count);
OutputStream.Write(intBuf, 0, 8); // Entries in this disk
OutputStream.Write(intBuf, 0, 8); // Total entries
OutputStream.Write(intBuf); // Entries in this disk
OutputStream.Write(intBuf); // Total entries
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, size);
OutputStream.Write(intBuf, 0, 8); // Central Directory size
OutputStream.Write(intBuf); // Central Directory size
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition);
OutputStream.Write(intBuf, 0, 8); // Disk offset
OutputStream.Write(intBuf); // Disk offset
// Write zip64 end of central directory locator
OutputStream.Write(stackalloc byte[] { 80, 75, 6, 7 });
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
OutputStream.Write(intBuf, 0, 4); // Entry disk
OutputStream.Write(intBuf.Slice(0, 4)); // Entry disk
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)streamPosition + size);
OutputStream.Write(intBuf, 0, 8); // Offset to the zip64 central directory
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
OutputStream.Write(intBuf, 0, 4); // Number of disks
OutputStream.Write(intBuf); // Offset to the zip64 central directory
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 1);
OutputStream.Write(intBuf.Slice(0, 4)); // Number of disks
streamPosition += recordlen + (4 + 4 + 8 + 4);
streampositionvalue = streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streampositionvalue;
@@ -289,15 +288,15 @@ namespace SharpCompress.Writers.Zip
// Write normal end of central directory record
OutputStream.Write(stackalloc byte[] { 80, 75, 5, 6, 0, 0, 0, 0 });
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)entries.Count);
OutputStream.Write(intBuf, 0, 2);
OutputStream.Write(intBuf, 0, 2);
OutputStream.Write(intBuf.Slice(0, 2));
OutputStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, sizevalue);
OutputStream.Write(intBuf, 0, 4);
OutputStream.Write(intBuf.Slice(0, 4));
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, streampositionvalue);
OutputStream.Write(intBuf, 0, 4);
OutputStream.Write(intBuf.Slice(0, 4));
byte[] encodedComment = WriterOptions.ArchiveEncoding.Encode(zipComment);
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedComment.Length);
OutputStream.Write(intBuf, 0, 2);
OutputStream.Write(intBuf.Slice(0, 2));
OutputStream.Write(encodedComment, 0, encodedComment.Length);
}
@@ -443,16 +442,16 @@ namespace SharpCompress.Writers.Zip
if (entry.Zip64HeaderOffset != 0)
{
originalStream.Position = (long)(entry.HeaderOffset + entry.Zip64HeaderOffset);
byte[] intBuf = new byte[8];
Span<byte> intBuf = stackalloc byte[8];
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 0x0001);
originalStream.Write(intBuf, 0, 2);
originalStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, 8 + 8);
originalStream.Write(intBuf, 0, 2);
originalStream.Write(intBuf.Slice(0, 2));
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Decompressed);
originalStream.Write(intBuf, 0, 8);
originalStream.Write(intBuf);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, entry.Compressed);
originalStream.Write(intBuf, 0, 8);
originalStream.Write(intBuf);
}
originalStream.Position = writer.streamPosition + (long)entry.Compressed;
@@ -471,9 +470,9 @@ namespace SharpCompress.Writers.Zip
throw new NotSupportedException("Streams larger than 4GiB are not supported for non-seekable streams");
}
byte[] intBuf = new byte[4];
Span<byte> intBuf = stackalloc byte[4];
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, ZipHeaderFactory.POST_DATA_DESCRIPTOR);
originalStream.Write(intBuf, 0, 4);
originalStream.Write(intBuf);
writer.WriteFooter(entry.Crc,
compressedvalue,
decompressedvalue);

View File

@@ -0,0 +1,37 @@
using System.IO;
using SharpCompress.Common;
using SharpCompress.IO;
using Xunit;
namespace SharpCompress.Test.GZip
{
public class GZipReaderTests : ReaderTests
{
public GZipReaderTests()
{
UseExtensionInsteadOfNameToVerify = true;
}
[Fact]
public void GZip_Reader_Generic()
{
Read("Tar.tar.gz", CompressionType.GZip);
}
[Fact]
public void GZip_Reader_Generic2()
{
//read only as GZip itme
using (Stream stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz")))
using (var reader = SharpCompress.Readers.GZip.GZipReader.Open(new RewindableStream(stream)))
{
while (reader.MoveToNextEntry()) // Crash here
{
Assert.NotEqual(0, reader.Entry.Size);
Assert.NotEqual(0, reader.Entry.Crc);
}
}
}
}
}

View File

@@ -564,5 +564,17 @@ namespace SharpCompress.Test.Zip
}
}
}
[Fact]
public void Zip_LongComment_Read()
{
string zipPath = Path.Combine(TEST_ARCHIVES_PATH, "Zip.LongComment.zip");
using(ZipArchive za = ZipArchive.Open(zipPath))
{
var count = za.Entries.Count;
Assert.Equal(1, count);
}
}
}
}

Binary file not shown.