Compare commits

..

3 Commits

Author SHA1 Message Date
Adam Hathcock
9d24c53cfd Intermediate commit. Was trying out a progress bar 2017-06-09 08:25:28 +01:00
Adam Hathcock
e7720ccc4e small refactor 2017-06-09 08:25:28 +01:00
Adam Hathcock
723f4dc83f Start making a dotnet tool 2017-06-09 08:25:28 +01:00
76 changed files with 802 additions and 1168 deletions

View File

@@ -44,14 +44,6 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
## Version Log
### Version 0.18
* [Now on Github releases](https://github.com/adamhathcock/sharpcompress/releases/tag/0.18)
### Version 0.17.1
* Fix - [Bug Fix for .NET Core on Windows](https://github.com/adamhathcock/sharpcompress/pull/257)
### Version 0.17.0
* New - Full LZip support! Can read and write LZip files and Tars inside LZip files. [Make LZip a first class citizen. #241](https://github.com/adamhathcock/sharpcompress/issues/241)

View File

@@ -13,6 +13,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress", "src\SharpC
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SharpCompress.Test", "tests\SharpCompress.Test\SharpCompress.Test.csproj", "{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "dotnet-sharpcompress", "src\dotnet-sharpcompress\dotnet-sharpcompress.csproj", "{CC08976E-8E3B-44EE-BDA7-6A9D2FDDDB02}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -27,6 +29,10 @@ Global
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F}.Release|Any CPU.Build.0 = Release|Any CPU
{CC08976E-8E3B-44EE-BDA7-6A9D2FDDDB02}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{CC08976E-8E3B-44EE-BDA7-6A9D2FDDDB02}.Debug|Any CPU.Build.0 = Debug|Any CPU
{CC08976E-8E3B-44EE-BDA7-6A9D2FDDDB02}.Release|Any CPU.ActiveCfg = Release|Any CPU
{CC08976E-8E3B-44EE-BDA7-6A9D2FDDDB02}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -34,5 +40,6 @@ Global
GlobalSection(NestedProjects) = preSolution
{FD19DDD8-72B2-4024-8665-0D1F7A2AA998} = {3C5BE746-03E5-4895-9988-0B57F162F86C}
{F2B1A1EB-0FA6-40D0-8908-E13247C7226F} = {0F0901FF-E8D9-426A-B5A2-17C7F47C1529}
{CC08976E-8E3B-44EE-BDA7-6A9D2FDDDB02} = {3C5BE746-03E5-4895-9988-0B57F162F86C}
EndGlobalSection
EndGlobal

View File

@@ -14,7 +14,6 @@ namespace SharpCompress.Archives.GZip
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -37,7 +36,6 @@ namespace SharpCompress.Archives.GZip
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -56,11 +54,11 @@ namespace SharpCompress.Archives.GZip
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal GZipArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.GZip, fileInfo, options)
{
@@ -106,9 +104,15 @@ namespace SharpCompress.Archives.GZip
{
// read the header on the first read
byte[] header = new byte[10];
int n = stream.Read(header, 0, header.Length);
// workitem 8501: handle edge case (decompress empty stream)
if (!stream.ReadFully(header))
if (n == 0)
{
return false;
}
if (n != 10)
{
return false;
}
@@ -154,7 +158,7 @@ namespace SharpCompress.Archives.GZip
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
}
using (var writer = new GZipWriter(stream, new GZipWriterOptions(options)))
using (var writer = new GZipWriter(stream))
{
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
@@ -175,7 +179,7 @@ namespace SharpCompress.Archives.GZip
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
Stream stream = volumes.Single().Stream;
yield return new GZipArchiveEntry(this, new GZipFilePart(stream, ReaderOptions.ArchiveEncoding));
yield return new GZipArchiveEntry(this, new GZipFilePart(stream));
}
protected override IReader CreateReaderForSolidExtraction()

View File

@@ -106,7 +106,7 @@ namespace SharpCompress.Archives.SevenZip
for (int i = 0; i < database.Files.Count; i++)
{
var file = database.Files[i];
yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file));
}
}

View File

@@ -16,7 +16,7 @@ namespace SharpCompress.Archives.Tar
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
{
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -39,7 +39,7 @@ namespace SharpCompress.Archives.Tar
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -52,7 +52,6 @@ namespace SharpCompress.Archives.Tar
}
#if !NO_FILE
public static bool IsTarFile(string filePath)
{
return IsTarFile(new FileInfo(filePath));
@@ -75,7 +74,7 @@ namespace SharpCompress.Archives.Tar
{
try
{
TarHeader tar = new TarHeader(new ArchiveEncoding());
TarHeader tar = new TarHeader();
tar.Read(new BinaryReader(stream));
return tar.Name.Length > 0 && Enum.IsDefined(typeof(EntryType), tar.EntryType);
}
@@ -99,6 +98,7 @@ namespace SharpCompress.Archives.Tar
protected override IEnumerable<TarVolume> LoadVolumes(FileInfo file)
{
return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
#endif
@@ -127,7 +127,7 @@ namespace SharpCompress.Archives.Tar
{
Stream stream = volumes.Single().Stream;
TarHeader previousHeader = null;
foreach (TarHeader header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
foreach (TarHeader header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream))
{
if (header != null)
{
@@ -152,7 +152,7 @@ namespace SharpCompress.Archives.Tar
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
header.Name = ReaderOptions.ArchiveEncoding.Decode(bytes).TrimNulls();
header.Name = ArchiveEncoding.Default.GetString(bytes, 0, bytes.Length).TrimNulls();
}
}

View File

@@ -24,7 +24,6 @@ namespace SharpCompress.Archives.Zip
public CompressionLevel DeflateCompressionLevel { get; set; }
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -47,7 +46,6 @@ namespace SharpCompress.Archives.Zip
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -60,7 +58,6 @@ namespace SharpCompress.Archives.Zip
}
#if !NO_FILE
public static bool IsZipFile(string filePath, string password = null)
{
return IsZipFile(new FileInfo(filePath), password);
@@ -81,7 +78,7 @@ namespace SharpCompress.Archives.Zip
public static bool IsZipFile(Stream stream, string password = null)
{
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password);
try
{
ZipHeader header =
@@ -112,7 +109,7 @@ namespace SharpCompress.Archives.Zip
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Zip, fileInfo, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password);
}
protected override IEnumerable<ZipVolume> LoadVolumes(FileInfo file)
@@ -134,7 +131,7 @@ namespace SharpCompress.Archives.Zip
internal ZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Zip, stream, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password);
}
protected override IEnumerable<ZipVolume> LoadVolumes(IEnumerable<Stream> streams)
@@ -153,19 +150,19 @@ namespace SharpCompress.Archives.Zip
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
h as DirectoryEntryHeader,
stream));
}
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
h as DirectoryEntryHeader,
stream));
}
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = (h as DirectoryEndHeader).Comment;
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
{
byte[] bytes = (h as DirectoryEndHeader).Comment;
volume.Comment = ArchiveEncoding.Default.GetString(bytes, 0, bytes.Length);
yield break;
}
}
}
}
@@ -208,7 +205,7 @@ namespace SharpCompress.Archives.Zip
{
var stream = Volumes.Single().Stream;
stream.Position = 0;
return ZipReader.Open(stream, ReaderOptions);
return ZipReader.Open(stream);
}
}
}

View File

@@ -1,60 +1,23 @@
using System;
using System.Text;
using System.Text;
namespace SharpCompress.Common
{
public class ArchiveEncoding
public static class ArchiveEncoding
{
/// <summary>
/// Default encoding to use when archive format doesn't specify one.
/// </summary>
public Encoding Default { get; set; }
public static Encoding Default { get; set; }
/// <summary>
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
/// Encoding used by encryption schemes which don't comply with RFC 2898.
/// </summary>
public Encoding Password { get; set; }
public static Encoding Password { get; set; }
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
/// </summary>
public Encoding Forced { get; set; }
/// <summary>
/// Set this when you want to use a custom method for all decoding operations.
/// </summary>
/// <returns>string Func(bytes, index, length)</returns>
public Func<byte[], int, int, string> CustomDecoder { get; set; }
public ArchiveEncoding()
static ArchiveEncoding()
{
Default = Encoding.UTF8;
Password = Encoding.UTF8;
}
public string Decode(byte[] bytes)
{
return Decode(bytes, 0, bytes.Length);
}
public string Decode(byte[] bytes, int start, int length)
{
return GetDecoder().Invoke(bytes, start, length);
}
public byte[] Encode(string str)
{
return GetEncoding().GetBytes(str);
}
public Encoding GetEncoding()
{
return Forced ?? Default ?? Encoding.UTF8;
}
public Func<byte[], int, int, string> GetDecoder()
{
return CustomDecoder ?? ((bytes, index, count) => (Default ?? Encoding.UTF8).GetString(bytes, index, count));
}
}
}

View File

@@ -4,17 +4,9 @@ namespace SharpCompress.Common
{
public abstract class FilePart
{
protected FilePart(ArchiveEncoding archiveEncoding)
{
ArchiveEncoding = archiveEncoding;
}
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string FilePartName { get; }
internal abstract Stream GetCompressedStream();
internal abstract Stream GetRawStream();
internal bool Skipped { get; set; }
}
}

View File

@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Common.GZip
{
@@ -40,9 +39,9 @@ namespace SharpCompress.Common.GZip
internal override IEnumerable<FilePart> Parts => filePart.AsEnumerable<FilePart>();
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
internal static IEnumerable<GZipEntry> GetEntries(Stream stream)
{
yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
yield return new GZipEntry(new GZipFilePart(stream));
}
}
}

View File

@@ -5,37 +5,35 @@ using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Common.GZip
{
internal class GZipFilePart : FilePart
{
private string _name;
private readonly Stream _stream;
private string name;
private readonly Stream stream;
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
internal GZipFilePart(Stream stream)
{
ReadAndValidateGzipHeader(stream);
EntryStartPosition = stream.Position;
this._stream = stream;
this.stream = stream;
}
internal long EntryStartPosition { get; }
internal DateTime? DateModified { get; private set; }
internal override string FilePartName => _name;
internal override string FilePartName => name;
internal override Stream GetCompressedStream()
{
return new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default, false);
return new DeflateStream(stream, CompressionMode.Decompress, CompressionLevel.Default, false);
}
internal override Stream GetRawStream()
{
return _stream;
return stream;
}
private void ReadAndValidateGzipHeader(Stream stream)
@@ -69,16 +67,15 @@ namespace SharpCompress.Common.GZip
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
if (!stream.ReadFully(extra))
n = stream.Read(extra, 0, extra.Length);
if (n != extraLength)
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
n = extraLength;
}
if ((header[3] & 0x08) == 0x08)
{
_name = ReadZeroTerminatedString(stream);
name = ReadZeroTerminatedString(stream);
}
if ((header[3] & 0x10) == 0x010)
{
@@ -90,7 +87,7 @@ namespace SharpCompress.Common.GZip
}
}
private string ReadZeroTerminatedString(Stream stream)
private static string ReadZeroTerminatedString(Stream stream)
{
byte[] buf1 = new byte[1];
var list = new List<byte>();
@@ -113,8 +110,8 @@ namespace SharpCompress.Common.GZip
}
}
while (!done);
byte[] buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
byte[] a = list.ToArray();
return ArchiveEncoding.Default.GetString(a, 0, a.Length);
}
}
}

View File

@@ -1,5 +1,4 @@
namespace SharpCompress.Common
namespace SharpCompress.Common
{
public class OptionsBase
{
@@ -7,7 +6,5 @@ namespace SharpCompress.Common
/// SharpCompress will keep the supplied streams open. Default is true.
/// </summary>
public bool LeaveStreamOpen { get; set; } = true;
public ArchiveEncoding ArchiveEncoding { get; set; } = new ArchiveEncoding();
}
}

View File

@@ -1,6 +1,6 @@
using SharpCompress.IO;
using System;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
@@ -52,50 +52,50 @@ namespace SharpCompress.Common.Rar.Headers
switch (HeaderType)
{
case HeaderType.FileHeader:
{
if (FileFlags.HasFlag(FileFlags.UNICODE))
{
if (FileFlags.HasFlag(FileFlags.UNICODE))
int length = 0;
while (length < fileNameBytes.Length
&& fileNameBytes[length] != 0)
{
int length = 0;
while (length < fileNameBytes.Length
&& fileNameBytes[length] != 0)
{
length++;
}
if (length != nameSize)
{
length++;
FileName = FileNameDecoder.Decode(fileNameBytes, length);
}
else
{
FileName = ArchiveEncoding.Decode(fileNameBytes);
}
length++;
}
if (length != nameSize)
{
length++;
FileName = FileNameDecoder.Decode(fileNameBytes, length);
}
else
{
FileName = ArchiveEncoding.Decode(fileNameBytes);
FileName = DecodeDefault(fileNameBytes);
}
FileName = ConvertPath(FileName, HostOS);
}
else
{
FileName = DecodeDefault(fileNameBytes);
}
FileName = ConvertPath(FileName, HostOS);
}
break;
case HeaderType.NewSubHeader:
{
int datasize = HeaderSize - NEWLHD_SIZE - nameSize;
if (FileFlags.HasFlag(FileFlags.SALT))
{
int datasize = HeaderSize - NEWLHD_SIZE - nameSize;
if (FileFlags.HasFlag(FileFlags.SALT))
{
datasize -= SALT_SIZE;
}
if (datasize > 0)
{
SubData = reader.ReadBytes(datasize);
}
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
{
RecoverySectors = SubData[8] + (SubData[9] << 8)
+ (SubData[10] << 16) + (SubData[11] << 24);
}
datasize -= SALT_SIZE;
}
if (datasize > 0)
{
SubData = reader.ReadBytes(datasize);
}
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
{
RecoverySectors = SubData[8] + (SubData[9] << 8)
+ (SubData[10] << 16) + (SubData[11] << 24);
}
}
break;
}
@@ -118,6 +118,12 @@ namespace SharpCompress.Common.Rar.Headers
}
}
//only the full .net framework will do other code pages than unicode/utf8
private string DecodeDefault(byte[] bytes)
{
return ArchiveEncoding.Default.GetString(bytes, 0, bytes.Length);
}
private long UInt32To64(uint x, uint y)
{
long l = x;
@@ -172,7 +178,6 @@ namespace SharpCompress.Common.Rar.Headers
}
internal long DataStartPosition { get; set; }
internal HostOS HostOS { get; private set; }
internal uint FileCRC { get; private set; }
@@ -194,7 +199,6 @@ namespace SharpCompress.Common.Rar.Headers
internal FileFlags FileFlags => (FileFlags)Flags;
internal long CompressedSize { get; private set; }
internal long UncompressedSize { get; private set; }
internal string FileName { get; private set; }

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Rar.Headers
{
@@ -18,16 +17,14 @@ namespace SharpCompress.Common.Rar.Headers
HeaderSize = baseHeader.HeaderSize;
AdditionalSize = baseHeader.AdditionalSize;
ReadBytes = baseHeader.ReadBytes;
ArchiveEncoding = baseHeader.ArchiveEncoding;
}
internal static RarHeader Create(RarCrcBinaryReader reader, ArchiveEncoding archiveEncoding)
internal static RarHeader Create(RarCrcBinaryReader reader)
{
try
{
RarHeader header = new RarHeader();
header.ArchiveEncoding = archiveEncoding;
reader.Mark();
header.ReadStartFromReader(reader);
header.ReadBytes += reader.CurrentReadByteCount;
@@ -53,8 +50,7 @@ namespace SharpCompress.Common.Rar.Headers
}
}
protected virtual void ReadFromReader(MarkingBinaryReader reader)
{
protected virtual void ReadFromReader(MarkingBinaryReader reader) {
throw new NotImplementedException();
}
@@ -80,11 +76,10 @@ namespace SharpCompress.Common.Rar.Headers
return header;
}
private void VerifyHeaderCrc(ushort crc)
{
if (HeaderType != HeaderType.MarkHeader)
private void VerifyHeaderCrc(ushort crc) {
if (HeaderType != HeaderType.MarkHeader)
{
if (crc != HeadCRC)
if (crc != HeadCRC)
{
throw new InvalidFormatException("rar header crc mismatch");
}
@@ -111,8 +106,6 @@ namespace SharpCompress.Common.Rar.Headers
protected short HeaderSize { get; private set; }
internal ArchiveEncoding ArchiveEncoding { get; private set; }
/// <summary>
/// This additional size of the header could be file data
/// </summary>

View File

@@ -117,7 +117,7 @@ namespace SharpCompress.Common.Rar.Headers
{
#if !NO_CRYPTO
var reader = new RarCryptoBinaryReader(stream, Options.Password);
if (IsEncrypted)
{
if (Options.Password == null)
@@ -133,7 +133,7 @@ namespace SharpCompress.Common.Rar.Headers
#endif
RarHeader header = RarHeader.Create(reader, Options.ArchiveEncoding);
RarHeader header = RarHeader.Create(reader);
if (header == null)
{
return null;
@@ -141,110 +141,110 @@ namespace SharpCompress.Common.Rar.Headers
switch (header.HeaderType)
{
case HeaderType.ArchiveHeader:
{
var ah = header.PromoteHeader<ArchiveHeader>(reader);
IsEncrypted = ah.HasPassword;
return ah;
}
{
var ah = header.PromoteHeader<ArchiveHeader>(reader);
IsEncrypted = ah.HasPassword;
return ah;
}
case HeaderType.MarkHeader:
{
return header.PromoteHeader<MarkHeader>(reader);
}
{
return header.PromoteHeader<MarkHeader>(reader);
}
case HeaderType.ProtectHeader:
{
ProtectHeader ph = header.PromoteHeader<ProtectHeader>(reader);
// skip the recovery record data, we do not use it.
switch (StreamingMode)
{
ProtectHeader ph = header.PromoteHeader<ProtectHeader>(reader);
// skip the recovery record data, we do not use it.
switch (StreamingMode)
case StreamingMode.Seekable:
{
case StreamingMode.Seekable:
{
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
reader.BaseStream.Skip(ph.DataSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
reader.BaseStream.Skip(ph.DataSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
return ph;
}
return ph;
}
case HeaderType.NewSubHeader:
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
case StreamingMode.Seekable:
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
return fh;
}
case HeaderType.FileHeader:
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
break;
case StreamingMode.Streaming:
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.Salt == null)
{
fh.PackedStream = ms;
}
else
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return fh;
}
case HeaderType.FileHeader:
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.Salt == null)
{
fh.PackedStream = ms;
}
else
{
#if !NO_CRYPTO
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.Salt);
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.Salt);
#else
throw new NotSupportedException("RarCrypto not supported");
#endif
}
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
return fh;
}
return fh;
}
case HeaderType.EndArchiveHeader:
{
return header.PromoteHeader<EndArchiveHeader>(reader);
}
{
return header.PromoteHeader<EndArchiveHeader>(reader);
}
default:
{
throw new InvalidFormatException("Invalid Rar Header: " + header.HeaderType);
}
{
throw new InvalidFormatException("Invalid Rar Header: " + header.HeaderType);
}
}
}
}
}
}

View File

@@ -9,7 +9,6 @@ namespace SharpCompress.Common.Rar
internal abstract class RarFilePart : FilePart
{
internal RarFilePart(MarkHeader mh, FileHeader fh)
: base(fh.ArchiveEncoding)
{
MarkHeader = mh;
FileHeader = fh;

View File

@@ -7,15 +7,14 @@ namespace SharpCompress.Common.SevenZip
{
internal class SevenZipFilePart : FilePart
{
private CompressionType? _type;
private readonly Stream _stream;
private readonly ArchiveDatabase _database;
private CompressionType? type;
private readonly Stream stream;
private readonly ArchiveDatabase database;
internal SevenZipFilePart(Stream stream, ArchiveDatabase database, int index, CFileItem fileEntry, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
internal SevenZipFilePart(Stream stream, ArchiveDatabase database, int index, CFileItem fileEntry)
{
this._stream = stream;
this._database = database;
this.stream = stream;
this.database = database;
Index = index;
Header = fileEntry;
if (Header.HasStream)
@@ -42,14 +41,14 @@ namespace SharpCompress.Common.SevenZip
{
return null;
}
var folderStream = _database.GetFolderStream(_stream, Folder, null);
var folderStream = database.GetFolderStream(stream, Folder, null);
int firstFileIndex = _database.FolderStartFileIndex[_database.Folders.IndexOf(Folder)];
int firstFileIndex = database.FolderStartFileIndex[database.Folders.IndexOf(Folder)];
int skipCount = Index - firstFileIndex;
long skipSize = 0;
for (int i = 0; i < skipCount; i++)
{
skipSize += _database.Files[firstFileIndex + i].Size;
skipSize += database.Files[firstFileIndex + i].Size;
}
if (skipSize > 0)
{
@@ -62,11 +61,11 @@ namespace SharpCompress.Common.SevenZip
{
get
{
if (_type == null)
if (type == null)
{
_type = GetCompression();
type = GetCompression();
}
return _type.Value;
return type.Value;
}
}
@@ -85,7 +84,7 @@ namespace SharpCompress.Common.SevenZip
{
var coder = Folder.Coders.First();
switch (coder.MethodId.Id)
{
{
case k_LZMA:
case k_LZMA2:
{

View File

@@ -9,11 +9,6 @@ namespace SharpCompress.Common.Tar.Headers
{
internal static readonly DateTime Epoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
public TarHeader(ArchiveEncoding archiveEncoding)
{
ArchiveEncoding = archiveEncoding;
}
internal string Name { get; set; }
//internal int Mode { get; set; }
@@ -25,7 +20,6 @@ namespace SharpCompress.Common.Tar.Headers
internal DateTime LastModifiedTime { get; set; }
internal EntryType EntryType { get; set; }
internal Stream PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal const int BlockSize = 512;
@@ -37,7 +31,7 @@ namespace SharpCompress.Common.Tar.Headers
WriteOctalBytes(0, buffer, 108, 8); // owner ID
WriteOctalBytes(0, buffer, 116, 8); // group ID
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
//Encoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
if (Name.Length > 100)
{
// Set mock filename and filetype to indicate the next block is the actual name of the file
@@ -78,7 +72,7 @@ namespace SharpCompress.Common.Tar.Headers
private void WriteLongFilenameHeader(Stream output)
{
byte[] nameBytes = ArchiveEncoding.Encode(Name);
byte[] nameBytes = ArchiveEncoding.Default.GetBytes(Name);
output.Write(nameBytes, 0, nameBytes.Length);
// pad to multiple of BlockSize bytes, and make sure a terminating null is added
@@ -105,7 +99,7 @@ namespace SharpCompress.Common.Tar.Headers
}
else
{
Name = ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
Name = ArchiveEncoding.Default.GetString(buffer, 0, 100).TrimNulls();
}
EntryType = ReadEntryType(buffer);
@@ -117,12 +111,12 @@ namespace SharpCompress.Common.Tar.Headers
long unixTimeStamp = ReadASCIIInt64Base8(buffer, 136, 11);
LastModifiedTime = Epoch.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
Magic = ArchiveEncoding.Default.GetString(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic)
&& "ustar".Equals(Magic))
{
string namePrefix = ArchiveEncoding.Decode(buffer, 345, 157);
string namePrefix = ArchiveEncoding.Default.GetString(buffer, 345, 157);
namePrefix = namePrefix.TrimNulls();
if (!string.IsNullOrEmpty(namePrefix))
{
@@ -149,7 +143,7 @@ namespace SharpCompress.Common.Tar.Headers
{
reader.ReadBytes(remainingBytesToRead);
}
return ArchiveEncoding.Decode(nameBytes, 0, nameBytes.Length).TrimNulls();
return ArchiveEncoding.Default.GetString(nameBytes, 0, nameBytes.Length).TrimNulls();
}
private static EntryType ReadEntryType(byte[] buffer)

View File

@@ -3,7 +3,6 @@ using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Tar
{
@@ -44,9 +43,9 @@ namespace SharpCompress.Common.Tar
internal override IEnumerable<FilePart> Parts => filePart.AsEnumerable<FilePart>();
internal static IEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,
CompressionType compressionType, ArchiveEncoding archiveEncoding)
CompressionType compressionType)
{
foreach (TarHeader h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
foreach (TarHeader h in TarHeaderFactory.ReadHeader(mode, stream))
{
if (h != null)
{

View File

@@ -6,12 +6,11 @@ namespace SharpCompress.Common.Tar
{
internal class TarFilePart : FilePart
{
private readonly Stream _seekableStream;
private readonly Stream seekableStream;
internal TarFilePart(TarHeader header, Stream seekableStream)
: base(header.ArchiveEncoding)
{
this._seekableStream = seekableStream;
this.seekableStream = seekableStream;
Header = header;
}
@@ -21,10 +20,10 @@ namespace SharpCompress.Common.Tar
internal override Stream GetCompressedStream()
{
if (_seekableStream != null)
if (seekableStream != null)
{
_seekableStream.Position = Header.DataStartPosition.Value;
return new ReadOnlySubStream(_seekableStream, Header.Size);
seekableStream.Position = Header.DataStartPosition.Value;
return new ReadOnlySubStream(seekableStream, Header.Size);
}
return Header.PackedStream;
}

View File

@@ -2,13 +2,12 @@
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Tar
{
internal static class TarHeaderFactory
{
internal static IEnumerable<TarHeader> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding)
internal static IEnumerable<TarHeader> ReadHeader(StreamingMode mode, Stream stream)
{
while (true)
{
@@ -16,8 +15,7 @@ namespace SharpCompress.Common.Tar
try
{
BinaryReader reader = new BinaryReader(stream);
header = new TarHeader(archiveEncoding);
header = new TarHeader();
if (!header.Read(reader))
{
yield break;
@@ -25,22 +23,22 @@ namespace SharpCompress.Common.Tar
switch (mode)
{
case StreamingMode.Seekable:
{
header.DataStartPosition = reader.BaseStream.Position;
{
header.DataStartPosition = reader.BaseStream.Position;
//skip to nearest 512
reader.BaseStream.Position += PadTo512(header.Size);
}
//skip to nearest 512
reader.BaseStream.Position += PadTo512(header.Size);
}
break;
case StreamingMode.Streaming:
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
}
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
catch

View File

@@ -6,8 +6,8 @@ namespace SharpCompress.Common.Zip.Headers
{
internal class DirectoryEntryHeader : ZipFileEntry
{
public DirectoryEntryHeader(ArchiveEncoding archiveEncoding)
: base(ZipHeaderType.DirectoryEntry, archiveEncoding)
public DirectoryEntryHeader()
: base(ZipHeaderType.DirectoryEntry)
{
}
@@ -31,10 +31,10 @@ namespace SharpCompress.Common.Zip.Headers
RelativeOffsetOfEntryHeader = reader.ReadUInt32();
byte[] name = reader.ReadBytes(nameLength);
Name = ArchiveEncoding.Decode(name);
Name = DecodeString(name);
byte[] extra = reader.ReadBytes(extraLength);
byte[] comment = reader.ReadBytes(commentLength);
Comment = ArchiveEncoding.Decode(comment);
Comment = DecodeString(comment);
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnicodePathExtraField);

View File

@@ -5,7 +5,6 @@ namespace SharpCompress.Common.Zip.Headers
[Flags]
internal enum HeaderFlags : ushort
{
None = 0,
Encrypted = 1, // http://www.pkware.com/documents/casestudies/APPNOTE.TXT
Bit1 = 2,
Bit2 = 4,

View File

@@ -1,13 +1,12 @@
using System.IO;
using System.Linq;
using System.Text;
namespace SharpCompress.Common.Zip.Headers
{
internal class LocalEntryHeader : ZipFileEntry
{
public LocalEntryHeader(ArchiveEncoding archiveEncoding)
: base(ZipHeaderType.LocalEntry, archiveEncoding)
public LocalEntryHeader()
: base(ZipHeaderType.LocalEntry)
{
}
@@ -25,7 +24,7 @@ namespace SharpCompress.Common.Zip.Headers
ushort extraLength = reader.ReadUInt16();
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
Name = ArchiveEncoding.Decode(name);
Name = DecodeString(name);
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnicodePathExtraField);

View File

@@ -8,11 +8,10 @@ namespace SharpCompress.Common.Zip.Headers
{
internal abstract class ZipFileEntry : ZipHeader
{
protected ZipFileEntry(ZipHeaderType type, ArchiveEncoding archiveEncoding)
protected ZipFileEntry(ZipHeaderType type)
: base(type)
{
Extra = new List<ExtraData>();
ArchiveEncoding = archiveEncoding;
}
internal bool IsDirectory
@@ -30,10 +29,27 @@ namespace SharpCompress.Common.Zip.Headers
&& Name.EndsWith("\\");
}
}
internal Stream PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
protected string DecodeString(byte[] str)
{
if (FlagUtility.HasFlag(Flags, HeaderFlags.UTF8))
{
return Encoding.UTF8.GetString(str, 0, str.Length);
}
return ArchiveEncoding.Default.GetString(str, 0, str.Length);
}
protected byte[] EncodeString(string str)
{
if (FlagUtility.HasFlag(Flags, HeaderFlags.UTF8))
{
return Encoding.UTF8.GetBytes(str);
}
return ArchiveEncoding.Default.GetBytes(str);
}
internal Stream PackedStream { get; set; }
internal string Name { get; set; }
@@ -48,7 +64,7 @@ namespace SharpCompress.Common.Zip.Headers
internal long UncompressedSize { get; set; }
internal List<ExtraData> Extra { get; set; }
public string Password { get; set; }
internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream)
@@ -59,10 +75,10 @@ namespace SharpCompress.Common.Zip.Headers
}
var buffer = new byte[12];
archiveStream.ReadFully(buffer);
archiveStream.Read(buffer, 0, 12);
PkwareTraditionalEncryptionData encryptionData = PkwareTraditionalEncryptionData.ForRead(Password, this, buffer);
return encryptionData;
}

View File

@@ -42,7 +42,7 @@ namespace SharpCompress.Common.Zip
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
throw new ArgumentNullException("buffer");
}
byte[] temp = new byte[count];

View File

@@ -9,11 +9,9 @@ namespace SharpCompress.Common.Zip
{
private static readonly CRC32 crc32 = new CRC32();
private readonly UInt32[] _Keys = {0x12345678, 0x23456789, 0x34567890};
private readonly ArchiveEncoding _archiveEncoding;
private PkwareTraditionalEncryptionData(string password, ArchiveEncoding archiveEncoding)
private PkwareTraditionalEncryptionData(string password)
{
_archiveEncoding = archiveEncoding;
Initialize(password);
}
@@ -29,7 +27,7 @@ namespace SharpCompress.Common.Zip
public static PkwareTraditionalEncryptionData ForRead(string password, ZipFileEntry header,
byte[] encryptionHeader)
{
var encryptor = new PkwareTraditionalEncryptionData(password, header.ArchiveEncoding);
var encryptor = new PkwareTraditionalEncryptionData(password);
byte[] plainTextHeader = encryptor.Decrypt(encryptionHeader, encryptionHeader.Length);
if (plainTextHeader[11] != (byte)((header.Crc >> 24) & 0xff))
{
@@ -49,7 +47,7 @@ namespace SharpCompress.Common.Zip
{
if (length > cipherText.Length)
{
throw new ArgumentOutOfRangeException(nameof(length),
throw new ArgumentOutOfRangeException("length",
"Bad length during Decryption: the length parameter must be smaller than or equal to the size of the destination array.");
}
@@ -72,7 +70,7 @@ namespace SharpCompress.Common.Zip
if (length > plainText.Length)
{
throw new ArgumentOutOfRangeException(nameof(length),
throw new ArgumentOutOfRangeException("length",
"Bad length during Encryption: The length parameter must be smaller than or equal to the size of the destination array.");
}
@@ -95,12 +93,17 @@ namespace SharpCompress.Common.Zip
}
}
internal byte[] StringToByteArray(string value)
internal static byte[] StringToByteArray(string value, Encoding encoding)
{
byte[] a = _archiveEncoding.Password.GetBytes(value);
byte[] a = encoding.GetBytes(value);
return a;
}
internal static byte[] StringToByteArray(string value)
{
return StringToByteArray(value, ArchiveEncoding.Password);
}
private void UpdateKeys(byte byteValue)
{
_Keys[0] = (UInt32)crc32.ComputeCrc32((int)_Keys[0], byteValue);

View File

@@ -5,21 +5,21 @@ namespace SharpCompress.Common.Zip
{
internal class SeekableZipFilePart : ZipFilePart
{
private bool _isLocalHeaderLoaded;
private readonly SeekableZipHeaderFactory _headerFactory;
private bool isLocalHeaderLoaded;
private readonly SeekableZipHeaderFactory headerFactory;
internal SeekableZipFilePart(SeekableZipHeaderFactory headerFactory, DirectoryEntryHeader header, Stream stream)
: base(header, stream)
{
this._headerFactory = headerFactory;
this.headerFactory = headerFactory;
}
internal override Stream GetCompressedStream()
{
if (!_isLocalHeaderLoaded)
if (!isLocalHeaderLoaded)
{
LoadLocalHeader();
_isLocalHeaderLoaded = true;
isLocalHeaderLoaded = true;
}
return base.GetCompressedStream();
}
@@ -29,7 +29,7 @@ namespace SharpCompress.Common.Zip
private void LoadLocalHeader()
{
bool hasData = Header.HasData;
Header = _headerFactory.GetLocalHeader(BaseStream, Header as DirectoryEntryHeader);
Header = headerFactory.GetLocalHeader(BaseStream, Header as DirectoryEntryHeader);
Header.HasData = hasData;
}

View File

@@ -3,17 +3,16 @@ using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{
internal class SeekableZipHeaderFactory : ZipHeaderFactory
{
private const int MAX_ITERATIONS_FOR_DIRECTORY_HEADER = 4096;
private bool _zip64;
private bool zip64;
internal SeekableZipHeaderFactory(string password, ArchiveEncoding archiveEncoding)
: base(StreamingMode.Seekable, password, archiveEncoding)
internal SeekableZipHeaderFactory(string password)
: base(StreamingMode.Seekable, password)
{
}
@@ -27,14 +26,14 @@ namespace SharpCompress.Common.Zip
if (entry.IsZip64)
{
_zip64 = true;
zip64 = true;
SeekBackToHeader(stream, reader, ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR);
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
zip64Locator.Read(reader);
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
uint zip64Signature = reader.ReadUInt32();
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
if(zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
throw new ArchiveException("Failed to locate the Zip64 Header");
var zip64Entry = new Zip64DirectoryEndHeader();
@@ -51,7 +50,7 @@ namespace SharpCompress.Common.Zip
{
stream.Position = position;
uint signature = reader.ReadUInt32();
var directoryEntryHeader = ReadHeader(signature, reader, _zip64) as DirectoryEntryHeader;
var directoryEntryHeader = ReadHeader(signature, reader, zip64) as DirectoryEntryHeader;
position = stream.Position;
if (directoryEntryHeader == null)
{
@@ -92,7 +91,7 @@ namespace SharpCompress.Common.Zip
stream.Seek(directoryEntryHeader.RelativeOffsetOfEntryHeader, SeekOrigin.Begin);
BinaryReader reader = new BinaryReader(stream);
uint signature = reader.ReadUInt32();
var localEntryHeader = ReadHeader(signature, reader, _zip64) as LocalEntryHeader;
var localEntryHeader = ReadHeader(signature, reader, zip64) as LocalEntryHeader;
if (localEntryHeader == null)
{
throw new InvalidOperationException();

View File

@@ -39,20 +39,19 @@ namespace SharpCompress.Common.Zip
{
return new BinaryReader(rewindableStream);
}
if (Header.HasData && !Skipped)
if (Header.HasData)
{
if (decompressionStream == null)
{
decompressionStream = GetCompressedStream();
}
decompressionStream.Skip();
decompressionStream.SkipAll();
DeflateStream deflateStream = decompressionStream as DeflateStream;
if (deflateStream != null)
{
rewindableStream.Rewind(deflateStream.InputBuffer);
}
Skipped = true;
}
var reader = new BinaryReader(rewindableStream);
decompressionStream = null;

View File

@@ -2,14 +2,13 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{
internal class StreamingZipHeaderFactory : ZipHeaderFactory
{
internal StreamingZipHeaderFactory(string password, ArchiveEncoding archiveEncoding)
: base(StreamingMode.Streaming, password, archiveEncoding)
internal StreamingZipHeaderFactory(string password)
: base(StreamingMode.Streaming, password)
{
}

View File

@@ -78,7 +78,7 @@ namespace SharpCompress.Common.Zip
{
//read out last 10 auth bytes
var ten = new byte[10];
stream.ReadFully(ten);
stream.Read(ten, 0, 10);
stream.Dispose();
}
}

View File

@@ -15,7 +15,6 @@ namespace SharpCompress.Common.Zip
internal abstract class ZipFilePart : FilePart
{
internal ZipFilePart(ZipFileEntry header, Stream stream)
: base(header.ArchiveEncoding)
{
Header = header;
header.Part = this;
@@ -89,7 +88,7 @@ namespace SharpCompress.Common.Zip
case ZipCompressionMethod.PPMd:
{
var props = new byte[2];
stream.ReadFully(props);
stream.Read(props, 0, props.Length);
return new PpmdStream(new PpmdProperties(props), stream, false);
}
case ZipCompressionMethod.WinzipAes:
@@ -176,6 +175,7 @@ namespace SharpCompress.Common.Zip
}
}
return plainStream;
}
}

View File

@@ -5,7 +5,6 @@ using System.Linq;
#endif
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{
@@ -24,13 +23,11 @@ namespace SharpCompress.Common.Zip
protected LocalEntryHeader lastEntryHeader;
private readonly string password;
private readonly StreamingMode mode;
private readonly ArchiveEncoding archiveEncoding;
protected ZipHeaderFactory(StreamingMode mode, string password, ArchiveEncoding archiveEncoding)
protected ZipHeaderFactory(StreamingMode mode, string password)
{
this.mode = mode;
this.password = password;
this.archiveEncoding = archiveEncoding;
}
protected ZipHeader ReadHeader(uint headerBytes, BinaryReader reader, bool zip64 = false)
@@ -39,7 +36,7 @@ namespace SharpCompress.Common.Zip
{
case ENTRY_HEADER_BYTES:
{
var entryHeader = new LocalEntryHeader(archiveEncoding);
var entryHeader = new LocalEntryHeader();
entryHeader.Read(reader);
LoadHeader(entryHeader, reader.BaseStream);
@@ -48,48 +45,48 @@ namespace SharpCompress.Common.Zip
}
case DIRECTORY_START_HEADER_BYTES:
{
var entry = new DirectoryEntryHeader(archiveEncoding);
var entry = new DirectoryEntryHeader();
entry.Read(reader);
return entry;
}
case POST_DATA_DESCRIPTOR:
{
if (FlagUtility.HasFlag(lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
{
if (FlagUtility.HasFlag(lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
{
lastEntryHeader.Crc = reader.ReadUInt32();
lastEntryHeader.CompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
lastEntryHeader.UncompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
}
else
{
reader.ReadBytes(zip64 ? 20 : 12);
}
return null;
lastEntryHeader.Crc = reader.ReadUInt32();
lastEntryHeader.CompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
lastEntryHeader.UncompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
}
else
{
reader.ReadBytes(zip64 ? 20 : 12);
}
return null;
}
case DIGITAL_SIGNATURE:
return null;
case DIRECTORY_END_HEADER_BYTES:
{
var entry = new DirectoryEndHeader();
entry.Read(reader);
return entry;
}
{
var entry = new DirectoryEndHeader();
entry.Read(reader);
return entry;
}
case SPLIT_ARCHIVE_HEADER_BYTES:
{
return new SplitHeader();
}
{
return new SplitHeader();
}
case ZIP64_END_OF_CENTRAL_DIRECTORY:
{
var entry = new Zip64DirectoryEndHeader();
entry.Read(reader);
return entry;
}
{
var entry = new Zip64DirectoryEndHeader();
entry.Read(reader);
return entry;
}
case ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR:
{
var entry = new Zip64DirectoryEndLocatorHeader();
entry.Read(reader);
return entry;
}
{
var entry = new Zip64DirectoryEndLocatorHeader();
entry.Read(reader);
return entry;
}
default:
throw new NotSupportedException("Unknown header: " + headerBytes);
}
@@ -168,22 +165,22 @@ namespace SharpCompress.Common.Zip
switch (mode)
{
case StreamingMode.Seekable:
{
entryHeader.DataStartPosition = stream.Position;
stream.Position += entryHeader.CompressedSize;
break;
}
{
entryHeader.DataStartPosition = stream.Position;
stream.Position += entryHeader.CompressedSize;
break;
}
case StreamingMode.Streaming:
{
entryHeader.PackedStream = stream;
break;
}
{
entryHeader.PackedStream = stream;
break;
}
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
//}

View File

@@ -105,19 +105,19 @@ namespace SharpCompress.Compressors.ADC
}
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
throw new ArgumentNullException("buffer");
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
throw new ArgumentOutOfRangeException("count");
}
if (offset < buffer.GetLowerBound(0))
{
throw new ArgumentOutOfRangeException(nameof(offset));
throw new ArgumentOutOfRangeException("offset");
}
if ((offset + count) > buffer.GetLength(0))
{
throw new ArgumentOutOfRangeException(nameof(count));
throw new ArgumentOutOfRangeException("count");
}
int size = -1;

View File

@@ -26,7 +26,6 @@
using System;
using System.IO;
using System.Text;
namespace SharpCompress.Compressors.Deflate
{
@@ -37,10 +36,9 @@ namespace SharpCompress.Compressors.Deflate
public DeflateStream(Stream stream, CompressionMode mode,
CompressionLevel level = CompressionLevel.Default,
bool leaveOpen = false,
Encoding forceEncoding = null)
bool leaveOpen = false)
{
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.DEFLATE, leaveOpen, forceEncoding);
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.DEFLATE, leaveOpen);
}
#region Zlib properties

View File

@@ -30,45 +30,41 @@ using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Compressors.Deflate
{
public class GZipStream : Stream
{
internal static readonly DateTime UNIX_EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
internal static readonly DateTime UnixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
public DateTime? LastModified { get; set; }
private string _comment;
private string _fileName;
private string comment;
private string fileName;
internal ZlibBaseStream BaseStream;
private bool _disposed;
private bool _firstReadDone;
private int _headerByteCount;
private readonly Encoding _encoding;
private bool disposed;
private bool firstReadDone;
private int headerByteCount;
public GZipStream(Stream stream, CompressionMode mode)
: this(stream, mode, CompressionLevel.Default, false, Encoding.UTF8)
: this(stream, mode, CompressionLevel.Default, false)
{
}
public GZipStream(Stream stream, CompressionMode mode, CompressionLevel level)
: this(stream, mode, level, false, Encoding.UTF8)
: this(stream, mode, level, false)
{
}
public GZipStream(Stream stream, CompressionMode mode, bool leaveOpen)
: this(stream, mode, CompressionLevel.Default, leaveOpen, Encoding.UTF8)
: this(stream, mode, CompressionLevel.Default, leaveOpen)
{
}
public GZipStream(Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen, Encoding encoding)
public GZipStream(Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen)
{
BaseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.GZIP, leaveOpen, encoding);
_encoding = encoding;
BaseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.GZIP, leaveOpen);
}
#region Zlib properties
@@ -78,7 +74,7 @@ namespace SharpCompress.Compressors.Deflate
get => (BaseStream._flushMode);
set
{
if (_disposed)
if (disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -91,7 +87,7 @@ namespace SharpCompress.Compressors.Deflate
get => BaseStream._bufferSize;
set
{
if (_disposed)
if (disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -127,7 +123,7 @@ namespace SharpCompress.Compressors.Deflate
{
get
{
if (_disposed)
if (disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -153,7 +149,7 @@ namespace SharpCompress.Compressors.Deflate
{
get
{
if (_disposed)
if (disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -183,7 +179,7 @@ namespace SharpCompress.Compressors.Deflate
{
if (BaseStream._streamMode == ZlibBaseStream.StreamMode.Writer)
{
return BaseStream._z.TotalBytesOut + _headerByteCount;
return BaseStream._z.TotalBytesOut + headerByteCount;
}
if (BaseStream._streamMode == ZlibBaseStream.StreamMode.Reader)
{
@@ -206,14 +202,14 @@ namespace SharpCompress.Compressors.Deflate
{
try
{
if (!_disposed)
if (!disposed)
{
if (disposing && (BaseStream != null))
{
BaseStream.Dispose();
Crc32 = BaseStream.Crc32;
}
_disposed = true;
disposed = true;
}
}
finally
@@ -227,7 +223,7 @@ namespace SharpCompress.Compressors.Deflate
/// </summary>
public override void Flush()
{
if (_disposed)
if (disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -267,7 +263,7 @@ namespace SharpCompress.Compressors.Deflate
/// <returns>the number of bytes actually read</returns>
public override int Read(byte[] buffer, int offset, int count)
{
if (_disposed)
if (disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -276,9 +272,9 @@ namespace SharpCompress.Compressors.Deflate
// Console.WriteLine("GZipStream::Read(buffer, off({0}), c({1}) = {2}", offset, count, n);
// Console.WriteLine( Util.FormatByteArray(buffer, offset, n) );
if (!_firstReadDone)
if (!firstReadDone)
{
_firstReadDone = true;
firstReadDone = true;
FileName = BaseStream._GzipFileName;
Comment = BaseStream._GzipComment;
}
@@ -329,7 +325,7 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="count">the number of bytes to write.</param>
public override void Write(byte[] buffer, int offset, int count)
{
if (_disposed)
if (disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -339,7 +335,7 @@ namespace SharpCompress.Compressors.Deflate
if (BaseStream._wantCompress)
{
// first write in compression, therefore, emit the GZIP header
_headerByteCount = EmitHeader();
headerByteCount = EmitHeader();
}
else
{
@@ -350,56 +346,56 @@ namespace SharpCompress.Compressors.Deflate
BaseStream.Write(buffer, offset, count);
}
#endregion Stream methods
#endregion
public String Comment
{
get => _comment;
get => comment;
set
{
if (_disposed)
if (disposed)
{
throw new ObjectDisposedException("GZipStream");
}
_comment = value;
comment = value;
}
}
public string FileName
{
get => _fileName;
get => fileName;
set
{
if (_disposed)
if (disposed)
{
throw new ObjectDisposedException("GZipStream");
}
_fileName = value;
if (_fileName == null)
fileName = value;
if (fileName == null)
{
return;
}
if (_fileName.IndexOf("/") != -1)
if (fileName.IndexOf("/") != -1)
{
_fileName = _fileName.Replace("/", "\\");
fileName = fileName.Replace("/", "\\");
}
if (_fileName.EndsWith("\\"))
if (fileName.EndsWith("\\"))
{
throw new InvalidOperationException("Illegal filename");
}
var index = _fileName.IndexOf("\\");
var index = fileName.IndexOf("\\");
if (index != -1)
{
// trim any leading path
int length = _fileName.Length;
int length = fileName.Length;
int num = length;
while (--num >= 0)
{
char c = _fileName[num];
char c = fileName[num];
if (c == '\\')
{
_fileName = _fileName.Substring(num + 1, length - num - 1);
fileName = fileName.Substring(num + 1, length - num - 1);
}
}
}
@@ -410,10 +406,8 @@ namespace SharpCompress.Compressors.Deflate
private int EmitHeader()
{
byte[] commentBytes = (Comment == null) ? null
: _encoding.GetBytes(Comment);
byte[] filenameBytes = (FileName == null) ? null
: _encoding.GetBytes(FileName);
byte[] commentBytes = (Comment == null) ? null : ArchiveEncoding.Default.GetBytes(Comment);
byte[] filenameBytes = (FileName == null) ? null : ArchiveEncoding.Default.GetBytes(FileName);
int cbLength = (Comment == null) ? 0 : commentBytes.Length + 1;
int fnLength = (FileName == null) ? 0 : filenameBytes.Length + 1;
@@ -446,7 +440,7 @@ namespace SharpCompress.Compressors.Deflate
{
LastModified = DateTime.Now;
}
TimeSpan delta = LastModified.Value - UNIX_EPOCH;
TimeSpan delta = LastModified.Value - UnixEpoch;
var timet = (Int32)delta.TotalSeconds;
DataConverter.LittleEndian.PutBytes(header, i, timet);
i += 4;

View File

@@ -1,20 +1,20 @@
// ZlibBaseStream.cs
// ------------------------------------------------------------------
//
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
// All rights reserved.
//
// This code module is part of DotNetZip, a zipfile class library.
//
// ------------------------------------------------------------------
//
// This code is licensed under the Microsoft Public License.
// This code is licensed under the Microsoft Public License.
// See the file License.txt for the license details.
// More info on: http://dotnetzip.codeplex.com
//
// ------------------------------------------------------------------
//
// last saved (in emacs):
// last saved (in emacs):
// Time-stamp: <2009-October-28 15:45:15>
//
// ------------------------------------------------------------------
@@ -30,7 +30,6 @@ using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Compressors.Deflate
{
@@ -65,8 +64,6 @@ namespace SharpCompress.Compressors.Deflate
protected internal DateTime _GzipMtime;
protected internal int _gzipHeaderByteCount;
private readonly Encoding _encoding;
internal int Crc32
{
get
@@ -83,8 +80,7 @@ namespace SharpCompress.Compressors.Deflate
CompressionMode compressionMode,
CompressionLevel level,
ZlibStreamFlavor flavor,
bool leaveOpen,
Encoding encoding)
bool leaveOpen)
{
_flushMode = FlushType.None;
@@ -95,8 +91,6 @@ namespace SharpCompress.Compressors.Deflate
_flavor = flavor;
_level = level;
_encoding = encoding;
// workitem 7159
if (flavor == ZlibStreamFlavor.GZIP)
{
@@ -424,8 +418,8 @@ namespace SharpCompress.Compressors.Deflate
}
}
while (!done);
byte[] buffer = list.ToArray();
return _encoding.GetString(buffer, 0, buffer.Length);
byte[] a = list.ToArray();
return ArchiveEncoding.Default.GetString(a, 0, a.Length);
}
private int _ReadAndValidateGzipHeader()
@@ -534,19 +528,19 @@ namespace SharpCompress.Compressors.Deflate
}
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
throw new ArgumentNullException("buffer");
}
if (count < 0)
{
throw new ArgumentOutOfRangeException(nameof(count));
throw new ArgumentOutOfRangeException("count");
}
if (offset < buffer.GetLowerBound(0))
{
throw new ArgumentOutOfRangeException(nameof(offset));
throw new ArgumentOutOfRangeException("offset");
}
if ((offset + count) > buffer.GetLength(0))
{
throw new ArgumentOutOfRangeException(nameof(count));
throw new ArgumentOutOfRangeException("count");
}
int rc = 0;
@@ -599,7 +593,7 @@ namespace SharpCompress.Compressors.Deflate
while (_z.AvailableBytesOut > 0 && !nomoreinput && rc == ZlibConstants.Z_OK);
// workitem 8557
// is there more room in output?
// is there more room in output?
if (_z.AvailableBytesOut > 0)
{
if (rc == ZlibConstants.Z_OK && _z.AvailableBytesIn == 0)

View File

@@ -27,7 +27,6 @@
using System;
using System.IO;
using System.Text;
namespace SharpCompress.Compressors.Deflate
{
@@ -37,23 +36,23 @@ namespace SharpCompress.Compressors.Deflate
private bool _disposed;
public ZlibStream(Stream stream, CompressionMode mode)
: this(stream, mode, CompressionLevel.Default, false, Encoding.UTF8)
: this(stream, mode, CompressionLevel.Default, false)
{
}
public ZlibStream(Stream stream, CompressionMode mode, CompressionLevel level)
: this(stream, mode, level, false, Encoding.UTF8)
: this(stream, mode, level, false)
{
}
public ZlibStream(Stream stream, CompressionMode mode, bool leaveOpen)
: this(stream, mode, CompressionLevel.Default, leaveOpen, Encoding.UTF8)
: this(stream, mode, CompressionLevel.Default, leaveOpen)
{
}
public ZlibStream(Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen, Encoding encoding)
public ZlibStream(Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen)
{
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.ZLIB, leaveOpen, encoding);
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.ZLIB, leaveOpen);
}
#region Zlib properties
@@ -327,6 +326,6 @@ namespace SharpCompress.Compressors.Deflate
_baseStream.Write(buffer, offset, count);
}
#endregion System.IO.Stream methods
#endregion
}
}

View File

@@ -58,7 +58,7 @@ namespace SharpCompress.Compressors.LZMA
{
if (index < 0 || index >= Length)
{
throw new ArgumentOutOfRangeException(nameof(index));
throw new ArgumentOutOfRangeException("index");
}
return (mBits[index >> 5] & (1u << (index & 31))) != 0;
@@ -69,7 +69,7 @@ namespace SharpCompress.Compressors.LZMA
{
if (index < 0 || index >= Length)
{
throw new ArgumentOutOfRangeException(nameof(index));
throw new ArgumentOutOfRangeException("index");
}
mBits[index >> 5] |= 1u << (index & 31);
@@ -79,7 +79,7 @@ namespace SharpCompress.Compressors.LZMA
{
if (index < 0 || index >= Length)
{
throw new ArgumentOutOfRangeException(nameof(index));
throw new ArgumentOutOfRangeException("index");
}
uint bits = mBits[index >> 5];

View File

@@ -58,22 +58,22 @@ namespace SharpCompress.Compressors.LZMA.Utilites
{
if (stream == null)
{
throw new ArgumentNullException(nameof(stream));
throw new ArgumentNullException("stream");
}
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
throw new ArgumentNullException("buffer");
}
if (offset < 0 || offset > buffer.Length)
{
throw new ArgumentOutOfRangeException(nameof(offset));
throw new ArgumentOutOfRangeException("offset");
}
if (length < 0 || length > buffer.Length - offset)
{
throw new ArgumentOutOfRangeException(nameof(length));
throw new ArgumentOutOfRangeException("length");
}
while (length > 0)

View File

@@ -146,12 +146,12 @@ namespace SharpCompress.Compressors.PPMd.I1
{
if (target == null)
{
throw new ArgumentNullException(nameof(target));
throw new ArgumentNullException("target");
}
if (source == null)
{
throw new ArgumentNullException(nameof(source));
throw new ArgumentNullException("source");
}
EncodeStart(properties);
@@ -235,12 +235,12 @@ namespace SharpCompress.Compressors.PPMd.I1
{
if (target == null)
{
throw new ArgumentNullException(nameof(target));
throw new ArgumentNullException("target");
}
if (source == null)
{
throw new ArgumentNullException(nameof(source));
throw new ArgumentNullException("source");
}
DecodeStart(source, properties);

View File

@@ -18,11 +18,9 @@ namespace SharpCompress.Compressors.Xz
public static int ReadLittleEndianInt32(this Stream stream)
{
byte[] bytes = new byte[4];
var read = stream.ReadFully(bytes);
if (!read)
{
var read = stream.Read(bytes, 0, 4);
if (read != 4)
throw new EndOfStreamException();
}
return (bytes[0] + (bytes[1] << 8) + (bytes[2] << 16) + (bytes[3] << 24));
}

View File

@@ -156,7 +156,7 @@ namespace SharpCompress.Converters
{
if (dest == null)
{
throw new ArgumentNullException(nameof(dest));
throw new ArgumentNullException("dest");
}
if (destIdx < 0 || destIdx > dest.Length - size)
{
@@ -170,7 +170,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 8)
{
@@ -195,7 +195,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 8)
{
@@ -221,7 +221,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 8)
{
@@ -247,7 +247,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 4)
{
@@ -273,7 +273,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 4)
{
@@ -299,7 +299,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 4)
{
@@ -325,7 +325,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 2)
{
@@ -351,7 +351,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 2)
{
@@ -468,7 +468,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 8)
{
@@ -494,7 +494,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 8)
{
@@ -520,7 +520,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 8)
{
@@ -546,7 +546,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 4)
{
@@ -572,7 +572,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 4)
{
@@ -598,7 +598,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 4)
{
@@ -624,7 +624,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 2)
{
@@ -650,7 +650,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException(nameof(data));
throw new ArgumentNullException("data");
}
if (data.Length - index < 2)
{

View File

@@ -12,7 +12,7 @@ namespace Org.BouncyCastle.Crypto.Parameters
{
if (key == null)
{
throw new ArgumentNullException(nameof(key));
throw new ArgumentNullException("key");
}
this.key = (byte[])key.Clone();
@@ -25,15 +25,15 @@ namespace Org.BouncyCastle.Crypto.Parameters
{
if (key == null)
{
throw new ArgumentNullException(nameof(key));
throw new ArgumentNullException("key");
}
if (keyOff < 0 || keyOff > key.Length)
{
throw new ArgumentOutOfRangeException(nameof(keyOff));
throw new ArgumentOutOfRangeException("keyOff");
}
if (keyLen < 0 || (keyOff + keyLen) > key.Length)
{
throw new ArgumentOutOfRangeException(nameof(keyLen));
throw new ArgumentOutOfRangeException("keyLen");
}
this.key = new byte[keyLen];

View File

@@ -139,6 +139,8 @@ namespace SharpCompress.Readers
}
}
private readonly byte[] skipBuffer = new byte[4096];
private void Skip()
{
if (ArchiveType != ArchiveType.Rar
@@ -146,21 +148,25 @@ namespace SharpCompress.Readers
&& Entry.CompressedSize > 0)
{
//not solid and has a known compressed size then we can skip raw bytes.
var part = Entry.Parts.First();
var rawStream = part.GetRawStream();
var rawStream = Entry.Parts.First().GetRawStream();
if (rawStream != null)
{
var bytesToAdvance = Entry.CompressedSize;
rawStream.Skip(bytesToAdvance);
part.Skipped = true;
for (var i = 0; i < bytesToAdvance / skipBuffer.Length; i++)
{
rawStream.Read(skipBuffer, 0, skipBuffer.Length);
}
rawStream.Read(skipBuffer, 0, (int)(bytesToAdvance % skipBuffer.Length));
return;
}
}
//don't know the size so we have to try to decompress to skip
using (var s = OpenEntryStream())
{
s.Skip();
while (s.Read(skipBuffer, 0, skipBuffer.Length) > 0)
{
}
}
}

View File

@@ -29,11 +29,11 @@ namespace SharpCompress.Readers.GZip
return new GZipReader(stream, options ?? new ReaderOptions());
}
#endregion Open
#endregion
internal override IEnumerable<GZipEntry> GetEntries(Stream stream)
{
return GZipEntry.GetEntries(stream, Options);
return GZipEntry.GetEntries(stream);
}
}
}

View File

@@ -8,7 +8,6 @@ namespace SharpCompress.Readers
/// Look for RarArchive (Check for self-extracting archives or cases where RarArchive isn't at the start of the file)
/// </summary>
public bool LookForHeader { get; set; }
public string Password { get; set; }
}
}

View File

@@ -114,11 +114,11 @@ namespace SharpCompress.Readers.Tar
return new TarReader(rewindableStream, options, CompressionType.None);
}
#endregion Open
#endregion
internal override IEnumerable<TarEntry> GetEntries(Stream stream)
{
return TarEntry.GetEntries(StreamingMode.Streaming, stream, compressionType, Options.ArchiveEncoding);
return TarEntry.GetEntries(StreamingMode.Streaming, stream, compressionType);
}
}
}

View File

@@ -8,13 +8,13 @@ namespace SharpCompress.Readers.Zip
{
public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
{
private readonly StreamingZipHeaderFactory _headerFactory;
private readonly StreamingZipHeaderFactory headerFactory;
internal ZipReader(Stream stream, ReaderOptions options)
: base(options, ArchiveType.Zip)
{
Volume = new ZipVolume(stream, options);
_headerFactory = new StreamingZipHeaderFactory(options.Password, options.ArchiveEncoding);
headerFactory = new StreamingZipHeaderFactory(options.Password);
}
public override ZipVolume Volume { get; }
@@ -33,26 +33,26 @@ namespace SharpCompress.Readers.Zip
return new ZipReader(stream, options ?? new ReaderOptions());
}
#endregion Open
#endregion
internal override IEnumerable<ZipEntry> GetEntries(Stream stream)
{
foreach (ZipHeader h in _headerFactory.ReadStreamHeader(stream))
foreach (ZipHeader h in headerFactory.ReadStreamHeader(stream))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.LocalEntry:
{
yield return new ZipEntry(new StreamingZipFilePart(h as LocalEntryHeader,
stream));
}
{
yield return new ZipEntry(new StreamingZipFilePart(h as LocalEntryHeader,
stream));
}
break;
case ZipHeaderType.DirectoryEnd:
{
yield break;
}
{
yield break;
}
}
}
}

View File

@@ -2,9 +2,9 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.18.1</VersionPrefix>
<AssemblyVersion>0.18.1.0</AssemblyVersion>
<FileVersion>0.18.1.0</FileVersion>
<VersionPrefix>0.17.0</VersionPrefix>
<AssemblyVersion>0.17.0.0</AssemblyVersion>
<FileVersion>0.17.0.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks Condition="'$(LibraryFrameworks)'==''">net45;net35;netstandard1.0;netstandard1.3</TargetFrameworks>
<TargetFrameworks Condition="'$(LibraryFrameworks)'!=''">$(LibraryFrameworks)</TargetFrameworks>
@@ -25,4 +25,4 @@
<PropertyGroup Condition=" '$(TargetFramework)' == 'netstandard1.0' ">
<DefineConstants>$(DefineConstants);NO_FILE;NO_CRYPTO;SILVERLIGHT</DefineConstants>
</PropertyGroup>
</Project>
</Project>

View File

@@ -7,7 +7,7 @@ using SharpCompress.Readers;
namespace SharpCompress
{
internal static class Utility
{
{
public static ReadOnlyCollection<T> ToReadOnly<T>(this IEnumerable<T> items)
{
return new ReadOnlyCollection<T>(items.ToList());
@@ -138,7 +138,7 @@ namespace SharpCompress
public static void Skip(this Stream source, long advanceAmount)
{
byte[] buffer = GetTransferByteArray();
byte[] buffer = new byte[32 * 1024];
int read = 0;
int readCount = 0;
do
@@ -162,9 +162,9 @@ namespace SharpCompress
while (true);
}
public static void Skip(this Stream source)
public static void SkipAll(this Stream source)
{
byte[] buffer = GetTransferByteArray();
byte[] buffer = new byte[32 * 1024];
do
{
}

View File

@@ -6,30 +6,29 @@ namespace SharpCompress.Writers
{
public abstract class AbstractWriter : IWriter
{
private bool closeStream;
private bool isDisposed;
protected AbstractWriter(ArchiveType type, WriterOptions writerOptions)
protected AbstractWriter(ArchiveType type)
{
WriterType = type;
WriterOptions = writerOptions;
}
protected void InitalizeStream(Stream stream)
protected void InitalizeStream(Stream stream, bool closeStream)
{
OutputStream = stream;
this.closeStream = closeStream;
}
protected Stream OutputStream { get; private set; }
public ArchiveType WriterType { get; }
protected WriterOptions WriterOptions { get; }
public abstract void Write(string filename, Stream source, DateTime? modificationTime);
protected virtual void Dispose(bool isDisposing)
{
if (isDisposing && !WriterOptions.LeaveStreamOpen)
if (isDisposing && closeStream)
{
OutputStream.Dispose();
}

View File

@@ -8,15 +8,12 @@ namespace SharpCompress.Writers.GZip
{
public class GZipWriter : AbstractWriter
{
private bool _wroteToStream;
private bool wroteToStream;
public GZipWriter(Stream destination, GZipWriterOptions options = null)
: base(ArchiveType.GZip, options ?? new GZipWriterOptions())
public GZipWriter(Stream destination, bool leaveOpen = false)
: base(ArchiveType.GZip)
{
InitalizeStream(new GZipStream(destination, CompressionMode.Compress,
options?.CompressionLevel ?? CompressionLevel.Default,
WriterOptions.LeaveStreamOpen,
WriterOptions.ArchiveEncoding.GetEncoding()));
InitalizeStream(new GZipStream(destination, CompressionMode.Compress, leaveOpen), !leaveOpen);
}
protected override void Dispose(bool isDisposing)
@@ -31,7 +28,7 @@ namespace SharpCompress.Writers.GZip
public override void Write(string filename, Stream source, DateTime? modificationTime)
{
if (_wroteToStream)
if (wroteToStream)
{
throw new ArgumentException("Can only write a single stream to a GZip file.");
}
@@ -39,7 +36,7 @@ namespace SharpCompress.Writers.GZip
stream.FileName = filename;
stream.LastModified = modificationTime;
source.TransferTo(stream);
_wroteToStream = true;
wroteToStream = true;
}
}
}

View File

@@ -1,28 +0,0 @@
using SharpCompress.Common;
using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Writers.GZip
{
public class GZipWriterOptions : WriterOptions
{
public GZipWriterOptions()
: base(CompressionType.GZip)
{
}
internal GZipWriterOptions(WriterOptions options)
: base(options.CompressionType)
{
LeaveStreamOpen = options.LeaveStreamOpen;
ArchiveEncoding = options.ArchiveEncoding;
var writerOptions = options as GZipWriterOptions;
if (writerOptions != null)
{
CompressionLevel = writerOptions.CompressionLevel;
}
}
public CompressionLevel CompressionLevel { get; set; } = CompressionLevel.Default;
}
}

View File

@@ -12,7 +12,7 @@ namespace SharpCompress.Writers.Tar
public class TarWriter : AbstractWriter
{
public TarWriter(Stream destination, WriterOptions options)
: base(ArchiveType.Tar, options)
: base(ArchiveType.Tar)
{
if (!destination.CanWrite)
{
@@ -42,7 +42,7 @@ namespace SharpCompress.Writers.Tar
throw new InvalidFormatException("Tar does not support compression: " + options.CompressionType);
}
}
InitalizeStream(destination);
InitalizeStream(destination, true);
}
public override void Write(string filename, Stream source, DateTime? modificationTime)
@@ -72,8 +72,7 @@ namespace SharpCompress.Writers.Tar
long realSize = size ?? source.Length;
TarHeader header = new TarHeader(WriterOptions.ArchiveEncoding);
TarHeader header = new TarHeader();
header.LastModifiedTime = modificationTime ?? TarHeader.Epoch;
header.Name = NormalizeFilename(filename);
header.Size = realSize;

View File

@@ -19,7 +19,7 @@ namespace SharpCompress.Writers
{
throw new InvalidFormatException("GZip archives only support GZip compression type.");
}
return new GZipWriter(stream, new GZipWriterOptions(writerOptions));
return new GZipWriter(stream, writerOptions.LeaveStreamOpen);
}
case ArchiveType.Zip:
{

View File

@@ -1,7 +1,6 @@
using System;
using System.IO;
using System.Text;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Converters;
@@ -12,16 +11,14 @@ namespace SharpCompress.Writers.Zip
{
private readonly ZipCompressionMethod compression;
private readonly string fileName;
private readonly ArchiveEncoding archiveEncoding;
public ZipCentralDirectoryEntry(ZipCompressionMethod compression, string fileName, ulong headerOffset, ArchiveEncoding archiveEncoding)
public ZipCentralDirectoryEntry(ZipCompressionMethod compression, string fileName, ulong headerOffset)
{
this.compression = compression;
this.fileName = fileName;
HeaderOffset = headerOffset;
this.archiveEncoding = archiveEncoding;
}
internal DateTime? ModificationTime { get; set; }
internal string Comment { get; set; }
internal uint Crc { get; set; }
@@ -32,11 +29,11 @@ namespace SharpCompress.Writers.Zip
internal uint Write(Stream outputStream)
{
byte[] encodedFilename = archiveEncoding.Encode(fileName);
byte[] encodedComment = archiveEncoding.Encode(Comment);
byte[] encodedFilename = Encoding.UTF8.GetBytes(fileName);
byte[] encodedComment = Encoding.UTF8.GetBytes(Comment);
var zip64_stream = Compressed >= uint.MaxValue || Decompressed >= uint.MaxValue;
var zip64 = zip64_stream || HeaderOffset >= uint.MaxValue || Zip64HeaderOffset != 0;
var zip64_stream = Compressed >= uint.MaxValue || Decompressed >= uint.MaxValue;
var zip64 = zip64_stream || HeaderOffset >= uint.MaxValue || Zip64HeaderOffset != 0;
var compressedvalue = zip64 ? uint.MaxValue : (uint)Compressed;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)Decompressed;
@@ -44,18 +41,18 @@ namespace SharpCompress.Writers.Zip
var extralength = zip64 ? (2 + 2 + 8 + 8 + 8 + 4) : 0;
var version = (byte)(zip64 ? 45 : 20); // Version 20 required for deflate/encryption
HeaderFlags flags = Equals(archiveEncoding.GetEncoding(), Encoding.UTF8) ? HeaderFlags.UTF8 : HeaderFlags.None;
HeaderFlags flags = HeaderFlags.UTF8;
if (!outputStream.CanSeek)
{
// Cannot use data descriptors with zip64:
// https://blogs.oracle.com/xuemingshen/entry/is_zipinput_outputstream_handling_of
// We check that streams are not written too large in the ZipWritingStream,
// so this extra guard is not required, but kept to simplify changing the code
// once the zip64 post-data issue is resolved
// We check that streams are not written too large in the ZipWritingStream,
// so this extra guard is not required, but kept to simplify changing the code
// once the zip64 post-data issue is resolved
if (!zip64_stream)
flags |= HeaderFlags.UsePostDataDescriptor;
if (compression == ZipCompressionMethod.LZMA)
{
flags |= HeaderFlags.Bit1; // eos marker

View File

@@ -26,7 +26,7 @@ namespace SharpCompress.Writers.Zip
private readonly bool isZip64;
public ZipWriter(Stream destination, ZipWriterOptions zipWriterOptions)
: base(ArchiveType.Zip, zipWriterOptions)
: base(ArchiveType.Zip)
{
zipComment = zipWriterOptions.ArchiveComment ?? string.Empty;
isZip64 = zipWriterOptions.UseZip64;
@@ -37,7 +37,7 @@ namespace SharpCompress.Writers.Zip
compressionType = zipWriterOptions.CompressionType;
compressionLevel = zipWriterOptions.DeflateCompressionLevel;
InitalizeStream(destination);
InitalizeStream(destination, !zipWriterOptions.LeaveStreamOpen);
}
private PpmdProperties PpmdProperties
@@ -65,7 +65,6 @@ namespace SharpCompress.Writers.Zip
}
base.Dispose(isDisposing);
}
private static ZipCompressionMethod ToZipCompressionMethod(CompressionType compressionType)
{
switch (compressionType)
@@ -98,9 +97,9 @@ namespace SharpCompress.Writers.Zip
public override void Write(string entryPath, Stream source, DateTime? modificationTime)
{
Write(entryPath, source, new ZipWriterEntryOptions()
{
ModificationDateTime = modificationTime
});
{
ModificationDateTime = modificationTime
});
}
public void Write(string entryPath, Stream source, ZipWriterEntryOptions zipWriterEntryOptions)
@@ -118,11 +117,11 @@ namespace SharpCompress.Writers.Zip
entryPath = NormalizeFilename(entryPath);
options.ModificationDateTime = options.ModificationDateTime ?? DateTime.Now;
options.EntryComment = options.EntryComment ?? string.Empty;
var entry = new ZipCentralDirectoryEntry(compression, entryPath, (ulong)streamPosition, WriterOptions.ArchiveEncoding)
{
Comment = options.EntryComment,
ModificationTime = options.ModificationDateTime
};
var entry = new ZipCentralDirectoryEntry(compression, entryPath, (ulong)streamPosition)
{
Comment = options.EntryComment,
ModificationTime = options.ModificationDateTime
};
// Use the archive default setting for zip64 and allow overrides
var useZip64 = isZip64;
@@ -131,7 +130,7 @@ namespace SharpCompress.Writers.Zip
var headersize = (uint)WriteHeader(entryPath, options, entry, useZip64);
streamPosition += headersize;
return new ZipWritingStream(this, OutputStream, entry, compression,
return new ZipWritingStream(this, OutputStream, entry, compression,
options.DeflateCompressionLevel ?? compressionLevel);
}
@@ -150,12 +149,12 @@ namespace SharpCompress.Writers.Zip
private int WriteHeader(string filename, ZipWriterEntryOptions zipWriterEntryOptions, ZipCentralDirectoryEntry entry, bool useZip64)
{
// We err on the side of caution until the zip specification clarifies how to support this
if (!OutputStream.CanSeek && useZip64)
throw new NotSupportedException("Zip64 extensions are not supported on non-seekable streams");
// We err on the side of caution until the zip specification clarifies how to support this
if (!OutputStream.CanSeek && useZip64)
throw new NotSupportedException("Zip64 extensions are not supported on non-seekable streams");
var explicitZipCompressionInfo = ToZipCompressionMethod(zipWriterEntryOptions.CompressionType ?? compressionType);
byte[] encodedFilename = WriterOptions.ArchiveEncoding.Encode(filename);
byte[] encodedFilename = ArchiveEncoding.Default.GetBytes(filename);
OutputStream.Write(DataConverter.LittleEndian.GetBytes(ZipHeaderFactory.ENTRY_HEADER_BYTES), 0, 4);
if (explicitZipCompressionInfo == ZipCompressionMethod.Deflate)
@@ -163,17 +162,17 @@ namespace SharpCompress.Writers.Zip
if (OutputStream.CanSeek && useZip64)
OutputStream.Write(new byte[] { 45, 0 }, 0, 2); //smallest allowed version for zip64
else
OutputStream.Write(new byte[] { 20, 0 }, 0, 2); //older version which is more compatible
OutputStream.Write(new byte[] { 20, 0 }, 0, 2); //older version which is more compatible
}
else
{
OutputStream.Write(new byte[] { 63, 0 }, 0, 2); //version says we used PPMd or LZMA
}
HeaderFlags flags = Equals(WriterOptions.ArchiveEncoding.GetEncoding(), Encoding.UTF8) ? HeaderFlags.UTF8 : 0;
HeaderFlags flags = ArchiveEncoding.Default == Encoding.UTF8 ? HeaderFlags.UTF8 : 0;
if (!OutputStream.CanSeek)
{
flags |= HeaderFlags.UsePostDataDescriptor;
if (explicitZipCompressionInfo == ZipCompressionMethod.LZMA)
{
flags |= HeaderFlags.Bit1; // eos marker
@@ -214,11 +213,11 @@ namespace SharpCompress.Writers.Zip
private void WriteEndRecord(ulong size)
{
byte[] encodedComment = WriterOptions.ArchiveEncoding.Encode(zipComment);
byte[] encodedComment = ArchiveEncoding.Default.GetBytes(zipComment);
var zip64 = isZip64 || entries.Count > ushort.MaxValue || streamPosition >= uint.MaxValue || size >= uint.MaxValue;
var sizevalue = size >= uint.MaxValue ? uint.MaxValue : (uint)size;
var streampositionvalue = streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streamPosition;
var streampositionvalue = streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streamPosition;
if (zip64)
{
@@ -251,7 +250,7 @@ namespace SharpCompress.Writers.Zip
}
// Write normal end of central directory record
OutputStream.Write(new byte[] { 80, 75, 5, 6, 0, 0, 0, 0 }, 0, 8);
OutputStream.Write(new byte[] {80, 75, 5, 6, 0, 0, 0, 0}, 0, 8);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)entries.Count), 0, 2);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)entries.Count), 0, 2);
OutputStream.Write(DataConverter.LittleEndian.GetBytes(sizevalue), 0, 4);
@@ -274,10 +273,10 @@ namespace SharpCompress.Writers.Zip
private CountingWritableSubStream counting;
private ulong decompressed;
// Flag to prevent throwing exceptions on Dispose
private bool limitsExceeded;
// Flag to prevent throwing exceptions on Dispose
private bool limitsExceeded;
internal ZipWritingStream(ZipWriter writer, Stream originalStream, ZipCentralDirectoryEntry entry,
internal ZipWritingStream(ZipWriter writer, Stream originalStream, ZipCentralDirectoryEntry entry,
ZipCompressionMethod zipCompressionMethod, CompressionLevel compressionLevel)
{
this.writer = writer;
@@ -306,39 +305,39 @@ namespace SharpCompress.Writers.Zip
switch (zipCompressionMethod)
{
case ZipCompressionMethod.None:
{
return output;
}
{
return output;
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(counting, CompressionMode.Compress, compressionLevel,
true);
}
{
return new DeflateStream(counting, CompressionMode.Compress, compressionLevel,
true);
}
case ZipCompressionMethod.BZip2:
{
return new BZip2Stream(counting, CompressionMode.Compress, true);
}
{
return new BZip2Stream(counting, CompressionMode.Compress, true);
}
case ZipCompressionMethod.LZMA:
{
counting.WriteByte(9);
counting.WriteByte(20);
counting.WriteByte(5);
counting.WriteByte(0);
{
counting.WriteByte(9);
counting.WriteByte(20);
counting.WriteByte(5);
counting.WriteByte(0);
LzmaStream lzmaStream = new LzmaStream(new LzmaEncoderProperties(!originalStream.CanSeek),
false, counting);
counting.Write(lzmaStream.Properties, 0, lzmaStream.Properties.Length);
return lzmaStream;
}
LzmaStream lzmaStream = new LzmaStream(new LzmaEncoderProperties(!originalStream.CanSeek),
false, counting);
counting.Write(lzmaStream.Properties, 0, lzmaStream.Properties.Length);
return lzmaStream;
}
case ZipCompressionMethod.PPMd:
{
counting.Write(writer.PpmdProperties.Properties, 0, 2);
return new PpmdStream(writer.PpmdProperties, counting, true);
}
{
counting.Write(writer.PpmdProperties.Properties, 0, 2);
return new PpmdStream(writer.PpmdProperties, counting, true);
}
default:
{
throw new NotSupportedException("CompressionMethod: " + zipCompressionMethod);
}
{
throw new NotSupportedException("CompressionMethod: " + zipCompressionMethod);
}
}
}
@@ -349,65 +348,65 @@ namespace SharpCompress.Writers.Zip
{
writeStream.Dispose();
if (limitsExceeded)
{
// We have written invalid data into the archive,
// so we destroy it now, instead of allowing the user to continue
// with a defunct archive
originalStream.Dispose();
return;
}
if (limitsExceeded)
{
// We have written invalid data into the archive,
// so we destroy it now, instead of allowing the user to continue
// with a defunct archive
originalStream.Dispose();
return;
}
entry.Crc = (uint)crc.Crc32Result;
entry.Compressed = counting.Count;
entry.Decompressed = decompressed;
var zip64 = entry.Compressed >= uint.MaxValue || entry.Decompressed >= uint.MaxValue;
var compressedvalue = zip64 ? uint.MaxValue : (uint)counting.Count;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)entry.Decompressed;
var compressedvalue = zip64 ? uint.MaxValue : (uint)counting.Count;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)entry.Decompressed;
if (originalStream.CanSeek)
{
originalStream.Position = (long)(entry.HeaderOffset + 6);
originalStream.WriteByte(0);
originalStream.Position = (long)(entry.HeaderOffset + 14);
writer.WriteFooter(entry.Crc, compressedvalue, decompressedvalue);
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64 && entry.Zip64HeaderOffset == 0)
throw new NotSupportedException("Attempted to write a stream that is larger than 4GiB without setting the zip64 option");
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64 && entry.Zip64HeaderOffset == 0)
throw new NotSupportedException("Attempted to write a stream that is larger than 4GiB without setting the zip64 option");
// If we have pre-allocated space for zip64 data,
// fill it out, even if it is not required
if (entry.Zip64HeaderOffset != 0)
{
originalStream.Position = (long)(entry.HeaderOffset + entry.Zip64HeaderOffset);
originalStream.Write(DataConverter.LittleEndian.GetBytes((ushort)0x0001), 0, 2);
originalStream.Write(DataConverter.LittleEndian.GetBytes((ushort)(8 + 8)), 0, 2);
// If we have pre-allocated space for zip64 data,
// fill it out, even if it is not required
if (entry.Zip64HeaderOffset != 0)
{
originalStream.Position = (long)(entry.HeaderOffset + entry.Zip64HeaderOffset);
originalStream.Write(DataConverter.LittleEndian.GetBytes((ushort)0x0001), 0, 2);
originalStream.Write(DataConverter.LittleEndian.GetBytes((ushort)(8 + 8)), 0, 2);
originalStream.Write(DataConverter.LittleEndian.GetBytes(entry.Decompressed), 0, 8);
originalStream.Write(DataConverter.LittleEndian.GetBytes(entry.Compressed), 0, 8);
}
originalStream.Write(DataConverter.LittleEndian.GetBytes(entry.Decompressed), 0, 8);
originalStream.Write(DataConverter.LittleEndian.GetBytes(entry.Compressed), 0, 8);
}
originalStream.Position = writer.streamPosition + (long)entry.Compressed;
writer.streamPosition += (long)entry.Compressed;
}
else
{
// We have a streaming archive, so we should add a post-data-descriptor,
// but we cannot as it does not hold the zip64 values
// Throwing an exception until the zip specification is clarified
// We have a streaming archive, so we should add a post-data-descriptor,
// but we cannot as it does not hold the zip64 values
// Throwing an exception until the zip specification is clarified
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64)
throw new NotSupportedException("Streams larger than 4GiB are not supported for non-seekable streams");
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64)
throw new NotSupportedException("Streams larger than 4GiB are not supported for non-seekable streams");
originalStream.Write(DataConverter.LittleEndian.GetBytes(ZipHeaderFactory.POST_DATA_DESCRIPTOR), 0, 4);
writer.WriteFooter(entry.Crc,
originalStream.Write(DataConverter.LittleEndian.GetBytes(ZipHeaderFactory.POST_DATA_DESCRIPTOR), 0, 4);
writer.WriteFooter(entry.Crc,
(uint)compressedvalue,
(uint)decompressedvalue);
writer.streamPosition += (long)entry.Compressed + 16;
@@ -438,35 +437,36 @@ namespace SharpCompress.Writers.Zip
public override void Write(byte[] buffer, int offset, int count)
{
// We check the limits first, because we can keep the archive consistent
// if we can prevent the writes from happening
if (entry.Zip64HeaderOffset == 0)
{
// Pre-check, the counting.Count is not exact, as we do not know the size before having actually compressed it
if (limitsExceeded || ((decompressed + (uint)count) > uint.MaxValue) || (counting.Count + (uint)count) > uint.MaxValue)
throw new NotSupportedException("Attempted to write a stream that is larger than 4GiB without setting the zip64 option");
}
// We check the limits first, because we can keep the archive consistent
// if we can prevent the writes from happening
if (entry.Zip64HeaderOffset == 0)
{
// Pre-check, the counting.Count is not exact, as we do not know the size before having actually compressed it
if (limitsExceeded || ((decompressed + (uint)count) > uint.MaxValue) || (counting.Count + (uint)count) > uint.MaxValue)
throw new NotSupportedException("Attempted to write a stream that is larger than 4GiB without setting the zip64 option");
}
decompressed += (uint)count;
crc.SlurpBlock(buffer, offset, count);
writeStream.Write(buffer, offset, count);
if (entry.Zip64HeaderOffset == 0)
{
// Post-check, this is accurate
if ((decompressed > uint.MaxValue) || counting.Count > uint.MaxValue)
{
// We have written the data, so the archive is now broken
// Throwing the exception here, allows us to avoid
// throwing an exception in Dispose() which is discouraged
// as it can mask other errors
limitsExceeded = true;
throw new NotSupportedException("Attempted to write a stream that is larger than 4GiB without setting the zip64 option");
}
}
if (entry.Zip64HeaderOffset == 0)
{
// Post-check, this is accurate
if ((decompressed > uint.MaxValue) || counting.Count > uint.MaxValue)
{
// We have written the data, so the archive is now broken
// Throwing the exception here, allows us to avoid
// throwing an exception in Dispose() which is discouraged
// as it can mask other errors
limitsExceeded = true;
throw new NotSupportedException("Attempted to write a stream that is larger than 4GiB without setting the zip64 option");
}
}
}
}
#endregion Nested type: ZipWritingStream
#endregion
}
}

View File

@@ -15,15 +15,8 @@ namespace SharpCompress.Writers.Zip
: base(options.CompressionType)
{
LeaveStreamOpen = options.LeaveStreamOpen;
ArchiveEncoding = options.ArchiveEncoding;
var writerOptions = options as ZipWriterOptions;
if (writerOptions != null)
{
UseZip64 = writerOptions.UseZip64;
DeflateCompressionLevel = writerOptions.DeflateCompressionLevel;
ArchiveComment = writerOptions.ArchiveComment;
}
if (options is ZipWriterOptions)
UseZip64 = ((ZipWriterOptions)options).UseZip64;
}
/// <summary>
/// When CompressionType.Deflate is used, this property is referenced. Defaults to CompressionLevel.Default.

View File

@@ -0,0 +1,32 @@
using System;
using System.Collections.Generic;
using System.IO;
using CommandLine;
namespace SharpCompress
{
public class BaseOptions
{
[Value(0, Min = 1)]
public IEnumerable<string> Path { get; set; }
protected IEnumerable<FileInfo> GetFilesFromPath()
{
foreach (var s in Path)
{
var fileInfo = new FileInfo(s);
if (fileInfo.Exists)
{
yield return fileInfo;
}
else
{
using (ConsoleHelper.PushError())
{
Console.WriteLine($"{s} does not exist");
}
}
}
}
}
}

View File

@@ -0,0 +1,32 @@
using System;
namespace SharpCompress
{
public static class ConsoleHelper
{
private class ConsoleTextPush : IDisposable
{
private readonly ConsoleColor _restoreColor;
public ConsoleTextPush(ConsoleColor displayColor)
{
_restoreColor = Console.ForegroundColor;
Console.ForegroundColor = displayColor;
}
public void Dispose()
{
Console.ForegroundColor = _restoreColor;
}
}
public static IDisposable PushForeground(ConsoleColor color)
{
return new ConsoleTextPush(color);
}
public static IDisposable PushError()
{
return PushForeground(ConsoleColor.Red);
}
}
}

View File

@@ -0,0 +1,37 @@
using System;
using CommandLine;
using SharpCompress.Readers;
namespace SharpCompress
{
[Verb("x", HelpText = "Extract an archive")]
public class ExtractOptions : BaseOptions
{
[Option('p', HelpText = "Path to extract to")]
public string ExtractionPath { get; set; } = AppContext.BaseDirectory;
public int Process()
{
foreach (var fileInfo in GetFilesFromPath())
{
Console.WriteLine($"Extracting archive {fileInfo.FullName} to path: {ExtractionPath}");
using (var reader = ReaderFactory.Open(fileInfo.OpenRead()))
{
while (reader.MoveToNextEntry())
{
var progress = new ProgressBar();
reader.EntryExtractionProgress += (sender, args) =>
{
progress.Report(args.ReaderProgress.PercentageReadExact);
};
Console.Write($"Extracting entry {reader.Entry.Key}: ");
reader.WriteEntryToDirectory(ExtractionPath);
Console.WriteLine();
}
}
}
return 1;
}
}
}

View File

@@ -0,0 +1,56 @@
using System;
using CommandLine;
using SharpCompress.Archives;
using SharpCompress.Common;
namespace SharpCompress
{
[Verb("i", HelpText = "Information about an archive")]
public class InfoOptions : BaseOptions
{
[Option('e', HelpText = "Show Archive Entry Information")]
public bool ShowEntries { get; set; }
public int Process()
{
foreach (var fileInfo in GetFilesFromPath())
{
Console.WriteLine($"=== Archive: {fileInfo}");
try
{
using (var archive = ArchiveFactory.Open(fileInfo.OpenRead()))
{
Console.WriteLine($"Archive Type: {archive.Type}");
Console.WriteLine($"Size: {archive.TotalSize}");
Console.WriteLine($"Uncompressed Size: {archive.TotalUncompressSize}");
if (ShowEntries)
{
foreach (var archiveEntry in archive.Entries)
{
Console.WriteLine($"\tEntry: {archiveEntry.Key}");
}
}
}
}
catch (InvalidFormatException)
{
using (ConsoleHelper.PushError())
{
Console.WriteLine("Archive Type is unknown.");
}
}
catch (Exception e)
{
using (ConsoleHelper.PushError())
{
Console.WriteLine($"Unhandled Error: {e}");
return 1;
}
}
}
return 0;
}
}
}

View File

@@ -0,0 +1,16 @@
using CommandLine;
namespace SharpCompress
{
public class Program
{
public static int Main(string[] args)
{
return Parser.Default.ParseArguments<InfoOptions, ExtractOptions>(args)
.MapResult(
(InfoOptions opts) => opts.Process(),
(ExtractOptions opts) => opts.Process(),
errs => 1);
}
}
}

View File

@@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk" ToolsVersion="15.0">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp1.0</TargetFramework>
<RootNamespace>SharpCompress</RootNamespace>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\SharpCompress\SharpCompress.csproj">
<Project>{fd19ddd8-72b2-4024-8665-0d1f7a2aa998}</Project>
<Name>SharpCompress</Name>
</ProjectReference>
</ItemGroup>
<ItemGroup>
<PackageReference Include="CommandLineParser">
<Version>2.1.1-beta</Version>
</PackageReference>
<PackageReference Include="goblinfactory.konsole" Version="2.0.2" />
</ItemGroup>
</Project>

View File

@@ -22,7 +22,4 @@
<ItemGroup>
<Service Include="{82a7f48d-3b50-4b1e-b82e-3ada8210c358}" />
</ItemGroup>
<ItemGroup>
<Folder Include="Xz\" />
</ItemGroup>
</Project>

View File

@@ -1,32 +0,0 @@
using SharpCompress.Compressors.Xz;
using System;
using System.Text;
using Xunit;
namespace SharpCompress.Test.Xz
{
public class Crc32Tests
{
private const string SimpleString = @"The quick brown fox jumps over the lazy dog.";
private readonly byte[] SimpleBytes = Encoding.ASCII.GetBytes(SimpleString);
private const string SimpleString2 = @"Life moves pretty fast. If you don't stop and look around once in a while, you could miss it.";
private readonly byte[] SimpleBytes2 = Encoding.ASCII.GetBytes(SimpleString2);
[Fact]
public void ShortAsciiString()
{
var actual = Crc32.Compute(SimpleBytes);
Assert.Equal((UInt32)0x519025e9, actual);
}
[Fact]
public void ShortAsciiString2()
{
var actual = Crc32.Compute(SimpleBytes2);
Assert.Equal((UInt32)0x6ee3ad88, actual);
}
}
}

View File

@@ -1,32 +0,0 @@
using SharpCompress.Compressors.Xz;
using System;
using System.Text;
using Xunit;
namespace SharpCompress.Test.Xz
{
public class Crc64Tests
{
private const string SimpleString = @"The quick brown fox jumps over the lazy dog.";
private readonly byte[] SimpleBytes = Encoding.ASCII.GetBytes(SimpleString);
private const string SimpleString2 = @"Life moves pretty fast. If you don't stop and look around once in a while, you could miss it.";
private readonly byte[] SimpleBytes2 = Encoding.ASCII.GetBytes(SimpleString2);
[Fact]
public void ShortAsciiString()
{
var actual = Crc64.Compute(SimpleBytes);
Assert.Equal((UInt64)0x7E210EB1B03E5A1D, actual);
}
[Fact]
public void ShortAsciiString2()
{
var actual = Crc64.Compute(SimpleBytes2);
Assert.Equal((UInt64)0x416B4150508661EE, actual);
}
}
}

View File

@@ -1,72 +0,0 @@
using System;
using Xunit;
using System.IO;
using SharpCompress.Compressors.Xz.Filters;
namespace SharpCompress.Test.Xz.Filters
{
public class Lzma2Tests : XZTestsBase
{
Lzma2Filter filter;
public Lzma2Tests()
{
filter = new Lzma2Filter();
}
[Fact]
public void IsOnlyAllowedLast()
{
Assert.True(filter.AllowAsLast);
Assert.False(filter.AllowAsNonLast);
}
[Fact]
public void ChangesStreamSize()
{
Assert.True(filter.ChangesDataSize);
}
[Theory]
[InlineData(0, (uint)4 * 1024)]
[InlineData(1, (uint)6 * 1024)]
[InlineData(2, (uint)8 * 1024)]
[InlineData(3, (uint)12 * 1024)]
[InlineData(38, (uint)2 * 1024 * 1024 * 1024)]
[InlineData(39, (uint)3 * 1024 * 1024 * 1024)]
[InlineData(40, (uint)(1024 * 1024 * 1024 - 1) * 4 + 3)]
public void CalculatesDictionarySize(byte inByte, uint dicSize)
{
filter.Init(new[] { inByte });
Assert.Equal(filter.DictionarySize, dicSize);
}
[Fact]
public void CalculatesDictionarySizeError()
{
uint temp;
filter.Init(new byte[] { 41 });
var ex = Assert.Throws<OverflowException>(() =>
{
temp = filter.DictionarySize;
});
Assert.Equal("Dictionary size greater than UInt32.Max", ex.Message);
}
[Theory]
[InlineData(new byte[] { })]
[InlineData(new byte[] { 0, 0 })]
public void OnlyAcceptsOneByte(byte[] bytes)
{
var ex = Assert.Throws<InvalidDataException>(() => filter.Init(bytes));
Assert.Equal("LZMA properties unexpected length", ex.Message);
}
[Fact]
public void ReservedBytesThrow()
{
var ex = Assert.Throws<InvalidDataException>(() => filter.Init(new byte[] { 0xC0 }));
Assert.Equal("Reserved bits used in LZMA properties", ex.Message);
}
}
}

View File

@@ -1,73 +0,0 @@
using System.Text;
using System.IO;
using SharpCompress.Compressors.Xz;
using Xunit;
namespace SharpCompress.Test.Xz
{
public class XZBlockTests : XZTestsBase
{
protected override void Rewind(Stream stream)
{
stream.Position = 12;
}
private byte[] ReadBytes(XZBlock block, int bytesToRead)
{
byte[] buffer = new byte[bytesToRead];
var read = block.Read(buffer, 0, bytesToRead);
if (read != bytesToRead)
throw new EndOfStreamException();
return buffer;
}
[Fact]
public void OnFindIndexBlockThrow()
{
var bytes = new byte[] { 0 };
using (Stream indexBlockStream = new MemoryStream(bytes))
{
var XZBlock = new XZBlock(indexBlockStream, CheckType.CRC64, 8);
Assert.Throws<XZIndexMarkerReachedException>(() => { ReadBytes(XZBlock, 1); });
}
}
[Fact]
public void CrcIncorrectThrows()
{
var bytes = Compressed.Clone() as byte[];
bytes[20]++;
using (Stream badCrcStream = new MemoryStream(bytes))
{
Rewind(badCrcStream);
var XZBlock = new XZBlock(badCrcStream, CheckType.CRC64, 8);
var ex = Assert.Throws<InvalidDataException>(() => { ReadBytes(XZBlock, 1); });
Assert.Equal("Block header corrupt", ex.Message);
}
}
[Fact]
public void CanReadM()
{
var XZBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
Assert.Equal(Encoding.ASCII.GetBytes("M"), ReadBytes(XZBlock, 1));
}
[Fact]
public void CanReadMary()
{
var XZBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
Assert.Equal(Encoding.ASCII.GetBytes("M"), ReadBytes(XZBlock, 1));
Assert.Equal(Encoding.ASCII.GetBytes("a"), ReadBytes(XZBlock, 1));
Assert.Equal(Encoding.ASCII.GetBytes("ry"), ReadBytes(XZBlock, 2));
}
[Fact]
public void CanReadPoemWithStreamReader()
{
var XZBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
var sr = new StreamReader(XZBlock);
Assert.Equal(sr.ReadToEnd(), Original);
}
}
}

View File

@@ -1,78 +0,0 @@
using SharpCompress.Compressors.Xz;
using System.IO;
using Xunit;
namespace SharpCompress.Test.Xz
{
public class XZHeaderTests : XZTestsBase
{
[Fact]
public void ChecksMagicNumber()
{
var bytes = Compressed.Clone() as byte[];
bytes[3]++;
using (Stream badMagicNumberStream = new MemoryStream(bytes))
{
BinaryReader br = new BinaryReader(badMagicNumberStream);
var header = new XZHeader(br);
var ex = Assert.Throws<InvalidDataException>(() => { header.Process(); });
Assert.Equal("Invalid XZ Stream", ex.Message);
}
}
[Fact]
public void CorruptHeaderThrows()
{
var bytes = Compressed.Clone() as byte[];
bytes[8]++;
using (Stream badCrcStream = new MemoryStream(bytes))
{
BinaryReader br = new BinaryReader(badCrcStream);
var header = new XZHeader(br);
var ex = Assert.Throws<InvalidDataException>(() => { header.Process(); });
Assert.Equal("Stream header corrupt", ex.Message);
}
}
[Fact]
public void BadVersionIfCrcOkButStreamFlagUnknown() {
var bytes = Compressed.Clone() as byte[];
byte[] streamFlags = { 0x00, 0xF4 };
byte[] crc = Crc32.Compute(streamFlags).ToLittleEndianBytes();
streamFlags.CopyTo(bytes, 6);
crc.CopyTo(bytes, 8);
using (Stream badFlagStream = new MemoryStream(bytes))
{
BinaryReader br = new BinaryReader(badFlagStream);
var header = new XZHeader(br);
var ex = Assert.Throws<InvalidDataException>(() => { header.Process(); });
Assert.Equal("Unknown XZ Stream Version", ex.Message);
}
}
[Fact]
public void ProcessesBlockCheckType()
{
BinaryReader br = new BinaryReader(CompressedStream);
var header = new XZHeader(br);
header.Process();
Assert.Equal(CheckType.CRC64, header.BlockCheckType);
}
[Fact]
public void CanCalculateBlockCheckSize()
{
BinaryReader br = new BinaryReader(CompressedStream);
var header = new XZHeader(br);
header.Process();
Assert.Equal(8, header.BlockCheckSize);
}
[Fact]
public void ProcessesStreamHeaderFromFactory()
{
var header = XZHeader.FromStream(CompressedStream);
Assert.Equal(CheckType.CRC64, header.BlockCheckType);
}
}
}

View File

@@ -1,45 +0,0 @@
using SharpCompress.Compressors.Xz;
using System.IO;
using Xunit;
namespace SharpCompress.Test.Xz
{
public class XZIndexTests : XZTestsBase
{
protected override void Rewind(Stream stream)
{
stream.Position = 356;
}
[Fact]
public void RecordsStreamStartOnInit()
{
using (Stream badStream = new MemoryStream(new byte[] { 1, 2, 3, 4, 5 }))
{
BinaryReader br = new BinaryReader(badStream);
var index = new XZIndex(br, false);
Assert.Equal(0, index.StreamStartPosition);
}
}
[Fact]
public void ThrowsIfHasNoIndexMarker()
{
using (Stream badStream = new MemoryStream(new byte[] { 1, 2, 3, 4, 5 }))
{
BinaryReader br = new BinaryReader(badStream);
var index = new XZIndex(br, false);
Assert.Throws<InvalidDataException>( () => index.Process());
}
}
[Fact]
public void ReadsNumberOfRecords()
{
BinaryReader br = new BinaryReader(CompressedStream);
var index = new XZIndex(br, false);
index.Process();
Assert.Equal(index.NumberOfRecords, (ulong)1);
}
}
}

View File

@@ -1,20 +0,0 @@
using SharpCompress.Compressors.Xz;
using System.IO;
using Xunit;
namespace SharpCompress.Test.Xz
{
public class XZStreamReaderTests : XZTestsBase
{
[Fact]
public void CanReadStream()
{
XZStream xz = new XZStream(CompressedStream);
using (var sr = new StreamReader(xz))
{
string uncompressed = sr.ReadToEnd();
Assert.Equal(uncompressed, Original);
}
}
}
}

View File

@@ -1,69 +0,0 @@
using System.Text;
using System.IO;
namespace SharpCompress.Test.Xz
{
public abstract class XZTestsBase
{
public XZTestsBase()
{
Rewind(CompressedStream);
}
protected virtual void Rewind(Stream stream)
{
stream.Position = 0;
}
protected Stream CompressedStream { get; } = new MemoryStream(Compressed);
protected static byte[] Compressed { get; } = new byte[] {
0xfd, 0x37, 0x7a, 0x58, 0x5a, 0x00, 0x00, 0x04, 0xe6, 0xd6, 0xb4, 0x46, 0x02, 0x00, 0x21, 0x01,
0x16, 0x00, 0x00, 0x00, 0x74, 0x2f, 0xe5, 0xa3, 0xe0, 0x01, 0xe4, 0x01, 0x3c, 0x5d, 0x00, 0x26,
0x98, 0x4a, 0x47, 0xc6, 0x6a, 0x27, 0xd7, 0x36, 0x7a, 0x05, 0xb9, 0x4f, 0xd7, 0xde, 0x52, 0x4c,
0xca, 0x26, 0x4f, 0x23, 0x60, 0x4d, 0xf3, 0x1f, 0xa3, 0x67, 0x49, 0x53, 0xd0, 0xf5, 0xc7, 0xa9,
0x3e, 0xd6, 0xb5, 0x3d, 0x2b, 0x02, 0xbe, 0x83, 0x27, 0xe2, 0xa6, 0xc3, 0x13, 0x4a, 0x31, 0x14,
0x33, 0xed, 0x9a, 0x85, 0x1d, 0x05, 0x6e, 0x7e, 0xa4, 0x91, 0xbf, 0x46, 0x71, 0x7d, 0xa7, 0xfb,
0x12, 0x10, 0xdf, 0x21, 0x73, 0x75, 0xd8, 0xd9, 0xab, 0x8f, 0x1f, 0x8b, 0xb0, 0xb9, 0x3f, 0x9a,
0xa5, 0x1e, 0xd4, 0x2f, 0xdf, 0x09, 0xb3, 0xfe, 0x45, 0xef, 0x16, 0xec, 0x95, 0x68, 0x64, 0xbb,
0x42, 0x0c, 0x8b, 0x96, 0x27, 0x30, 0x62, 0x42, 0x91, 0x7c, 0xf3, 0x6e, 0x4d, 0x03, 0xc5, 0x00,
0x04, 0x73, 0xdd, 0xee, 0xb0, 0xaa, 0xd6, 0x0b, 0x11, 0x90, 0x81, 0xd4, 0xaa, 0x69, 0x63, 0xfa,
0x2f, 0xb4, 0x25, 0x0a, 0x7f, 0xf9, 0x47, 0x77, 0xb1, 0x1f, 0xc3, 0xb4, 0x4d, 0x51, 0xf8, 0x23,
0x3a, 0x7c, 0x44, 0xc8, 0xcc, 0xca, 0x72, 0x09, 0xae, 0xc9, 0x7b, 0x7e, 0x91, 0x5d, 0xff, 0xc4,
0xeb, 0xfd, 0xa1, 0x9b, 0xd4, 0x8d, 0xd7, 0xd3, 0x57, 0xac, 0x7e, 0x3b, 0x97, 0x2e, 0xe4, 0xc2,
0x2e, 0x93, 0x3d, 0xb0, 0x16, 0x64, 0x78, 0x45, 0xb1, 0xc9, 0x40, 0x96, 0xcf, 0x5b, 0xc2, 0x2f,
0xaa, 0xba, 0xcf, 0x98, 0x38, 0x21, 0x3d, 0x1a, 0x13, 0xe8, 0xa6, 0xa6, 0xdf, 0xf4, 0x3d, 0x01,
0xa1, 0x9d, 0xc1, 0x3e, 0x37, 0xac, 0x20, 0xc4, 0xef, 0x18, 0xb1, 0xeb, 0x35, 0xf4, 0x66, 0x9a,
0x47, 0x3c, 0xce, 0x7c, 0xad, 0xdb, 0x2e, 0x39, 0xf5, 0x8d, 0x4a, 0x1d, 0x65, 0xc2, 0x0f, 0xa4,
0x40, 0x7e, 0xe6, 0xa7, 0x17, 0xce, 0x75, 0x7f, 0xd9, 0xa3, 0xf9, 0x27, 0x42, 0xd7, 0x98, 0x54,
0x17, 0xa7, 0x7a, 0x7c, 0x82, 0xdf, 0xeb, 0x08, 0x28, 0x86, 0xdd, 0x57, 0x77, 0x92, 0x80, 0x5f,
0x7b, 0x3b, 0xce, 0x77, 0x72, 0xff, 0xa3, 0x85, 0xd8, 0x5c, 0x8a, 0xb7, 0x83, 0x58, 0xfa, 0xbd,
0x72, 0xe3, 0x66, 0x9d, 0x3b, 0xff, 0x13, 0x5b, 0x0b, 0xf1, 0x6c, 0xa6, 0xb1, 0x3b, 0x85, 0x3b,
0x47, 0x91, 0xc8, 0x7c, 0x38, 0xe2, 0xe5, 0x54, 0xf8, 0x27, 0xee, 0x00, 0xff, 0xd3, 0x68, 0xf1,
0xc6, 0xc7, 0xd7, 0x24, 0x00, 0x01, 0xd8, 0x02, 0xe5, 0x03, 0x00, 0x00, 0xac, 0x16, 0x1f, 0xa4,
0xb1, 0xc4, 0x67, 0xfb, 0x02, 0x00, 0x00, 0x00, 0x00, 0x04, 0x59, 0x5a
};
protected static byte[] OriginalBytes => Encoding.ASCII.GetBytes(Original);
protected static string Original { get; } =
"Mary had a little lamb,\r\n" +
"His fleece was white as snow,\r\n" +
"And everywhere that Mary went,\r\n" +
"The lamb was sure to go.\r\n" +
"\r\n" +
"He followed her to school one day,\r\n" +
"Which was against the rule,\r\n" +
"It made the children laugh and play\r\n" +
"To see a lamb at school.\r\n" +
"\r\n" +
"And so the teacher turned it out,\r\n" +
"But still it lingered near,\r\n" +
"And waited patiently about,\r\n" +
"Till Mary did appear.\r\n" +
"\r\n" +
"\"Why does the lamb love Mary so?\"\r\n" +
"The eager children cry.\r\n" +
"\"Why, Mary loves the lamb, you know.\"\r\n" +
"The teacher did reply.";
}
}

View File

@@ -14,29 +14,6 @@ namespace SharpCompress.Test.Zip
{
UseExtensionInsteadOfNameToVerify = true;
}
[Fact]
public void Issue_269_Double_Skip()
{
ResetScratch();
var path = Path.Combine(TEST_ARCHIVES_PATH, "PrePostHeaders.zip");
using (Stream stream = new ForwardOnlyStream(File.OpenRead(path)))
using (IReader reader = ReaderFactory.Open(stream))
{
int count = 0;
while (reader.MoveToNextEntry())
{
count++;
if (!reader.Entry.IsDirectory)
{
if (count % 2 != 0)
{
reader.WriteEntryTo(Stream.Null);
}
}
}
}
}
[Fact]
public void Zip_Zip64_Streamed_Read()