Compare commits

..

11 Commits

Author SHA1 Message Date
Adam Hathcock
703e4bd49e add pkware zip 2017-08-17 08:47:46 +01:00
Adam Hathcock
30d4251332 Trying to make this work. Conflicts with reading on post descriptor header style 2017-08-08 10:16:27 +01:00
Adam Hathcock
3983db08ff Use nameof 2017-07-27 11:05:33 -05:00
Adam Hathcock
72114bceea Add release link 2017-07-17 10:22:58 -05:00
Adam Hathcock
c303f96682 mark for 0.18 2017-07-17 10:11:27 -05:00
Adam Hathcock
0e785968c4 Rework usage of WriterOptions for writers since it was inconsistently used. (#271) 2017-07-17 11:05:42 -04:00
Adam Hathcock
15110e18e2 Don't skip ZipReader data twice. (#272)
* Don't skip ZipReader data twice.

* Add archive for a new test
2017-07-17 11:05:21 -04:00
Adam Hathcock
5465af041b Use Skip and ReadFully extension methods where possible. (#276) 2017-07-17 10:55:22 -04:00
Adam Hathcock
310d56fc16 Made ArchiveEncoding a non-static class that is used with options. (#274)
* Made ArchiveEncoding a non-static class that is used with options.

* Revert some formatting.

* Optional string decoder delegate (#278)
2017-07-17 10:53:20 -04:00
eklann
231258ef69 Force encoding (#266)
* Fixing build

* Fixing build

* Fixing build

* Fixed build (seems working now)

* Added support to force specific encoding when reading or writing an archive

* Minor fixed related to force encoding

* Removed obsolete project file not present in master
2017-07-05 10:15:49 -05:00
Sam Bott
16b7e3ffc8 Add XZ tests (#258)
* tests added and converted to xunit

* reordered two assertions
2017-06-11 13:44:00 +01:00
73 changed files with 1301 additions and 692 deletions

View File

@@ -44,6 +44,10 @@ I'm always looking for help or ideas. Please submit code or email with ideas. Un
## Version Log
### Version 0.18
* [Now on Github releases](https://github.com/adamhathcock/sharpcompress/releases/tag/0.18)
### Version 0.17.1
* Fix - [Bug Fix for .NET Core on Windows](https://github.com/adamhathcock/sharpcompress/pull/257)

View File

@@ -14,6 +14,7 @@ namespace SharpCompress.Archives.GZip
public class GZipArchive : AbstractWritableArchive<GZipArchiveEntry, GZipVolume>
{
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -36,6 +37,7 @@ namespace SharpCompress.Archives.GZip
return new GZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -54,11 +56,11 @@ namespace SharpCompress.Archives.GZip
#if !NO_FILE
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
/// <summary>
/// Constructor with a FileInfo object to an existing file.
/// </summary>
/// <param name="fileInfo"></param>
/// <param name="options"></param>
internal GZipArchive(FileInfo fileInfo, ReaderOptions options)
: base(ArchiveType.GZip, fileInfo, options)
{
@@ -104,15 +106,9 @@ namespace SharpCompress.Archives.GZip
{
// read the header on the first read
byte[] header = new byte[10];
int n = stream.Read(header, 0, header.Length);
// workitem 8501: handle edge case (decompress empty stream)
if (n == 0)
{
return false;
}
if (n != 10)
if (!stream.ReadFully(header))
{
return false;
}
@@ -158,7 +154,7 @@ namespace SharpCompress.Archives.GZip
{
throw new InvalidOperationException("Only one entry is allowed in a GZip Archive");
}
using (var writer = new GZipWriter(stream))
using (var writer = new GZipWriter(stream, new GZipWriterOptions(options)))
{
foreach (var entry in oldEntries.Concat(newEntries)
.Where(x => !x.IsDirectory))
@@ -179,7 +175,7 @@ namespace SharpCompress.Archives.GZip
protected override IEnumerable<GZipArchiveEntry> LoadEntries(IEnumerable<GZipVolume> volumes)
{
Stream stream = volumes.Single().Stream;
yield return new GZipArchiveEntry(this, new GZipFilePart(stream));
yield return new GZipArchiveEntry(this, new GZipFilePart(stream, ReaderOptions.ArchiveEncoding));
}
protected override IReader CreateReaderForSolidExtraction()

View File

@@ -106,7 +106,7 @@ namespace SharpCompress.Archives.SevenZip
for (int i = 0; i < database.Files.Count; i++)
{
var file = database.Files[i];
yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file));
yield return new SevenZipArchiveEntry(this, new SevenZipFilePart(stream, database, i, file, ReaderOptions.ArchiveEncoding));
}
}

View File

@@ -16,7 +16,7 @@ namespace SharpCompress.Archives.Tar
public class TarArchive : AbstractWritableArchive<TarArchiveEntry, TarVolume>
{
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -39,7 +39,7 @@ namespace SharpCompress.Archives.Tar
return new TarArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -52,6 +52,7 @@ namespace SharpCompress.Archives.Tar
}
#if !NO_FILE
public static bool IsTarFile(string filePath)
{
return IsTarFile(new FileInfo(filePath));
@@ -74,7 +75,7 @@ namespace SharpCompress.Archives.Tar
{
try
{
TarHeader tar = new TarHeader();
TarHeader tar = new TarHeader(new ArchiveEncoding());
tar.Read(new BinaryReader(stream));
return tar.Name.Length > 0 && Enum.IsDefined(typeof(EntryType), tar.EntryType);
}
@@ -98,7 +99,6 @@ namespace SharpCompress.Archives.Tar
protected override IEnumerable<TarVolume> LoadVolumes(FileInfo file)
{
return new TarVolume(file.OpenRead(), ReaderOptions).AsEnumerable();
}
#endif
@@ -127,7 +127,7 @@ namespace SharpCompress.Archives.Tar
{
Stream stream = volumes.Single().Stream;
TarHeader previousHeader = null;
foreach (TarHeader header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream))
foreach (TarHeader header in TarHeaderFactory.ReadHeader(StreamingMode.Seekable, stream, ReaderOptions.ArchiveEncoding))
{
if (header != null)
{
@@ -152,7 +152,7 @@ namespace SharpCompress.Archives.Tar
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
header.Name = ArchiveEncoding.Default.GetString(bytes, 0, bytes.Length).TrimNulls();
header.Name = ReaderOptions.ArchiveEncoding.Decode(bytes).TrimNulls();
}
}

View File

@@ -24,6 +24,7 @@ namespace SharpCompress.Archives.Zip
public CompressionLevel DeflateCompressionLevel { get; set; }
#if !NO_FILE
/// <summary>
/// Constructor expects a filepath to an existing file.
/// </summary>
@@ -46,6 +47,7 @@ namespace SharpCompress.Archives.Zip
return new ZipArchive(fileInfo, readerOptions ?? new ReaderOptions());
}
#endif
/// <summary>
/// Takes a seekable Stream as a source
/// </summary>
@@ -58,6 +60,7 @@ namespace SharpCompress.Archives.Zip
}
#if !NO_FILE
public static bool IsZipFile(string filePath, string password = null)
{
return IsZipFile(new FileInfo(filePath), password);
@@ -78,7 +81,7 @@ namespace SharpCompress.Archives.Zip
public static bool IsZipFile(Stream stream, string password = null)
{
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password);
StreamingZipHeaderFactory headerFactory = new StreamingZipHeaderFactory(password, new ArchiveEncoding());
try
{
ZipHeader header =
@@ -109,7 +112,7 @@ namespace SharpCompress.Archives.Zip
internal ZipArchive(FileInfo fileInfo, ReaderOptions readerOptions)
: base(ArchiveType.Zip, fileInfo, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password);
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override IEnumerable<ZipVolume> LoadVolumes(FileInfo file)
@@ -131,7 +134,7 @@ namespace SharpCompress.Archives.Zip
internal ZipArchive(Stream stream, ReaderOptions readerOptions)
: base(ArchiveType.Zip, stream, readerOptions)
{
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password);
headerFactory = new SeekableZipHeaderFactory(readerOptions.Password, readerOptions.ArchiveEncoding);
}
protected override IEnumerable<ZipVolume> LoadVolumes(IEnumerable<Stream> streams)
@@ -150,19 +153,19 @@ namespace SharpCompress.Archives.Zip
switch (h.ZipHeaderType)
{
case ZipHeaderType.DirectoryEntry:
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
h as DirectoryEntryHeader,
stream));
}
{
yield return new ZipArchiveEntry(this,
new SeekableZipFilePart(headerFactory,
h as DirectoryEntryHeader,
stream));
}
break;
case ZipHeaderType.DirectoryEnd:
{
byte[] bytes = (h as DirectoryEndHeader).Comment;
volume.Comment = ArchiveEncoding.Default.GetString(bytes, 0, bytes.Length);
yield break;
}
{
byte[] bytes = (h as DirectoryEndHeader).Comment;
volume.Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
}
}
}

View File

@@ -1,23 +1,60 @@
using System.Text;
using System;
using System.Text;
namespace SharpCompress.Common
{
public static class ArchiveEncoding
public class ArchiveEncoding
{
/// <summary>
/// Default encoding to use when archive format doesn't specify one.
/// </summary>
public static Encoding Default { get; set; }
public Encoding Default { get; set; }
/// <summary>
/// Encoding used by encryption schemes which don't comply with RFC 2898.
/// ArchiveEncoding used by encryption schemes which don't comply with RFC 2898.
/// </summary>
public static Encoding Password { get; set; }
public Encoding Password { get; set; }
static ArchiveEncoding()
/// <summary>
/// Set this encoding when you want to force it for all encoding operations.
/// </summary>
public Encoding Forced { get; set; }
/// <summary>
/// Set this when you want to use a custom method for all decoding operations.
/// </summary>
/// <returns>string Func(bytes, index, length)</returns>
public Func<byte[], int, int, string> CustomDecoder { get; set; }
public ArchiveEncoding()
{
Default = Encoding.UTF8;
Password = Encoding.UTF8;
}
public string Decode(byte[] bytes)
{
return Decode(bytes, 0, bytes.Length);
}
public string Decode(byte[] bytes, int start, int length)
{
return GetDecoder().Invoke(bytes, start, length);
}
public byte[] Encode(string str)
{
return GetEncoding().GetBytes(str);
}
public Encoding GetEncoding()
{
return Forced ?? Default ?? Encoding.UTF8;
}
public Func<byte[], int, int, string> GetDecoder()
{
return CustomDecoder ?? ((bytes, index, count) => (Default ?? Encoding.UTF8).GetString(bytes, index, count));
}
}
}

View File

@@ -4,9 +4,17 @@ namespace SharpCompress.Common
{
public abstract class FilePart
{
protected FilePart(ArchiveEncoding archiveEncoding)
{
ArchiveEncoding = archiveEncoding;
}
internal ArchiveEncoding ArchiveEncoding { get; }
internal abstract string FilePartName { get; }
internal abstract Stream GetCompressedStream();
internal abstract Stream GetRawStream();
internal bool Skipped { get; set; }
}
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace SharpCompress.Common.GZip
{
@@ -39,9 +40,9 @@ namespace SharpCompress.Common.GZip
internal override IEnumerable<FilePart> Parts => filePart.AsEnumerable<FilePart>();
internal static IEnumerable<GZipEntry> GetEntries(Stream stream)
internal static IEnumerable<GZipEntry> GetEntries(Stream stream, OptionsBase options)
{
yield return new GZipEntry(new GZipFilePart(stream));
yield return new GZipEntry(new GZipFilePart(stream, options.ArchiveEncoding));
}
}
}

View File

@@ -5,35 +5,37 @@ using SharpCompress.Common.Tar.Headers;
using SharpCompress.Compressors;
using SharpCompress.Compressors.Deflate;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Common.GZip
{
internal class GZipFilePart : FilePart
{
private string name;
private readonly Stream stream;
private string _name;
private readonly Stream _stream;
internal GZipFilePart(Stream stream)
internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
{
ReadAndValidateGzipHeader(stream);
EntryStartPosition = stream.Position;
this.stream = stream;
this._stream = stream;
}
internal long EntryStartPosition { get; }
internal DateTime? DateModified { get; private set; }
internal override string FilePartName => name;
internal override string FilePartName => _name;
internal override Stream GetCompressedStream()
{
return new DeflateStream(stream, CompressionMode.Decompress, CompressionLevel.Default, false);
return new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default, false);
}
internal override Stream GetRawStream()
{
return stream;
return _stream;
}
private void ReadAndValidateGzipHeader(Stream stream)
@@ -67,15 +69,16 @@ namespace SharpCompress.Common.GZip
Int16 extraLength = (Int16)(header[0] + header[1] * 256);
byte[] extra = new byte[extraLength];
n = stream.Read(extra, 0, extra.Length);
if (n != extraLength)
if (!stream.ReadFully(extra))
{
throw new ZlibException("Unexpected end-of-file reading GZIP header.");
}
n = extraLength;
}
if ((header[3] & 0x08) == 0x08)
{
name = ReadZeroTerminatedString(stream);
_name = ReadZeroTerminatedString(stream);
}
if ((header[3] & 0x10) == 0x010)
{
@@ -87,7 +90,7 @@ namespace SharpCompress.Common.GZip
}
}
private static string ReadZeroTerminatedString(Stream stream)
private string ReadZeroTerminatedString(Stream stream)
{
byte[] buf1 = new byte[1];
var list = new List<byte>();
@@ -110,8 +113,8 @@ namespace SharpCompress.Common.GZip
}
}
while (!done);
byte[] a = list.ToArray();
return ArchiveEncoding.Default.GetString(a, 0, a.Length);
byte[] buffer = list.ToArray();
return ArchiveEncoding.Decode(buffer);
}
}
}

View File

@@ -1,4 +1,5 @@
namespace SharpCompress.Common
namespace SharpCompress.Common
{
public class OptionsBase
{
@@ -6,5 +7,7 @@
/// SharpCompress will keep the supplied streams open. Default is true.
/// </summary>
public bool LeaveStreamOpen { get; set; } = true;
public ArchiveEncoding ArchiveEncoding { get; set; } = new ArchiveEncoding();
}
}

View File

@@ -1,6 +1,6 @@
using SharpCompress.IO;
using System;
using System.IO;
using SharpCompress.IO;
namespace SharpCompress.Common.Rar.Headers
{
@@ -52,50 +52,50 @@ namespace SharpCompress.Common.Rar.Headers
switch (HeaderType)
{
case HeaderType.FileHeader:
{
if (FileFlags.HasFlag(FileFlags.UNICODE))
{
int length = 0;
while (length < fileNameBytes.Length
&& fileNameBytes[length] != 0)
if (FileFlags.HasFlag(FileFlags.UNICODE))
{
length++;
}
if (length != nameSize)
{
length++;
FileName = FileNameDecoder.Decode(fileNameBytes, length);
int length = 0;
while (length < fileNameBytes.Length
&& fileNameBytes[length] != 0)
{
length++;
}
if (length != nameSize)
{
length++;
FileName = FileNameDecoder.Decode(fileNameBytes, length);
}
else
{
FileName = ArchiveEncoding.Decode(fileNameBytes);
}
}
else
{
FileName = DecodeDefault(fileNameBytes);
FileName = ArchiveEncoding.Decode(fileNameBytes);
}
FileName = ConvertPath(FileName, HostOS);
}
else
{
FileName = DecodeDefault(fileNameBytes);
}
FileName = ConvertPath(FileName, HostOS);
}
break;
case HeaderType.NewSubHeader:
{
int datasize = HeaderSize - NEWLHD_SIZE - nameSize;
if (FileFlags.HasFlag(FileFlags.SALT))
{
datasize -= SALT_SIZE;
}
if (datasize > 0)
{
SubData = reader.ReadBytes(datasize);
}
int datasize = HeaderSize - NEWLHD_SIZE - nameSize;
if (FileFlags.HasFlag(FileFlags.SALT))
{
datasize -= SALT_SIZE;
}
if (datasize > 0)
{
SubData = reader.ReadBytes(datasize);
}
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
{
RecoverySectors = SubData[8] + (SubData[9] << 8)
+ (SubData[10] << 16) + (SubData[11] << 24);
if (NewSubHeaderType.SUBHEAD_TYPE_RR.Equals(fileNameBytes))
{
RecoverySectors = SubData[8] + (SubData[9] << 8)
+ (SubData[10] << 16) + (SubData[11] << 24);
}
}
}
break;
}
@@ -118,12 +118,6 @@ namespace SharpCompress.Common.Rar.Headers
}
}
//only the full .net framework will do other code pages than unicode/utf8
private string DecodeDefault(byte[] bytes)
{
return ArchiveEncoding.Default.GetString(bytes, 0, bytes.Length);
}
private long UInt32To64(uint x, uint y)
{
long l = x;
@@ -178,6 +172,7 @@ namespace SharpCompress.Common.Rar.Headers
}
internal long DataStartPosition { get; set; }
internal HostOS HostOS { get; private set; }
internal uint FileCRC { get; private set; }
@@ -199,6 +194,7 @@ namespace SharpCompress.Common.Rar.Headers
internal FileFlags FileFlags => (FileFlags)Flags;
internal long CompressedSize { get; private set; }
internal long UncompressedSize { get; private set; }
internal string FileName { get; private set; }

View File

@@ -1,6 +1,7 @@
using System;
using System.IO;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Rar.Headers
{
@@ -17,14 +18,16 @@ namespace SharpCompress.Common.Rar.Headers
HeaderSize = baseHeader.HeaderSize;
AdditionalSize = baseHeader.AdditionalSize;
ReadBytes = baseHeader.ReadBytes;
ArchiveEncoding = baseHeader.ArchiveEncoding;
}
internal static RarHeader Create(RarCrcBinaryReader reader)
internal static RarHeader Create(RarCrcBinaryReader reader, ArchiveEncoding archiveEncoding)
{
try
{
RarHeader header = new RarHeader();
header.ArchiveEncoding = archiveEncoding;
reader.Mark();
header.ReadStartFromReader(reader);
header.ReadBytes += reader.CurrentReadByteCount;
@@ -50,7 +53,8 @@ namespace SharpCompress.Common.Rar.Headers
}
}
protected virtual void ReadFromReader(MarkingBinaryReader reader) {
protected virtual void ReadFromReader(MarkingBinaryReader reader)
{
throw new NotImplementedException();
}
@@ -76,10 +80,11 @@ namespace SharpCompress.Common.Rar.Headers
return header;
}
private void VerifyHeaderCrc(ushort crc) {
if (HeaderType != HeaderType.MarkHeader)
private void VerifyHeaderCrc(ushort crc)
{
if (HeaderType != HeaderType.MarkHeader)
{
if (crc != HeadCRC)
if (crc != HeadCRC)
{
throw new InvalidFormatException("rar header crc mismatch");
}
@@ -106,6 +111,8 @@ namespace SharpCompress.Common.Rar.Headers
protected short HeaderSize { get; private set; }
internal ArchiveEncoding ArchiveEncoding { get; private set; }
/// <summary>
/// This additional size of the header could be file data
/// </summary>

View File

@@ -117,7 +117,7 @@ namespace SharpCompress.Common.Rar.Headers
{
#if !NO_CRYPTO
var reader = new RarCryptoBinaryReader(stream, Options.Password);
if (IsEncrypted)
{
if (Options.Password == null)
@@ -133,7 +133,7 @@ namespace SharpCompress.Common.Rar.Headers
#endif
RarHeader header = RarHeader.Create(reader);
RarHeader header = RarHeader.Create(reader, Options.ArchiveEncoding);
if (header == null)
{
return null;
@@ -141,110 +141,110 @@ namespace SharpCompress.Common.Rar.Headers
switch (header.HeaderType)
{
case HeaderType.ArchiveHeader:
{
var ah = header.PromoteHeader<ArchiveHeader>(reader);
IsEncrypted = ah.HasPassword;
return ah;
}
{
var ah = header.PromoteHeader<ArchiveHeader>(reader);
IsEncrypted = ah.HasPassword;
return ah;
}
case HeaderType.MarkHeader:
{
return header.PromoteHeader<MarkHeader>(reader);
}
{
return header.PromoteHeader<MarkHeader>(reader);
}
case HeaderType.ProtectHeader:
{
ProtectHeader ph = header.PromoteHeader<ProtectHeader>(reader);
// skip the recovery record data, we do not use it.
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
reader.BaseStream.Skip(ph.DataSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
ProtectHeader ph = header.PromoteHeader<ProtectHeader>(reader);
return ph;
}
// skip the recovery record data, we do not use it.
switch (StreamingMode)
{
case StreamingMode.Seekable:
{
reader.BaseStream.Position += ph.DataSize;
}
break;
case StreamingMode.Streaming:
{
reader.BaseStream.Skip(ph.DataSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return ph;
}
case HeaderType.NewSubHeader:
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
{
case StreamingMode.Seekable:
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
//skip the data because it's useless?
reader.BaseStream.Skip(fh.CompressedSize);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return fh;
}
return fh;
}
case HeaderType.FileHeader:
{
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
{
case StreamingMode.Seekable:
FileHeader fh = header.PromoteHeader<FileHeader>(reader);
switch (StreamingMode)
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.Salt == null)
{
fh.PackedStream = ms;
}
else
{
case StreamingMode.Seekable:
{
fh.DataStartPosition = reader.BaseStream.Position;
reader.BaseStream.Position += fh.CompressedSize;
}
break;
case StreamingMode.Streaming:
{
var ms = new ReadOnlySubStream(reader.BaseStream, fh.CompressedSize);
if (fh.Salt == null)
{
fh.PackedStream = ms;
}
else
{
#if !NO_CRYPTO
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.Salt);
fh.PackedStream = new RarCryptoWrapper(ms, Options.Password, fh.Salt);
#else
throw new NotSupportedException("RarCrypto not supported");
#endif
}
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
return fh;
}
return fh;
}
case HeaderType.EndArchiveHeader:
{
return header.PromoteHeader<EndArchiveHeader>(reader);
}
{
return header.PromoteHeader<EndArchiveHeader>(reader);
}
default:
{
throw new InvalidFormatException("Invalid Rar Header: " + header.HeaderType);
}
{
throw new InvalidFormatException("Invalid Rar Header: " + header.HeaderType);
}
}
}
}
}
}

View File

@@ -9,6 +9,7 @@ namespace SharpCompress.Common.Rar
internal abstract class RarFilePart : FilePart
{
internal RarFilePart(MarkHeader mh, FileHeader fh)
: base(fh.ArchiveEncoding)
{
MarkHeader = mh;
FileHeader = fh;

View File

@@ -7,14 +7,15 @@ namespace SharpCompress.Common.SevenZip
{
internal class SevenZipFilePart : FilePart
{
private CompressionType? type;
private readonly Stream stream;
private readonly ArchiveDatabase database;
private CompressionType? _type;
private readonly Stream _stream;
private readonly ArchiveDatabase _database;
internal SevenZipFilePart(Stream stream, ArchiveDatabase database, int index, CFileItem fileEntry)
internal SevenZipFilePart(Stream stream, ArchiveDatabase database, int index, CFileItem fileEntry, ArchiveEncoding archiveEncoding)
: base(archiveEncoding)
{
this.stream = stream;
this.database = database;
this._stream = stream;
this._database = database;
Index = index;
Header = fileEntry;
if (Header.HasStream)
@@ -41,14 +42,14 @@ namespace SharpCompress.Common.SevenZip
{
return null;
}
var folderStream = database.GetFolderStream(stream, Folder, null);
var folderStream = _database.GetFolderStream(_stream, Folder, null);
int firstFileIndex = database.FolderStartFileIndex[database.Folders.IndexOf(Folder)];
int firstFileIndex = _database.FolderStartFileIndex[_database.Folders.IndexOf(Folder)];
int skipCount = Index - firstFileIndex;
long skipSize = 0;
for (int i = 0; i < skipCount; i++)
{
skipSize += database.Files[firstFileIndex + i].Size;
skipSize += _database.Files[firstFileIndex + i].Size;
}
if (skipSize > 0)
{
@@ -61,11 +62,11 @@ namespace SharpCompress.Common.SevenZip
{
get
{
if (type == null)
if (_type == null)
{
type = GetCompression();
_type = GetCompression();
}
return type.Value;
return _type.Value;
}
}
@@ -84,7 +85,7 @@ namespace SharpCompress.Common.SevenZip
{
var coder = Folder.Coders.First();
switch (coder.MethodId.Id)
{
{
case k_LZMA:
case k_LZMA2:
{

View File

@@ -9,6 +9,11 @@ namespace SharpCompress.Common.Tar.Headers
{
internal static readonly DateTime Epoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
public TarHeader(ArchiveEncoding archiveEncoding)
{
ArchiveEncoding = archiveEncoding;
}
internal string Name { get; set; }
//internal int Mode { get; set; }
@@ -20,6 +25,7 @@ namespace SharpCompress.Common.Tar.Headers
internal DateTime LastModifiedTime { get; set; }
internal EntryType EntryType { get; set; }
internal Stream PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal const int BlockSize = 512;
@@ -31,7 +37,7 @@ namespace SharpCompress.Common.Tar.Headers
WriteOctalBytes(0, buffer, 108, 8); // owner ID
WriteOctalBytes(0, buffer, 116, 8); // group ID
//Encoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
//ArchiveEncoding.UTF8.GetBytes("magic").CopyTo(buffer, 257);
if (Name.Length > 100)
{
// Set mock filename and filetype to indicate the next block is the actual name of the file
@@ -72,7 +78,7 @@ namespace SharpCompress.Common.Tar.Headers
private void WriteLongFilenameHeader(Stream output)
{
byte[] nameBytes = ArchiveEncoding.Default.GetBytes(Name);
byte[] nameBytes = ArchiveEncoding.Encode(Name);
output.Write(nameBytes, 0, nameBytes.Length);
// pad to multiple of BlockSize bytes, and make sure a terminating null is added
@@ -99,7 +105,7 @@ namespace SharpCompress.Common.Tar.Headers
}
else
{
Name = ArchiveEncoding.Default.GetString(buffer, 0, 100).TrimNulls();
Name = ArchiveEncoding.Decode(buffer, 0, 100).TrimNulls();
}
EntryType = ReadEntryType(buffer);
@@ -111,12 +117,12 @@ namespace SharpCompress.Common.Tar.Headers
long unixTimeStamp = ReadASCIIInt64Base8(buffer, 136, 11);
LastModifiedTime = Epoch.AddSeconds(unixTimeStamp).ToLocalTime();
Magic = ArchiveEncoding.Default.GetString(buffer, 257, 6).TrimNulls();
Magic = ArchiveEncoding.Decode(buffer, 257, 6).TrimNulls();
if (!string.IsNullOrEmpty(Magic)
&& "ustar".Equals(Magic))
{
string namePrefix = ArchiveEncoding.Default.GetString(buffer, 345, 157);
string namePrefix = ArchiveEncoding.Decode(buffer, 345, 157);
namePrefix = namePrefix.TrimNulls();
if (!string.IsNullOrEmpty(namePrefix))
{
@@ -143,7 +149,7 @@ namespace SharpCompress.Common.Tar.Headers
{
reader.ReadBytes(remainingBytesToRead);
}
return ArchiveEncoding.Default.GetString(nameBytes, 0, nameBytes.Length).TrimNulls();
return ArchiveEncoding.Decode(nameBytes, 0, nameBytes.Length).TrimNulls();
}
private static EntryType ReadEntryType(byte[] buffer)

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Tar
{
@@ -43,9 +44,9 @@ namespace SharpCompress.Common.Tar
internal override IEnumerable<FilePart> Parts => filePart.AsEnumerable<FilePart>();
internal static IEnumerable<TarEntry> GetEntries(StreamingMode mode, Stream stream,
CompressionType compressionType)
CompressionType compressionType, ArchiveEncoding archiveEncoding)
{
foreach (TarHeader h in TarHeaderFactory.ReadHeader(mode, stream))
foreach (TarHeader h in TarHeaderFactory.ReadHeader(mode, stream, archiveEncoding))
{
if (h != null)
{

View File

@@ -6,11 +6,12 @@ namespace SharpCompress.Common.Tar
{
internal class TarFilePart : FilePart
{
private readonly Stream seekableStream;
private readonly Stream _seekableStream;
internal TarFilePart(TarHeader header, Stream seekableStream)
: base(header.ArchiveEncoding)
{
this.seekableStream = seekableStream;
this._seekableStream = seekableStream;
Header = header;
}
@@ -20,10 +21,10 @@ namespace SharpCompress.Common.Tar
internal override Stream GetCompressedStream()
{
if (seekableStream != null)
if (_seekableStream != null)
{
seekableStream.Position = Header.DataStartPosition.Value;
return new ReadOnlySubStream(seekableStream, Header.Size);
_seekableStream.Position = Header.DataStartPosition.Value;
return new ReadOnlySubStream(_seekableStream, Header.Size);
}
return Header.PackedStream;
}

View File

@@ -2,12 +2,13 @@
using System.IO;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Tar
{
internal static class TarHeaderFactory
{
internal static IEnumerable<TarHeader> ReadHeader(StreamingMode mode, Stream stream)
internal static IEnumerable<TarHeader> ReadHeader(StreamingMode mode, Stream stream, ArchiveEncoding archiveEncoding)
{
while (true)
{
@@ -15,7 +16,8 @@ namespace SharpCompress.Common.Tar
try
{
BinaryReader reader = new BinaryReader(stream);
header = new TarHeader();
header = new TarHeader(archiveEncoding);
if (!header.Read(reader))
{
yield break;
@@ -23,22 +25,22 @@ namespace SharpCompress.Common.Tar
switch (mode)
{
case StreamingMode.Seekable:
{
header.DataStartPosition = reader.BaseStream.Position;
{
header.DataStartPosition = reader.BaseStream.Position;
//skip to nearest 512
reader.BaseStream.Position += PadTo512(header.Size);
}
//skip to nearest 512
reader.BaseStream.Position += PadTo512(header.Size);
}
break;
case StreamingMode.Streaming:
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
}
{
header.PackedStream = new TarReadOnlySubStream(stream, header.Size);
}
break;
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
}
catch

View File

@@ -0,0 +1,8 @@
namespace SharpCompress.Common.Zip
{
internal enum CryptoMode
{
Encrypt,
Decrypt
}
}

View File

@@ -6,8 +6,8 @@ namespace SharpCompress.Common.Zip.Headers
{
internal class DirectoryEntryHeader : ZipFileEntry
{
public DirectoryEntryHeader()
: base(ZipHeaderType.DirectoryEntry)
public DirectoryEntryHeader(ArchiveEncoding archiveEncoding)
: base(ZipHeaderType.DirectoryEntry, archiveEncoding)
{
}
@@ -31,10 +31,10 @@ namespace SharpCompress.Common.Zip.Headers
RelativeOffsetOfEntryHeader = reader.ReadUInt32();
byte[] name = reader.ReadBytes(nameLength);
Name = DecodeString(name);
Name = ArchiveEncoding.Decode(name);
byte[] extra = reader.ReadBytes(extraLength);
byte[] comment = reader.ReadBytes(commentLength);
Comment = DecodeString(comment);
Comment = ArchiveEncoding.Decode(comment);
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnicodePathExtraField);

View File

@@ -5,6 +5,7 @@ namespace SharpCompress.Common.Zip.Headers
[Flags]
internal enum HeaderFlags : ushort
{
None = 0,
Encrypted = 1, // http://www.pkware.com/documents/casestudies/APPNOTE.TXT
Bit1 = 2,
Bit2 = 4,

View File

@@ -1,12 +1,13 @@
using System.IO;
using System.Linq;
using System.Text;
namespace SharpCompress.Common.Zip.Headers
{
internal class LocalEntryHeader : ZipFileEntry
{
public LocalEntryHeader()
: base(ZipHeaderType.LocalEntry)
public LocalEntryHeader(ArchiveEncoding archiveEncoding)
: base(ZipHeaderType.LocalEntry, archiveEncoding)
{
}
@@ -24,7 +25,7 @@ namespace SharpCompress.Common.Zip.Headers
ushort extraLength = reader.ReadUInt16();
byte[] name = reader.ReadBytes(nameLength);
byte[] extra = reader.ReadBytes(extraLength);
Name = DecodeString(name);
Name = ArchiveEncoding.Decode(name);
LoadExtra(extra);
var unicodePathExtra = Extra.FirstOrDefault(u => u.Type == ExtraDataType.UnicodePathExtraField);

View File

@@ -8,10 +8,11 @@ namespace SharpCompress.Common.Zip.Headers
{
internal abstract class ZipFileEntry : ZipHeader
{
protected ZipFileEntry(ZipHeaderType type)
protected ZipFileEntry(ZipHeaderType type, ArchiveEncoding archiveEncoding)
: base(type)
{
Extra = new List<ExtraData>();
ArchiveEncoding = archiveEncoding;
}
internal bool IsDirectory
@@ -29,28 +30,11 @@ namespace SharpCompress.Common.Zip.Headers
&& Name.EndsWith("\\");
}
}
protected string DecodeString(byte[] str)
{
if (FlagUtility.HasFlag(Flags, HeaderFlags.UTF8))
{
return Encoding.UTF8.GetString(str, 0, str.Length);
}
return ArchiveEncoding.Default.GetString(str, 0, str.Length);
}
protected byte[] EncodeString(string str)
{
if (FlagUtility.HasFlag(Flags, HeaderFlags.UTF8))
{
return Encoding.UTF8.GetBytes(str);
}
return ArchiveEncoding.Default.GetBytes(str);
}
internal Stream PackedStream { get; set; }
internal ArchiveEncoding ArchiveEncoding { get; }
internal string Name { get; set; }
internal HeaderFlags Flags { get; set; }
@@ -64,7 +48,7 @@ namespace SharpCompress.Common.Zip.Headers
internal long UncompressedSize { get; set; }
internal List<ExtraData> Extra { get; set; }
public string Password { get; set; }
internal PkwareTraditionalEncryptionData ComposeEncryptionData(Stream archiveStream)
@@ -75,10 +59,10 @@ namespace SharpCompress.Common.Zip.Headers
}
var buffer = new byte[12];
archiveStream.Read(buffer, 0, 12);
archiveStream.ReadFully(buffer);
PkwareTraditionalEncryptionData encryptionData = PkwareTraditionalEncryptionData.ForRead(Password, this, buffer);
return encryptionData;
}

View File

@@ -3,12 +3,6 @@ using System.IO;
namespace SharpCompress.Common.Zip
{
internal enum CryptoMode
{
Encrypt,
Decrypt
}
internal class PkwareTraditionalCryptoStream : Stream
{
private readonly PkwareTraditionalEncryptionData encryptor;
@@ -42,7 +36,7 @@ namespace SharpCompress.Common.Zip
if (buffer == null)
{
throw new ArgumentNullException("buffer");
throw new ArgumentNullException(nameof(buffer));
}
byte[] temp = new byte[count];

View File

@@ -9,9 +9,11 @@ namespace SharpCompress.Common.Zip
{
private static readonly CRC32 crc32 = new CRC32();
private readonly UInt32[] _Keys = {0x12345678, 0x23456789, 0x34567890};
private readonly ArchiveEncoding _archiveEncoding;
private PkwareTraditionalEncryptionData(string password)
private PkwareTraditionalEncryptionData(string password, ArchiveEncoding archiveEncoding)
{
_archiveEncoding = archiveEncoding;
Initialize(password);
}
@@ -27,14 +29,10 @@ namespace SharpCompress.Common.Zip
public static PkwareTraditionalEncryptionData ForRead(string password, ZipFileEntry header,
byte[] encryptionHeader)
{
var encryptor = new PkwareTraditionalEncryptionData(password);
var encryptor = new PkwareTraditionalEncryptionData(password, header.ArchiveEncoding);
byte[] plainTextHeader = encryptor.Decrypt(encryptionHeader, encryptionHeader.Length);
if (plainTextHeader[11] != (byte)((header.Crc >> 24) & 0xff))
{
if (!FlagUtility.HasFlag(header.Flags, HeaderFlags.UsePostDataDescriptor))
{
throw new CryptographicException("The password did not match.");
}
if (plainTextHeader[11] != (byte)((header.LastModifiedTime >> 8) & 0xff))
{
throw new CryptographicException("The password did not match.");
@@ -47,7 +45,7 @@ namespace SharpCompress.Common.Zip
{
if (length > cipherText.Length)
{
throw new ArgumentOutOfRangeException("length",
throw new ArgumentOutOfRangeException(nameof(length),
"Bad length during Decryption: the length parameter must be smaller than or equal to the size of the destination array.");
}
@@ -70,7 +68,7 @@ namespace SharpCompress.Common.Zip
if (length > plainText.Length)
{
throw new ArgumentOutOfRangeException("length",
throw new ArgumentOutOfRangeException(nameof(length),
"Bad length during Encryption: The length parameter must be smaller than or equal to the size of the destination array.");
}
@@ -93,17 +91,12 @@ namespace SharpCompress.Common.Zip
}
}
internal static byte[] StringToByteArray(string value, Encoding encoding)
internal byte[] StringToByteArray(string value)
{
byte[] a = encoding.GetBytes(value);
byte[] a = _archiveEncoding.Password.GetBytes(value);
return a;
}
internal static byte[] StringToByteArray(string value)
{
return StringToByteArray(value, ArchiveEncoding.Password);
}
private void UpdateKeys(byte byteValue)
{
_Keys[0] = (UInt32)crc32.ComputeCrc32((int)_Keys[0], byteValue);
@@ -111,5 +104,10 @@ namespace SharpCompress.Common.Zip
_Keys[1] = _Keys[1] * 0x08088405 + 1;
_Keys[2] = (UInt32)crc32.ComputeCrc32((int)_Keys[2], (byte)(_Keys[1] >> 24));
}
public static PkwareTraditionalEncryptionData ForWrite(string password, ArchiveEncoding archiveEncoding)
{
return new PkwareTraditionalEncryptionData(password, archiveEncoding);
}
}
}

View File

@@ -5,21 +5,21 @@ namespace SharpCompress.Common.Zip
{
internal class SeekableZipFilePart : ZipFilePart
{
private bool isLocalHeaderLoaded;
private readonly SeekableZipHeaderFactory headerFactory;
private bool _isLocalHeaderLoaded;
private readonly SeekableZipHeaderFactory _headerFactory;
internal SeekableZipFilePart(SeekableZipHeaderFactory headerFactory, DirectoryEntryHeader header, Stream stream)
: base(header, stream)
{
this.headerFactory = headerFactory;
this._headerFactory = headerFactory;
}
internal override Stream GetCompressedStream()
{
if (!isLocalHeaderLoaded)
if (!_isLocalHeaderLoaded)
{
LoadLocalHeader();
isLocalHeaderLoaded = true;
_isLocalHeaderLoaded = true;
}
return base.GetCompressedStream();
}
@@ -29,7 +29,7 @@ namespace SharpCompress.Common.Zip
private void LoadLocalHeader()
{
bool hasData = Header.HasData;
Header = headerFactory.GetLocalHeader(BaseStream, Header as DirectoryEntryHeader);
Header = _headerFactory.GetLocalHeader(BaseStream, Header as DirectoryEntryHeader);
Header.HasData = hasData;
}

View File

@@ -3,16 +3,17 @@ using System.Collections.Generic;
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{
internal class SeekableZipHeaderFactory : ZipHeaderFactory
{
private const int MAX_ITERATIONS_FOR_DIRECTORY_HEADER = 4096;
private bool zip64;
private bool _zip64;
internal SeekableZipHeaderFactory(string password)
: base(StreamingMode.Seekable, password)
internal SeekableZipHeaderFactory(string password, ArchiveEncoding archiveEncoding)
: base(StreamingMode.Seekable, password, archiveEncoding)
{
}
@@ -26,14 +27,14 @@ namespace SharpCompress.Common.Zip
if (entry.IsZip64)
{
zip64 = true;
_zip64 = true;
SeekBackToHeader(stream, reader, ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR);
var zip64Locator = new Zip64DirectoryEndLocatorHeader();
zip64Locator.Read(reader);
stream.Seek(zip64Locator.RelativeOffsetOfTheEndOfDirectoryRecord, SeekOrigin.Begin);
uint zip64Signature = reader.ReadUInt32();
if(zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
if (zip64Signature != ZIP64_END_OF_CENTRAL_DIRECTORY)
throw new ArchiveException("Failed to locate the Zip64 Header");
var zip64Entry = new Zip64DirectoryEndHeader();
@@ -50,7 +51,7 @@ namespace SharpCompress.Common.Zip
{
stream.Position = position;
uint signature = reader.ReadUInt32();
var directoryEntryHeader = ReadHeader(signature, reader, zip64) as DirectoryEntryHeader;
var directoryEntryHeader = ReadHeader(signature, reader, _zip64) as DirectoryEntryHeader;
position = stream.Position;
if (directoryEntryHeader == null)
{
@@ -91,7 +92,7 @@ namespace SharpCompress.Common.Zip
stream.Seek(directoryEntryHeader.RelativeOffsetOfEntryHeader, SeekOrigin.Begin);
BinaryReader reader = new BinaryReader(stream);
uint signature = reader.ReadUInt32();
var localEntryHeader = ReadHeader(signature, reader, zip64) as LocalEntryHeader;
var localEntryHeader = ReadHeader(signature, reader, _zip64) as LocalEntryHeader;
if (localEntryHeader == null)
{
throw new InvalidOperationException();

View File

@@ -39,19 +39,20 @@ namespace SharpCompress.Common.Zip
{
return new BinaryReader(rewindableStream);
}
if (Header.HasData)
if (Header.HasData && !Skipped)
{
if (decompressionStream == null)
{
decompressionStream = GetCompressedStream();
}
decompressionStream.SkipAll();
decompressionStream.Skip();
DeflateStream deflateStream = decompressionStream as DeflateStream;
if (deflateStream != null)
{
rewindableStream.Rewind(deflateStream.InputBuffer);
}
Skipped = true;
}
var reader = new BinaryReader(rewindableStream);
decompressionStream = null;

View File

@@ -2,13 +2,14 @@
using System.IO;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{
internal class StreamingZipHeaderFactory : ZipHeaderFactory
{
internal StreamingZipHeaderFactory(string password)
: base(StreamingMode.Streaming, password)
internal StreamingZipHeaderFactory(string password, ArchiveEncoding archiveEncoding)
: base(StreamingMode.Streaming, password, archiveEncoding)
{
}

View File

@@ -78,7 +78,7 @@ namespace SharpCompress.Common.Zip
{
//read out last 10 auth bytes
var ten = new byte[10];
stream.Read(ten, 0, 10);
stream.ReadFully(ten);
stream.Dispose();
}
}

View File

@@ -15,6 +15,7 @@ namespace SharpCompress.Common.Zip
internal abstract class ZipFilePart : FilePart
{
internal ZipFilePart(ZipFileEntry header, Stream stream)
: base(header.ArchiveEncoding)
{
Header = header;
header.Part = this;
@@ -88,7 +89,7 @@ namespace SharpCompress.Common.Zip
case ZipCompressionMethod.PPMd:
{
var props = new byte[2];
stream.Read(props, 0, props.Length);
stream.ReadFully(props);
return new PpmdStream(new PpmdProperties(props), stream, false);
}
case ZipCompressionMethod.WinzipAes:
@@ -127,11 +128,6 @@ namespace SharpCompress.Common.Zip
{
bool isFileEncrypted = FlagUtility.HasFlag(Header.Flags, HeaderFlags.Encrypted);
if (Header.CompressedSize == 0 && isFileEncrypted)
{
throw new NotSupportedException("Cannot encrypt file with unknown size at start.");
}
if ((Header.CompressedSize == 0
&& FlagUtility.HasFlag(Header.Flags, HeaderFlags.UsePostDataDescriptor))
|| Header.IsZip64)
@@ -175,7 +171,6 @@ namespace SharpCompress.Common.Zip
}
}
return plainStream;
}
}

View File

@@ -5,6 +5,7 @@ using System.Linq;
#endif
using SharpCompress.Common.Zip.Headers;
using SharpCompress.IO;
using System.Text;
namespace SharpCompress.Common.Zip
{
@@ -23,11 +24,13 @@ namespace SharpCompress.Common.Zip
protected LocalEntryHeader lastEntryHeader;
private readonly string password;
private readonly StreamingMode mode;
private readonly ArchiveEncoding archiveEncoding;
protected ZipHeaderFactory(StreamingMode mode, string password)
protected ZipHeaderFactory(StreamingMode mode, string password, ArchiveEncoding archiveEncoding)
{
this.mode = mode;
this.password = password;
this.archiveEncoding = archiveEncoding;
}
protected ZipHeader ReadHeader(uint headerBytes, BinaryReader reader, bool zip64 = false)
@@ -36,7 +39,7 @@ namespace SharpCompress.Common.Zip
{
case ENTRY_HEADER_BYTES:
{
var entryHeader = new LocalEntryHeader();
var entryHeader = new LocalEntryHeader(archiveEncoding);
entryHeader.Read(reader);
LoadHeader(entryHeader, reader.BaseStream);
@@ -45,48 +48,48 @@ namespace SharpCompress.Common.Zip
}
case DIRECTORY_START_HEADER_BYTES:
{
var entry = new DirectoryEntryHeader();
var entry = new DirectoryEntryHeader(archiveEncoding);
entry.Read(reader);
return entry;
}
case POST_DATA_DESCRIPTOR:
{
if (FlagUtility.HasFlag(lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
{
lastEntryHeader.Crc = reader.ReadUInt32();
lastEntryHeader.CompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
lastEntryHeader.UncompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
if (FlagUtility.HasFlag(lastEntryHeader.Flags, HeaderFlags.UsePostDataDescriptor))
{
lastEntryHeader.Crc = reader.ReadUInt32();
lastEntryHeader.CompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
lastEntryHeader.UncompressedSize = zip64 ? (long)reader.ReadUInt64() : reader.ReadUInt32();
}
else
{
reader.ReadBytes(zip64 ? 20 : 12);
}
return null;
}
else
{
reader.ReadBytes(zip64 ? 20 : 12);
}
return null;
}
case DIGITAL_SIGNATURE:
return null;
case DIRECTORY_END_HEADER_BYTES:
{
var entry = new DirectoryEndHeader();
entry.Read(reader);
return entry;
}
{
var entry = new DirectoryEndHeader();
entry.Read(reader);
return entry;
}
case SPLIT_ARCHIVE_HEADER_BYTES:
{
return new SplitHeader();
}
{
return new SplitHeader();
}
case ZIP64_END_OF_CENTRAL_DIRECTORY:
{
var entry = new Zip64DirectoryEndHeader();
entry.Read(reader);
return entry;
}
{
var entry = new Zip64DirectoryEndHeader();
entry.Read(reader);
return entry;
}
case ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR:
{
var entry = new Zip64DirectoryEndLocatorHeader();
entry.Read(reader);
return entry;
}
{
var entry = new Zip64DirectoryEndLocatorHeader();
entry.Read(reader);
return entry;
}
default:
throw new NotSupportedException("Unknown header: " + headerBytes);
}
@@ -165,22 +168,22 @@ namespace SharpCompress.Common.Zip
switch (mode)
{
case StreamingMode.Seekable:
{
entryHeader.DataStartPosition = stream.Position;
stream.Position += entryHeader.CompressedSize;
break;
}
{
entryHeader.DataStartPosition = stream.Position;
stream.Position += entryHeader.CompressedSize;
break;
}
case StreamingMode.Streaming:
{
entryHeader.PackedStream = stream;
break;
}
{
entryHeader.PackedStream = stream;
break;
}
default:
{
throw new InvalidFormatException("Invalid StreamingMode");
}
{
throw new InvalidFormatException("Invalid StreamingMode");
}
}
//}

View File

@@ -105,19 +105,19 @@ namespace SharpCompress.Compressors.ADC
}
if (buffer == null)
{
throw new ArgumentNullException("buffer");
throw new ArgumentNullException(nameof(buffer));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException("count");
throw new ArgumentOutOfRangeException(nameof(count));
}
if (offset < buffer.GetLowerBound(0))
{
throw new ArgumentOutOfRangeException("offset");
throw new ArgumentOutOfRangeException(nameof(offset));
}
if ((offset + count) > buffer.GetLength(0))
{
throw new ArgumentOutOfRangeException("count");
throw new ArgumentOutOfRangeException(nameof(count));
}
int size = -1;

View File

@@ -26,6 +26,7 @@
using System;
using System.IO;
using System.Text;
namespace SharpCompress.Compressors.Deflate
{
@@ -36,9 +37,10 @@ namespace SharpCompress.Compressors.Deflate
public DeflateStream(Stream stream, CompressionMode mode,
CompressionLevel level = CompressionLevel.Default,
bool leaveOpen = false)
bool leaveOpen = false,
Encoding forceEncoding = null)
{
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.DEFLATE, leaveOpen);
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.DEFLATE, leaveOpen, forceEncoding);
}
#region Zlib properties

View File

@@ -30,41 +30,45 @@ using System;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Compressors.Deflate
{
public class GZipStream : Stream
{
internal static readonly DateTime UnixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
internal static readonly DateTime UNIX_EPOCH = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
public DateTime? LastModified { get; set; }
private string comment;
private string fileName;
private string _comment;
private string _fileName;
internal ZlibBaseStream BaseStream;
private bool disposed;
private bool firstReadDone;
private int headerByteCount;
private bool _disposed;
private bool _firstReadDone;
private int _headerByteCount;
private readonly Encoding _encoding;
public GZipStream(Stream stream, CompressionMode mode)
: this(stream, mode, CompressionLevel.Default, false)
: this(stream, mode, CompressionLevel.Default, false, Encoding.UTF8)
{
}
public GZipStream(Stream stream, CompressionMode mode, CompressionLevel level)
: this(stream, mode, level, false)
: this(stream, mode, level, false, Encoding.UTF8)
{
}
public GZipStream(Stream stream, CompressionMode mode, bool leaveOpen)
: this(stream, mode, CompressionLevel.Default, leaveOpen)
: this(stream, mode, CompressionLevel.Default, leaveOpen, Encoding.UTF8)
{
}
public GZipStream(Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen)
public GZipStream(Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen, Encoding encoding)
{
BaseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.GZIP, leaveOpen);
BaseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.GZIP, leaveOpen, encoding);
_encoding = encoding;
}
#region Zlib properties
@@ -74,7 +78,7 @@ namespace SharpCompress.Compressors.Deflate
get => (BaseStream._flushMode);
set
{
if (disposed)
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -87,7 +91,7 @@ namespace SharpCompress.Compressors.Deflate
get => BaseStream._bufferSize;
set
{
if (disposed)
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -123,7 +127,7 @@ namespace SharpCompress.Compressors.Deflate
{
get
{
if (disposed)
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -149,7 +153,7 @@ namespace SharpCompress.Compressors.Deflate
{
get
{
if (disposed)
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -179,7 +183,7 @@ namespace SharpCompress.Compressors.Deflate
{
if (BaseStream._streamMode == ZlibBaseStream.StreamMode.Writer)
{
return BaseStream._z.TotalBytesOut + headerByteCount;
return BaseStream._z.TotalBytesOut + _headerByteCount;
}
if (BaseStream._streamMode == ZlibBaseStream.StreamMode.Reader)
{
@@ -202,14 +206,14 @@ namespace SharpCompress.Compressors.Deflate
{
try
{
if (!disposed)
if (!_disposed)
{
if (disposing && (BaseStream != null))
{
BaseStream.Dispose();
Crc32 = BaseStream.Crc32;
}
disposed = true;
_disposed = true;
}
}
finally
@@ -223,7 +227,7 @@ namespace SharpCompress.Compressors.Deflate
/// </summary>
public override void Flush()
{
if (disposed)
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -263,7 +267,7 @@ namespace SharpCompress.Compressors.Deflate
/// <returns>the number of bytes actually read</returns>
public override int Read(byte[] buffer, int offset, int count)
{
if (disposed)
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -272,9 +276,9 @@ namespace SharpCompress.Compressors.Deflate
// Console.WriteLine("GZipStream::Read(buffer, off({0}), c({1}) = {2}", offset, count, n);
// Console.WriteLine( Util.FormatByteArray(buffer, offset, n) );
if (!firstReadDone)
if (!_firstReadDone)
{
firstReadDone = true;
_firstReadDone = true;
FileName = BaseStream._GzipFileName;
Comment = BaseStream._GzipComment;
}
@@ -325,7 +329,7 @@ namespace SharpCompress.Compressors.Deflate
/// <param name="count">the number of bytes to write.</param>
public override void Write(byte[] buffer, int offset, int count)
{
if (disposed)
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
@@ -335,7 +339,7 @@ namespace SharpCompress.Compressors.Deflate
if (BaseStream._wantCompress)
{
// first write in compression, therefore, emit the GZIP header
headerByteCount = EmitHeader();
_headerByteCount = EmitHeader();
}
else
{
@@ -346,56 +350,56 @@ namespace SharpCompress.Compressors.Deflate
BaseStream.Write(buffer, offset, count);
}
#endregion
#endregion Stream methods
public String Comment
{
get => comment;
get => _comment;
set
{
if (disposed)
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
comment = value;
_comment = value;
}
}
public string FileName
{
get => fileName;
get => _fileName;
set
{
if (disposed)
if (_disposed)
{
throw new ObjectDisposedException("GZipStream");
}
fileName = value;
if (fileName == null)
_fileName = value;
if (_fileName == null)
{
return;
}
if (fileName.IndexOf("/") != -1)
if (_fileName.IndexOf("/") != -1)
{
fileName = fileName.Replace("/", "\\");
_fileName = _fileName.Replace("/", "\\");
}
if (fileName.EndsWith("\\"))
if (_fileName.EndsWith("\\"))
{
throw new InvalidOperationException("Illegal filename");
}
var index = fileName.IndexOf("\\");
var index = _fileName.IndexOf("\\");
if (index != -1)
{
// trim any leading path
int length = fileName.Length;
int length = _fileName.Length;
int num = length;
while (--num >= 0)
{
char c = fileName[num];
char c = _fileName[num];
if (c == '\\')
{
fileName = fileName.Substring(num + 1, length - num - 1);
_fileName = _fileName.Substring(num + 1, length - num - 1);
}
}
}
@@ -406,8 +410,10 @@ namespace SharpCompress.Compressors.Deflate
private int EmitHeader()
{
byte[] commentBytes = (Comment == null) ? null : ArchiveEncoding.Default.GetBytes(Comment);
byte[] filenameBytes = (FileName == null) ? null : ArchiveEncoding.Default.GetBytes(FileName);
byte[] commentBytes = (Comment == null) ? null
: _encoding.GetBytes(Comment);
byte[] filenameBytes = (FileName == null) ? null
: _encoding.GetBytes(FileName);
int cbLength = (Comment == null) ? 0 : commentBytes.Length + 1;
int fnLength = (FileName == null) ? 0 : filenameBytes.Length + 1;
@@ -440,7 +446,7 @@ namespace SharpCompress.Compressors.Deflate
{
LastModified = DateTime.Now;
}
TimeSpan delta = LastModified.Value - UnixEpoch;
TimeSpan delta = LastModified.Value - UNIX_EPOCH;
var timet = (Int32)delta.TotalSeconds;
DataConverter.LittleEndian.PutBytes(header, i, timet);
i += 4;

View File

@@ -1,20 +1,20 @@
// ZlibBaseStream.cs
// ------------------------------------------------------------------
//
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
// Copyright (c) 2009 Dino Chiesa and Microsoft Corporation.
// All rights reserved.
//
// This code module is part of DotNetZip, a zipfile class library.
//
// ------------------------------------------------------------------
//
// This code is licensed under the Microsoft Public License.
// This code is licensed under the Microsoft Public License.
// See the file License.txt for the license details.
// More info on: http://dotnetzip.codeplex.com
//
// ------------------------------------------------------------------
//
// last saved (in emacs):
// last saved (in emacs):
// Time-stamp: <2009-October-28 15:45:15>
//
// ------------------------------------------------------------------
@@ -30,6 +30,7 @@ using System.IO;
using SharpCompress.Common;
using SharpCompress.Common.Tar.Headers;
using SharpCompress.Converters;
using System.Text;
namespace SharpCompress.Compressors.Deflate
{
@@ -64,6 +65,8 @@ namespace SharpCompress.Compressors.Deflate
protected internal DateTime _GzipMtime;
protected internal int _gzipHeaderByteCount;
private readonly Encoding _encoding;
internal int Crc32
{
get
@@ -80,7 +83,8 @@ namespace SharpCompress.Compressors.Deflate
CompressionMode compressionMode,
CompressionLevel level,
ZlibStreamFlavor flavor,
bool leaveOpen)
bool leaveOpen,
Encoding encoding)
{
_flushMode = FlushType.None;
@@ -91,6 +95,8 @@ namespace SharpCompress.Compressors.Deflate
_flavor = flavor;
_level = level;
_encoding = encoding;
// workitem 7159
if (flavor == ZlibStreamFlavor.GZIP)
{
@@ -418,8 +424,8 @@ namespace SharpCompress.Compressors.Deflate
}
}
while (!done);
byte[] a = list.ToArray();
return ArchiveEncoding.Default.GetString(a, 0, a.Length);
byte[] buffer = list.ToArray();
return _encoding.GetString(buffer, 0, buffer.Length);
}
private int _ReadAndValidateGzipHeader()
@@ -528,19 +534,19 @@ namespace SharpCompress.Compressors.Deflate
}
if (buffer == null)
{
throw new ArgumentNullException("buffer");
throw new ArgumentNullException(nameof(buffer));
}
if (count < 0)
{
throw new ArgumentOutOfRangeException("count");
throw new ArgumentOutOfRangeException(nameof(count));
}
if (offset < buffer.GetLowerBound(0))
{
throw new ArgumentOutOfRangeException("offset");
throw new ArgumentOutOfRangeException(nameof(offset));
}
if ((offset + count) > buffer.GetLength(0))
{
throw new ArgumentOutOfRangeException("count");
throw new ArgumentOutOfRangeException(nameof(count));
}
int rc = 0;
@@ -593,7 +599,7 @@ namespace SharpCompress.Compressors.Deflate
while (_z.AvailableBytesOut > 0 && !nomoreinput && rc == ZlibConstants.Z_OK);
// workitem 8557
// is there more room in output?
// is there more room in output?
if (_z.AvailableBytesOut > 0)
{
if (rc == ZlibConstants.Z_OK && _z.AvailableBytesIn == 0)

View File

@@ -27,6 +27,7 @@
using System;
using System.IO;
using System.Text;
namespace SharpCompress.Compressors.Deflate
{
@@ -36,23 +37,23 @@ namespace SharpCompress.Compressors.Deflate
private bool _disposed;
public ZlibStream(Stream stream, CompressionMode mode)
: this(stream, mode, CompressionLevel.Default, false)
: this(stream, mode, CompressionLevel.Default, false, Encoding.UTF8)
{
}
public ZlibStream(Stream stream, CompressionMode mode, CompressionLevel level)
: this(stream, mode, level, false)
: this(stream, mode, level, false, Encoding.UTF8)
{
}
public ZlibStream(Stream stream, CompressionMode mode, bool leaveOpen)
: this(stream, mode, CompressionLevel.Default, leaveOpen)
: this(stream, mode, CompressionLevel.Default, leaveOpen, Encoding.UTF8)
{
}
public ZlibStream(Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen)
public ZlibStream(Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen, Encoding encoding)
{
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.ZLIB, leaveOpen);
_baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.ZLIB, leaveOpen, encoding);
}
#region Zlib properties
@@ -326,6 +327,6 @@ namespace SharpCompress.Compressors.Deflate
_baseStream.Write(buffer, offset, count);
}
#endregion
#endregion System.IO.Stream methods
}
}

View File

@@ -58,7 +58,7 @@ namespace SharpCompress.Compressors.LZMA
{
if (index < 0 || index >= Length)
{
throw new ArgumentOutOfRangeException("index");
throw new ArgumentOutOfRangeException(nameof(index));
}
return (mBits[index >> 5] & (1u << (index & 31))) != 0;
@@ -69,7 +69,7 @@ namespace SharpCompress.Compressors.LZMA
{
if (index < 0 || index >= Length)
{
throw new ArgumentOutOfRangeException("index");
throw new ArgumentOutOfRangeException(nameof(index));
}
mBits[index >> 5] |= 1u << (index & 31);
@@ -79,7 +79,7 @@ namespace SharpCompress.Compressors.LZMA
{
if (index < 0 || index >= Length)
{
throw new ArgumentOutOfRangeException("index");
throw new ArgumentOutOfRangeException(nameof(index));
}
uint bits = mBits[index >> 5];

View File

@@ -58,22 +58,22 @@ namespace SharpCompress.Compressors.LZMA.Utilites
{
if (stream == null)
{
throw new ArgumentNullException("stream");
throw new ArgumentNullException(nameof(stream));
}
if (buffer == null)
{
throw new ArgumentNullException("buffer");
throw new ArgumentNullException(nameof(buffer));
}
if (offset < 0 || offset > buffer.Length)
{
throw new ArgumentOutOfRangeException("offset");
throw new ArgumentOutOfRangeException(nameof(offset));
}
if (length < 0 || length > buffer.Length - offset)
{
throw new ArgumentOutOfRangeException("length");
throw new ArgumentOutOfRangeException(nameof(length));
}
while (length > 0)

View File

@@ -146,12 +146,12 @@ namespace SharpCompress.Compressors.PPMd.I1
{
if (target == null)
{
throw new ArgumentNullException("target");
throw new ArgumentNullException(nameof(target));
}
if (source == null)
{
throw new ArgumentNullException("source");
throw new ArgumentNullException(nameof(source));
}
EncodeStart(properties);
@@ -235,12 +235,12 @@ namespace SharpCompress.Compressors.PPMd.I1
{
if (target == null)
{
throw new ArgumentNullException("target");
throw new ArgumentNullException(nameof(target));
}
if (source == null)
{
throw new ArgumentNullException("source");
throw new ArgumentNullException(nameof(source));
}
DecodeStart(source, properties);

View File

@@ -18,9 +18,11 @@ namespace SharpCompress.Compressors.Xz
public static int ReadLittleEndianInt32(this Stream stream)
{
byte[] bytes = new byte[4];
var read = stream.Read(bytes, 0, 4);
if (read != 4)
var read = stream.ReadFully(bytes);
if (!read)
{
throw new EndOfStreamException();
}
return (bytes[0] + (bytes[1] << 8) + (bytes[2] << 16) + (bytes[3] << 24));
}

View File

@@ -156,7 +156,7 @@ namespace SharpCompress.Converters
{
if (dest == null)
{
throw new ArgumentNullException("dest");
throw new ArgumentNullException(nameof(dest));
}
if (destIdx < 0 || destIdx > dest.Length - size)
{
@@ -170,7 +170,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 8)
{
@@ -195,7 +195,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 8)
{
@@ -221,7 +221,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 8)
{
@@ -247,7 +247,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 4)
{
@@ -273,7 +273,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 4)
{
@@ -299,7 +299,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 4)
{
@@ -325,7 +325,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 2)
{
@@ -351,7 +351,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 2)
{
@@ -468,7 +468,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 8)
{
@@ -494,7 +494,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 8)
{
@@ -520,7 +520,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 8)
{
@@ -546,7 +546,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 4)
{
@@ -572,7 +572,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 4)
{
@@ -598,7 +598,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 4)
{
@@ -624,7 +624,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 2)
{
@@ -650,7 +650,7 @@ namespace SharpCompress.Converters
{
if (data == null)
{
throw new ArgumentNullException("data");
throw new ArgumentNullException(nameof(data));
}
if (data.Length - index < 2)
{

View File

@@ -12,7 +12,7 @@ namespace Org.BouncyCastle.Crypto.Parameters
{
if (key == null)
{
throw new ArgumentNullException("key");
throw new ArgumentNullException(nameof(key));
}
this.key = (byte[])key.Clone();
@@ -25,15 +25,15 @@ namespace Org.BouncyCastle.Crypto.Parameters
{
if (key == null)
{
throw new ArgumentNullException("key");
throw new ArgumentNullException(nameof(key));
}
if (keyOff < 0 || keyOff > key.Length)
{
throw new ArgumentOutOfRangeException("keyOff");
throw new ArgumentOutOfRangeException(nameof(keyOff));
}
if (keyLen < 0 || (keyOff + keyLen) > key.Length)
{
throw new ArgumentOutOfRangeException("keyLen");
throw new ArgumentOutOfRangeException(nameof(keyLen));
}
this.key = new byte[keyLen];

View File

@@ -139,8 +139,6 @@ namespace SharpCompress.Readers
}
}
private readonly byte[] skipBuffer = new byte[4096];
private void Skip()
{
if (ArchiveType != ArchiveType.Rar
@@ -148,25 +146,21 @@ namespace SharpCompress.Readers
&& Entry.CompressedSize > 0)
{
//not solid and has a known compressed size then we can skip raw bytes.
var rawStream = Entry.Parts.First().GetRawStream();
var part = Entry.Parts.First();
var rawStream = part.GetRawStream();
if (rawStream != null)
{
var bytesToAdvance = Entry.CompressedSize;
for (var i = 0; i < bytesToAdvance / skipBuffer.Length; i++)
{
rawStream.Read(skipBuffer, 0, skipBuffer.Length);
}
rawStream.Read(skipBuffer, 0, (int)(bytesToAdvance % skipBuffer.Length));
rawStream.Skip(bytesToAdvance);
part.Skipped = true;
return;
}
}
//don't know the size so we have to try to decompress to skip
using (var s = OpenEntryStream())
{
while (s.Read(skipBuffer, 0, skipBuffer.Length) > 0)
{
}
s.Skip();
}
}

View File

@@ -29,11 +29,11 @@ namespace SharpCompress.Readers.GZip
return new GZipReader(stream, options ?? new ReaderOptions());
}
#endregion
#endregion Open
internal override IEnumerable<GZipEntry> GetEntries(Stream stream)
{
return GZipEntry.GetEntries(stream);
return GZipEntry.GetEntries(stream, Options);
}
}
}

View File

@@ -93,10 +93,6 @@ namespace SharpCompress.Readers
using (FileStream fs = File.Open(destinationFileName, fm))
{
reader.WriteEntryTo(fs);
//using (Stream s = reader.OpenEntryStream())
//{
// s.TransferTo(fs);
//}
}
reader.Entry.PreserveExtractionOptions(destinationFileName, options);
}

View File

@@ -8,6 +8,7 @@ namespace SharpCompress.Readers
/// Look for RarArchive (Check for self-extracting archives or cases where RarArchive isn't at the start of the file)
/// </summary>
public bool LookForHeader { get; set; }
public string Password { get; set; }
}
}

View File

@@ -114,11 +114,11 @@ namespace SharpCompress.Readers.Tar
return new TarReader(rewindableStream, options, CompressionType.None);
}
#endregion
#endregion Open
internal override IEnumerable<TarEntry> GetEntries(Stream stream)
{
return TarEntry.GetEntries(StreamingMode.Streaming, stream, compressionType);
return TarEntry.GetEntries(StreamingMode.Streaming, stream, compressionType, Options.ArchiveEncoding);
}
}
}

View File

@@ -8,13 +8,13 @@ namespace SharpCompress.Readers.Zip
{
public class ZipReader : AbstractReader<ZipEntry, ZipVolume>
{
private readonly StreamingZipHeaderFactory headerFactory;
private readonly StreamingZipHeaderFactory _headerFactory;
internal ZipReader(Stream stream, ReaderOptions options)
: base(options, ArchiveType.Zip)
{
Volume = new ZipVolume(stream, options);
headerFactory = new StreamingZipHeaderFactory(options.Password);
_headerFactory = new StreamingZipHeaderFactory(options.Password, options.ArchiveEncoding);
}
public override ZipVolume Volume { get; }
@@ -33,26 +33,26 @@ namespace SharpCompress.Readers.Zip
return new ZipReader(stream, options ?? new ReaderOptions());
}
#endregion
#endregion Open
internal override IEnumerable<ZipEntry> GetEntries(Stream stream)
{
foreach (ZipHeader h in headerFactory.ReadStreamHeader(stream))
foreach (ZipHeader h in _headerFactory.ReadStreamHeader(stream))
{
if (h != null)
{
switch (h.ZipHeaderType)
{
case ZipHeaderType.LocalEntry:
{
yield return new ZipEntry(new StreamingZipFilePart(h as LocalEntryHeader,
stream));
}
{
yield return new ZipEntry(new StreamingZipFilePart(h as LocalEntryHeader,
stream));
}
break;
case ZipHeaderType.DirectoryEnd:
{
yield break;
}
{
yield break;
}
}
}
}

View File

@@ -2,9 +2,9 @@
<PropertyGroup>
<AssemblyTitle>SharpCompress - Pure C# Decompression/Compression</AssemblyTitle>
<NeutralLanguage>en-US</NeutralLanguage>
<VersionPrefix>0.17.1</VersionPrefix>
<AssemblyVersion>0.17.1.0</AssemblyVersion>
<FileVersion>0.17.1.0</FileVersion>
<VersionPrefix>0.18</VersionPrefix>
<AssemblyVersion>0.18.0.0</AssemblyVersion>
<FileVersion>0.18.0.0</FileVersion>
<Authors>Adam Hathcock</Authors>
<TargetFrameworks Condition="'$(LibraryFrameworks)'==''">net45;net35;netstandard1.0;netstandard1.3</TargetFrameworks>
<TargetFrameworks Condition="'$(LibraryFrameworks)'!=''">$(LibraryFrameworks)</TargetFrameworks>

View File

@@ -7,7 +7,7 @@ using SharpCompress.Readers;
namespace SharpCompress
{
internal static class Utility
{
{
public static ReadOnlyCollection<T> ToReadOnly<T>(this IEnumerable<T> items)
{
return new ReadOnlyCollection<T>(items.ToList());
@@ -138,7 +138,7 @@ namespace SharpCompress
public static void Skip(this Stream source, long advanceAmount)
{
byte[] buffer = new byte[32 * 1024];
byte[] buffer = GetTransferByteArray();
int read = 0;
int readCount = 0;
do
@@ -162,9 +162,9 @@ namespace SharpCompress
while (true);
}
public static void SkipAll(this Stream source)
public static void Skip(this Stream source)
{
byte[] buffer = new byte[32 * 1024];
byte[] buffer = GetTransferByteArray();
do
{
}

View File

@@ -6,29 +6,30 @@ namespace SharpCompress.Writers
{
public abstract class AbstractWriter : IWriter
{
private bool closeStream;
private bool isDisposed;
protected AbstractWriter(ArchiveType type)
protected AbstractWriter(ArchiveType type, WriterOptions writerOptions)
{
WriterType = type;
WriterOptions = writerOptions;
}
protected void InitalizeStream(Stream stream, bool closeStream)
protected void InitalizeStream(Stream stream)
{
OutputStream = stream;
this.closeStream = closeStream;
}
protected Stream OutputStream { get; private set; }
public ArchiveType WriterType { get; }
protected WriterOptions WriterOptions { get; }
public abstract void Write(string filename, Stream source, DateTime? modificationTime);
protected virtual void Dispose(bool isDisposing)
{
if (isDisposing && closeStream)
if (isDisposing && !WriterOptions.LeaveStreamOpen)
{
OutputStream.Dispose();
}

View File

@@ -8,12 +8,15 @@ namespace SharpCompress.Writers.GZip
{
public class GZipWriter : AbstractWriter
{
private bool wroteToStream;
private bool _wroteToStream;
public GZipWriter(Stream destination, bool leaveOpen = false)
: base(ArchiveType.GZip)
public GZipWriter(Stream destination, GZipWriterOptions options = null)
: base(ArchiveType.GZip, options ?? new GZipWriterOptions())
{
InitalizeStream(new GZipStream(destination, CompressionMode.Compress, leaveOpen), !leaveOpen);
InitalizeStream(new GZipStream(destination, CompressionMode.Compress,
options?.CompressionLevel ?? CompressionLevel.Default,
WriterOptions.LeaveStreamOpen,
WriterOptions.ArchiveEncoding.GetEncoding()));
}
protected override void Dispose(bool isDisposing)
@@ -28,7 +31,7 @@ namespace SharpCompress.Writers.GZip
public override void Write(string filename, Stream source, DateTime? modificationTime)
{
if (wroteToStream)
if (_wroteToStream)
{
throw new ArgumentException("Can only write a single stream to a GZip file.");
}
@@ -36,7 +39,7 @@ namespace SharpCompress.Writers.GZip
stream.FileName = filename;
stream.LastModified = modificationTime;
source.TransferTo(stream);
wroteToStream = true;
_wroteToStream = true;
}
}
}

View File

@@ -0,0 +1,28 @@
using SharpCompress.Common;
using SharpCompress.Compressors.Deflate;
namespace SharpCompress.Writers.GZip
{
public class GZipWriterOptions : WriterOptions
{
public GZipWriterOptions()
: base(CompressionType.GZip)
{
}
internal GZipWriterOptions(WriterOptions options)
: base(options.CompressionType)
{
LeaveStreamOpen = options.LeaveStreamOpen;
ArchiveEncoding = options.ArchiveEncoding;
var writerOptions = options as GZipWriterOptions;
if (writerOptions != null)
{
CompressionLevel = writerOptions.CompressionLevel;
}
}
public CompressionLevel CompressionLevel { get; set; } = CompressionLevel.Default;
}
}

View File

@@ -12,7 +12,7 @@ namespace SharpCompress.Writers.Tar
public class TarWriter : AbstractWriter
{
public TarWriter(Stream destination, WriterOptions options)
: base(ArchiveType.Tar)
: base(ArchiveType.Tar, options)
{
if (!destination.CanWrite)
{
@@ -42,7 +42,7 @@ namespace SharpCompress.Writers.Tar
throw new InvalidFormatException("Tar does not support compression: " + options.CompressionType);
}
}
InitalizeStream(destination, true);
InitalizeStream(destination);
}
public override void Write(string filename, Stream source, DateTime? modificationTime)
@@ -72,7 +72,8 @@ namespace SharpCompress.Writers.Tar
long realSize = size ?? source.Length;
TarHeader header = new TarHeader();
TarHeader header = new TarHeader(WriterOptions.ArchiveEncoding);
header.LastModifiedTime = modificationTime ?? TarHeader.Epoch;
header.Name = NormalizeFilename(filename);
header.Size = realSize;

View File

@@ -19,7 +19,7 @@ namespace SharpCompress.Writers
{
throw new InvalidFormatException("GZip archives only support GZip compression type.");
}
return new GZipWriter(stream, writerOptions.LeaveStreamOpen);
return new GZipWriter(stream, new GZipWriterOptions(writerOptions));
}
case ArchiveType.Zip:
{

View File

@@ -1,6 +1,7 @@
using System;
using System.IO;
using System.Text;
using SharpCompress.Common;
using SharpCompress.Common.Zip;
using SharpCompress.Common.Zip.Headers;
using SharpCompress.Converters;
@@ -11,14 +12,16 @@ namespace SharpCompress.Writers.Zip
{
private readonly ZipCompressionMethod compression;
private readonly string fileName;
private readonly ArchiveEncoding archiveEncoding;
public ZipCentralDirectoryEntry(ZipCompressionMethod compression, string fileName, ulong headerOffset)
public ZipCentralDirectoryEntry(ZipCompressionMethod compression, string fileName, ulong headerOffset, ArchiveEncoding archiveEncoding)
{
this.compression = compression;
this.fileName = fileName;
HeaderOffset = headerOffset;
this.archiveEncoding = archiveEncoding;
}
internal DateTime? ModificationTime { get; set; }
internal string Comment { get; set; }
internal uint Crc { get; set; }
@@ -29,11 +32,11 @@ namespace SharpCompress.Writers.Zip
internal uint Write(Stream outputStream)
{
byte[] encodedFilename = Encoding.UTF8.GetBytes(fileName);
byte[] encodedComment = Encoding.UTF8.GetBytes(Comment);
byte[] encodedFilename = archiveEncoding.Encode(fileName);
byte[] encodedComment = archiveEncoding.Encode(Comment);
var zip64_stream = Compressed >= uint.MaxValue || Decompressed >= uint.MaxValue;
var zip64 = zip64_stream || HeaderOffset >= uint.MaxValue || Zip64HeaderOffset != 0;
var zip64_stream = Compressed >= uint.MaxValue || Decompressed >= uint.MaxValue;
var zip64 = zip64_stream || HeaderOffset >= uint.MaxValue || Zip64HeaderOffset != 0;
var compressedvalue = zip64 ? uint.MaxValue : (uint)Compressed;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)Decompressed;
@@ -41,18 +44,18 @@ namespace SharpCompress.Writers.Zip
var extralength = zip64 ? (2 + 2 + 8 + 8 + 8 + 4) : 0;
var version = (byte)(zip64 ? 45 : 20); // Version 20 required for deflate/encryption
HeaderFlags flags = HeaderFlags.UTF8;
HeaderFlags flags = Equals(archiveEncoding.GetEncoding(), Encoding.UTF8) ? HeaderFlags.UTF8 : HeaderFlags.None;
if (!outputStream.CanSeek)
{
// Cannot use data descriptors with zip64:
// https://blogs.oracle.com/xuemingshen/entry/is_zipinput_outputstream_handling_of
// We check that streams are not written too large in the ZipWritingStream,
// so this extra guard is not required, but kept to simplify changing the code
// once the zip64 post-data issue is resolved
// We check that streams are not written too large in the ZipWritingStream,
// so this extra guard is not required, but kept to simplify changing the code
// once the zip64 post-data issue is resolved
if (!zip64_stream)
flags |= HeaderFlags.UsePostDataDescriptor;
if (compression == ZipCompressionMethod.LZMA)
{
flags |= HeaderFlags.Bit1; // eos marker

View File

@@ -17,38 +17,31 @@ namespace SharpCompress.Writers.Zip
{
public class ZipWriter : AbstractWriter
{
private readonly CompressionType compressionType;
private readonly CompressionLevel compressionLevel;
private readonly List<ZipCentralDirectoryEntry> entries = new List<ZipCentralDirectoryEntry>();
private readonly string zipComment;
private long streamPosition;
private PpmdProperties ppmdProps;
private readonly bool isZip64;
private readonly ZipWriterOptions _zipWriterOptions;
private readonly List<ZipCentralDirectoryEntry> _entries = new List<ZipCentralDirectoryEntry>();
private long _streamPosition;
private PpmdProperties _ppmdProps;
public ZipWriter(Stream destination, ZipWriterOptions zipWriterOptions)
: base(ArchiveType.Zip)
: base(ArchiveType.Zip, zipWriterOptions)
{
zipComment = zipWriterOptions.ArchiveComment ?? string.Empty;
isZip64 = zipWriterOptions.UseZip64;
_zipWriterOptions = zipWriterOptions;
if (destination.CanSeek)
{
streamPosition = destination.Position;
_streamPosition = destination.Position;
}
compressionType = zipWriterOptions.CompressionType;
compressionLevel = zipWriterOptions.DeflateCompressionLevel;
InitalizeStream(destination, !zipWriterOptions.LeaveStreamOpen);
InitalizeStream(destination);
}
private PpmdProperties PpmdProperties
{
get
{
if (ppmdProps == null)
if (_ppmdProps == null)
{
ppmdProps = new PpmdProperties();
_ppmdProps = new PpmdProperties();
}
return ppmdProps;
return _ppmdProps;
}
}
@@ -57,7 +50,7 @@ namespace SharpCompress.Writers.Zip
if (isDisposing)
{
ulong size = 0;
foreach (ZipCentralDirectoryEntry entry in entries)
foreach (ZipCentralDirectoryEntry entry in _entries)
{
size += entry.Write(OutputStream);
}
@@ -65,6 +58,7 @@ namespace SharpCompress.Writers.Zip
}
base.Dispose(isDisposing);
}
private static ZipCompressionMethod ToZipCompressionMethod(CompressionType compressionType)
{
switch (compressionType)
@@ -97,9 +91,9 @@ namespace SharpCompress.Writers.Zip
public override void Write(string entryPath, Stream source, DateTime? modificationTime)
{
Write(entryPath, source, new ZipWriterEntryOptions()
{
ModificationDateTime = modificationTime
});
{
ModificationDateTime = modificationTime
});
}
public void Write(string entryPath, Stream source, ZipWriterEntryOptions zipWriterEntryOptions)
@@ -112,26 +106,27 @@ namespace SharpCompress.Writers.Zip
public Stream WriteToStream(string entryPath, ZipWriterEntryOptions options)
{
var compression = ToZipCompressionMethod(options.CompressionType ?? compressionType);
var compression = ToZipCompressionMethod(options.CompressionType ?? _zipWriterOptions.CompressionType);
entryPath = NormalizeFilename(entryPath);
options.ModificationDateTime = options.ModificationDateTime ?? DateTime.Now;
options.EntryComment = options.EntryComment ?? string.Empty;
var entry = new ZipCentralDirectoryEntry(compression, entryPath, (ulong)streamPosition)
{
Comment = options.EntryComment,
ModificationTime = options.ModificationDateTime
};
var entry = new ZipCentralDirectoryEntry(compression, entryPath, (ulong)_streamPosition, WriterOptions.ArchiveEncoding)
{
Comment = options.EntryComment,
ModificationTime = options.ModificationDateTime
};
// Use the archive default setting for zip64 and allow overrides
var useZip64 = isZip64;
var useZip64 = _zipWriterOptions.UseZip64;
if (options.EnableZip64.HasValue)
{
useZip64 = options.EnableZip64.Value;
}
var headersize = (uint)WriteHeader(entryPath, options, entry, useZip64);
streamPosition += headersize;
return new ZipWritingStream(this, OutputStream, entry, compression,
options.DeflateCompressionLevel ?? compressionLevel);
_streamPosition += headersize;
return new ZipWritingStream(this, OutputStream, entry, compression, _zipWriterOptions);
}
private string NormalizeFilename(string filename)
@@ -149,12 +144,12 @@ namespace SharpCompress.Writers.Zip
private int WriteHeader(string filename, ZipWriterEntryOptions zipWriterEntryOptions, ZipCentralDirectoryEntry entry, bool useZip64)
{
// We err on the side of caution until the zip specification clarifies how to support this
if (!OutputStream.CanSeek && useZip64)
throw new NotSupportedException("Zip64 extensions are not supported on non-seekable streams");
// We err on the side of caution until the zip specification clarifies how to support this
if (!OutputStream.CanSeek && useZip64)
throw new NotSupportedException("Zip64 extensions are not supported on non-seekable streams");
var explicitZipCompressionInfo = ToZipCompressionMethod(zipWriterEntryOptions.CompressionType ?? compressionType);
byte[] encodedFilename = ArchiveEncoding.Default.GetBytes(filename);
var explicitZipCompressionInfo = ToZipCompressionMethod(zipWriterEntryOptions.CompressionType ?? _zipWriterOptions.CompressionType);
byte[] encodedFilename = WriterOptions.ArchiveEncoding.Encode(filename);
OutputStream.Write(DataConverter.LittleEndian.GetBytes(ZipHeaderFactory.ENTRY_HEADER_BYTES), 0, 4);
if (explicitZipCompressionInfo == ZipCompressionMethod.Deflate)
@@ -162,22 +157,26 @@ namespace SharpCompress.Writers.Zip
if (OutputStream.CanSeek && useZip64)
OutputStream.Write(new byte[] { 45, 0 }, 0, 2); //smallest allowed version for zip64
else
OutputStream.Write(new byte[] { 20, 0 }, 0, 2); //older version which is more compatible
OutputStream.Write(new byte[] { 20, 0 }, 0, 2); //older version which is more compatible
}
else
{
OutputStream.Write(new byte[] { 63, 0 }, 0, 2); //version says we used PPMd or LZMA
}
HeaderFlags flags = ArchiveEncoding.Default == Encoding.UTF8 ? HeaderFlags.UTF8 : 0;
HeaderFlags flags = Equals(WriterOptions.ArchiveEncoding.GetEncoding(), Encoding.UTF8) ? HeaderFlags.UTF8 : 0;
if (!OutputStream.CanSeek)
{
flags |= HeaderFlags.UsePostDataDescriptor;
if (explicitZipCompressionInfo == ZipCompressionMethod.LZMA)
{
flags |= HeaderFlags.Bit1; // eos marker
}
}
if (!string.IsNullOrEmpty(_zipWriterOptions.Password))
{
flags |= HeaderFlags.Encrypted;
}
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)flags), 0, 2);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)explicitZipCompressionInfo), 0, 2); // zipping method
OutputStream.Write(DataConverter.LittleEndian.GetBytes(zipWriterEntryOptions.ModificationDateTime.DateTimeToDosTime()), 0, 4);
@@ -213,11 +212,11 @@ namespace SharpCompress.Writers.Zip
private void WriteEndRecord(ulong size)
{
byte[] encodedComment = ArchiveEncoding.Default.GetBytes(zipComment);
var zip64 = isZip64 || entries.Count > ushort.MaxValue || streamPosition >= uint.MaxValue || size >= uint.MaxValue;
byte[] encodedComment = WriterOptions.ArchiveEncoding.Encode(_zipWriterOptions.ArchiveComment ?? string.Empty);
var zip64 = _zipWriterOptions.UseZip64 || _entries.Count > ushort.MaxValue || _streamPosition >= uint.MaxValue || size >= uint.MaxValue;
var sizevalue = size >= uint.MaxValue ? uint.MaxValue : (uint)size;
var streampositionvalue = streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streamPosition;
var streampositionvalue = _streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)_streamPosition;
if (zip64)
{
@@ -233,26 +232,26 @@ namespace SharpCompress.Writers.Zip
OutputStream.Write(DataConverter.LittleEndian.GetBytes((uint)0), 0, 4); // Central dir disk
// TODO: entries.Count is int, so max 2^31 files
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ulong)entries.Count), 0, 8); // Entries in this disk
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ulong)entries.Count), 0, 8); // Total entries
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ulong)_entries.Count), 0, 8); // Entries in this disk
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ulong)_entries.Count), 0, 8); // Total entries
OutputStream.Write(DataConverter.LittleEndian.GetBytes(size), 0, 8); // Central Directory size
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ulong)streamPosition), 0, 8); // Disk offset
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ulong)_streamPosition), 0, 8); // Disk offset
// Write zip64 end of central directory locator
OutputStream.Write(new byte[] { 80, 75, 6, 7 }, 0, 4);
OutputStream.Write(DataConverter.LittleEndian.GetBytes(0uL), 0, 4); // Entry disk
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ulong)streamPosition + size), 0, 8); // Offset to the zip64 central directory
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ulong)_streamPosition + size), 0, 8); // Offset to the zip64 central directory
OutputStream.Write(DataConverter.LittleEndian.GetBytes(0u), 0, 4); // Number of disks
streamPosition += recordlen + (4 + 4 + 8 + 4);
streampositionvalue = streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streampositionvalue;
_streamPosition += recordlen + (4 + 4 + 8 + 4);
streampositionvalue = _streamPosition >= uint.MaxValue ? uint.MaxValue : (uint)streampositionvalue;
}
// Write normal end of central directory record
OutputStream.Write(new byte[] {80, 75, 5, 6, 0, 0, 0, 0}, 0, 8);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)entries.Count), 0, 2);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)entries.Count), 0, 2);
OutputStream.Write(new byte[] { 80, 75, 5, 6, 0, 0, 0, 0 }, 0, 8);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)_entries.Count), 0, 2);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)_entries.Count), 0, 2);
OutputStream.Write(DataConverter.LittleEndian.GetBytes(sizevalue), 0, 4);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((uint)streampositionvalue), 0, 4);
OutputStream.Write(DataConverter.LittleEndian.GetBytes((ushort)encodedComment.Length), 0, 2);
@@ -263,29 +262,29 @@ namespace SharpCompress.Writers.Zip
internal class ZipWritingStream : Stream
{
private readonly CRC32 crc = new CRC32();
private readonly ZipCentralDirectoryEntry entry;
private readonly Stream originalStream;
private readonly Stream writeStream;
private readonly ZipWriter writer;
private readonly ZipCompressionMethod zipCompressionMethod;
private readonly CompressionLevel compressionLevel;
private CountingWritableSubStream counting;
private ulong decompressed;
private readonly CRC32 _crc = new CRC32();
private readonly ZipCentralDirectoryEntry _entry;
private readonly Stream _originalStream;
private readonly Stream _writeStream;
private readonly ZipWriter _writer;
private readonly ZipWriterOptions _zipWriterOptions;
private readonly ZipCompressionMethod _zipCompressionMethod;
private CountingWritableSubStream _counting;
private ulong _decompressed;
// Flag to prevent throwing exceptions on Dispose
private bool limitsExceeded;
// Flag to prevent throwing exceptions on Dispose
private bool _limitsExceeded;
internal ZipWritingStream(ZipWriter writer, Stream originalStream, ZipCentralDirectoryEntry entry,
ZipCompressionMethod zipCompressionMethod, CompressionLevel compressionLevel)
internal ZipWritingStream(ZipWriter writer, Stream originalStream, ZipCentralDirectoryEntry entry,
ZipCompressionMethod zipCompressionMethod,
ZipWriterOptions zipWriterOptions)
{
this.writer = writer;
this.originalStream = originalStream;
this.writer = writer;
this.entry = entry;
this.zipCompressionMethod = zipCompressionMethod;
this.compressionLevel = compressionLevel;
writeStream = GetWriteStream(originalStream);
this._writer = writer;
this._originalStream = originalStream;
this._entry = entry;
_zipWriterOptions = zipWriterOptions;
_zipCompressionMethod = zipCompressionMethod;
_writeStream = GetWriteStream(originalStream);
}
public override bool CanRead => false;
@@ -300,45 +299,60 @@ namespace SharpCompress.Writers.Zip
private Stream GetWriteStream(Stream writeStream)
{
counting = new CountingWritableSubStream(writeStream);
Stream output = counting;
switch (zipCompressionMethod)
_counting = new CountingWritableSubStream(writeStream);
Stream output = _counting;
Stream compressedStream;
switch (_zipCompressionMethod)
{
case ZipCompressionMethod.None:
{
return output;
}
{
compressedStream = output;
break;
}
case ZipCompressionMethod.Deflate:
{
return new DeflateStream(counting, CompressionMode.Compress, compressionLevel,
true);
}
{
compressedStream = new DeflateStream(_counting, CompressionMode.Compress,
_zipWriterOptions.DeflateCompressionLevel,
true);
break;
}
case ZipCompressionMethod.BZip2:
{
return new BZip2Stream(counting, CompressionMode.Compress, true);
}
{
compressedStream = new BZip2Stream(_counting, CompressionMode.Compress, true);
break;
}
case ZipCompressionMethod.LZMA:
{
counting.WriteByte(9);
counting.WriteByte(20);
counting.WriteByte(5);
counting.WriteByte(0);
{
_counting.WriteByte(9);
_counting.WriteByte(20);
_counting.WriteByte(5);
_counting.WriteByte(0);
LzmaStream lzmaStream = new LzmaStream(new LzmaEncoderProperties(!originalStream.CanSeek),
false, counting);
counting.Write(lzmaStream.Properties, 0, lzmaStream.Properties.Length);
return lzmaStream;
}
LzmaStream lzmaStream = new LzmaStream(new LzmaEncoderProperties(!_originalStream.CanSeek),
false, _counting);
_counting.Write(lzmaStream.Properties, 0, lzmaStream.Properties.Length);
compressedStream = lzmaStream;
break;
}
case ZipCompressionMethod.PPMd:
{
counting.Write(writer.PpmdProperties.Properties, 0, 2);
return new PpmdStream(writer.PpmdProperties, counting, true);
}
{
_counting.Write(_writer.PpmdProperties.Properties, 0, 2);
compressedStream = new PpmdStream(_writer.PpmdProperties, _counting, true);
break;
}
default:
{
throw new NotSupportedException("CompressionMethod: " + zipCompressionMethod);
}
{
throw new NotSupportedException("CompressionMethod: " + _zipCompressionMethod);
}
}
if (string.IsNullOrEmpty(_zipWriterOptions.Password))
{
return compressedStream;
}
var encryptionData = PkwareTraditionalEncryptionData.ForWrite(_zipWriterOptions.Password, _zipWriterOptions.ArchiveEncoding);
return new PkwareTraditionalCryptoStream(new NonDisposingStream(writeStream), encryptionData, CryptoMode.Encrypt);
}
protected override void Dispose(bool disposing)
@@ -346,78 +360,73 @@ namespace SharpCompress.Writers.Zip
base.Dispose(disposing);
if (disposing)
{
writeStream.Dispose();
_writeStream.Dispose();
if (limitsExceeded)
{
// We have written invalid data into the archive,
// so we destroy it now, instead of allowing the user to continue
// with a defunct archive
originalStream.Dispose();
return;
}
entry.Crc = (uint)crc.Crc32Result;
entry.Compressed = counting.Count;
entry.Decompressed = decompressed;
var zip64 = entry.Compressed >= uint.MaxValue || entry.Decompressed >= uint.MaxValue;
var compressedvalue = zip64 ? uint.MaxValue : (uint)counting.Count;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)entry.Decompressed;
if (originalStream.CanSeek)
if (_limitsExceeded)
{
originalStream.Position = (long)(entry.HeaderOffset + 6);
originalStream.WriteByte(0);
originalStream.Position = (long)(entry.HeaderOffset + 14);
// We have written invalid data into the archive,
// so we destroy it now, instead of allowing the user to continue
// with a defunct archive
_originalStream.Dispose();
return;
}
writer.WriteFooter(entry.Crc, compressedvalue, decompressedvalue);
_entry.Crc = (uint)_crc.Crc32Result;
_entry.Compressed = _counting.Count;
_entry.Decompressed = _decompressed;
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64 && entry.Zip64HeaderOffset == 0)
throw new NotSupportedException("Attempted to write a stream that is larger than 4GiB without setting the zip64 option");
var zip64 = _entry.Compressed >= uint.MaxValue || _entry.Decompressed >= uint.MaxValue;
var compressedvalue = zip64 ? uint.MaxValue : (uint)_counting.Count;
var decompressedvalue = zip64 ? uint.MaxValue : (uint)_entry.Decompressed;
// If we have pre-allocated space for zip64 data,
// fill it out, even if it is not required
if (entry.Zip64HeaderOffset != 0)
{
originalStream.Position = (long)(entry.HeaderOffset + entry.Zip64HeaderOffset);
originalStream.Write(DataConverter.LittleEndian.GetBytes((ushort)0x0001), 0, 2);
originalStream.Write(DataConverter.LittleEndian.GetBytes((ushort)(8 + 8)), 0, 2);
if (_originalStream.CanSeek)
{
_writer.WriteFooter(_entry.Crc, compressedvalue, decompressedvalue);
originalStream.Write(DataConverter.LittleEndian.GetBytes(entry.Decompressed), 0, 8);
originalStream.Write(DataConverter.LittleEndian.GetBytes(entry.Compressed), 0, 8);
}
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64 && _entry.Zip64HeaderOffset == 0)
throw new NotSupportedException("Attempted to write a stream that is larger than 4GiB without setting the zip64 option");
originalStream.Position = writer.streamPosition + (long)entry.Compressed;
writer.streamPosition += (long)entry.Compressed;
// If we have pre-allocated space for zip64 data,
// fill it out, even if it is not required
if (_entry.Zip64HeaderOffset != 0)
{
_originalStream.Position = (long)(_entry.HeaderOffset + _entry.Zip64HeaderOffset);
_originalStream.Write(DataConverter.LittleEndian.GetBytes((ushort)0x0001), 0, 2);
_originalStream.Write(DataConverter.LittleEndian.GetBytes((ushort)(8 + 8)), 0, 2);
_originalStream.Write(DataConverter.LittleEndian.GetBytes(_entry.Decompressed), 0, 8);
_originalStream.Write(DataConverter.LittleEndian.GetBytes(_entry.Compressed), 0, 8);
}
_originalStream.Position = _writer._streamPosition + (long)_entry.Compressed;
_writer._streamPosition += (long)_entry.Compressed;
}
else
{
// We have a streaming archive, so we should add a post-data-descriptor,
// but we cannot as it does not hold the zip64 values
// Throwing an exception until the zip specification is clarified
// We have a streaming archive, so we should add a post-data-descriptor,
// but we cannot as it does not hold the zip64 values
// Throwing an exception until the zip specification is clarified
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64)
throw new NotSupportedException("Streams larger than 4GiB are not supported for non-seekable streams");
// Ideally, we should not throw from Dispose()
// We should not get here as the Write call checks the limits
if (zip64)
throw new NotSupportedException("Streams larger than 4GiB are not supported for non-seekable streams");
originalStream.Write(DataConverter.LittleEndian.GetBytes(ZipHeaderFactory.POST_DATA_DESCRIPTOR), 0, 4);
writer.WriteFooter(entry.Crc,
_originalStream.Write(DataConverter.LittleEndian.GetBytes(ZipHeaderFactory.POST_DATA_DESCRIPTOR), 0, 4);
_writer.WriteFooter(_entry.Crc,
(uint)compressedvalue,
(uint)decompressedvalue);
writer.streamPosition += (long)entry.Compressed + 16;
_writer._streamPosition += (long)_entry.Compressed + 16;
}
writer.entries.Add(entry);
_writer._entries.Add(_entry);
}
}
public override void Flush()
{
writeStream.Flush();
_writeStream.Flush();
}
public override int Read(byte[] buffer, int offset, int count)
@@ -437,36 +446,35 @@ namespace SharpCompress.Writers.Zip
public override void Write(byte[] buffer, int offset, int count)
{
// We check the limits first, because we can keep the archive consistent
// if we can prevent the writes from happening
if (entry.Zip64HeaderOffset == 0)
{
// Pre-check, the counting.Count is not exact, as we do not know the size before having actually compressed it
if (limitsExceeded || ((decompressed + (uint)count) > uint.MaxValue) || (counting.Count + (uint)count) > uint.MaxValue)
throw new NotSupportedException("Attempted to write a stream that is larger than 4GiB without setting the zip64 option");
}
// We check the limits first, because we can keep the archive consistent
// if we can prevent the writes from happening
if (_entry.Zip64HeaderOffset == 0)
{
// Pre-check, the counting.Count is not exact, as we do not know the size before having actually compressed it
if (_limitsExceeded || ((_decompressed + (uint)count) > uint.MaxValue) || (_counting.Count + (uint)count) > uint.MaxValue)
throw new NotSupportedException("Attempted to write a stream that is larger than 4GiB without setting the zip64 option");
}
decompressed += (uint)count;
crc.SlurpBlock(buffer, offset, count);
writeStream.Write(buffer, offset, count);
if (entry.Zip64HeaderOffset == 0)
{
// Post-check, this is accurate
if ((decompressed > uint.MaxValue) || counting.Count > uint.MaxValue)
{
// We have written the data, so the archive is now broken
// Throwing the exception here, allows us to avoid
// throwing an exception in Dispose() which is discouraged
// as it can mask other errors
limitsExceeded = true;
throw new NotSupportedException("Attempted to write a stream that is larger than 4GiB without setting the zip64 option");
}
}
_decompressed += (uint)count;
_crc.SlurpBlock(buffer, offset, count);
_writeStream.Write(buffer, offset, count);
if (_entry.Zip64HeaderOffset == 0)
{
// Post-check, this is accurate
if ((_decompressed > uint.MaxValue) || _counting.Count > uint.MaxValue)
{
// We have written the data, so the archive is now broken
// Throwing the exception here, allows us to avoid
// throwing an exception in Dispose() which is discouraged
// as it can mask other errors
_limitsExceeded = true;
throw new NotSupportedException("Attempted to write a stream that is larger than 4GiB without setting the zip64 option");
}
}
}
}
#endregion
#endregion Nested type: ZipWritingStream
}
}

View File

@@ -15,8 +15,15 @@ namespace SharpCompress.Writers.Zip
: base(options.CompressionType)
{
LeaveStreamOpen = options.LeaveStreamOpen;
if (options is ZipWriterOptions)
UseZip64 = ((ZipWriterOptions)options).UseZip64;
ArchiveEncoding = options.ArchiveEncoding;
var writerOptions = options as ZipWriterOptions;
if (writerOptions != null)
{
UseZip64 = writerOptions.UseZip64;
DeflateCompressionLevel = writerOptions.DeflateCompressionLevel;
ArchiveComment = writerOptions.ArchiveComment;
}
}
/// <summary>
/// When CompressionType.Deflate is used, this property is referenced. Defaults to CompressionLevel.Default.
@@ -33,5 +40,10 @@ namespace SharpCompress.Writers.Zip
/// are less than 4GiB in length.
/// </summary>
public bool UseZip64 { get; set; }
/// <summary>
/// Setting a password will encrypt the zip archive with the Pkware style.
/// </summary>
public string Password { get; set; }
}
}

View File

@@ -22,4 +22,7 @@
<ItemGroup>
<Service Include="{82a7f48d-3b50-4b1e-b82e-3ada8210c358}" />
</ItemGroup>
<ItemGroup>
<Folder Include="Xz\" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,32 @@
using SharpCompress.Compressors.Xz;
using System;
using System.Text;
using Xunit;
namespace SharpCompress.Test.Xz
{
public class Crc32Tests
{
private const string SimpleString = @"The quick brown fox jumps over the lazy dog.";
private readonly byte[] SimpleBytes = Encoding.ASCII.GetBytes(SimpleString);
private const string SimpleString2 = @"Life moves pretty fast. If you don't stop and look around once in a while, you could miss it.";
private readonly byte[] SimpleBytes2 = Encoding.ASCII.GetBytes(SimpleString2);
[Fact]
public void ShortAsciiString()
{
var actual = Crc32.Compute(SimpleBytes);
Assert.Equal((UInt32)0x519025e9, actual);
}
[Fact]
public void ShortAsciiString2()
{
var actual = Crc32.Compute(SimpleBytes2);
Assert.Equal((UInt32)0x6ee3ad88, actual);
}
}
}

View File

@@ -0,0 +1,32 @@
using SharpCompress.Compressors.Xz;
using System;
using System.Text;
using Xunit;
namespace SharpCompress.Test.Xz
{
public class Crc64Tests
{
private const string SimpleString = @"The quick brown fox jumps over the lazy dog.";
private readonly byte[] SimpleBytes = Encoding.ASCII.GetBytes(SimpleString);
private const string SimpleString2 = @"Life moves pretty fast. If you don't stop and look around once in a while, you could miss it.";
private readonly byte[] SimpleBytes2 = Encoding.ASCII.GetBytes(SimpleString2);
[Fact]
public void ShortAsciiString()
{
var actual = Crc64.Compute(SimpleBytes);
Assert.Equal((UInt64)0x7E210EB1B03E5A1D, actual);
}
[Fact]
public void ShortAsciiString2()
{
var actual = Crc64.Compute(SimpleBytes2);
Assert.Equal((UInt64)0x416B4150508661EE, actual);
}
}
}

View File

@@ -0,0 +1,72 @@
using System;
using Xunit;
using System.IO;
using SharpCompress.Compressors.Xz.Filters;
namespace SharpCompress.Test.Xz.Filters
{
public class Lzma2Tests : XZTestsBase
{
Lzma2Filter filter;
public Lzma2Tests()
{
filter = new Lzma2Filter();
}
[Fact]
public void IsOnlyAllowedLast()
{
Assert.True(filter.AllowAsLast);
Assert.False(filter.AllowAsNonLast);
}
[Fact]
public void ChangesStreamSize()
{
Assert.True(filter.ChangesDataSize);
}
[Theory]
[InlineData(0, (uint)4 * 1024)]
[InlineData(1, (uint)6 * 1024)]
[InlineData(2, (uint)8 * 1024)]
[InlineData(3, (uint)12 * 1024)]
[InlineData(38, (uint)2 * 1024 * 1024 * 1024)]
[InlineData(39, (uint)3 * 1024 * 1024 * 1024)]
[InlineData(40, (uint)(1024 * 1024 * 1024 - 1) * 4 + 3)]
public void CalculatesDictionarySize(byte inByte, uint dicSize)
{
filter.Init(new[] { inByte });
Assert.Equal(filter.DictionarySize, dicSize);
}
[Fact]
public void CalculatesDictionarySizeError()
{
uint temp;
filter.Init(new byte[] { 41 });
var ex = Assert.Throws<OverflowException>(() =>
{
temp = filter.DictionarySize;
});
Assert.Equal("Dictionary size greater than UInt32.Max", ex.Message);
}
[Theory]
[InlineData(new byte[] { })]
[InlineData(new byte[] { 0, 0 })]
public void OnlyAcceptsOneByte(byte[] bytes)
{
var ex = Assert.Throws<InvalidDataException>(() => filter.Init(bytes));
Assert.Equal("LZMA properties unexpected length", ex.Message);
}
[Fact]
public void ReservedBytesThrow()
{
var ex = Assert.Throws<InvalidDataException>(() => filter.Init(new byte[] { 0xC0 }));
Assert.Equal("Reserved bits used in LZMA properties", ex.Message);
}
}
}

View File

@@ -0,0 +1,73 @@
using System.Text;
using System.IO;
using SharpCompress.Compressors.Xz;
using Xunit;
namespace SharpCompress.Test.Xz
{
public class XZBlockTests : XZTestsBase
{
protected override void Rewind(Stream stream)
{
stream.Position = 12;
}
private byte[] ReadBytes(XZBlock block, int bytesToRead)
{
byte[] buffer = new byte[bytesToRead];
var read = block.Read(buffer, 0, bytesToRead);
if (read != bytesToRead)
throw new EndOfStreamException();
return buffer;
}
[Fact]
public void OnFindIndexBlockThrow()
{
var bytes = new byte[] { 0 };
using (Stream indexBlockStream = new MemoryStream(bytes))
{
var XZBlock = new XZBlock(indexBlockStream, CheckType.CRC64, 8);
Assert.Throws<XZIndexMarkerReachedException>(() => { ReadBytes(XZBlock, 1); });
}
}
[Fact]
public void CrcIncorrectThrows()
{
var bytes = Compressed.Clone() as byte[];
bytes[20]++;
using (Stream badCrcStream = new MemoryStream(bytes))
{
Rewind(badCrcStream);
var XZBlock = new XZBlock(badCrcStream, CheckType.CRC64, 8);
var ex = Assert.Throws<InvalidDataException>(() => { ReadBytes(XZBlock, 1); });
Assert.Equal("Block header corrupt", ex.Message);
}
}
[Fact]
public void CanReadM()
{
var XZBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
Assert.Equal(Encoding.ASCII.GetBytes("M"), ReadBytes(XZBlock, 1));
}
[Fact]
public void CanReadMary()
{
var XZBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
Assert.Equal(Encoding.ASCII.GetBytes("M"), ReadBytes(XZBlock, 1));
Assert.Equal(Encoding.ASCII.GetBytes("a"), ReadBytes(XZBlock, 1));
Assert.Equal(Encoding.ASCII.GetBytes("ry"), ReadBytes(XZBlock, 2));
}
[Fact]
public void CanReadPoemWithStreamReader()
{
var XZBlock = new XZBlock(CompressedStream, CheckType.CRC64, 8);
var sr = new StreamReader(XZBlock);
Assert.Equal(sr.ReadToEnd(), Original);
}
}
}

View File

@@ -0,0 +1,78 @@
using SharpCompress.Compressors.Xz;
using System.IO;
using Xunit;
namespace SharpCompress.Test.Xz
{
public class XZHeaderTests : XZTestsBase
{
[Fact]
public void ChecksMagicNumber()
{
var bytes = Compressed.Clone() as byte[];
bytes[3]++;
using (Stream badMagicNumberStream = new MemoryStream(bytes))
{
BinaryReader br = new BinaryReader(badMagicNumberStream);
var header = new XZHeader(br);
var ex = Assert.Throws<InvalidDataException>(() => { header.Process(); });
Assert.Equal("Invalid XZ Stream", ex.Message);
}
}
[Fact]
public void CorruptHeaderThrows()
{
var bytes = Compressed.Clone() as byte[];
bytes[8]++;
using (Stream badCrcStream = new MemoryStream(bytes))
{
BinaryReader br = new BinaryReader(badCrcStream);
var header = new XZHeader(br);
var ex = Assert.Throws<InvalidDataException>(() => { header.Process(); });
Assert.Equal("Stream header corrupt", ex.Message);
}
}
[Fact]
public void BadVersionIfCrcOkButStreamFlagUnknown() {
var bytes = Compressed.Clone() as byte[];
byte[] streamFlags = { 0x00, 0xF4 };
byte[] crc = Crc32.Compute(streamFlags).ToLittleEndianBytes();
streamFlags.CopyTo(bytes, 6);
crc.CopyTo(bytes, 8);
using (Stream badFlagStream = new MemoryStream(bytes))
{
BinaryReader br = new BinaryReader(badFlagStream);
var header = new XZHeader(br);
var ex = Assert.Throws<InvalidDataException>(() => { header.Process(); });
Assert.Equal("Unknown XZ Stream Version", ex.Message);
}
}
[Fact]
public void ProcessesBlockCheckType()
{
BinaryReader br = new BinaryReader(CompressedStream);
var header = new XZHeader(br);
header.Process();
Assert.Equal(CheckType.CRC64, header.BlockCheckType);
}
[Fact]
public void CanCalculateBlockCheckSize()
{
BinaryReader br = new BinaryReader(CompressedStream);
var header = new XZHeader(br);
header.Process();
Assert.Equal(8, header.BlockCheckSize);
}
[Fact]
public void ProcessesStreamHeaderFromFactory()
{
var header = XZHeader.FromStream(CompressedStream);
Assert.Equal(CheckType.CRC64, header.BlockCheckType);
}
}
}

View File

@@ -0,0 +1,45 @@
using SharpCompress.Compressors.Xz;
using System.IO;
using Xunit;
namespace SharpCompress.Test.Xz
{
public class XZIndexTests : XZTestsBase
{
protected override void Rewind(Stream stream)
{
stream.Position = 356;
}
[Fact]
public void RecordsStreamStartOnInit()
{
using (Stream badStream = new MemoryStream(new byte[] { 1, 2, 3, 4, 5 }))
{
BinaryReader br = new BinaryReader(badStream);
var index = new XZIndex(br, false);
Assert.Equal(0, index.StreamStartPosition);
}
}
[Fact]
public void ThrowsIfHasNoIndexMarker()
{
using (Stream badStream = new MemoryStream(new byte[] { 1, 2, 3, 4, 5 }))
{
BinaryReader br = new BinaryReader(badStream);
var index = new XZIndex(br, false);
Assert.Throws<InvalidDataException>( () => index.Process());
}
}
[Fact]
public void ReadsNumberOfRecords()
{
BinaryReader br = new BinaryReader(CompressedStream);
var index = new XZIndex(br, false);
index.Process();
Assert.Equal(index.NumberOfRecords, (ulong)1);
}
}
}

View File

@@ -0,0 +1,20 @@
using SharpCompress.Compressors.Xz;
using System.IO;
using Xunit;
namespace SharpCompress.Test.Xz
{
public class XZStreamReaderTests : XZTestsBase
{
[Fact]
public void CanReadStream()
{
XZStream xz = new XZStream(CompressedStream);
using (var sr = new StreamReader(xz))
{
string uncompressed = sr.ReadToEnd();
Assert.Equal(uncompressed, Original);
}
}
}
}

View File

@@ -0,0 +1,69 @@
using System.Text;
using System.IO;
namespace SharpCompress.Test.Xz
{
public abstract class XZTestsBase
{
public XZTestsBase()
{
Rewind(CompressedStream);
}
protected virtual void Rewind(Stream stream)
{
stream.Position = 0;
}
protected Stream CompressedStream { get; } = new MemoryStream(Compressed);
protected static byte[] Compressed { get; } = new byte[] {
0xfd, 0x37, 0x7a, 0x58, 0x5a, 0x00, 0x00, 0x04, 0xe6, 0xd6, 0xb4, 0x46, 0x02, 0x00, 0x21, 0x01,
0x16, 0x00, 0x00, 0x00, 0x74, 0x2f, 0xe5, 0xa3, 0xe0, 0x01, 0xe4, 0x01, 0x3c, 0x5d, 0x00, 0x26,
0x98, 0x4a, 0x47, 0xc6, 0x6a, 0x27, 0xd7, 0x36, 0x7a, 0x05, 0xb9, 0x4f, 0xd7, 0xde, 0x52, 0x4c,
0xca, 0x26, 0x4f, 0x23, 0x60, 0x4d, 0xf3, 0x1f, 0xa3, 0x67, 0x49, 0x53, 0xd0, 0xf5, 0xc7, 0xa9,
0x3e, 0xd6, 0xb5, 0x3d, 0x2b, 0x02, 0xbe, 0x83, 0x27, 0xe2, 0xa6, 0xc3, 0x13, 0x4a, 0x31, 0x14,
0x33, 0xed, 0x9a, 0x85, 0x1d, 0x05, 0x6e, 0x7e, 0xa4, 0x91, 0xbf, 0x46, 0x71, 0x7d, 0xa7, 0xfb,
0x12, 0x10, 0xdf, 0x21, 0x73, 0x75, 0xd8, 0xd9, 0xab, 0x8f, 0x1f, 0x8b, 0xb0, 0xb9, 0x3f, 0x9a,
0xa5, 0x1e, 0xd4, 0x2f, 0xdf, 0x09, 0xb3, 0xfe, 0x45, 0xef, 0x16, 0xec, 0x95, 0x68, 0x64, 0xbb,
0x42, 0x0c, 0x8b, 0x96, 0x27, 0x30, 0x62, 0x42, 0x91, 0x7c, 0xf3, 0x6e, 0x4d, 0x03, 0xc5, 0x00,
0x04, 0x73, 0xdd, 0xee, 0xb0, 0xaa, 0xd6, 0x0b, 0x11, 0x90, 0x81, 0xd4, 0xaa, 0x69, 0x63, 0xfa,
0x2f, 0xb4, 0x25, 0x0a, 0x7f, 0xf9, 0x47, 0x77, 0xb1, 0x1f, 0xc3, 0xb4, 0x4d, 0x51, 0xf8, 0x23,
0x3a, 0x7c, 0x44, 0xc8, 0xcc, 0xca, 0x72, 0x09, 0xae, 0xc9, 0x7b, 0x7e, 0x91, 0x5d, 0xff, 0xc4,
0xeb, 0xfd, 0xa1, 0x9b, 0xd4, 0x8d, 0xd7, 0xd3, 0x57, 0xac, 0x7e, 0x3b, 0x97, 0x2e, 0xe4, 0xc2,
0x2e, 0x93, 0x3d, 0xb0, 0x16, 0x64, 0x78, 0x45, 0xb1, 0xc9, 0x40, 0x96, 0xcf, 0x5b, 0xc2, 0x2f,
0xaa, 0xba, 0xcf, 0x98, 0x38, 0x21, 0x3d, 0x1a, 0x13, 0xe8, 0xa6, 0xa6, 0xdf, 0xf4, 0x3d, 0x01,
0xa1, 0x9d, 0xc1, 0x3e, 0x37, 0xac, 0x20, 0xc4, 0xef, 0x18, 0xb1, 0xeb, 0x35, 0xf4, 0x66, 0x9a,
0x47, 0x3c, 0xce, 0x7c, 0xad, 0xdb, 0x2e, 0x39, 0xf5, 0x8d, 0x4a, 0x1d, 0x65, 0xc2, 0x0f, 0xa4,
0x40, 0x7e, 0xe6, 0xa7, 0x17, 0xce, 0x75, 0x7f, 0xd9, 0xa3, 0xf9, 0x27, 0x42, 0xd7, 0x98, 0x54,
0x17, 0xa7, 0x7a, 0x7c, 0x82, 0xdf, 0xeb, 0x08, 0x28, 0x86, 0xdd, 0x57, 0x77, 0x92, 0x80, 0x5f,
0x7b, 0x3b, 0xce, 0x77, 0x72, 0xff, 0xa3, 0x85, 0xd8, 0x5c, 0x8a, 0xb7, 0x83, 0x58, 0xfa, 0xbd,
0x72, 0xe3, 0x66, 0x9d, 0x3b, 0xff, 0x13, 0x5b, 0x0b, 0xf1, 0x6c, 0xa6, 0xb1, 0x3b, 0x85, 0x3b,
0x47, 0x91, 0xc8, 0x7c, 0x38, 0xe2, 0xe5, 0x54, 0xf8, 0x27, 0xee, 0x00, 0xff, 0xd3, 0x68, 0xf1,
0xc6, 0xc7, 0xd7, 0x24, 0x00, 0x01, 0xd8, 0x02, 0xe5, 0x03, 0x00, 0x00, 0xac, 0x16, 0x1f, 0xa4,
0xb1, 0xc4, 0x67, 0xfb, 0x02, 0x00, 0x00, 0x00, 0x00, 0x04, 0x59, 0x5a
};
protected static byte[] OriginalBytes => Encoding.ASCII.GetBytes(Original);
protected static string Original { get; } =
"Mary had a little lamb,\r\n" +
"His fleece was white as snow,\r\n" +
"And everywhere that Mary went,\r\n" +
"The lamb was sure to go.\r\n" +
"\r\n" +
"He followed her to school one day,\r\n" +
"Which was against the rule,\r\n" +
"It made the children laugh and play\r\n" +
"To see a lamb at school.\r\n" +
"\r\n" +
"And so the teacher turned it out,\r\n" +
"But still it lingered near,\r\n" +
"And waited patiently about,\r\n" +
"Till Mary did appear.\r\n" +
"\r\n" +
"\"Why does the lamb love Mary so?\"\r\n" +
"The eager children cry.\r\n" +
"\"Why, Mary loves the lamb, you know.\"\r\n" +
"The teacher did reply.";
}
}

View File

@@ -14,6 +14,29 @@ namespace SharpCompress.Test.Zip
{
UseExtensionInsteadOfNameToVerify = true;
}
[Fact]
public void Issue_269_Double_Skip()
{
ResetScratch();
var path = Path.Combine(TEST_ARCHIVES_PATH, "PrePostHeaders.zip");
using (Stream stream = new ForwardOnlyStream(File.OpenRead(path)))
using (IReader reader = ReaderFactory.Open(stream))
{
int count = 0;
while (reader.MoveToNextEntry())
{
count++;
if (!reader.Entry.IsDirectory)
{
if (count % 2 != 0)
{
reader.WriteEntryTo(Stream.Null);
}
}
}
}
}
[Fact]
public void Zip_Zip64_Streamed_Read()

View File

@@ -1,4 +1,9 @@
using SharpCompress.Common;
using System.IO;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Readers.Zip;
using SharpCompress.Writers;
using SharpCompress.Writers.Zip;
using Xunit;
namespace SharpCompress.Test.Zip
@@ -48,5 +53,41 @@ namespace SharpCompress.Test.Zip
{
Assert.Throws<InvalidFormatException>(() => Write(CompressionType.Rar, "Zip.ppmd.noEmptyDirs.zip", "Zip.ppmd.noEmptyDirs.zip"));
}
[Fact]
public void Zip_BZip2_PkwareEncryption_Write()
{
ResetScratch();
using (Stream stream = File.OpenWrite(Path.Combine(SCRATCH_FILES_PATH, "Zip.pkware.zip")))
{
using (var writer = new ZipWriter(stream, new ZipWriterOptions(CompressionType.BZip2)
{
Password = "test"
}))
{
writer.WriteAll(ORIGINAL_FILES_PATH, "*", SearchOption.AllDirectories);
}
}
using (Stream stream = File.OpenRead(Path.Combine(SCRATCH_FILES_PATH, "Zip.pkware.zip")))
using (var reader = ZipReader.Open(stream, new ReaderOptions()
{
Password = "test"
}))
{
while (reader.MoveToNextEntry())
{
if (!reader.Entry.IsDirectory)
{
Assert.Equal(CompressionType.BZip2, reader.Entry.CompressionType);
reader.WriteEntryToDirectory(SCRATCH_FILES_PATH, new ExtractionOptions()
{
ExtractFullPath = true,
Overwrite = true
});
}
}
}
VerifyFiles();
}
}
}

Binary file not shown.

Binary file not shown.